hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72ab179dcc3caf0aecde8a069b2cd8ed3626836 | 2,754 | py | Python | src/main/resources/classes/assassin/multihit.py | WynnLab/WynnLab | 9950bc1485fa187394c1b1326fa0b5c6b6a1ac96 | [
"MIT"
] | 2 | 2021-03-17T19:28:36.000Z | 2021-03-26T09:31:22.000Z | src/main/resources/classes/assassin/multihit.py | FauxKiwi/Wynnlab | 9950bc1485fa187394c1b1326fa0b5c6b6a1ac96 | [
"MIT"
] | 5 | 2021-06-08T12:13:40.000Z | 2021-08-09T15:04:23.000Z | src/main/resources/classes/assassin/multihit.py | FauxKiwi/Wynnlab | 9950bc1485fa187394c1b1326fa0b5c6b6a1ac96 | [
"MIT"
] | 4 | 2021-08-09T15:17:23.000Z | 2022-03-05T14:08:26.000Z | from org.bukkit import Particle, Sound
from org.bukkit.potion import PotionEffectType
from com.wynnlab.spells import PySpell
from com.wynnlab.util import BukkitUtils
class Spell(PySpell):
def __init__(self):
self.l = None
self.entities = None
self.shift = False
def init(self):
self.shift = self.player.isSneaking()
def tick(self):
if self.t % 2 != 0:
return
if self.t == 0:
if self.player.hasPotionEffect(PotionEffectType.INVISIBILITY):
self.castSpell('ASSASSIN', 5)
self.sound(Sound.ENTITY_PLAYER_ATTACK_STRONG, .5, 1)
self.sound(Sound.ENTITY_IRON_GOLEM_HURT, 1, 1.5)
if self.clone:
self.sound(Sound.ENTITY_BLAZE_AMBIENT, .3, 1.5)
v = BukkitUtils.normalizeOnXZ(self.player.getEyeLocation().getDirection())
self.l = self.player.getLocation().clone().add(v).add(0, .5, 0)
self.particle(self.l, Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
self.entities = self.nearbyMobs(self.l, 3, 3, 3)
self.particle(self.l.add(v), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
self.particle(self.l.add(v), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
elif self.t <= 20:
for e in self.entities:
e.setVelocity(self.player.getEyeLocation().getDirection().multiply(.05 if self.shift else .3).setY(.2).rotateAroundY((.1 * self.t) if self.t % 2 == 0 else (-.1 * self.t)))
self.particle(e.getLocation(), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .5)
if self.clone:
self.particle(e.getLocation(), Particle.SPELL_WITCH, 7, .5, .5, .5, .2)
self.particle(e.getLocation(), Particle.SQUID_INK, 6, .5, .5, .5, .1)
self.particle(e.getLocation(), Particle.CRIT, 7 if self.clone else 10, .5, .5, .5, .1)
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_SWEEP, 1, 1.3)
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_CRIT, .8, 1.6)
self.damage(e, False, .27)
else:
for e in self.entities:
if not self.shift:
e.setVelocity(self.player.getEyeLocation().getDirection().setY(.5))
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_KNOCKBACK, 1, 1.3)
self.damage(e, False, 1.2, .2, 0, .3, .5, 0, 0)
if self.clone:
self.sound(e.getLocation(), Sound.ENTITY_BLAZE_DEATH, 1, 1.2)
self.sound(e.getLocation(), Sound.ENTITY_BLAZE_AMBIENT, 1, 1.6)
self.sound(e.getLocation(), Sound.ENTITY_FIREWORK_ROCKET_BLAST_FAR, .5, 1)
| 41.727273 | 187 | 0.576253 | from org.bukkit import Particle, Sound
from org.bukkit.potion import PotionEffectType
from com.wynnlab.spells import PySpell
from com.wynnlab.util import BukkitUtils
class Spell(PySpell):
def __init__(self):
self.l = None
self.entities = None
self.shift = False
def init(self):
self.shift = self.player.isSneaking()
def tick(self):
if self.t % 2 != 0:
return
if self.t == 0:
if self.player.hasPotionEffect(PotionEffectType.INVISIBILITY):
self.castSpell('ASSASSIN', 5)
self.sound(Sound.ENTITY_PLAYER_ATTACK_STRONG, .5, 1)
self.sound(Sound.ENTITY_IRON_GOLEM_HURT, 1, 1.5)
if self.clone:
self.sound(Sound.ENTITY_BLAZE_AMBIENT, .3, 1.5)
v = BukkitUtils.normalizeOnXZ(self.player.getEyeLocation().getDirection())
self.l = self.player.getLocation().clone().add(v).add(0, .5, 0)
self.particle(self.l, Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
self.entities = self.nearbyMobs(self.l, 3, 3, 3)
self.particle(self.l.add(v), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
self.particle(self.l.add(v), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .1)
elif self.t <= 20:
for e in self.entities:
e.setVelocity(self.player.getEyeLocation().getDirection().multiply(.05 if self.shift else .3).setY(.2).rotateAroundY((.1 * self.t) if self.t % 2 == 0 else (-.1 * self.t)))
self.particle(e.getLocation(), Particle.SWEEP_ATTACK, 5, .5, .5, .5, .5)
if self.clone:
self.particle(e.getLocation(), Particle.SPELL_WITCH, 7, .5, .5, .5, .2)
self.particle(e.getLocation(), Particle.SQUID_INK, 6, .5, .5, .5, .1)
self.particle(e.getLocation(), Particle.CRIT, 7 if self.clone else 10, .5, .5, .5, .1)
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_SWEEP, 1, 1.3)
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_CRIT, .8, 1.6)
self.damage(e, False, .27)
else:
for e in self.entities:
if not self.shift:
e.setVelocity(self.player.getEyeLocation().getDirection().setY(.5))
self.sound(e.getLocation(), Sound.ENTITY_PLAYER_ATTACK_KNOCKBACK, 1, 1.3)
self.damage(e, False, 1.2, .2, 0, .3, .5, 0, 0)
if self.clone:
self.sound(e.getLocation(), Sound.ENTITY_BLAZE_DEATH, 1, 1.2)
self.sound(e.getLocation(), Sound.ENTITY_BLAZE_AMBIENT, 1, 1.6)
self.sound(e.getLocation(), Sound.ENTITY_FIREWORK_ROCKET_BLAST_FAR, .5, 1)
| true | true |
f72ab1a21ac26d83b6dfe2d7a8390897f1a6f645 | 5,138 | py | Python | DSPdu.py | Francisobiagwu/SecureDocumentSharing | d8fe27f3ca4d1b470a8cbe6d3e475226bdb796c1 | [
"MIT"
] | 2 | 2018-06-21T18:06:15.000Z | 2021-08-19T15:27:55.000Z | DSPdu.py | Francisobiagwu/DocumentSharing | d8fe27f3ca4d1b470a8cbe6d3e475226bdb796c1 | [
"MIT"
] | null | null | null | DSPdu.py | Francisobiagwu/DocumentSharing | d8fe27f3ca4d1b470a8cbe6d3e475226bdb796c1 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
@author: Francis Obiagwu
@software: SecureDocumentSharing
@file: DSPdu.py
@time: 6/6/18 7:16 PM
"""
import binascii
import struct
from datetime import datetime
from DSCodes import DSCode
class DSPdu:
"""
The DSPdu class is used to create a generic pdu object. The user have the option
of modifying the changing the size of the pdu, adding additional parts to the pdu also
"""
def __init__( self ):
"""
This is used to initialize the pdu components and their respective sizes
"""
####################################
# MESSAGE_TYPE : 12 BYTES #
# TIMESTAMP : 32 BYTES #
# ERROR CODE : 4 BYTES #
# FLAGS : 6 BYTES #
# CHANGED_SECTION : 8 BYTES #
# SECTION_ID : 8 BYTES #
# RESERVED_1 : 32 BYTES #
# RESERVED_2 : 32 BYTES #
# RESERVED_3 : 32 BYTES #
# DATA : 100 BYTES #
# DATA_SIZE : 8 BYTES #
# CHECKSUM : 8 BYTES #
####################################
# TOTAL : 658 BYTES #
####################################
array = [('MESSAGE_TYPE', '12s'), ('TIMESTAMP', '32s'), ('ERROR_CODES', 'i'), ('FLAG', '6s'), ('CHANGED_SECTION', 'q'),
('SECTION-ID', 'q'), ('RESERVED-1', '32s'), ('RESERVED-2', '32s'), ('RESERVED-3', '32s'),
('DATA', '100s'),('DATA_SIZE', 'q'), ('CHECKSUM', 'q')]
self.pdu_dic = {}
self.size = None
self.format = ''
self.s = None
self.null_bytes = b'\x00'
self.data_size = None
self.parts_index = []
for index, item in enumerate(array):
name, size = item
self.parts_index.append(index)
self.format += ' ' + size
self.pdu_dic[name] = struct.Struct(size).size
self.s = struct.Struct(self.format)
self.size = self.s.size
# print('{:>11} {:>11}'.format(name, struct.Struct(size).size))
self.data_size = self.pdu_dic.get('DATA')
# print(self.data_size)
# print(self.pdu_dic)
# print(self.size)
# print(self.parts_index)
def get_pdu_parts_index(self):
return self.parts_index
def get_data_size(self):
return self.data_size
def get_other_pdu_parts( self, request, data ):
"""
:param byte request:
:param byte data:
:return: list
"""
timestamp = self.get_time()
checksum = self.get_checksum(timestamp, data)
# return all the parameters including the DSCode.OK. The client is only allowed to use DSCode.OK
return [request, checksum, timestamp, DSCode.OK, data]
@staticmethod
def get_time():
return str(datetime.now()).encode('utf-8')
@staticmethod
def get_checksum( timestamp, data ):
try:
return binascii.crc32(timestamp + data)
except TypeError as err:
print('This value {} is not a byte'.format(data))
def get_reserved_1( self ):
return self.null_bytes
def get_reserved_2( self ):
return self.null_bytes
def get_reserved_3( self ):
return self.null_bytes
def get_flag( self ):
pass
def pack( self, array ):
"""
Used to return the pdu after it is created
:return: Struct object
"""
self.s = struct.Struct(self.format)
self.size = self.s.size
return self.s.pack(*array)
def unpack( self, packed_pdu ):
"""
Used to unpack pdu
:param Struct packed_pdu:
:return: Struct Object
"""
self.s = struct.Struct(self.format)
# print(self.s.size)
# print(self.s.unpack(packed_pdu))
# print('size of the packed pdu: {}'.format(len(packed_pdu)))
return self.s.unpack(packed_pdu)
def get_pdu_part_names( self ):
"""
Used to return the parts name. When the user is unsure of the pdu parts, they
can use this to return the pdu component names
:return: string array
"""
return self.pdu_part_list
@staticmethod
def remove_padding( unpacked_pdu ):
"""
This processes an unpacked pdu that is padded.
Then returns the unpacked_pdu without padding
:param unpacked_pdu:
:return: list
"""
array = []
# print(unpacked_pdu)
for item in unpacked_pdu:
if type(item) is bytes: # this means it is string
item = item.decode('utf-8')
padding_index = item.find('\x00')
if padding_index > 0:
array.append(item[:padding_index])
# print(array)
else: # there is no null bytes
array.append(item)
else:
array.append(item)
return array
def get_buffer_size( self ):
return self.size
| 30.583333 | 127 | 0.527053 |
import binascii
import struct
from datetime import datetime
from DSCodes import DSCode
class DSPdu:
def __init__( self ):
elf ):
pass
def pack( self, array ):
self.s = struct.Struct(self.format)
self.size = self.s.size
return self.s.pack(*array)
def unpack( self, packed_pdu ):
self.s = struct.Struct(self.format)
return self.s.unpack(packed_pdu)
def get_pdu_part_names( self ):
return self.pdu_part_list
@staticmethod
def remove_padding( unpacked_pdu ):
array = []
for item in unpacked_pdu:
if type(item) is bytes:
item = item.decode('utf-8')
padding_index = item.find('\x00')
if padding_index > 0:
array.append(item[:padding_index])
else:
array.append(item)
else:
array.append(item)
return array
def get_buffer_size( self ):
return self.size
| true | true |
f72ab2180c0e9b438ab38e4580406e4f2106a777 | 731 | py | Python | main.py | beetrandahiya/project-Zurich | e46584c1e036ec95a9f612d04a3855349568e082 | [
"MIT"
] | null | null | null | main.py | beetrandahiya/project-Zurich | e46584c1e036ec95a9f612d04a3855349568e082 | [
"MIT"
] | null | null | null | main.py | beetrandahiya/project-Zurich | e46584c1e036ec95a9f612d04a3855349568e082 | [
"MIT"
] | null | null | null | import numpy as np
#test inputs
inputs = [1, 2, 3, 2.5]
weights = [[0.2, 0.8, -0.5, 1],
[0.5, -0.91, 0.26, -0.5],
[-0.26, -0.27, 0.17, 0.87]]
biases = [2, 3, 0.5]
def neuron_output(inputs, weights,bias):
return sum(inputs[i] * weights[i] for i in range(len(inputs)))+ bias
#this can also be done with numpy because its just the dot product of weights and inputs
#np.dot(weights,inputs) + bias
def neuron_layer_output(inputs, weights, biases):
outputs=[]
for i in range(len(biases)):
outputs.append(neuron_output(inputs,weights[i],biases[i]))
return outputs
print(neuron_layer_output(inputs, weights, biases))
# for input in batches
# we will have to use matrix operations to calculate outputs
| 22.151515 | 88 | 0.674419 | import numpy as np
inputs = [1, 2, 3, 2.5]
weights = [[0.2, 0.8, -0.5, 1],
[0.5, -0.91, 0.26, -0.5],
[-0.26, -0.27, 0.17, 0.87]]
biases = [2, 3, 0.5]
def neuron_output(inputs, weights,bias):
return sum(inputs[i] * weights[i] for i in range(len(inputs)))+ bias
def neuron_layer_output(inputs, weights, biases):
outputs=[]
for i in range(len(biases)):
outputs.append(neuron_output(inputs,weights[i],biases[i]))
return outputs
print(neuron_layer_output(inputs, weights, biases))
| true | true |
f72ab2e85de44330df2bccc1d1ebf94901b9c48b | 387 | py | Python | students/K33401/Goncharov_Vladimir/Lr3/hotel/hotel/asgi.py | ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021 | bb91c91a56d21cec2b12ae4cc722eaa652a88420 | [
"MIT"
] | 4 | 2020-09-03T15:41:42.000Z | 2021-12-24T15:28:20.000Z | students/K33401/Goncharov_Vladimir/Lr3/hotel/hotel/asgi.py | ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021 | bb91c91a56d21cec2b12ae4cc722eaa652a88420 | [
"MIT"
] | 48 | 2020-09-13T20:22:42.000Z | 2021-04-30T11:13:30.000Z | students/K33401/Goncharov_Vladimir/Lr3/hotel/hotel/asgi.py | ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021 | bb91c91a56d21cec2b12ae4cc722eaa652a88420 | [
"MIT"
] | 69 | 2020-09-06T10:32:37.000Z | 2021-11-28T18:13:17.000Z | """
ASGI config for hotel project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hotel.settings')
application = get_asgi_application()
| 22.764706 | 78 | 0.782946 |
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hotel.settings')
application = get_asgi_application()
| true | true |
f72ab3141e4951a0fbf2744f08280c033d6a9acf | 13,023 | py | Python | imgcls/modeling/backbone/mobilenet.py | TuranSKT/detectron2_class | c90e68abbd39afa8c34d83ac760cabf3b5d02868 | [
"MIT"
] | 22 | 2020-06-09T11:06:15.000Z | 2022-03-29T16:24:23.000Z | imgcls/modeling/backbone/mobilenet.py | TuranSKT/detectron2_class | c90e68abbd39afa8c34d83ac760cabf3b5d02868 | [
"MIT"
] | 4 | 2020-07-09T16:39:48.000Z | 2020-11-25T13:34:52.000Z | imgcls/modeling/backbone/mobilenet.py | TuranSKT/detectron2_class | c90e68abbd39afa8c34d83ac760cabf3b5d02868 | [
"MIT"
] | 9 | 2020-06-10T09:55:09.000Z | 2021-08-20T12:55:26.000Z | '''
@Copyright (c) tkianai All Rights Reserved.
@Author : tkianai
@Github : https://github.com/tkianai
@Date : 2020-04-26 14:14:18
@FilePath : /ImageCls.detectron2/imgcls/modeling/backbone/mobilenet.py
@Description :
'''
import torch
import torch.nn as nn
from detectron2.layers import Conv2d, ShapeSpec
from detectron2.modeling.backbone.build import BACKBONE_REGISTRY
from detectron2.modeling.backbone import Backbone
from detectron2.modeling.backbone.fpn import FPN, LastLevelMaxPool, LastLevelP6P7
__all__ = [
'build_mnetv1_backbone',
'build_mnetv2_backbone',
]
def conv_bn_leaky(inp, oup, stride=1, leaky=0):
return nn.Sequential(
Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.LeakyReLU(negative_slope=leaky, inplace=True)
)
def conv_dw_leaky(inp, oup, stride, leaky=0.1):
return nn.Sequential(
Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.BatchNorm2d(inp),
nn.LeakyReLU(negative_slope=leaky, inplace=True),
Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.LeakyReLU(negative_slope=leaky, inplace=True),
)
class MobileNetV1(Backbone):
def __init__(self, cfg, data_channel, width_mult=1.0, out_features=None, num_classes=None):
super().__init__()
self.num_classes = num_classes
input_channel = 32
# scale input channel
input_channel = int(input_channel * width_mult)
# stem
current_stride = 2
name = "stem"
self.stem = conv_bn_leaky(
data_channel, input_channel, current_stride, leaky=0.1)
self._out_feature_strides = {name: current_stride}
self._out_feature_channels = {name: input_channel}
# body
dw_setting = [
# c, n, s
[64, 1, 1],
[128, 2, 2],
[256, 2, 2],
[512, 6, 2],
[1024, 2, 2],
]
self.return_features_indices = [3, 5, 11, 13]
self.features = nn.ModuleList([])
# building depthwise conv block
for c, n, s in dw_setting:
output_channel = int(c * width_mult)
for i in range(n):
# the first one applying stride
if i == 0:
self.features.append(conv_dw_leaky(
input_channel, output_channel, s))
else:
self.features.append(conv_dw_leaky(
input_channel, output_channel, 1))
# update input channel for next block
input_channel = output_channel
# check output this feature map?
if len(self.features) in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(len(self.features)) + 2)
self._out_feature_channels.update({
name: output_channel
})
current_stride *= 2
self._out_feature_strides.update({
name: current_stride
})
if num_classes is not None:
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.linear = nn.Linear(input_channel, num_classes)
nn.init.normal_(self.linear.weight, std=0.01)
name = "linear"
if out_features is None:
out_features = [name]
self._out_features = out_features
assert len(self._out_features)
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, (2. / n) ** 0.5)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
# n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def freeze(self, freeze_at):
if freeze_at > 0:
# freeze stem
for p in self.stem.parameters():
p.requires_grad = False
if freeze_at > 1:
# freeze features
freeze_at = freeze_at - 2
freeze_layers = self.return_features_indices[freeze_at] if freeze_at < len(
self.return_features_indices) else self.return_features_indices[-1]
for layer_index in range(freeze_layers):
for p in self.features[layer_index].parameters():
p.requires_grad = False
return self
def forward(self, x):
outputs = {}
x = self.stem(x)
if "stem" in self._out_features:
outputs["stem"] = x
for i, m in enumerate(self.features, 1):
x = m(x)
if i in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(i) + 2)
if name in self._out_features:
outputs[name] = x
if self.num_classes is not None:
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.linear(x)
if "linear" in self._out_features:
outputs["linear"] = x
return outputs
def conv_bn(inp, oup, stride):
return nn.Sequential(
Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU6(inplace=True)
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU6(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio):
super().__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = int(round(inp * expand_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
if expand_ratio == 1:
self.conv = nn.Sequential(
# dw
Conv2d(inp, hidden_dim, 3, stride, 1,
groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
# pw-linear
Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
# dw
Conv2d(hidden_dim, hidden_dim, 3, stride,
1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
# pw-linear
Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetV2(Backbone):
def __init__(self, cfg, data_channel, width_mult=1.0, out_features=None, num_classes=None):
super().__init__()
self.num_classes = num_classes
input_channel = 32
# scale input channel
input_channel = int(input_channel * width_mult)
# stem
current_stride = 2
name = "stem"
self.stem = conv_bn(data_channel, input_channel, current_stride)
self._out_feature_strides = {name: current_stride}
self._out_feature_channels = {name: input_channel}
# body
block = InvertedResidual
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
self.return_features_indices = [3, 6, 13, 17]
self.features = nn.ModuleList([])
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = int(c * width_mult)
for i in range(n):
# the first one applying stride
if i == 0:
self.features.append(
block(input_channel, output_channel, s, expand_ratio=t))
else:
self.features.append(
block(input_channel, output_channel, 1, expand_ratio=t))
# update input channel for next block
input_channel = output_channel
# check output this feature map?
if len(self.features) in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(len(self.features)) + 2)
self._out_feature_channels.update({
name: output_channel
})
current_stride *= 2
self._out_feature_strides.update({
name: current_stride
})
if num_classes is not None:
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.linear = nn.Linear(input_channel, num_classes)
nn.init.normal_(self.linear.weight, std=0.01)
name = "linear"
if out_features is None:
out_features = [name]
self._out_features = out_features
assert len(self._out_features)
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, (2. / n) ** 0.5)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
# n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def freeze(self, freeze_at):
if freeze_at > 0:
# freeze stem
for p in self.stem.parameters():
p.requires_grad = False
if freeze_at > 1:
# freeze features
freeze_at = freeze_at - 2
freeze_layers = self.return_features_indices[freeze_at] if freeze_at < len(
self.return_features_indices) else self.return_features_indices[-1]
for layer_index in range(freeze_layers):
for p in self.features[layer_index].parameters():
p.requires_grad = False
return self
def forward(self, x):
outputs = {}
x = self.stem(x)
if "stem" in self._out_features:
outputs["stem"] = x
# res2 -> stride 2**2
# res3 -> stride 2**3
# output downsample stride: [4, 8, 16, 32]
for i, m in enumerate(self.features, 1):
x = m(x)
if i in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(i) + 2)
if name in self._out_features:
outputs[name] = x
if self.num_classes is not None:
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.linear(x)
if "linear" in self._out_features:
outputs["linear"] = x
return outputs
@BACKBONE_REGISTRY.register()
def build_mnetv1_backbone(cfg, input_shape: ShapeSpec):
freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT
out_features = cfg.MODEL.MNET.OUT_FEATURES
width_mult = cfg.MODEL.MNET.WIDTH_MULT
num_classes = cfg.MODEL.CLSNET.NUM_CLASSES if cfg.MODEL.CLSNET.ENABLE else None
model = MobileNetV1(cfg, input_shape.channels, width_mult=width_mult,
out_features=out_features, num_classes=num_classes).freeze(freeze_at)
return model
@BACKBONE_REGISTRY.register()
def build_mnetv2_backbone(cfg, input_shape: ShapeSpec):
freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT
out_features = cfg.MODEL.MNET.OUT_FEATURES
width_mult = cfg.MODEL.MNET.WIDTH_MULT
num_classes = cfg.MODEL.CLSNET.NUM_CLASSES if cfg.MODEL.CLSNET.ENABLE else None
model = MobileNetV2(cfg, input_shape.channels, width_mult=width_mult,
out_features=out_features, num_classes=num_classes).freeze(freeze_at)
return model
| 35.581967 | 95 | 0.543807 |
import torch
import torch.nn as nn
from detectron2.layers import Conv2d, ShapeSpec
from detectron2.modeling.backbone.build import BACKBONE_REGISTRY
from detectron2.modeling.backbone import Backbone
from detectron2.modeling.backbone.fpn import FPN, LastLevelMaxPool, LastLevelP6P7
__all__ = [
'build_mnetv1_backbone',
'build_mnetv2_backbone',
]
def conv_bn_leaky(inp, oup, stride=1, leaky=0):
return nn.Sequential(
Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.LeakyReLU(negative_slope=leaky, inplace=True)
)
def conv_dw_leaky(inp, oup, stride, leaky=0.1):
return nn.Sequential(
Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.BatchNorm2d(inp),
nn.LeakyReLU(negative_slope=leaky, inplace=True),
Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.LeakyReLU(negative_slope=leaky, inplace=True),
)
class MobileNetV1(Backbone):
def __init__(self, cfg, data_channel, width_mult=1.0, out_features=None, num_classes=None):
super().__init__()
self.num_classes = num_classes
input_channel = 32
input_channel = int(input_channel * width_mult)
current_stride = 2
name = "stem"
self.stem = conv_bn_leaky(
data_channel, input_channel, current_stride, leaky=0.1)
self._out_feature_strides = {name: current_stride}
self._out_feature_channels = {name: input_channel}
dw_setting = [
[64, 1, 1],
[128, 2, 2],
[256, 2, 2],
[512, 6, 2],
[1024, 2, 2],
]
self.return_features_indices = [3, 5, 11, 13]
self.features = nn.ModuleList([])
for c, n, s in dw_setting:
output_channel = int(c * width_mult)
for i in range(n):
if i == 0:
self.features.append(conv_dw_leaky(
input_channel, output_channel, s))
else:
self.features.append(conv_dw_leaky(
input_channel, output_channel, 1))
input_channel = output_channel
if len(self.features) in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(len(self.features)) + 2)
self._out_feature_channels.update({
name: output_channel
})
current_stride *= 2
self._out_feature_strides.update({
name: current_stride
})
if num_classes is not None:
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.linear = nn.Linear(input_channel, num_classes)
nn.init.normal_(self.linear.weight, std=0.01)
name = "linear"
if out_features is None:
out_features = [name]
self._out_features = out_features
assert len(self._out_features)
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, (2. / n) ** 0.5)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def freeze(self, freeze_at):
if freeze_at > 0:
for p in self.stem.parameters():
p.requires_grad = False
if freeze_at > 1:
freeze_at = freeze_at - 2
freeze_layers = self.return_features_indices[freeze_at] if freeze_at < len(
self.return_features_indices) else self.return_features_indices[-1]
for layer_index in range(freeze_layers):
for p in self.features[layer_index].parameters():
p.requires_grad = False
return self
def forward(self, x):
outputs = {}
x = self.stem(x)
if "stem" in self._out_features:
outputs["stem"] = x
for i, m in enumerate(self.features, 1):
x = m(x)
if i in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(i) + 2)
if name in self._out_features:
outputs[name] = x
if self.num_classes is not None:
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.linear(x)
if "linear" in self._out_features:
outputs["linear"] = x
return outputs
def conv_bn(inp, oup, stride):
return nn.Sequential(
Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU6(inplace=True)
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU6(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio):
super().__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = int(round(inp * expand_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
if expand_ratio == 1:
self.conv = nn.Sequential(
Conv2d(inp, hidden_dim, 3, stride, 1,
groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
Conv2d(hidden_dim, hidden_dim, 3, stride,
1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetV2(Backbone):
def __init__(self, cfg, data_channel, width_mult=1.0, out_features=None, num_classes=None):
super().__init__()
self.num_classes = num_classes
input_channel = 32
input_channel = int(input_channel * width_mult)
current_stride = 2
name = "stem"
self.stem = conv_bn(data_channel, input_channel, current_stride)
self._out_feature_strides = {name: current_stride}
self._out_feature_channels = {name: input_channel}
block = InvertedResidual
inverted_residual_setting = [
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
self.return_features_indices = [3, 6, 13, 17]
self.features = nn.ModuleList([])
for t, c, n, s in inverted_residual_setting:
output_channel = int(c * width_mult)
for i in range(n):
if i == 0:
self.features.append(
block(input_channel, output_channel, s, expand_ratio=t))
else:
self.features.append(
block(input_channel, output_channel, 1, expand_ratio=t))
input_channel = output_channel
if len(self.features) in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(len(self.features)) + 2)
self._out_feature_channels.update({
name: output_channel
})
current_stride *= 2
self._out_feature_strides.update({
name: current_stride
})
if num_classes is not None:
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.linear = nn.Linear(input_channel, num_classes)
nn.init.normal_(self.linear.weight, std=0.01)
name = "linear"
if out_features is None:
out_features = [name]
self._out_features = out_features
assert len(self._out_features)
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, (2. / n) ** 0.5)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def freeze(self, freeze_at):
if freeze_at > 0:
for p in self.stem.parameters():
p.requires_grad = False
if freeze_at > 1:
freeze_at = freeze_at - 2
freeze_layers = self.return_features_indices[freeze_at] if freeze_at < len(
self.return_features_indices) else self.return_features_indices[-1]
for layer_index in range(freeze_layers):
for p in self.features[layer_index].parameters():
p.requires_grad = False
return self
def forward(self, x):
outputs = {}
x = self.stem(x)
if "stem" in self._out_features:
outputs["stem"] = x
for i, m in enumerate(self.features, 1):
x = m(x)
if i in self.return_features_indices:
name = "mob{}".format(
self.return_features_indices.index(i) + 2)
if name in self._out_features:
outputs[name] = x
if self.num_classes is not None:
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.linear(x)
if "linear" in self._out_features:
outputs["linear"] = x
return outputs
@BACKBONE_REGISTRY.register()
def build_mnetv1_backbone(cfg, input_shape: ShapeSpec):
freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT
out_features = cfg.MODEL.MNET.OUT_FEATURES
width_mult = cfg.MODEL.MNET.WIDTH_MULT
num_classes = cfg.MODEL.CLSNET.NUM_CLASSES if cfg.MODEL.CLSNET.ENABLE else None
model = MobileNetV1(cfg, input_shape.channels, width_mult=width_mult,
out_features=out_features, num_classes=num_classes).freeze(freeze_at)
return model
@BACKBONE_REGISTRY.register()
def build_mnetv2_backbone(cfg, input_shape: ShapeSpec):
freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT
out_features = cfg.MODEL.MNET.OUT_FEATURES
width_mult = cfg.MODEL.MNET.WIDTH_MULT
num_classes = cfg.MODEL.CLSNET.NUM_CLASSES if cfg.MODEL.CLSNET.ENABLE else None
model = MobileNetV2(cfg, input_shape.channels, width_mult=width_mult,
out_features=out_features, num_classes=num_classes).freeze(freeze_at)
return model
| true | true |
f72ab477380e68f511e89a12fe5e0154052fb2b7 | 854 | py | Python | sleap/io/format/text.py | jens-k/sleap | 4e99ed037f1f7f41d9f15e2efaac638fc7e12b09 | [
"BSD-3-Clause-Clear"
] | null | null | null | sleap/io/format/text.py | jens-k/sleap | 4e99ed037f1f7f41d9f15e2efaac638fc7e12b09 | [
"BSD-3-Clause-Clear"
] | null | null | null | sleap/io/format/text.py | jens-k/sleap | 4e99ed037f1f7f41d9f15e2efaac638fc7e12b09 | [
"BSD-3-Clause-Clear"
] | null | null | null | from .adaptor import Adaptor, SleapObjectType
from .filehandle import FileHandle
class TextAdaptor(Adaptor):
@property
def handles(self):
return SleapObjectType.misc
@property
def default_ext(self):
return "txt"
@property
def all_exts(self):
return ["txt", "log"]
@property
def name(self):
return "Text file"
def can_read_file(self, file: FileHandle):
return True # FIXME
def can_write_filename(self, filename: str) -> bool:
return True
def does_read(self) -> bool:
return True
def does_write(self) -> bool:
return True
def read(self, file: FileHandle, *args, **kwargs):
return file.text
def write(self, filename: str, source_object: str):
with open(filename, "w") as f:
f.write(source_object)
| 21.35 | 56 | 0.619438 | from .adaptor import Adaptor, SleapObjectType
from .filehandle import FileHandle
class TextAdaptor(Adaptor):
@property
def handles(self):
return SleapObjectType.misc
@property
def default_ext(self):
return "txt"
@property
def all_exts(self):
return ["txt", "log"]
@property
def name(self):
return "Text file"
def can_read_file(self, file: FileHandle):
return True
def can_write_filename(self, filename: str) -> bool:
return True
def does_read(self) -> bool:
return True
def does_write(self) -> bool:
return True
def read(self, file: FileHandle, *args, **kwargs):
return file.text
def write(self, filename: str, source_object: str):
with open(filename, "w") as f:
f.write(source_object)
| true | true |
f72ab504565970994d8e7ad4fc8bc28fa7d14daa | 14,614 | py | Python | tests/unit/anchore_engine/services/policy_engine/policy/test_parameters.py | dspalmer99/anchore-engine | 8c61318be6fec5d767426fa4ccd98472cc85b5cd | [
"Apache-2.0"
] | 1 | 2019-06-27T08:47:48.000Z | 2019-06-27T08:47:48.000Z | tests/unit/anchore_engine/services/policy_engine/policy/test_parameters.py | dspalmer99/anchore-engine | 8c61318be6fec5d767426fa4ccd98472cc85b5cd | [
"Apache-2.0"
] | 4 | 2020-11-07T00:16:02.000Z | 2020-11-08T20:52:06.000Z | tests/unit/anchore_engine/services/policy_engine/policy/test_parameters.py | dspalmer99/anchore-engine | 8c61318be6fec5d767426fa4ccd98472cc85b5cd | [
"Apache-2.0"
] | 1 | 2019-11-23T03:39:28.000Z | 2019-11-23T03:39:28.000Z | import unittest
from anchore_engine.services.policy_engine.engine.policy.params import JsonSchemaValidator, BooleanStringValidator, TypeValidator, CommaDelimitedNumberListValidator, EnumValidator, \
DelimitedEnumStringValidator, IntegerValidator, NameVersionListValidator, PipeDelimitedStringListValidator, CommaDelimitedStringListValidator, RegexParamValidator, nested_item_delim_parser, \
delim_parser, LinkedValidator
from anchore_engine.services.policy_engine.engine.policy import params
from anchore_engine.services.policy_engine.engine.policy import gate
from anchore_engine.services.policy_engine.engine.policy.exceptions import ParameterValueInvalidError, ValidationError, RequiredParameterNotSetError
class ValidatorTestMixin(object):
"""
Mixin for helpers for parameter validation tests
"""
def run_matrix_test(self, value_matrix, validator):
for input, expected in value_matrix:
print(('Testing value: {} with expected output: {}'.format(input, expected)))
if expected:
self.assertTrue(validator.validate(input), msg='Expected true for input: {}'.format(input))
else:
with self.assertRaises(ValidationError, msg='Expected exception for input: {}'.format(input)) as e:
validator.validate(input)
class TestParamParsers(unittest.TestCase):
def _run_test_table(self, table, fn):
for t in table:
self.assertEqual(t['result'], fn(t['test']))
def testDelimParser(self):
test_table = [
{'test': 'a,b', 'result': ['a', 'b']},
{'test': ' a , b ', 'result': ['a', 'b']},
{'test': 'a,b,', 'result': ['a', 'b', '']}
]
self._run_test_table(test_table, delim_parser)
test_table = [
{'test': 'a|b', 'result': ['a', 'b']},
{'test': ' a | b ', 'result': ['a', 'b']},
{'test': 'a|b|', 'result': ['a', 'b', '']}
]
self._run_test_table(test_table, lambda x: delim_parser(param_value=x, item_delimiter='|'))
def testBarsplitCommaDelimParser(self):
test_table = [
{'test': 'a|b,c|d', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a|b , c|d ', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a|b,c|d ', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a-b.c-09-e|b,c|d ', 'result': {'a-b.c-09-e': 'b', 'c': 'd'}},
]
self._run_test_table(test_table, nested_item_delim_parser)
class TestTypeValidator(unittest.TestCase, ValidatorTestMixin):
def test_boolean(self):
matrix = [
(True, True),
(False, True),
('true', False),
('True', False),
('false', False),
('False', False),
('abc', False),
(1, False),
(['a'], False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator("boolean"))
def test_object(self):
matrix = [
('blah', False),
(1, False),
(['a'], False),
({}, True),
({'a': 'b'}, True)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('object'))
def test_string(self):
matrix = [
('blah', True),
('', True),
(1, False),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('string'))
def test_array(self):
matrix = [
('blah', False),
(1, False),
(['a'], True),
([], True),
({'a': 'b'}, False),
('null', False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('array'))
def test_integer(self):
matrix = [
('blah', False),
(1, True),
(1.0, False),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('integer'))
def test_number(self):
matrix = [
('blah', False),
(1, True),
(1.0, True),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('number'))
class TestBooleanStringValidator(unittest.TestCase, ValidatorTestMixin):
def test_boolean_strings(self):
matrix = [
('True', True),
('False', True),
('true', True),
('TRUE', True),
('FALSE', True),
('false', True),
('blah', False),
(1, False),
('1.0', False),
('1', False),
({'a': 'b'}, False),
(['a'], False)
]
self.run_matrix_test(matrix, BooleanStringValidator())
class TestJsonSchemaValidator(unittest.TestCase, ValidatorTestMixin):
class CustomValidator(JsonSchemaValidator):
__validation_schema__ = {
'type': 'object',
'required': ['id', 'name'],
'properties': {
'id': {
'type': 'string'
},
'name': {
'type': 'string'
},
'count': {
'type': 'integer'
}
}
}
def test_json(self):
matrix = [
({'id': 'abc', 'name': 'testname', 'count': 123}, True),
({'id': 'abc', 'name': 'test'}, True),
('a', False),
(1.0, False),
('1.1', False),
(['a', 1, 1], False),
({'name': 'testname', 'count': 123}, False), # Missing a required key
({'id': 'v1', 'name': 'v2', 'count': 123, 'blah': 'hello'}, True)
]
v = TestJsonSchemaValidator.CustomValidator()
self.run_matrix_test(matrix, v)
class TestRegexValidator(unittest.TestCase, ValidatorTestMixin):
def test_regex(self):
v = RegexParamValidator('.*')
matrix = [
('abadfasd.asdfonweo;ianvoaisealnefq;olq23--=23512=5=-w=215', True),
(1, False),
('', True)
]
self.run_matrix_test(matrix, v)
v = RegexParamValidator('[0-9]+')
matrix = [
('1231231', True),
('abc', False),
('', False),
(' ', False)
]
self.run_matrix_test(matrix, v)
class TestRegexRelatedValidators(unittest.TestCase, ValidatorTestMixin):
def test_commadelim_numberlist_validator(self):
v = CommaDelimitedNumberListValidator()
matrix = [
('1,2,3', True),
(' 1, 2, 3 ', True),
('1', True),
('a', False),
('1,2,c', False),
('1,,2', False)
]
self.run_matrix_test(matrix, v)
def test_nameversion_list_validator(self):
v = NameVersionListValidator()
matrix = [
('a|1.0,b|2.0', True),
('a|b,c|defefes|', False),
('a|b', True),
('a|b,c|d', True),
('a,b', False),
('|a', False),
('a,', False),
('a||', False),
('a|,c|d', False),
('a', False),
('a,b', False),
('pkg1|0.1.1.1 pkg2|1.2.', False)
]
self.run_matrix_test(matrix, v)
def test_commadelim_stringlist_validator(self):
v = CommaDelimitedStringListValidator()
matrix = [
('a,b,c', True),
('aa,,bb', False),
(',a', False),
('a,', False)
]
self.run_matrix_test(matrix, v)
def test_pipe_delim_validator(self):
v = PipeDelimitedStringListValidator()
matrix = [
('ab', True),
('abc|c', True),
('ab|c|d', True),
('|a', False),
('a|', False)
]
self.run_matrix_test(matrix, v)
def test_integer_validator(self):
v = IntegerValidator()
matrix = [
('1', True),
('1,2,3', False),
('a,b,c', False),
('a', False),
('1,2,c', False)
]
self.run_matrix_test(matrix, v)
def test_enum_validator(self):
v = EnumValidator(['value1', 'value2'])
matrix = [
('value1', True),
('value2', True),
('3', False),
('value1,value2', False)
]
self.run_matrix_test(matrix, v)
def test_enum_list_validator(self):
v = DelimitedEnumStringValidator(['value1', 'value2'])
matrix = [
('value1', True),
('value2', True),
('value1,value2', True),
('value3', False),
('value1,value3', False)
]
self.run_matrix_test(matrix, v)
class FakeTrigger(gate.BaseTrigger):
__trigger_name__ = 'TestingTrigger'
__description__ = 'Not real'
__trigger_id__ = 'Blah123'
param1 = params.TriggerParameter(name='param_test', example_str='somevalue', description='Test parameter', validator=TypeValidator("string"), is_required=False)
def test1(self):
print((type(self.param1)))
class FakeGate(gate.Gate):
__gate_name__ = 'Somegate'
__triggers__ = [FakeTrigger]
class TestTriggerParams(unittest.TestCase):
def test_param_basics(self):
p = params.TriggerParameter('TestParam1', description='Param for testing basic strings', validator=TypeValidator("string"), related_to='ThisOtherParam')
print('Trying string that should pass validation')
# Should pass validation
print((p.set_value('somestring')))
print(('Got value: {}'.format(p.value())))
print('Trying an int that should fail validation')
# Should fail validation
with self.assertRaises(ValidationError) as ex:
print((p.set_value(10)))
print(('Correctly got exception {}'.format(ex.exception)))
def test_param_integration(self):
t = FakeTrigger(parent_gate_cls=FakeGate, param_test='blah')
# print('Inst value: {}'.format(t.eval_params.get(t.param1.name)))
print(('Inst value: {}'.format(t.param1.value())))
print(('Class value: {}'.format(t.__class__.param1.value())))
t.test1()
class ValidatedParameterTestMixin(object):
"""
Mixin for helpers for parameter validation tests
"""
def run_matrix_test(self, value_matrix, parameter):
for input, expected in value_matrix:
print(('Testing value: {} with expected output: {}'.format(input, expected)))
if expected:
parameter.set_value(input)
output = parameter.value()
self.assertEqual(output, expected)
else:
with self.assertRaises(ValidationError) as e:
parameter.set_value(input)
class TestParameters(unittest.TestCase, ValidatedParameterTestMixin):
def test_nameversion_stringlist_parameter(self):
p = params.NameVersionStringListParameter(name='test1', description='test_description', is_required=False)
test_matrix = [
('a|b,c|d', {'a': 'b', 'c': 'd'}),
('pkg1|0.1.1-abc,pkg2|1.3.5-asdf0', {'pkg1': '0.1.1-abc', 'pkg2': '1.3.5-asdf0'}),
(' a|b , c|d', {'a': 'b', 'c': 'd'}),
('a,b', False),
('a b c', False),
('a|b,c,d', False),
('a|b|c|d', False),
('pkg1|0.1.1.1 pkg2|1.2.', False)
]
self.run_matrix_test(test_matrix, p)
def test_enum_string_parameter(self):
p = params.EnumStringParameter(name='test1', description='test1_description', is_required=False, enum_values=['value1', 'value2'])
test_matrix = [
('value1', 'value1'),
('value2', 'value2'),
('value3', False),
('value1,value2', False),
(' ', False),
('', False)
]
self.run_matrix_test(test_matrix, p)
def test_enumcomma_stringlist_parameter(self):
p = params.EnumCommaDelimStringListParameter(name='test1', description='test1_description', is_required=False, enum_values=['value1', 'value2'])
test_matrix = [
('value1', ['value1']),
('value1,value2', ['value1', 'value2']),
('value1 , value2', ['value1', 'value2']),
('value1, value2', ['value1', 'value2']),
('value1, value2, value1', ['value1', 'value2', 'value1']),
('value3', False),
(' ', False),
('', False)
]
self.run_matrix_test(test_matrix, p)
class TestLinkedValidator(unittest.TestCase, ValidatedParameterTestMixin):
def test_linked(self):
p1 = params.EnumStringParameter(name='attribute', description='Testing123', enum_values=['a', 'b'], is_required=True)
p2 = params.SimpleStringParameter(name='downstream', validator=LinkedValidator(discriminator_parameter='attribute', default_validator=TypeValidator('string'), value_map={'a': BooleanStringValidator(), 'b': IntegerValidator()}), description='test123')
print(p2.validator.validation_criteria())
#p1.set_value('a')
p2.validator.inject_discriminator(None)
test_matrix = [
('true', 'true'),
('blah', 'blah') # p1 not set, so uses default
]
self.run_matrix_test(test_matrix, p2)
p1._param_value = None
p2._param_value = None
p2.validator.inject_discriminator('a')
p1.set_value('a')
test_matrix = [
('true', 'true'),
('blah', False) # should fail now that p1 has a value
]
self.run_matrix_test(test_matrix, p2)
p1._param_value = None
p2._param_value = None
p1.set_value('b')
p2.validator.inject_discriminator('b')
test_matrix = [
('true', False),
('blah', False),
('123', '123')
]
self.run_matrix_test(test_matrix, p2)
def test_multiple(self):
trig1 = FakeTrigger(parent_gate_cls=FakeGate, param_test="somevalue")
trig2 = FakeTrigger(parent_gate_cls=FakeGate, param_test="someothervalue")
print('{} {}'.format(trig1.json(), trig2.json()))
if __name__ == '__main__':
unittest.main()
| 32.189427 | 258 | 0.531066 | import unittest
from anchore_engine.services.policy_engine.engine.policy.params import JsonSchemaValidator, BooleanStringValidator, TypeValidator, CommaDelimitedNumberListValidator, EnumValidator, \
DelimitedEnumStringValidator, IntegerValidator, NameVersionListValidator, PipeDelimitedStringListValidator, CommaDelimitedStringListValidator, RegexParamValidator, nested_item_delim_parser, \
delim_parser, LinkedValidator
from anchore_engine.services.policy_engine.engine.policy import params
from anchore_engine.services.policy_engine.engine.policy import gate
from anchore_engine.services.policy_engine.engine.policy.exceptions import ParameterValueInvalidError, ValidationError, RequiredParameterNotSetError
class ValidatorTestMixin(object):
def run_matrix_test(self, value_matrix, validator):
for input, expected in value_matrix:
print(('Testing value: {} with expected output: {}'.format(input, expected)))
if expected:
self.assertTrue(validator.validate(input), msg='Expected true for input: {}'.format(input))
else:
with self.assertRaises(ValidationError, msg='Expected exception for input: {}'.format(input)) as e:
validator.validate(input)
class TestParamParsers(unittest.TestCase):
def _run_test_table(self, table, fn):
for t in table:
self.assertEqual(t['result'], fn(t['test']))
def testDelimParser(self):
test_table = [
{'test': 'a,b', 'result': ['a', 'b']},
{'test': ' a , b ', 'result': ['a', 'b']},
{'test': 'a,b,', 'result': ['a', 'b', '']}
]
self._run_test_table(test_table, delim_parser)
test_table = [
{'test': 'a|b', 'result': ['a', 'b']},
{'test': ' a | b ', 'result': ['a', 'b']},
{'test': 'a|b|', 'result': ['a', 'b', '']}
]
self._run_test_table(test_table, lambda x: delim_parser(param_value=x, item_delimiter='|'))
def testBarsplitCommaDelimParser(self):
test_table = [
{'test': 'a|b,c|d', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a|b , c|d ', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a|b,c|d ', 'result': {'a': 'b', 'c': 'd'}},
{'test': ' a-b.c-09-e|b,c|d ', 'result': {'a-b.c-09-e': 'b', 'c': 'd'}},
]
self._run_test_table(test_table, nested_item_delim_parser)
class TestTypeValidator(unittest.TestCase, ValidatorTestMixin):
def test_boolean(self):
matrix = [
(True, True),
(False, True),
('true', False),
('True', False),
('false', False),
('False', False),
('abc', False),
(1, False),
(['a'], False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator("boolean"))
def test_object(self):
matrix = [
('blah', False),
(1, False),
(['a'], False),
({}, True),
({'a': 'b'}, True)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('object'))
def test_string(self):
matrix = [
('blah', True),
('', True),
(1, False),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('string'))
def test_array(self):
matrix = [
('blah', False),
(1, False),
(['a'], True),
([], True),
({'a': 'b'}, False),
('null', False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('array'))
def test_integer(self):
matrix = [
('blah', False),
(1, True),
(1.0, False),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('integer'))
def test_number(self):
matrix = [
('blah', False),
(1, True),
(1.0, True),
(['a'], False),
({}, False),
({'a': 'b'}, False)
]
self.run_matrix_test(value_matrix=matrix, validator=TypeValidator('number'))
class TestBooleanStringValidator(unittest.TestCase, ValidatorTestMixin):
def test_boolean_strings(self):
matrix = [
('True', True),
('False', True),
('true', True),
('TRUE', True),
('FALSE', True),
('false', True),
('blah', False),
(1, False),
('1.0', False),
('1', False),
({'a': 'b'}, False),
(['a'], False)
]
self.run_matrix_test(matrix, BooleanStringValidator())
class TestJsonSchemaValidator(unittest.TestCase, ValidatorTestMixin):
class CustomValidator(JsonSchemaValidator):
__validation_schema__ = {
'type': 'object',
'required': ['id', 'name'],
'properties': {
'id': {
'type': 'string'
},
'name': {
'type': 'string'
},
'count': {
'type': 'integer'
}
}
}
def test_json(self):
matrix = [
({'id': 'abc', 'name': 'testname', 'count': 123}, True),
({'id': 'abc', 'name': 'test'}, True),
('a', False),
(1.0, False),
('1.1', False),
(['a', 1, 1], False),
({'name': 'testname', 'count': 123}, False),
({'id': 'v1', 'name': 'v2', 'count': 123, 'blah': 'hello'}, True)
]
v = TestJsonSchemaValidator.CustomValidator()
self.run_matrix_test(matrix, v)
class TestRegexValidator(unittest.TestCase, ValidatorTestMixin):
def test_regex(self):
v = RegexParamValidator('.*')
matrix = [
('abadfasd.asdfonweo;ianvoaisealnefq;olq23--=23512=5=-w=215', True),
(1, False),
('', True)
]
self.run_matrix_test(matrix, v)
v = RegexParamValidator('[0-9]+')
matrix = [
('1231231', True),
('abc', False),
('', False),
(' ', False)
]
self.run_matrix_test(matrix, v)
class TestRegexRelatedValidators(unittest.TestCase, ValidatorTestMixin):
def test_commadelim_numberlist_validator(self):
v = CommaDelimitedNumberListValidator()
matrix = [
('1,2,3', True),
(' 1, 2, 3 ', True),
('1', True),
('a', False),
('1,2,c', False),
('1,,2', False)
]
self.run_matrix_test(matrix, v)
def test_nameversion_list_validator(self):
v = NameVersionListValidator()
matrix = [
('a|1.0,b|2.0', True),
('a|b,c|defefes|', False),
('a|b', True),
('a|b,c|d', True),
('a,b', False),
('|a', False),
('a,', False),
('a||', False),
('a|,c|d', False),
('a', False),
('a,b', False),
('pkg1|0.1.1.1 pkg2|1.2.', False)
]
self.run_matrix_test(matrix, v)
def test_commadelim_stringlist_validator(self):
v = CommaDelimitedStringListValidator()
matrix = [
('a,b,c', True),
('aa,,bb', False),
(',a', False),
('a,', False)
]
self.run_matrix_test(matrix, v)
def test_pipe_delim_validator(self):
v = PipeDelimitedStringListValidator()
matrix = [
('ab', True),
('abc|c', True),
('ab|c|d', True),
('|a', False),
('a|', False)
]
self.run_matrix_test(matrix, v)
def test_integer_validator(self):
v = IntegerValidator()
matrix = [
('1', True),
('1,2,3', False),
('a,b,c', False),
('a', False),
('1,2,c', False)
]
self.run_matrix_test(matrix, v)
def test_enum_validator(self):
v = EnumValidator(['value1', 'value2'])
matrix = [
('value1', True),
('value2', True),
('3', False),
('value1,value2', False)
]
self.run_matrix_test(matrix, v)
def test_enum_list_validator(self):
v = DelimitedEnumStringValidator(['value1', 'value2'])
matrix = [
('value1', True),
('value2', True),
('value1,value2', True),
('value3', False),
('value1,value3', False)
]
self.run_matrix_test(matrix, v)
class FakeTrigger(gate.BaseTrigger):
__trigger_name__ = 'TestingTrigger'
__description__ = 'Not real'
__trigger_id__ = 'Blah123'
param1 = params.TriggerParameter(name='param_test', example_str='somevalue', description='Test parameter', validator=TypeValidator("string"), is_required=False)
def test1(self):
print((type(self.param1)))
class FakeGate(gate.Gate):
__gate_name__ = 'Somegate'
__triggers__ = [FakeTrigger]
class TestTriggerParams(unittest.TestCase):
def test_param_basics(self):
p = params.TriggerParameter('TestParam1', description='Param for testing basic strings', validator=TypeValidator("string"), related_to='ThisOtherParam')
print('Trying string that should pass validation')
print((p.set_value('somestring')))
print(('Got value: {}'.format(p.value())))
print('Trying an int that should fail validation')
with self.assertRaises(ValidationError) as ex:
print((p.set_value(10)))
print(('Correctly got exception {}'.format(ex.exception)))
def test_param_integration(self):
t = FakeTrigger(parent_gate_cls=FakeGate, param_test='blah')
print(('Inst value: {}'.format(t.param1.value())))
print(('Class value: {}'.format(t.__class__.param1.value())))
t.test1()
class ValidatedParameterTestMixin(object):
def run_matrix_test(self, value_matrix, parameter):
for input, expected in value_matrix:
print(('Testing value: {} with expected output: {}'.format(input, expected)))
if expected:
parameter.set_value(input)
output = parameter.value()
self.assertEqual(output, expected)
else:
with self.assertRaises(ValidationError) as e:
parameter.set_value(input)
class TestParameters(unittest.TestCase, ValidatedParameterTestMixin):
def test_nameversion_stringlist_parameter(self):
p = params.NameVersionStringListParameter(name='test1', description='test_description', is_required=False)
test_matrix = [
('a|b,c|d', {'a': 'b', 'c': 'd'}),
('pkg1|0.1.1-abc,pkg2|1.3.5-asdf0', {'pkg1': '0.1.1-abc', 'pkg2': '1.3.5-asdf0'}),
(' a|b , c|d', {'a': 'b', 'c': 'd'}),
('a,b', False),
('a b c', False),
('a|b,c,d', False),
('a|b|c|d', False),
('pkg1|0.1.1.1 pkg2|1.2.', False)
]
self.run_matrix_test(test_matrix, p)
def test_enum_string_parameter(self):
p = params.EnumStringParameter(name='test1', description='test1_description', is_required=False, enum_values=['value1', 'value2'])
test_matrix = [
('value1', 'value1'),
('value2', 'value2'),
('value3', False),
('value1,value2', False),
(' ', False),
('', False)
]
self.run_matrix_test(test_matrix, p)
def test_enumcomma_stringlist_parameter(self):
p = params.EnumCommaDelimStringListParameter(name='test1', description='test1_description', is_required=False, enum_values=['value1', 'value2'])
test_matrix = [
('value1', ['value1']),
('value1,value2', ['value1', 'value2']),
('value1 , value2', ['value1', 'value2']),
('value1, value2', ['value1', 'value2']),
('value1, value2, value1', ['value1', 'value2', 'value1']),
('value3', False),
(' ', False),
('', False)
]
self.run_matrix_test(test_matrix, p)
class TestLinkedValidator(unittest.TestCase, ValidatedParameterTestMixin):
def test_linked(self):
p1 = params.EnumStringParameter(name='attribute', description='Testing123', enum_values=['a', 'b'], is_required=True)
p2 = params.SimpleStringParameter(name='downstream', validator=LinkedValidator(discriminator_parameter='attribute', default_validator=TypeValidator('string'), value_map={'a': BooleanStringValidator(), 'b': IntegerValidator()}), description='test123')
print(p2.validator.validation_criteria())
p2.validator.inject_discriminator(None)
test_matrix = [
('true', 'true'),
('blah', 'blah')
]
self.run_matrix_test(test_matrix, p2)
p1._param_value = None
p2._param_value = None
p2.validator.inject_discriminator('a')
p1.set_value('a')
test_matrix = [
('true', 'true'),
('blah', False)
]
self.run_matrix_test(test_matrix, p2)
p1._param_value = None
p2._param_value = None
p1.set_value('b')
p2.validator.inject_discriminator('b')
test_matrix = [
('true', False),
('blah', False),
('123', '123')
]
self.run_matrix_test(test_matrix, p2)
def test_multiple(self):
trig1 = FakeTrigger(parent_gate_cls=FakeGate, param_test="somevalue")
trig2 = FakeTrigger(parent_gate_cls=FakeGate, param_test="someothervalue")
print('{} {}'.format(trig1.json(), trig2.json()))
if __name__ == '__main__':
unittest.main()
| true | true |
f72ab6805d5b4e650b8e6b745b9ad9b0ed680de0 | 329 | py | Python | clingine/clock.py | avancayetano/clingine | 55e8bd6366aad3ae8e7ac9537fa3ae85efab9ddc | [
"MIT"
] | 12 | 2020-04-10T09:10:29.000Z | 2022-03-12T03:45:08.000Z | clingine/clock.py | avancayetano/clingine | 55e8bd6366aad3ae8e7ac9537fa3ae85efab9ddc | [
"MIT"
] | 6 | 2020-04-11T10:47:01.000Z | 2020-10-19T14:15:55.000Z | clingine/clock.py | avancayetano/clingine | 55e8bd6366aad3ae8e7ac9537fa3ae85efab9ddc | [
"MIT"
] | 1 | 2021-09-04T00:40:34.000Z | 2021-09-04T00:40:34.000Z | import time
class Clock:
def __init__(self):
self.start_time = time.time()
self.current_time = time.time()
def get_time(self):
return time.time() - self.start_time
def get_dt(self):
return time.time() - self.current_time
def update(self):
self.current_time = time.time()
def delay(self, sec):
time.sleep(sec) | 19.352941 | 40 | 0.702128 | import time
class Clock:
def __init__(self):
self.start_time = time.time()
self.current_time = time.time()
def get_time(self):
return time.time() - self.start_time
def get_dt(self):
return time.time() - self.current_time
def update(self):
self.current_time = time.time()
def delay(self, sec):
time.sleep(sec) | true | true |
f72ab6e6434d9b5f426cef3c89cc2fec38e25ed5 | 1,703 | py | Python | scripts/maf_covered_regions.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | scripts/maf_covered_regions.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | scripts/maf_covered_regions.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Read a maf file and print the regions covered to a set of bed files (one for
each sequence source referenced in the maf). Only blocks with a positive
percent identity are written out.
TODO: Can this be generalized to be made more useful?
usage: %prog bed_outfile_prefix < maf
"""
from __future__ import division, print_function
import sys
import bx.align.maf
import psyco_full
def block_pid( comp1, comp2 ):
match = 0
total = 0
t1 = comp1.text.lower()
t2 = comp2.text.lower()
for i in range( 0, len(t1) ):
a, b = t1[i], t2[i]
if a == '-' or b == '-':
continue
elif a == b:
match += 1
total += 1
if total == 0: return None
return ( match / total )
def main():
out_prefix = sys.argv[1]
print(out_prefix)
out_files = dict()
for block in bx.align.maf.Reader( sys.stdin ):
ref_comp = block.components[0]
ref_chrom = ref_comp.src.split('.')[1]
for comp in block.components[1:]:
comp_species, comp_chrom = comp.src.split('.')[:2]
if comp_species not in out_files:
f = open( "%s%s.bed" % ( out_prefix, comp_species ), "w" )
out_files[comp_species] = f
pid = block_pid( ref_comp, comp )
if pid:
out_files[comp_species].write( "%s\t%d\t%d\t%s:%d-%d,%s\t%f\n" %
( ref_chrom, ref_comp.forward_strand_start, ref_comp.forward_strand_end, \
comp_chrom, comp.start, comp.end, comp.strand, pid ) )
for f in out_files.values():
f.close()
if __name__ == "__main__":
main()
| 28.864407 | 107 | 0.570757 |
from __future__ import division, print_function
import sys
import bx.align.maf
import psyco_full
def block_pid( comp1, comp2 ):
match = 0
total = 0
t1 = comp1.text.lower()
t2 = comp2.text.lower()
for i in range( 0, len(t1) ):
a, b = t1[i], t2[i]
if a == '-' or b == '-':
continue
elif a == b:
match += 1
total += 1
if total == 0: return None
return ( match / total )
def main():
out_prefix = sys.argv[1]
print(out_prefix)
out_files = dict()
for block in bx.align.maf.Reader( sys.stdin ):
ref_comp = block.components[0]
ref_chrom = ref_comp.src.split('.')[1]
for comp in block.components[1:]:
comp_species, comp_chrom = comp.src.split('.')[:2]
if comp_species not in out_files:
f = open( "%s%s.bed" % ( out_prefix, comp_species ), "w" )
out_files[comp_species] = f
pid = block_pid( ref_comp, comp )
if pid:
out_files[comp_species].write( "%s\t%d\t%d\t%s:%d-%d,%s\t%f\n" %
( ref_chrom, ref_comp.forward_strand_start, ref_comp.forward_strand_end, \
comp_chrom, comp.start, comp.end, comp.strand, pid ) )
for f in out_files.values():
f.close()
if __name__ == "__main__":
main()
| true | true |
f72ab71ef7ac9e4ef3368da23f0af720b87fc67f | 448 | py | Python | M101P/week1/pymongo_exception_processing/mongo_exception.py | lambdaxymox/mongodb-university | fbab1dfa61b0c422f0d45209d0047261da3525c9 | [
"Unlicense"
] | 1 | 2020-04-08T03:03:16.000Z | 2020-04-08T03:03:16.000Z | M101P/week1/pymongo_exception_processing/mongo_exception.py | lambdaxymox/mongodb-university | fbab1dfa61b0c422f0d45209d0047261da3525c9 | [
"Unlicense"
] | null | null | null | M101P/week1/pymongo_exception_processing/mongo_exception.py | lambdaxymox/mongodb-university | fbab1dfa61b0c422f0d45209d0047261da3525c9 | [
"Unlicense"
] | 1 | 2020-04-08T03:03:18.000Z | 2020-04-08T03:03:18.000Z |
import sys
import pymongo
connection = pymongo.MongoClient("mongodb://localhost")
db = connection.test
users = db.users
doc = {'firstname':'Andrew', 'lastname':'Erlichson'}
print doc
print "about to insert the document"
try:
users.insert_one(doc)
except Exception as e:
print "insert failed:", e
print doc
print "inserting again"
try:
users.insert_one(doc)
except Exception as e:
print "second insert failed:", e
print doc
| 15.448276 | 55 | 0.716518 |
import sys
import pymongo
connection = pymongo.MongoClient("mongodb://localhost")
db = connection.test
users = db.users
doc = {'firstname':'Andrew', 'lastname':'Erlichson'}
print doc
print "about to insert the document"
try:
users.insert_one(doc)
except Exception as e:
print "insert failed:", e
print doc
print "inserting again"
try:
users.insert_one(doc)
except Exception as e:
print "second insert failed:", e
print doc
| false | true |
f72ab7e4fe69751d46adb928a0232848fd36398f | 4,962 | py | Python | apps/log_extract/handlers/thread.py | yiqiwang-17/bk-log | 7b356fced63b667baea300cfd194ad70a842c3ee | [
"MIT"
] | null | null | null | apps/log_extract/handlers/thread.py | yiqiwang-17/bk-log | 7b356fced63b667baea300cfd194ad70a842c3ee | [
"MIT"
] | null | null | null | apps/log_extract/handlers/thread.py | yiqiwang-17/bk-log | 7b356fced63b667baea300cfd194ad70a842c3ee | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-LOG 蓝鲸日志平台 is licensed under the MIT License.
License for BK-LOG 蓝鲸日志平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging # noqa
from functools import partial # noqa
from multiprocessing.pool import ThreadPool as _ThreadPool # noqa
from django import db # noqa
from django.utils import timezone, translation # noqa
from apps.utils.local import activate_request, get_request # noqa
from .local import local # noqa
logger = logging.getLogger(__name__)
def run_func_with_local(items, tz, lang, request, func, *args, **kwargs):
"""
线程执行函数
:param request: added by jairwu API request
:param func: 待执行函数
:param items: Thread Local Items
:param tz: 时区
:param lang: 语言
:param args: 位置参数
:param kwargs: 关键字参数
:return: 函数返回值
"""
# 同步local数据
for item in items:
setattr(local, item[0], item[1])
# 设置时区及语言
timezone.activate(tz)
translation.activate(lang)
activate_request(request)
try:
data = func(*args, **kwargs)
except Exception as e:
raise e
finally:
# 关闭db连接
db.connections.close_all()
# 清理local数据
for item in local:
delattr(local, item[0])
return data
class ThreadPool(_ThreadPool):
"""
线程池
"""
@staticmethod
def get_func_with_local(func):
tz = timezone.get_current_timezone().zone
lang = translation.get_language()
items = [item for item in local]
request = get_request()
return partial(run_func_with_local, items, tz, lang, request, func)
def map_ignore_exception(self, func, iterable, return_exception=False):
"""
忽略错误版的map
"""
futures = []
for params in iterable:
if not isinstance(params, (tuple, list)):
params = (params,)
futures.append(self.apply_async(func, args=params))
results = []
for future in futures:
try:
results.append(future.get())
except Exception as e:
if return_exception:
results.append(e)
logger.exception(e)
return results
def map_async(self, func, iterable, chunksize=None, callback=None):
return super(ThreadPool, self).map_async(
self.get_func_with_local(func), iterable, chunksize=chunksize, callback=callback
)
def apply_async(self, func, args=(), kwds={}, callback=None):
return super(ThreadPool, self).apply_async(
self.get_func_with_local(func), args=args, kwds=kwds, callback=callback
)
def imap(self, func, iterable, chunksize=1):
return super(ThreadPool, self).imap(self.get_func_with_local(func), iterable, chunksize)
def imap_unordered(self, func, iterable, chunksize=1):
func = partial(run_func_with_local, func, local)
return super(ThreadPool, self).imap_unordered(self.get_func_with_local(func), iterable, chunksize=chunksize)
| 38.169231 | 116 | 0.689238 |
import logging
from functools import partial
from multiprocessing.pool import ThreadPool as _ThreadPool
from django import db
from django.utils import timezone, translation
from apps.utils.local import activate_request, get_request
from .local import local
logger = logging.getLogger(__name__)
def run_func_with_local(items, tz, lang, request, func, *args, **kwargs):
for item in items:
setattr(local, item[0], item[1])
timezone.activate(tz)
translation.activate(lang)
activate_request(request)
try:
data = func(*args, **kwargs)
except Exception as e:
raise e
finally:
db.connections.close_all()
for item in local:
delattr(local, item[0])
return data
class ThreadPool(_ThreadPool):
@staticmethod
def get_func_with_local(func):
tz = timezone.get_current_timezone().zone
lang = translation.get_language()
items = [item for item in local]
request = get_request()
return partial(run_func_with_local, items, tz, lang, request, func)
def map_ignore_exception(self, func, iterable, return_exception=False):
futures = []
for params in iterable:
if not isinstance(params, (tuple, list)):
params = (params,)
futures.append(self.apply_async(func, args=params))
results = []
for future in futures:
try:
results.append(future.get())
except Exception as e:
if return_exception:
results.append(e)
logger.exception(e)
return results
def map_async(self, func, iterable, chunksize=None, callback=None):
return super(ThreadPool, self).map_async(
self.get_func_with_local(func), iterable, chunksize=chunksize, callback=callback
)
def apply_async(self, func, args=(), kwds={}, callback=None):
return super(ThreadPool, self).apply_async(
self.get_func_with_local(func), args=args, kwds=kwds, callback=callback
)
def imap(self, func, iterable, chunksize=1):
return super(ThreadPool, self).imap(self.get_func_with_local(func), iterable, chunksize)
def imap_unordered(self, func, iterable, chunksize=1):
func = partial(run_func_with_local, func, local)
return super(ThreadPool, self).imap_unordered(self.get_func_with_local(func), iterable, chunksize=chunksize)
| true | true |
f72ab8481e4f48f3a7a7d665752d25ae94efa665 | 3,571 | py | Python | basic/string1.py | hmln/google-python-exercises | c9b55063708ea22a99914a3ad14fd2aae54336f2 | [
"Apache-2.0"
] | null | null | null | basic/string1.py | hmln/google-python-exercises | c9b55063708ea22a99914a3ad14fd2aae54336f2 | [
"Apache-2.0"
] | null | null | null | basic/string1.py | hmln/google-python-exercises | c9b55063708ea22a99914a3ad14fd2aae54336f2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
return 'Number of donuts: {}'.format(count if count < 10 else 'many')
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
if len(s) < 2:
return ''
return s[0:2] + s[-2:]
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
return s[0] + s[1:].replace(s[0], '*')
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
return '{} {}'.format(b[:2] + a[2:], a[:2] + b[2:])
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print('donuts')
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print()
print('both_ends')
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print()
print('fix_start')
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print()
print('mix_up')
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| 32.463636 | 80 | 0.659199 |
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
def donuts(count):
return 'Number of donuts: {}'.format(count if count < 10 else 'many')
def both_ends(s):
if len(s) < 2:
return ''
return s[0:2] + s[-2:]
def fix_start(s):
return s[0] + s[1:].replace(s[0], '*')
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
return '{} {}'.format(b[:2] + a[2:], a[:2] + b[2:])
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
def main():
print('donuts')
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print()
print('both_ends')
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print()
print('fix_start')
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print()
print('mix_up')
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
if __name__ == '__main__':
main()
| true | true |
f72ab89546778e858c6dc70b6873b930fa6fde29 | 518 | py | Python | tests/test_config.py | kraeki/openair-jac | 760b1b1be7efebde1146b31cf0a9326a7362a82c | [
"BSD-3-Clause"
] | null | null | null | tests/test_config.py | kraeki/openair-jac | 760b1b1be7efebde1146b31cf0a9326a7362a82c | [
"BSD-3-Clause"
] | null | null | null | tests/test_config.py | kraeki/openair-jac | 760b1b1be7efebde1146b31cf0a9326a7362a82c | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Test configs."""
from openair.app import create_app
from openair.settings import DevConfig, ProdConfig
def test_production_config():
"""Production config."""
app = create_app(ProdConfig)
assert app.config['ENV'] == 'prod'
assert app.config['DEBUG'] is False
assert app.config['DEBUG_TB_ENABLED'] is False
def test_dev_config():
"""Development config."""
app = create_app(DevConfig)
assert app.config['ENV'] == 'dev'
assert app.config['DEBUG'] is True
| 25.9 | 50 | 0.675676 |
from openair.app import create_app
from openair.settings import DevConfig, ProdConfig
def test_production_config():
app = create_app(ProdConfig)
assert app.config['ENV'] == 'prod'
assert app.config['DEBUG'] is False
assert app.config['DEBUG_TB_ENABLED'] is False
def test_dev_config():
app = create_app(DevConfig)
assert app.config['ENV'] == 'dev'
assert app.config['DEBUG'] is True
| true | true |
f72ab8a2448743b933326291b648e8d737b17a76 | 142 | py | Python | config/prd.py | by46/camel | b1ac2609bc5d1cd22933c07c9fce7b935f2d9394 | [
"MIT"
] | null | null | null | config/prd.py | by46/camel | b1ac2609bc5d1cd22933c07c9fce7b935f2d9394 | [
"MIT"
] | null | null | null | config/prd.py | by46/camel | b1ac2609bc5d1cd22933c07c9fce7b935f2d9394 | [
"MIT"
] | null | null | null | # PRD environment setting
# Flask-NegLog Settings
LOG_LEVEL = 'debug'
LOG_FILENAME = "/var/camel/error.log"
LOG_ENABLE_CONSOLE = False
| 20.285714 | 38 | 0.739437 |
LOG_LEVEL = 'debug'
LOG_FILENAME = "/var/camel/error.log"
LOG_ENABLE_CONSOLE = False
| true | true |
f72ab9e2e5e78bac6263ebb24b7540ab94fc5895 | 1,304 | py | Python | clean_prediction.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | clean_prediction.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | clean_prediction.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | from gensim.models import Word2Vec
from sklearn.decomposition import PCA
from matplotlib import pyplot
import string
import fnmatch
# define training data
#sentences = open('new_file_sentence.txt', 'r', encoding='utf-8')
path = 'predictions_v11_1500_clean.txt'
output_file = open("predictions_v11_500.txt", "w")
input_texts = ()
with open(path) as f:
lines = f.read().split('\n')
for line in lines[: min(1000000, len(lines) - 1)]:
line = line.replace(' ','').split(',')
str = ''
#print(line)
#x = 'spotify*'
for i in range(2000):
if 'spotify:track:' in line[i]:
str += line[i]
str += ','
print(line[i])
output_file.write(str)
output_file.write('\n')
#y = not (fnmatch.filter(line, x))
# print(y)
#print(line[i])
#print(line)
#print(x for x in line if 'spotify' in x)
#if "spotify" not in line:
# print(line)
# line=line[i].replace(line[i], '')
#print(line)
#input_texts.append(line)
#output_file.write(input_texts)
#output_file.write('\n')
#import fnmatch
#l = ['RT07010534.txt', 'RT07010533.txt', 'RT02010534.txt']
#pattern = 'RT0701*.txt'
#matching = fnmatch.filter(l, pattern)
#print(matching)
#print(sample1)
| 26.612245 | 66 | 0.595859 | from gensim.models import Word2Vec
from sklearn.decomposition import PCA
from matplotlib import pyplot
import string
import fnmatch
path = 'predictions_v11_1500_clean.txt'
output_file = open("predictions_v11_500.txt", "w")
input_texts = ()
with open(path) as f:
lines = f.read().split('\n')
for line in lines[: min(1000000, len(lines) - 1)]:
line = line.replace(' ','').split(',')
str = ''
for i in range(2000):
if 'spotify:track:' in line[i]:
str += line[i]
str += ','
print(line[i])
output_file.write(str)
output_file.write('\n')
| true | true |
f72aba22fc109af958a6de438269df6a2c4a6b07 | 1,733 | py | Python | tests/test_data/test_structured.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | null | null | null | tests/test_data/test_structured.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | null | null | null | tests/test_data/test_structured.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | 1 | 2019-12-27T19:34:17.000Z | 2019-12-27T19:34:17.000Z | # -*- coding: utf-8 -*-
import re
import csv
from elizabeth.core.providers import Structured
from unittest import TestCase
from elizabeth.core import interdata as common
from ._patterns import STR_REGEX
class StructuredBaseTest(TestCase):
def setUp(self):
self.structured = Structured('en')
def tearDown(self):
del self.structured
def test_str(self):
self.assertTrue(re.match(STR_REGEX, self.structured.__str__()))
def test_css(self):
result = self.structured.css()
self.assertIsInstance(result, str) # returns string
self.assertIn(":", result) # contains property assignments
self.assertEqual(result[-1], "}") # closed at end
self.assertEqual(result.split(" ")[1][0], "{") # opened after selector
def test_css_property(self):
result = self.structured.css_property()
self.assertEqual(len(result.split(" ")), 2) # contains one property assignment
self.assertIn(":", result) # contains any property assignments
def test_html_attribute_value(self):
result = self.structured.html_attribute_value("a", "href")
self.assertEqual(result[0:4], "http")
with self.assertRaises(NotImplementedError):
self.structured.html_attribute_value("a", "bogus")
with self.assertRaises(NotImplementedError):
common.HTML_CONTAINER_TAGS['div']['class'] = "bogus"
from elizabeth.core.providers import Structured
Structured('en').html_attribute_value("div", "class")
def test_html(self):
result = self.structured.html()
self.assertEqual(result[0], "<") # tag is enclosed
self.assertEqual(result[-1], ">") # tag is enclosed
| 36.104167 | 87 | 0.663589 |
import re
import csv
from elizabeth.core.providers import Structured
from unittest import TestCase
from elizabeth.core import interdata as common
from ._patterns import STR_REGEX
class StructuredBaseTest(TestCase):
def setUp(self):
self.structured = Structured('en')
def tearDown(self):
del self.structured
def test_str(self):
self.assertTrue(re.match(STR_REGEX, self.structured.__str__()))
def test_css(self):
result = self.structured.css()
self.assertIsInstance(result, str)
self.assertIn(":", result)
self.assertEqual(result[-1], "}")
self.assertEqual(result.split(" ")[1][0], "{")
def test_css_property(self):
result = self.structured.css_property()
self.assertEqual(len(result.split(" ")), 2)
self.assertIn(":", result)
def test_html_attribute_value(self):
result = self.structured.html_attribute_value("a", "href")
self.assertEqual(result[0:4], "http")
with self.assertRaises(NotImplementedError):
self.structured.html_attribute_value("a", "bogus")
with self.assertRaises(NotImplementedError):
common.HTML_CONTAINER_TAGS['div']['class'] = "bogus"
from elizabeth.core.providers import Structured
Structured('en').html_attribute_value("div", "class")
def test_html(self):
result = self.structured.html()
self.assertEqual(result[0], "<")
self.assertEqual(result[-1], ">")
| true | true |
f72aba59680a1148f9878e622e1a32e4cbb7706a | 212 | py | Python | mayan/apps/document_states/managers.py | eshbeata/open-paperless | 6b9ed1f21908116ad2795b3785b2dbd66713d66e | [
"Apache-2.0"
] | 2,743 | 2017-12-18T07:12:30.000Z | 2022-03-27T17:21:25.000Z | mayan/apps/document_states/managers.py | kyper999/mayan-edms | ca7b8301a1f68548e8e718d42a728a500d67286e | [
"Apache-2.0"
] | 15 | 2020-06-06T00:00:48.000Z | 2022-03-12T00:03:54.000Z | mayan/apps/document_states/managers.py | kyper999/mayan-edms | ca7b8301a1f68548e8e718d42a728a500d67286e | [
"Apache-2.0"
] | 257 | 2017-12-18T03:12:58.000Z | 2022-03-25T08:59:10.000Z | from django.db import models
class WorkflowManager(models.Manager):
def launch_for(self, document):
for workflow in document.document_type.workflows.all():
workflow.launch_for(document)
| 26.5 | 63 | 0.726415 | from django.db import models
class WorkflowManager(models.Manager):
def launch_for(self, document):
for workflow in document.document_type.workflows.all():
workflow.launch_for(document)
| true | true |
f72aba799455f6cc85c2295c96a774ff725ab946 | 18,200 | py | Python | tests/onnx/test_onnx_model_export.py | kokoff/mlflow | 062722b172f403e613c41f9bb024b3e1673dfe31 | [
"Apache-2.0"
] | 1 | 2020-08-17T21:50:32.000Z | 2020-08-17T21:50:32.000Z | tests/onnx/test_onnx_model_export.py | kokoff/mlflow | 062722b172f403e613c41f9bb024b3e1673dfe31 | [
"Apache-2.0"
] | null | null | null | tests/onnx/test_onnx_model_export.py | kokoff/mlflow | 062722b172f403e613c41f9bb024b3e1673dfe31 | [
"Apache-2.0"
] | null | null | null | import sys
import os
import pytest
import mock
from keras.models import Sequential
from keras.layers import Dense
import sklearn.datasets as datasets
import pandas as pd
import numpy as np
import yaml
import tensorflow as tf
import mlflow.pyfunc.scoring_server as pyfunc_scoring_server
from mlflow import pyfunc
from mlflow.models import infer_signature, Model
from mlflow.models.utils import _read_example
from mlflow.utils.file_utils import TempDir
from tests.helper_functions import pyfunc_serve_and_score_model
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.utils.environment import _mlflow_conda_env
from mlflow.utils.model_utils import _get_flavor_configuration
pytestmark = pytest.mark.skipif(
(sys.version_info < (3, 6)), reason="Tests require Python 3 to run!"
)
@pytest.fixture(scope="module")
def data():
iris = datasets.load_iris()
data = pd.DataFrame(
data=np.c_[iris["data"], iris["target"]], columns=iris["feature_names"] + ["target"]
)
y = data["target"]
x = data.drop("target", axis=1)
return x, y
@pytest.fixture(scope="module")
def model(data):
x, y = data
model = Sequential()
model.add(Dense(3, input_dim=4))
model.add(Dense(1))
model.compile(loss="mean_squared_error", optimizer="SGD")
model.fit(x, y)
return model
@pytest.fixture(scope="module")
def onnx_model(model):
import onnxmltools
return onnxmltools.convert_keras(model)
@pytest.fixture(scope="module")
def sklearn_model(data):
from sklearn.linear_model import LogisticRegression
x, y = data
model = LogisticRegression()
model.fit(x, y)
return model
@pytest.fixture(scope="module")
def onnx_sklearn_model(sklearn_model):
import onnxmltools
from skl2onnx.common.data_types import FloatTensorType
initial_type = [("float_input", FloatTensorType([None, 4]))]
onx = onnxmltools.convert_sklearn(sklearn_model, initial_types=initial_type)
return onx
@pytest.fixture(scope="module")
def predicted(model, data):
return model.predict(data[0])
@pytest.fixture(scope="module")
def tf_model_multiple_inputs_float64():
graph = tf.Graph()
with graph.as_default():
t_in1 = tf.placeholder(tf.float64, 10, name="first_input")
t_in2 = tf.placeholder(tf.float64, 10, name="second_input")
t_out = tf.multiply(t_in1, t_in2)
tf.identity(t_out, name="output")
return graph
@pytest.fixture(scope="module")
def tf_model_multiple_inputs_float32():
graph = tf.Graph()
with graph.as_default():
t_in1 = tf.placeholder(tf.float32, 10, name="first_input")
t_in2 = tf.placeholder(tf.float32, 10, name="second_input")
t_out = tf.multiply(t_in1, t_in2)
tf.identity(t_out, name="output")
return graph
@pytest.fixture(scope="module")
def onnx_model_multiple_inputs_float64(tf_model_multiple_inputs_float64):
import tf2onnx
sess = tf.Session(graph=tf_model_multiple_inputs_float64)
onnx_graph = tf2onnx.tfonnx.process_tf_graph(
sess.graph, input_names=["first_input:0", "second_input:0"], output_names=["output:0"]
)
model_proto = onnx_graph.make_model("test")
return model_proto
@pytest.fixture(scope="module")
def onnx_model_multiple_inputs_float32(tf_model_multiple_inputs_float32):
import tf2onnx
sess = tf.Session(graph=tf_model_multiple_inputs_float32)
onnx_graph = tf2onnx.tfonnx.process_tf_graph(
sess.graph, input_names=["first_input:0", "second_input:0"], output_names=["output:0"]
)
model_proto = onnx_graph.make_model("test")
return model_proto
@pytest.fixture(scope="module")
def data_multiple_inputs():
return pd.DataFrame(
{"first_input:0": np.random.random(10), "second_input:0": np.random.random(10)}
)
@pytest.fixture(scope="module")
def predicted_multiple_inputs(data_multiple_inputs):
return pd.DataFrame(
data_multiple_inputs["first_input:0"] * data_multiple_inputs["second_input:0"]
)
@pytest.fixture
def model_path(tmpdir):
return os.path.join(tmpdir.strpath, "model")
@pytest.fixture
def onnx_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
_mlflow_conda_env(
conda_env,
additional_conda_deps=["pytest", "keras"],
additional_pip_deps=["onnx", "onnxmltools"],
)
return conda_env
@pytest.mark.large
def test_cast_float64_to_float32():
import mlflow.onnx
df = pd.DataFrame([[1.0, 2.1], [True, False]], columns=["col1", "col2"])
df["col1"] = df["col1"].astype(np.float64)
df["col2"] = df["col2"].astype(np.bool)
df2 = mlflow.onnx._OnnxModelWrapper._cast_float64_to_float32(df, df.columns)
assert df2["col1"].dtype == np.float32 and df2["col2"].dtype == np.bool
# TODO: Use the default conda environment once MLflow's Travis build supports the onnxruntime
# library
@pytest.mark.large
def test_model_save_load(onnx_model, model_path, onnx_custom_env):
import onnx
import mlflow.onnx
mlflow.onnx.save_model(onnx_model, model_path, conda_env=onnx_custom_env)
# Loading ONNX model
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_path)
assert onnx.checker.check_model.called
@pytest.mark.large
def test_signature_and_examples_are_saved_correctly(onnx_model, data, onnx_custom_env):
import mlflow.onnx
model = onnx_model
signature_ = infer_signature(*data)
example_ = data[0].head(3)
for signature in (None, signature_):
for example in (None, example_):
with TempDir() as tmp:
path = tmp.path("model")
mlflow.onnx.save_model(
model,
path=path,
conda_env=onnx_custom_env,
signature=signature,
input_example=example,
)
mlflow_model = Model.load(path)
assert signature == mlflow_model.signature
if example is None:
assert mlflow_model.saved_input_example_info is None
else:
assert all((_read_example(mlflow_model, path) == example).all())
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_load_evaluate_pyfunc_format(onnx_model, model_path, data, predicted):
import mlflow.onnx
x = data[0]
mlflow.onnx.save_model(onnx_model, model_path)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(pyfunc_loaded.predict(x).values, predicted, rtol=1e-05, atol=1e-05)
# pyfunc serve
scoring_response = pyfunc_serve_and_score_model(
model_uri=os.path.abspath(model_path),
data=x,
content_type=pyfunc_scoring_server.CONTENT_TYPE_JSON_SPLIT_ORIENTED,
)
assert np.allclose(
pd.read_json(scoring_response.content, orient="records").values.astype(np.float32),
predicted,
rtol=1e-05,
atol=1e-05,
)
# TODO: Use the default conda environment once MLflow's Travis build supports the onnxruntime
# library
@pytest.mark.large
def test_model_save_load_multiple_inputs(
onnx_model_multiple_inputs_float64, model_path, onnx_custom_env
):
import onnx
import mlflow.onnx
mlflow.onnx.save_model(
onnx_model_multiple_inputs_float64, model_path, conda_env=onnx_custom_env
)
# Loading ONNX model
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_path)
assert onnx.checker.check_model.called
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_load_evaluate_pyfunc_format_multiple_inputs(
onnx_model_multiple_inputs_float64, data_multiple_inputs, predicted_multiple_inputs, model_path
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model_multiple_inputs_float64, model_path)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(
pyfunc_loaded.predict(data_multiple_inputs).values,
predicted_multiple_inputs.values,
rtol=1e-05,
atol=1e-05,
)
# pyfunc serve
scoring_response = pyfunc_serve_and_score_model(
model_uri=os.path.abspath(model_path),
data=data_multiple_inputs,
content_type=pyfunc_scoring_server.CONTENT_TYPE_JSON_SPLIT_ORIENTED,
)
assert np.allclose(
pd.read_json(scoring_response.content, orient="records").values,
predicted_multiple_inputs.values,
rtol=1e-05,
atol=1e-05,
)
# TODO: Remove test, along with explicit casting, when https://github.com/mlflow/mlflow/issues/1286
# is fixed.
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_pyfunc_representation_of_float32_model_casts_and_evalutes_float64_inputs(
onnx_model_multiple_inputs_float32, model_path, data_multiple_inputs, predicted_multiple_inputs
):
"""
The ``python_function`` representation of an MLflow model with the ONNX flavor
casts 64-bit floats to 32-bit floats automatically before evaluating, as opposed
to throwing an unexpected type exception. This behavior is implemented due
to the issue described in https://github.com/mlflow/mlflow/issues/1286 where
the JSON representation of a Pandas DataFrame does not always preserve float
precision (e.g., 32-bit floats may be converted to 64-bit floats when persisting a
DataFrame as JSON).
"""
import mlflow.onnx
mlflow.onnx.save_model(onnx_model_multiple_inputs_float32, model_path)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(
pyfunc_loaded.predict(data_multiple_inputs.astype("float64")).values,
predicted_multiple_inputs.astype("float32").values,
rtol=1e-05,
atol=1e-05,
)
with pytest.raises(RuntimeError):
pyfunc_loaded.predict(data_multiple_inputs.astype("int32"))
# TODO: Use the default conda environment once MLflow's Travis build supports the onnxruntime
# library
@pytest.mark.large
def test_model_log(onnx_model, onnx_custom_env):
# pylint: disable=unused-argument
import onnx
import mlflow.onnx
# should_start_run tests whether or not calling log_model() automatically starts a run.
for should_start_run in [False, True]:
try:
if should_start_run:
mlflow.start_run()
artifact_path = "onnx_model"
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
# Load model
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_uri)
assert onnx.checker.check_model.called
finally:
mlflow.end_run()
def test_log_model_calls_register_model(onnx_model, onnx_custom_env):
import mlflow.onnx
artifact_path = "model"
register_model_patch = mock.patch("mlflow.register_model")
with mlflow.start_run(), register_model_patch:
mlflow.onnx.log_model(
onnx_model=onnx_model,
artifact_path=artifact_path,
conda_env=onnx_custom_env,
registered_model_name="AdsModel1",
)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
mlflow.register_model.assert_called_once_with(model_uri, "AdsModel1")
def test_log_model_no_registered_model_name(onnx_model, onnx_custom_env):
import mlflow.onnx
artifact_path = "model"
register_model_patch = mock.patch("mlflow.register_model")
with mlflow.start_run(), register_model_patch:
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
mlflow.register_model.assert_not_called()
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_log_evaluate_pyfunc_format(onnx_model, data, predicted):
import mlflow.onnx
x = data[0]
# should_start_run tests whether or not calling log_model() automatically starts a run.
for should_start_run in [False, True]:
try:
if should_start_run:
mlflow.start_run()
artifact_path = "onnx_model"
mlflow.onnx.log_model(onnx_model=onnx_model, artifact_path=artifact_path)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_uri=model_uri)
assert np.allclose(pyfunc_loaded.predict(x).values, predicted, rtol=1e-05, atol=1e-05)
finally:
mlflow.end_run()
@pytest.mark.large
def test_model_save_persists_specified_conda_env_in_mlflow_model_directory(
onnx_model, model_path, onnx_custom_env
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=onnx_custom_env)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
assert saved_conda_env_path != onnx_custom_env
with open(onnx_custom_env, "r") as f:
onnx_custom_env_parsed = yaml.safe_load(f)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == onnx_custom_env_parsed
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_accepts_conda_env_as_dict(onnx_model, model_path):
import mlflow.onnx
conda_env = dict(mlflow.onnx.get_default_conda_env())
conda_env["dependencies"].append("pytest")
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=conda_env)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == conda_env
@pytest.mark.large
def test_model_log_persists_specified_conda_env_in_mlflow_model_directory(
onnx_model, onnx_custom_env
):
import mlflow.onnx
artifact_path = "model"
with mlflow.start_run():
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
model_path = _download_artifact_from_uri(
"runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
assert saved_conda_env_path != onnx_custom_env
with open(onnx_custom_env, "r") as f:
onnx_custom_env_parsed = yaml.safe_load(f)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == onnx_custom_env_parsed
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_without_specified_conda_env_uses_default_env_with_expected_dependencies(
onnx_model, model_path
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=None)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
with open(conda_env_path, "r") as f:
conda_env = yaml.safe_load(f)
assert conda_env == mlflow.onnx.get_default_conda_env()
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_log_without_specified_conda_env_uses_default_env_with_expected_dependencies(
onnx_model,
):
import mlflow.onnx
artifact_path = "model"
with mlflow.start_run():
mlflow.onnx.log_model(onnx_model=onnx_model, artifact_path=artifact_path, conda_env=None)
model_path = _download_artifact_from_uri(
"runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
with open(conda_env_path, "r") as f:
conda_env = yaml.safe_load(f)
assert conda_env == mlflow.onnx.get_default_conda_env()
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_pyfunc_predict_supports_models_with_list_outputs(onnx_sklearn_model, model_path, data):
"""
https://github.com/mlflow/mlflow/issues/2499
User encountered issue where an sklearn model, converted to onnx, would return a list response.
The issue resulted in an error because MLflow assumed it would be a numpy array. Therefore,
the this test validates the service does not receive that error when using such a model.
"""
import mlflow.onnx
x = data[0]
mlflow.onnx.save_model(onnx_sklearn_model, model_path)
wrapper = mlflow.pyfunc.load_model(model_path)
wrapper.predict(pd.DataFrame(x))
| 34.469697 | 99 | 0.720879 | import sys
import os
import pytest
import mock
from keras.models import Sequential
from keras.layers import Dense
import sklearn.datasets as datasets
import pandas as pd
import numpy as np
import yaml
import tensorflow as tf
import mlflow.pyfunc.scoring_server as pyfunc_scoring_server
from mlflow import pyfunc
from mlflow.models import infer_signature, Model
from mlflow.models.utils import _read_example
from mlflow.utils.file_utils import TempDir
from tests.helper_functions import pyfunc_serve_and_score_model
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.utils.environment import _mlflow_conda_env
from mlflow.utils.model_utils import _get_flavor_configuration
pytestmark = pytest.mark.skipif(
(sys.version_info < (3, 6)), reason="Tests require Python 3 to run!"
)
@pytest.fixture(scope="module")
def data():
iris = datasets.load_iris()
data = pd.DataFrame(
data=np.c_[iris["data"], iris["target"]], columns=iris["feature_names"] + ["target"]
)
y = data["target"]
x = data.drop("target", axis=1)
return x, y
@pytest.fixture(scope="module")
def model(data):
x, y = data
model = Sequential()
model.add(Dense(3, input_dim=4))
model.add(Dense(1))
model.compile(loss="mean_squared_error", optimizer="SGD")
model.fit(x, y)
return model
@pytest.fixture(scope="module")
def onnx_model(model):
import onnxmltools
return onnxmltools.convert_keras(model)
@pytest.fixture(scope="module")
def sklearn_model(data):
from sklearn.linear_model import LogisticRegression
x, y = data
model = LogisticRegression()
model.fit(x, y)
return model
@pytest.fixture(scope="module")
def onnx_sklearn_model(sklearn_model):
import onnxmltools
from skl2onnx.common.data_types import FloatTensorType
initial_type = [("float_input", FloatTensorType([None, 4]))]
onx = onnxmltools.convert_sklearn(sklearn_model, initial_types=initial_type)
return onx
@pytest.fixture(scope="module")
def predicted(model, data):
return model.predict(data[0])
@pytest.fixture(scope="module")
def tf_model_multiple_inputs_float64():
graph = tf.Graph()
with graph.as_default():
t_in1 = tf.placeholder(tf.float64, 10, name="first_input")
t_in2 = tf.placeholder(tf.float64, 10, name="second_input")
t_out = tf.multiply(t_in1, t_in2)
tf.identity(t_out, name="output")
return graph
@pytest.fixture(scope="module")
def tf_model_multiple_inputs_float32():
graph = tf.Graph()
with graph.as_default():
t_in1 = tf.placeholder(tf.float32, 10, name="first_input")
t_in2 = tf.placeholder(tf.float32, 10, name="second_input")
t_out = tf.multiply(t_in1, t_in2)
tf.identity(t_out, name="output")
return graph
@pytest.fixture(scope="module")
def onnx_model_multiple_inputs_float64(tf_model_multiple_inputs_float64):
import tf2onnx
sess = tf.Session(graph=tf_model_multiple_inputs_float64)
onnx_graph = tf2onnx.tfonnx.process_tf_graph(
sess.graph, input_names=["first_input:0", "second_input:0"], output_names=["output:0"]
)
model_proto = onnx_graph.make_model("test")
return model_proto
@pytest.fixture(scope="module")
def onnx_model_multiple_inputs_float32(tf_model_multiple_inputs_float32):
import tf2onnx
sess = tf.Session(graph=tf_model_multiple_inputs_float32)
onnx_graph = tf2onnx.tfonnx.process_tf_graph(
sess.graph, input_names=["first_input:0", "second_input:0"], output_names=["output:0"]
)
model_proto = onnx_graph.make_model("test")
return model_proto
@pytest.fixture(scope="module")
def data_multiple_inputs():
return pd.DataFrame(
{"first_input:0": np.random.random(10), "second_input:0": np.random.random(10)}
)
@pytest.fixture(scope="module")
def predicted_multiple_inputs(data_multiple_inputs):
return pd.DataFrame(
data_multiple_inputs["first_input:0"] * data_multiple_inputs["second_input:0"]
)
@pytest.fixture
def model_path(tmpdir):
return os.path.join(tmpdir.strpath, "model")
@pytest.fixture
def onnx_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
_mlflow_conda_env(
conda_env,
additional_conda_deps=["pytest", "keras"],
additional_pip_deps=["onnx", "onnxmltools"],
)
return conda_env
@pytest.mark.large
def test_cast_float64_to_float32():
import mlflow.onnx
df = pd.DataFrame([[1.0, 2.1], [True, False]], columns=["col1", "col2"])
df["col1"] = df["col1"].astype(np.float64)
df["col2"] = df["col2"].astype(np.bool)
df2 = mlflow.onnx._OnnxModelWrapper._cast_float64_to_float32(df, df.columns)
assert df2["col1"].dtype == np.float32 and df2["col2"].dtype == np.bool
# library
@pytest.mark.large
def test_model_save_load(onnx_model, model_path, onnx_custom_env):
import onnx
import mlflow.onnx
mlflow.onnx.save_model(onnx_model, model_path, conda_env=onnx_custom_env)
# Loading ONNX model
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_path)
assert onnx.checker.check_model.called
@pytest.mark.large
def test_signature_and_examples_are_saved_correctly(onnx_model, data, onnx_custom_env):
import mlflow.onnx
model = onnx_model
signature_ = infer_signature(*data)
example_ = data[0].head(3)
for signature in (None, signature_):
for example in (None, example_):
with TempDir() as tmp:
path = tmp.path("model")
mlflow.onnx.save_model(
model,
path=path,
conda_env=onnx_custom_env,
signature=signature,
input_example=example,
)
mlflow_model = Model.load(path)
assert signature == mlflow_model.signature
if example is None:
assert mlflow_model.saved_input_example_info is None
else:
assert all((_read_example(mlflow_model, path) == example).all())
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_load_evaluate_pyfunc_format(onnx_model, model_path, data, predicted):
import mlflow.onnx
x = data[0]
mlflow.onnx.save_model(onnx_model, model_path)
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(pyfunc_loaded.predict(x).values, predicted, rtol=1e-05, atol=1e-05)
scoring_response = pyfunc_serve_and_score_model(
model_uri=os.path.abspath(model_path),
data=x,
content_type=pyfunc_scoring_server.CONTENT_TYPE_JSON_SPLIT_ORIENTED,
)
assert np.allclose(
pd.read_json(scoring_response.content, orient="records").values.astype(np.float32),
predicted,
rtol=1e-05,
atol=1e-05,
)
# library
@pytest.mark.large
def test_model_save_load_multiple_inputs(
onnx_model_multiple_inputs_float64, model_path, onnx_custom_env
):
import onnx
import mlflow.onnx
mlflow.onnx.save_model(
onnx_model_multiple_inputs_float64, model_path, conda_env=onnx_custom_env
)
# Loading ONNX model
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_path)
assert onnx.checker.check_model.called
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_load_evaluate_pyfunc_format_multiple_inputs(
onnx_model_multiple_inputs_float64, data_multiple_inputs, predicted_multiple_inputs, model_path
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model_multiple_inputs_float64, model_path)
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(
pyfunc_loaded.predict(data_multiple_inputs).values,
predicted_multiple_inputs.values,
rtol=1e-05,
atol=1e-05,
)
scoring_response = pyfunc_serve_and_score_model(
model_uri=os.path.abspath(model_path),
data=data_multiple_inputs,
content_type=pyfunc_scoring_server.CONTENT_TYPE_JSON_SPLIT_ORIENTED,
)
assert np.allclose(
pd.read_json(scoring_response.content, orient="records").values,
predicted_multiple_inputs.values,
rtol=1e-05,
atol=1e-05,
)
@pytest.mark.release
def test_pyfunc_representation_of_float32_model_casts_and_evalutes_float64_inputs(
onnx_model_multiple_inputs_float32, model_path, data_multiple_inputs, predicted_multiple_inputs
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model_multiple_inputs_float32, model_path)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_path)
assert np.allclose(
pyfunc_loaded.predict(data_multiple_inputs.astype("float64")).values,
predicted_multiple_inputs.astype("float32").values,
rtol=1e-05,
atol=1e-05,
)
with pytest.raises(RuntimeError):
pyfunc_loaded.predict(data_multiple_inputs.astype("int32"))
# TODO: Use the default conda environment once MLflow's Travis build supports the onnxruntime
@pytest.mark.large
def test_model_log(onnx_model, onnx_custom_env):
import onnx
import mlflow.onnx
for should_start_run in [False, True]:
try:
if should_start_run:
mlflow.start_run()
artifact_path = "onnx_model"
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
onnx.checker.check_model = mock.Mock()
mlflow.onnx.load_model(model_uri)
assert onnx.checker.check_model.called
finally:
mlflow.end_run()
def test_log_model_calls_register_model(onnx_model, onnx_custom_env):
import mlflow.onnx
artifact_path = "model"
register_model_patch = mock.patch("mlflow.register_model")
with mlflow.start_run(), register_model_patch:
mlflow.onnx.log_model(
onnx_model=onnx_model,
artifact_path=artifact_path,
conda_env=onnx_custom_env,
registered_model_name="AdsModel1",
)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
mlflow.register_model.assert_called_once_with(model_uri, "AdsModel1")
def test_log_model_no_registered_model_name(onnx_model, onnx_custom_env):
import mlflow.onnx
artifact_path = "model"
register_model_patch = mock.patch("mlflow.register_model")
with mlflow.start_run(), register_model_patch:
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
mlflow.register_model.assert_not_called()
@pytest.mark.release
def test_model_log_evaluate_pyfunc_format(onnx_model, data, predicted):
import mlflow.onnx
x = data[0]
# should_start_run tests whether or not calling log_model() automatically starts a run.
for should_start_run in [False, True]:
try:
if should_start_run:
mlflow.start_run()
artifact_path = "onnx_model"
mlflow.onnx.log_model(onnx_model=onnx_model, artifact_path=artifact_path)
model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
# Loading pyfunc model
pyfunc_loaded = mlflow.pyfunc.load_pyfunc(model_uri=model_uri)
assert np.allclose(pyfunc_loaded.predict(x).values, predicted, rtol=1e-05, atol=1e-05)
finally:
mlflow.end_run()
@pytest.mark.large
def test_model_save_persists_specified_conda_env_in_mlflow_model_directory(
onnx_model, model_path, onnx_custom_env
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=onnx_custom_env)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
assert saved_conda_env_path != onnx_custom_env
with open(onnx_custom_env, "r") as f:
onnx_custom_env_parsed = yaml.safe_load(f)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == onnx_custom_env_parsed
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_save_accepts_conda_env_as_dict(onnx_model, model_path):
import mlflow.onnx
conda_env = dict(mlflow.onnx.get_default_conda_env())
conda_env["dependencies"].append("pytest")
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=conda_env)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == conda_env
@pytest.mark.large
def test_model_log_persists_specified_conda_env_in_mlflow_model_directory(
onnx_model, onnx_custom_env
):
import mlflow.onnx
artifact_path = "model"
with mlflow.start_run():
mlflow.onnx.log_model(
onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env
)
model_path = _download_artifact_from_uri(
"runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
assert os.path.exists(saved_conda_env_path)
assert saved_conda_env_path != onnx_custom_env
with open(onnx_custom_env, "r") as f:
onnx_custom_env_parsed = yaml.safe_load(f)
with open(saved_conda_env_path, "r") as f:
saved_conda_env_parsed = yaml.safe_load(f)
assert saved_conda_env_parsed == onnx_custom_env_parsed
@pytest.mark.release
def test_model_save_without_specified_conda_env_uses_default_env_with_expected_dependencies(
onnx_model, model_path
):
import mlflow.onnx
mlflow.onnx.save_model(onnx_model=onnx_model, path=model_path, conda_env=None)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
with open(conda_env_path, "r") as f:
conda_env = yaml.safe_load(f)
assert conda_env == mlflow.onnx.get_default_conda_env()
# TODO: Mark this as large once MLflow's Travis build supports the onnxruntime library
@pytest.mark.release
def test_model_log_without_specified_conda_env_uses_default_env_with_expected_dependencies(
onnx_model,
):
import mlflow.onnx
artifact_path = "model"
with mlflow.start_run():
mlflow.onnx.log_model(onnx_model=onnx_model, artifact_path=artifact_path, conda_env=None)
model_path = _download_artifact_from_uri(
"runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=artifact_path
)
)
pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME)
conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV])
with open(conda_env_path, "r") as f:
conda_env = yaml.safe_load(f)
assert conda_env == mlflow.onnx.get_default_conda_env()
@pytest.mark.release
def test_pyfunc_predict_supports_models_with_list_outputs(onnx_sklearn_model, model_path, data):
import mlflow.onnx
x = data[0]
mlflow.onnx.save_model(onnx_sklearn_model, model_path)
wrapper = mlflow.pyfunc.load_model(model_path)
wrapper.predict(pd.DataFrame(x))
| true | true |
f72abaa0b0eceda9463707c365f8a64adba68be2 | 30,975 | py | Python | .happydoc.fsa.py | osteele/pyfsa | 58a44106d3e3918a17a5a106584d1a91636f9d52 | [
"Artistic-1.0-Perl"
] | 7 | 2015-11-25T10:52:43.000Z | 2018-09-11T21:35:25.000Z | .happydoc.fsa.py | osteele/pyfsa | 58a44106d3e3918a17a5a106584d1a91636f9d52 | [
"Artistic-1.0-Perl"
] | null | null | null | .happydoc.fsa.py | osteele/pyfsa | 58a44106d3e3918a17a5a106584d1a91636f9d52 | [
"Artistic-1.0-Perl"
] | 7 | 2015-12-23T05:22:20.000Z | 2021-07-13T19:17:32.000Z | (S'822675c38199b44f85699c1653abb0fc'
p1
(ihappydoclib.parseinfo.moduleinfo
ModuleInfo
p2
(dp3
S'_namespaces'
p4
((dp5
S'FSA'
p6
(ihappydoclib.parseinfo.classinfo
ClassInfo
p7
(dp8
g4
((dp9
(dp10
S'nextStates'
p11
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp12
g4
((dp13
(dp14
tsS'_exception_info'
p15
(dsS'_parameter_names'
p16
(S'self'
p17
S'state'
p18
S'input'
p19
tsS'_parameter_info'
p20
(dp21
g19
(NNNtsg17
(NNNtsg18
(NNNtssS'_filename'
p22
S'fsa.py'
p23
sS'_docstring'
p24
S''
sS'_name'
p25
g11
sS'_parent'
p26
g7
sS'_function_info'
p27
g14
sS'_configuration_values'
p28
(dsS'_class_info'
p29
g13
sS'_comment_info'
p30
(dp31
(S'FSA'
p32
S'labelMatches'
tS' \n Accepting\n \n'
p33
s(g32
S'sorted'
tS' \n Reductions\n \n'
p34
s(g32
S'create'
tS' \n Copying\n \n'
p35
s(g32
S'complement'
tS' \n FSA operations\n \n'
p36
s(g32
S'hasArcMetadata'
tS' \n Arc Metadata Accessors\n \n'
p37
s(g32
S'__repr__'
tS' \n Presentation Methods\n \n'
p38
s(g32
S'makeStateTable'
tS' \n Initialization\n \n'
p39
s(g32
S'isEmpty'
tS' \n Predicates\n \n'
p40
s(g32
S'epsilonClosure'
tS' \n Accessors\n \n'
p41
ssS'_comments'
p42
S''
sbsS'makeStateTable'
p43
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp44
g4
((dp45
(dp46
tsg15
(dsg16
(S'self'
p47
S'default'
p48
tsg20
(dp49
g48
(I1
S'None'
Ntsg47
(NNNtssg22
g23
sg24
S''
sg25
g43
sg26
g7
sg27
g46
sg28
(dsg29
g45
sg30
g31
sg42
g39
sbsS'tuple'
p50
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp51
g4
((dp52
(dp53
tsg15
(dsg16
(S'self'
p54
tsg20
(dp55
g54
(NNNtssg22
g23
sg24
S''
sg25
g50
sg26
g7
sg27
g53
sg28
(dsg29
g52
sg30
g31
sg42
S''
sbsS'collectStates'
p56
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp57
g4
((dp58
(dp59
tsg15
(dsg16
(S'self'
p60
S'transitions'
p61
S'initialState'
p62
S'finalStates'
p63
tsg20
(dp64
g60
(NNNtsg61
(NNNtsg63
(NNNtsg62
(NNNtssg22
g23
sg24
S''
sg25
g56
sg26
g7
sg27
g59
sg28
(dsg29
g58
sg30
g31
sg42
S''
sbsS'complement'
p65
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp66
g4
((dp67
(dp68
tsg15
(dsg16
(S'self'
p69
tsg20
(dp70
g69
(NNNtssg22
g23
sg24
S''
sg25
g65
sg26
g7
sg27
g68
sg28
(dsg29
g67
sg30
g31
sg42
g36
sbsS'labels'
p71
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp72
g4
((dp73
(dp74
tsg15
(dsg16
(S'self'
p75
tsg20
(dp76
g75
(NNNtssg22
g23
sg24
S'Returns a list of transition labels.'
sg25
g71
sg26
g7
sg27
g74
sg28
(dsg29
g73
sg30
g31
sg42
S''
sbsS'determinized'
p77
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp78
g4
((dp79
(dp80
tsg15
(dsg16
(S'self'
p81
tsg20
(dp82
g81
(NNNtssg22
g23
sg24
S'Returns a deterministic FSA that accepts the same language.'
sg25
g77
sg26
g7
sg27
g80
sg28
(dsg29
g79
sg30
g31
sg42
S''
sbsS'minimized'
p83
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp84
g4
((dp85
(dp86
tsg15
(dsg16
(S'self'
p87
tsg20
(dp88
g87
(NNNtssg22
g23
sg24
S'Returns a minimal FSA that accepts the same language.'
sg25
g83
sg26
g7
sg27
g86
sg28
(dsg29
g85
sg30
g31
sg42
S''
sbsS'initializeTransitionTables'
p89
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp90
g4
((dp91
(dp92
tsg15
(dsg16
(S'self'
p93
tsg20
(dp94
g93
(NNNtssg22
g23
sg24
S''
sg25
g89
sg26
g7
sg27
g92
sg28
(dsg29
g91
sg30
g31
sg42
S''
sbsS'coerce'
p95
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp96
g4
((dp97
(dp98
tsg15
(dsg16
(S'self'
p99
S'klass'
p100
tsg20
(dp101
g99
(NNNtsg100
(NNNtssg22
g23
sg24
S''
sg25
g95
sg26
g7
sg27
g98
sg28
(dsg29
g97
sg30
g31
sg42
S''
sbsS'hasArcMetadata'
p102
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp103
g4
((dp104
(dp105
tsg15
(dsg16
(S'self'
p106
tsg20
(dp107
g106
(NNNtssg22
g23
sg24
S''
sg25
g102
sg26
g7
sg27
g105
sg28
(dsg29
g104
sg30
g31
sg42
g37
sbsS'__str__'
p108
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp109
g4
((dp110
(dp111
tsg15
(dsg16
(S'self'
p112
tsg20
(dp113
g112
(NNNtssg22
g23
sg24
S''
sg25
g108
sg26
g7
sg27
g111
sg28
(dsg29
g110
sg30
g31
sg42
S''
sbsS'stateLabelString'
p114
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp115
g4
((dp116
(dp117
tsg15
(dsg16
(S'self'
p118
S'state'
p119
tsg20
(dp120
g118
(NNNtsg119
(NNNtssg22
g23
sg24
S"A template method for specifying a state's label, for use in dot\n diagrams. If this returns None, the default (the string representation\n of the state) is used."
sg25
g114
sg26
g7
sg27
g117
sg28
(dsg29
g116
sg30
g31
sg42
S''
sbsS'labelMatches'
p121
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp122
g4
((dp123
(dp124
tsg15
(dsg16
(S'self'
p125
S'label'
p126
S'input'
p127
tsg20
(dp128
g127
(NNNtsg125
(NNNtsg126
(NNNtssg22
g23
sg24
S''
sg25
g121
sg26
g7
sg27
g124
sg28
(dsg29
g123
sg30
g31
sg42
g33
sbsS'computeEpsilonClosure'
p129
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp130
g4
((dp131
(dp132
tsg15
(dsg16
(S'self'
p133
S'state'
p134
tsg20
(dp135
g133
(NNNtsg134
(NNNtssg22
g23
sg24
S''
sg25
g129
sg26
g7
sg27
g132
sg28
(dsg29
g131
sg30
g31
sg42
S''
sbsS'sorted'
p136
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp137
g4
((dp138
(dp139
tsg15
(dsg16
(S'self'
p140
S'initial'
p141
tsg20
(dp142
g140
(NNNtsg141
(I1
S'0'
Ntssg22
g23
sg24
S''
sg25
g136
sg26
g7
sg27
g139
sg28
(dsg29
g138
sg30
g31
sg42
g34
sbsS'copy'
p143
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp144
g4
((dp145
(dp146
tsg15
(dsg16
(S'self'
p147
S'*args'
p148
tsg20
(dp149
g148
(NNNtsg147
(NNNtssg22
g23
sg24
S''
sg25
g143
sg26
g7
sg27
g146
sg28
(dsg29
g145
sg30
g31
sg42
S''
sbsS'addArcMetadataFor'
p150
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp151
g4
((dp152
(dp153
tsg15
(dsg16
(S'self'
p154
S'transition'
p155
S'data'
p156
tsg20
(dp157
g154
(NNNtsg155
(NNNtsg156
(NNNtssg22
g23
sg24
S''
sg25
g150
sg26
g7
sg27
g153
sg28
(dsg29
g152
sg30
g31
sg42
S''
sbsS'__init__'
p158
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp159
g4
((dp160
(dp161
tsg15
(dsg16
(S'self'
p162
S'states'
p163
S'alphabet'
p164
S'transitions'
p165
S'initialState'
p166
S'finalStates'
p167
S'arcMetadata'
p168
tsg20
(dp169
g163
(NNNtsg167
(NNNtsg164
(NNNtsg162
(NNNtsg165
(NNNtsg168
(I1
S'[]'
Ntsg166
(NNNtssg22
g23
sg24
S''
sg25
g158
sg26
g7
sg27
g161
sg28
(dsg29
g160
sg30
g31
sg42
S''
sbsS'getArcMetadata'
p170
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp171
g4
((dp172
(dp173
tsg15
(dsg16
(S'self'
p174
tsg20
(dp175
g174
(NNNtssg22
g23
sg24
S''
sg25
g170
sg26
g7
sg27
g173
sg28
(dsg29
g172
sg30
g31
sg42
S''
sbsS'setArcMetadataFor'
p176
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp177
g4
((dp178
(dp179
tsg15
(dsg16
(S'self'
p180
S'transition'
p181
S'data'
p182
tsg20
(dp183
g180
(NNNtsg181
(NNNtsg182
(NNNtssg22
g23
sg24
S''
sg25
g176
sg26
g7
sg27
g179
sg28
(dsg29
g178
sg30
g31
sg42
S''
sbsS'withoutEpsilons'
p184
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp185
g4
((dp186
(dp187
tsg15
(dsg16
(S'self'
p188
tsg20
(dp189
g188
(NNNtssg22
g23
sg24
S''
sg25
g184
sg26
g7
sg27
g187
sg28
(dsg29
g186
sg30
g31
sg42
S''
sbsS'addArcMetadata'
p190
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp191
g4
((dp192
(dp193
tsg15
(dsg16
(S'self'
p194
S'list'
p195
tsg20
(dp196
g194
(NNNtsg195
(NNNtssg22
g23
sg24
S''
sg25
g190
sg26
g7
sg27
g193
sg28
(dsg29
g192
sg30
g31
sg42
S''
sbsS'epsilonClosure'
p197
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp198
g4
((dp199
(dp200
tsg15
(dsg16
(S'self'
p201
S'state'
p202
tsg20
(dp203
g201
(NNNtsg202
(NNNtssg22
g23
sg24
S''
sg25
g197
sg26
g7
sg27
g200
sg28
(dsg29
g199
sg30
g31
sg42
g41
sbsS'additionalTransitionInfoString'
p204
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp205
g4
((dp206
(dp207
tsg15
(dsg16
(S'self'
p208
S'transition'
p209
tsg20
(dp210
g208
(NNNtsg209
(NNNtssg22
g23
sg24
S''
sg25
g204
sg26
g7
sg27
g207
sg28
(dsg29
g206
sg30
g31
sg42
S''
sbsS'create'
p211
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp212
g4
((dp213
(dp214
tsg15
(dsg16
(S'self'
p215
S'*args'
p216
tsg20
(dp217
g216
(NNNtsg215
(NNNtssg22
g23
sg24
S''
sg25
g211
sg26
g7
sg27
g214
sg28
(dsg29
g213
sg30
g31
sg42
g35
sbsS'isEmpty'
p218
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp219
g4
((dp220
(dp221
tsg15
(dsg16
(S'self'
p222
tsg20
(dp223
g222
(NNNtssg22
g23
sg24
S''
sg25
g218
sg26
g7
sg27
g221
sg28
(dsg29
g220
sg30
g31
sg42
g40
sbsS'accepts'
p224
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp225
g4
((dp226
(dp227
tsg15
(dsg16
(S'self'
p228
S'sequence'
p229
tsg20
(dp230
g228
(NNNtsg229
(NNNtssg22
g23
sg24
S''
sg25
g224
sg26
g7
sg27
g227
sg28
(dsg29
g226
sg30
g31
sg42
S''
sbsS'getArcMetadataFor'
p231
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp232
g4
((dp233
(dp234
tsg15
(dsg16
(S'self'
p235
S'transition'
p236
S'default'
p237
tsg20
(dp238
g237
(I1
S'None'
Ntsg235
(NNNtsg236
(NNNtssg22
g23
sg24
S''
sg25
g231
sg26
g7
sg27
g234
sg28
(dsg29
g233
sg30
g31
sg42
S''
sbsS'nextState'
p239
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp240
g4
((dp241
(dp242
tsg15
(dsg16
(S'self'
p243
S'state'
p244
S'input'
p245
tsg20
(dp246
g245
(NNNtsg243
(NNNtsg244
(NNNtssg22
g23
sg24
S''
sg25
g239
sg26
g7
sg27
g242
sg28
(dsg29
g241
sg30
g31
sg42
S''
sbsS'trimmed'
p247
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp248
g4
((dp249
(dp250
tsg15
(dsg16
(S'self'
p251
tsg20
(dp252
g251
(NNNtssg22
g23
sg24
S"Returns an equivalent FSA that doesn't include unreachable states,\n or states that only lead to dead states."
sg25
g247
sg26
g7
sg27
g250
sg28
(dsg29
g249
sg30
g31
sg42
S''
sbsS'isFSA'
p253
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp254
g4
((dp255
(dp256
tsg15
(dsg16
(S'self'
p257
tsg20
(dp258
g257
(NNNtssg22
g23
sg24
S''
sg25
g253
sg26
g7
sg27
g256
sg28
(dsg29
g255
sg30
g31
sg42
S''
sbsS'creationArgs'
p259
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp260
g4
((dp261
(dp262
tsg15
(dsg16
(S'self'
p263
tsg20
(dp264
g263
(NNNtssg22
g23
sg24
S''
sg25
g259
sg26
g7
sg27
g262
sg28
(dsg29
g261
sg30
g31
sg42
S''
sbsS'__repr__'
p265
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp266
g4
((dp267
(dp268
tsg15
(dsg16
(S'self'
p269
tsg20
(dp270
g269
(NNNtssg22
g23
sg24
S''
sg25
g265
sg26
g7
sg27
g268
sg28
(dsg29
g267
sg30
g31
sg42
g38
sbsS'setArcMetadata'
p271
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp272
g4
((dp273
(dp274
tsg15
(dsg16
(S'self'
p275
S'list'
p276
tsg20
(dp277
g275
(NNNtsg276
(NNNtssg22
g23
sg24
S''
sg25
g271
sg26
g7
sg27
g274
sg28
(dsg29
g273
sg30
g31
sg42
S''
sbsS'nextAvailableState'
p278
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp279
g4
((dp280
(dp281
tsg15
(dsg16
(S'self'
p282
tsg20
(dp283
g282
(NNNtssg22
g23
sg24
S''
sg25
g278
sg26
g7
sg27
g281
sg28
(dsg29
g280
sg30
g31
sg42
S''
sbsS'computeEpsilonClosures'
p284
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp285
g4
((dp286
(dp287
tsg15
(dsg16
(S'self'
p288
tsg20
(dp289
g288
(NNNtssg22
g23
sg24
S''
sg25
g284
sg26
g7
sg27
g287
sg28
(dsg29
g286
sg30
g31
sg42
S''
sbsS'view'
p290
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp291
g4
((dp292
(dp293
tsg15
(dsg16
(S'self'
p294
tsg20
(dp295
g294
(NNNtssg22
g23
sg24
S''
sg25
g290
sg26
g7
sg27
g293
sg28
(dsg29
g292
sg30
g31
sg42
S''
sbsS'nextStateSet'
p296
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp297
g4
((dp298
(dp299
tsg15
(dsg16
(S'self'
p300
S'states'
p301
S'input'
p302
tsg20
(dp303
g301
(NNNtsg302
(NNNtsg300
(NNNtssg22
g23
sg24
S''
sg25
g296
sg26
g7
sg27
g299
sg28
(dsg29
g298
sg30
g31
sg42
S''
sbsS'toDotString'
p304
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp305
g4
((dp306
(dp307
tsg15
(dsg16
(S'self'
p308
tsg20
(dp309
g308
(NNNtssg22
g23
sg24
S'Returns a string that can be printed by the DOT tool at\n http://www.research.att.com/sw/tools/graphviz/ .'
sg25
g304
sg26
g7
sg27
g307
sg28
(dsg29
g306
sg30
g31
sg42
S''
sbsS'transitionsFrom'
p310
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp311
g4
((dp312
(dp313
tsg15
(dsg16
(S'self'
p314
S'state'
p315
tsg20
(dp316
g314
(NNNtsg315
(NNNtssg22
g23
sg24
S''
sg25
g310
sg26
g7
sg27
g313
sg28
(dsg29
g312
sg30
g31
sg42
S''
sbstsg22
g23
sg24
S''
sS'_class_member_info'
p317
(lsg25
g6
sg26
g2
sg27
g10
sg42
S''
sg28
(dsg29
g9
sg30
g31
sS'_base_class_info'
p318
(lsbs(dp319
S'trim'
p320
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp321
g4
((dp322
(dp323
tsg15
(dsg16
(S'fsa'
p324
tsg20
(dp325
g324
(NNNtssg22
g23
sg24
S''
sg25
g320
sg26
g2
sg27
g323
sg28
(dsg29
g322
sg30
g31
sg42
S''
sbsS'completion'
p326
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp327
g4
((dp328
(dp329
tsg15
(dsg16
(S'fsa'
p330
tsg20
(dp331
g330
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts the same language as the argument, but that\n lands in a defined state for every input.'
sg25
g326
sg26
g2
sg27
g329
sg28
(dsg29
g328
sg30
g31
sg42
S''
sbsS'singleton'
p332
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp333
g4
((dp334
(dp335
tsg15
(dsg16
(S'symbol'
p336
S'alphabet'
p337
S'arcMetadata'
p338
tsg20
(dp339
g337
(I1
S'None'
Ntsg336
(NNNtsg338
(I1
S'None'
Ntssg22
g23
sg24
S''
sg25
g332
sg26
g2
sg27
g335
sg28
(dsg29
g334
sg30
g31
sg42
S''
sbsS'option'
p340
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp341
g4
((dp342
(dp343
tsg15
(dsg16
(S'fsa'
p344
tsg20
(dp345
g344
(NNNtssg22
g23
sg24
S''
sg25
g340
sg26
g2
sg27
g343
sg28
(dsg29
g342
sg30
g31
sg42
S''
sbsS'sequence'
p346
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp347
g4
((dp348
(dp349
tsg15
(dsg16
(S'sequence'
p350
S'alphabet'
p351
tsg20
(dp352
g351
(I1
S'None'
Ntsg350
(NNNtssg22
g23
sg24
S''
sg25
g346
sg26
g2
sg27
g349
sg28
(dsg29
g348
sg30
g31
sg42
S''
sbsS'equivalent'
p353
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp354
g4
((dp355
(dp356
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp357
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Return true ifff a and b accept the same language.'
sg25
g353
sg26
g2
sg27
g356
sg28
(dsg29
g355
sg30
g31
sg42
S''
sbsS'unionLabelSets'
p358
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp359
g4
((dp360
(dp361
tsg15
(dsg16
(S'alist'
p362
S'blist'
p363
S'alphabet'
p364
tsg20
(dp365
g364
(I1
S'None'
Ntsg363
(NNNtsg362
(NNNtssg22
g23
sg24
S''
sg25
g358
sg26
g2
sg27
g361
sg28
(dsg29
g360
sg30
g31
sg42
S''
sbsS'symbolComplement'
p366
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp367
g4
((dp368
(dp369
tsg15
(dsg16
(S'symbol'
p370
tsg20
(dp371
g370
(NNNtssg22
g23
sg24
S''
sg25
g366
sg26
g2
sg27
g369
sg28
(dsg29
g368
sg30
g31
sg42
S''
sbsS'concatenation'
p372
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp373
g4
((dp374
(dp375
tsg15
(dsg16
(S'a'
S'*args'
p376
tsg20
(dp377
S'a'
(NNNtsg376
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts the language consisting of the concatenation\n of strings recognized by the arguments.'
sg25
g372
sg26
g2
sg27
g375
sg28
(dsg29
g374
sg30
g31
sg42
S''
sbsS'sort'
p378
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp379
g4
((dp380
(dp381
tsg15
(dsg16
(S'fsa'
p382
tsg20
(dp383
g382
(NNNtssg22
g23
sg24
S''
sg25
g378
sg26
g2
sg27
g381
sg28
(dsg29
g380
sg30
g31
sg42
S''
sbsS'labelIntersection'
p384
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp385
g4
((dp386
(dp387
tsg15
(dsg16
(S'l1'
p388
S'l2'
p389
tsg20
(dp390
g389
(NNNtsg388
(NNNtssg22
g23
sg24
S''
sg25
g384
sg26
g2
sg27
g387
sg28
(dsg29
g386
sg30
g31
sg42
S''
sbsS'intersectLabelSets'
p391
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp392
g4
((dp393
(dp394
tsg15
(dsg16
(S'alist'
p395
S'blist'
p396
tsg20
(dp397
g396
(NNNtsg395
(NNNtssg22
g23
sg24
S''
sg25
g391
sg26
g2
sg27
g394
sg28
(dsg29
g393
sg30
g31
sg42
S''
sbsS'labelString'
p398
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp399
g4
((dp400
(dp401
tsg15
(dsg16
(S'label'
p402
tsg20
(dp403
g402
(NNNtssg22
g23
sg24
S''
sg25
g398
sg26
g2
sg27
g401
sg28
(dsg29
g400
sg30
g31
sg42
S''
sbsS'compileItem'
p404
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp405
g4
((dp406
(dp407
tsg15
(dp408
S"'unimplemented'"
Nssg16
(S'str'
p409
S'index'
p410
S'options'
p411
tsg20
(dp412
g410
(NNNtsg411
(NNNtsg409
(NNNtssg22
g23
sg24
S''
sg25
g404
sg26
g2
sg27
g407
sg28
(dsg29
g406
sg30
g31
sg42
S''
sbsS'labelComplement'
p413
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp414
g4
((dp415
(dp416
tsg15
(dsg16
(S'label'
p417
S'alphabet'
p418
tsg20
(dp419
g418
(NNNtsg417
(NNNtssg22
g23
sg24
S''
sg25
g413
sg26
g2
sg27
g416
sg28
(dsg29
g415
sg30
g31
sg42
S''
sbsS'removeDuplicates'
p420
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp421
g4
((dp422
(dp423
tsg15
(dsg16
(S'sequence'
p424
tsg20
(dp425
g424
(NNNtssg22
g23
sg24
S''
sg25
g420
sg26
g2
sg27
g423
sg28
(dsg29
g422
sg30
g31
sg42
S''
sbsS'difference'
p426
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp427
g4
((dp428
(dp429
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp430
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts those strings accepted by the first\n argument, but not the second.'
sg25
g426
sg26
g2
sg27
g429
sg28
(dsg29
g428
sg30
g31
sg42
S''
sbsS'compileREExpr'
p431
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp432
g4
((dp433
(dp434
tsg15
(dsg16
(S'str'
p435
S'index'
p436
S'options'
p437
tsg20
(dp438
g436
(NNNtsg437
(NNNtsg435
(NNNtssg22
g23
sg24
S''
sg25
g431
sg26
g2
sg27
g434
sg28
(dsg29
g433
sg30
g31
sg42
S''
sbsS'complementLabelSet'
p439
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp440
g4
((dp441
(dp442
tsg15
(dsg16
(S'labels'
p443
S'alphabet'
p444
tsg20
(dp445
g444
(I1
S'None'
Ntsg443
(NNNtssg22
g23
sg24
S''
sg25
g439
sg26
g2
sg27
g442
sg28
(dsg29
g441
sg30
g31
sg42
S''
sbsS'compileRE'
p446
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp447
g4
((dp448
(dp449
tsg15
(dp450
S"'extra ' + ` ')' `"
Nssg16
(S's'
S'**options'
p451
tsg20
(dp452
S's'
(NNNtsg451
(NNNtssg22
g23
sg24
S''
sg25
g446
sg26
g2
sg27
g449
sg28
(dsg29
g448
sg30
g31
sg42
S''
sbsS'closure'
p453
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp454
g4
((dp455
(dp456
tsg15
(dsg16
(S'arg'
p457
tsg20
(dp458
g457
(NNNtssg22
g23
sg24
S''
sg25
g453
sg26
g2
sg27
g456
sg28
(dsg29
g455
sg30
g31
sg42
S''
sbsS'labelComplements'
p459
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp460
g4
((dp461
(dp462
tsg15
(dsg16
(S'label'
p463
S'alphabet'
p464
tsg20
(dp465
g464
(NNNtsg463
(NNNtssg22
g23
sg24
S''
sg25
g459
sg26
g2
sg27
g462
sg28
(dsg29
g461
sg30
g31
sg42
S''
sbsS'intersection'
p466
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp467
g4
((dp468
(dp469
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp470
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Returns the intersection of two FSAs'
sg25
g466
sg26
g2
sg27
g469
sg28
(dsg29
g468
sg30
g31
sg42
S''
sbsS'reverse'
p471
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp472
g4
((dp473
(dp474
tsg15
(dsg16
(S'fsa'
p475
tsg20
(dp476
g475
(NNNtssg22
g23
sg24
S''
sg25
g471
sg26
g2
sg27
g474
sg28
(dsg29
g473
sg30
g31
sg42
S''
sbsS'determinize'
p477
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp478
g4
((dp479
(dp480
tsg15
(dsg16
(S'fsa'
p481
tsg20
(dp482
g481
(NNNtssg22
g23
sg24
S''
sg25
g477
sg26
g2
sg27
g480
sg28
(dsg29
g479
sg30
g31
sg42
S''
sbsS'union'
p483
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp484
g4
((dp485
(dp486
tsg15
(dsg16
(S'*args'
p487
tsg20
(dp488
g487
(NNNtssg22
g23
sg24
S''
sg25
g483
sg26
g2
sg27
g486
sg28
(dsg29
g485
sg30
g31
sg42
S''
sbsS'symbolIntersection'
p489
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp490
g4
((dp491
(dp492
tsg15
(dsg16
(S's1'
p493
S's2'
p494
tsg20
(dp495
g494
(NNNtsg493
(NNNtssg22
g23
sg24
S''
sg25
g489
sg26
g2
sg27
g492
sg28
(dsg29
g491
sg30
g31
sg42
S''
sbsS'compileSequence'
p496
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp497
g4
((dp498
(dp499
tsg15
(dsg16
(S'str'
p500
S'index'
p501
S'options'
p502
tsg20
(dp503
g501
(NNNtsg502
(NNNtsg500
(NNNtssg22
g23
sg24
S''
sg25
g496
sg26
g2
sg27
g499
sg28
(dsg29
g498
sg30
g31
sg42
S''
sbsS'iteration'
p504
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp505
g4
((dp506
(dp507
tsg15
(dsg16
(S'fsa'
p508
S'min'
p509
S'max'
p510
tsg20
(dp511
g508
(NNNtsg510
(I1
S'None'
Ntsg509
(I1
S'1'
Ntssg22
g23
sg24
S"\n >>> equivalent(iteration(singleton('a', 0, 2)), compileRE('|a|aa'))\n >>> equivalent(iteration(singleton('a', 1, 2)), compileRE('a|aa'))\n >>> equivalent(iteration(singleton('a', 1)), compileRE('aa*'))\n "
sg25
g504
sg26
g2
sg27
g507
sg28
(dsg29
g506
sg30
g31
sg42
S''
sbsS'constructLabelMap'
p512
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp513
g4
((dp514
(dp515
tsg15
(dsg16
(S'labels'
p516
S'alphabet'
p517
S'includeComplements'
p518
tsg20
(dp519
g517
(NNNtsg516
(NNNtsg518
(I1
S'0'
Ntssg22
g23
sg24
S'Return a list of (newLabel, positives), where newLabel is an\n intersection of elements from labels and their complemens, and positives is\n a list of labels that have non-empty intersections with newLabel.'
sg25
g512
sg26
g2
sg27
g515
sg28
(dsg29
g514
sg30
g31
sg42
S''
sbsS'toFSA'
p520
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp521
g4
((dp522
(dp523
tsg15
(dsg16
(S'arg'
p524
tsg20
(dp525
g524
(NNNtssg22
g23
sg24
S''
sg25
g520
sg26
g2
sg27
g523
sg28
(dsg29
g522
sg30
g31
sg42
S''
sbsS'compileConjunction'
p526
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp527
g4
((dp528
(dp529
tsg15
(dsg16
(S'str'
p530
S'index'
p531
S'options'
p532
tsg20
(dp533
g531
(NNNtsg532
(NNNtsg530
(NNNtssg22
g23
sg24
S''
sg25
g526
sg26
g2
sg27
g529
sg28
(dsg29
g528
sg30
g31
sg42
S''
sbsS'minimize'
p534
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp535
g4
((dp536
(dp537
tsg15
(dsg16
(S'fsa'
p538
tsg20
(dp539
g538
(NNNtssg22
g23
sg24
S''
sg25
g534
sg26
g2
sg27
g537
sg28
(dsg29
g536
sg30
g31
sg42
S''
sbsS'consolidateTransitions'
p540
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp541
g4
((dp542
(dp543
tsg15
(dsg16
(S'transitions'
p544
tsg20
(dp545
g544
(NNNtssg22
g23
sg24
S''
sg25
g540
sg26
g2
sg27
g543
sg28
(dsg29
g542
sg30
g31
sg42
S''
sbsS'containment'
p546
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp547
g4
((dp548
(dp549
tsg15
(dsg16
(S'arg'
p550
S'occurrences'
p551
tsg20
(dp552
g551
(I1
S'1'
Ntsg550
(NNNtssg22
g23
sg24
S'Returns an FSA that matches sequences containing at least _count_\n occurrences\n of _symbol_.'
sg25
g546
sg26
g2
sg27
g549
sg28
(dsg29
g548
sg30
g31
sg42
S''
sbsS'labelMatches'
p553
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp554
g4
((dp555
(dp556
tsg15
(dsg16
(S'label'
p557
S'input'
p558
tsg20
(dp559
g558
(NNNtsg557
(NNNtssg22
g23
sg24
S''
sg25
g553
sg26
g2
sg27
g556
sg28
(dsg29
g555
sg30
g31
sg42
S''
sbsS'_labelIntersection'
p560
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp561
g4
((dp562
(dp563
tsg15
(dsg16
(S'l1'
p564
S'l2'
p565
tsg20
(dp566
g565
(NNNtsg564
(NNNtssg22
g23
sg24
S''
sg25
g560
sg26
g2
sg27
g563
sg28
(dsg29
g562
sg30
g31
sg42
S''
sbsS'complement'
p567
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp568
g4
((dp569
(dp570
tsg15
(dsg16
(S'arg'
p571
tsg20
(dp572
g571
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts exactly those strings that the argument does\n not.'
sg25
g567
sg26
g2
sg27
g570
sg28
(dsg29
g569
sg30
g31
sg42
S''
sbsS'view'
p573
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp574
g4
((dp575
(dp576
tsg15
(dsg16
(S'str'
p577
tsg20
(dp578
g577
(NNNtssg22
g23
sg24
S''
sg25
g573
sg26
g2
sg27
g576
sg28
(dsg29
g575
sg30
g31
sg42
S''
sbstsS'_import_info'
p579
(ihappydoclib.parseinfo.imports
ImportInfo
(dp580
S'_named_imports'
p581
(dp582
S'types'
(lp583
S'InstanceType'
aS'ListType'
aS'IntType'
aS'LongType'
assS'_straight_imports'
p584
(lsbsg22
g23
sg24
S'""" methods to manipulate finite-state automata\n\nThis module defines an FSA class, for representing and operating on\nfinite-state automata (FSAs). FSAs can be used to represent regular\nexpressions and to test sequences for membership in the languages\ndescribed by regular expressions.\n\nFSAs can be deterministic or nondeterministic, and they can contain\nepsilon transitions. Methods to determinize an automaton (also\neliminating its epsilon transitions), and to minimize an automaton,\nare provided.\n\nThe transition labels for an FSA can be symbols from an alphabet, as\nin the standard formal definition of an FSA, but they can also be\ninstances which represent predicates. If these instances implement\ninstance.matches(), then the FSA nextState() function and accepts()\npredicate can be used. If they implement instance.complement() and\ninstance.intersection(), the FSA can be be determinized and minimized,\nto find a minimal deterministic FSA that accepts an equivalent\nlanguage.\n\n\nQuick Start\n----------\nInstances of FSA can be created out of labels (for instance, strings)\nby the singleton() function, and combined to create more complex FSAs\nthrough the complement(), closure(), concatenation(), union(), and\nother constructors. For example, concatenation(singleton(\'a\'),\nunion(singleton(\'b\'), closure(singleton(\'c\')))) creates an FSA that\naccepts the strings \'a\', \'ab\', \'ac\', \'acc\', \'accc\', and so on.\n\nInstances of FSA can also be created with the compileRE() function,\nwhich compiles a simple regular expression (using only \'*\', \'?\', \'+\',\n\'|\', \'(\', and \')\' as metacharacters) into an FSA. For example,\ncompileRE(\'a(b|c*)\') returns an FSA equivalent to the example in the\nprevious paragraph.\n\nFSAs can be determinized, to create equivalent FSAs (FSAs accepting\nthe same language) with unique successor states for each input, and\nminimized, to create an equivalent deterministic FSA with the smallest\nnumber of states. FSAs can also be complemented, intersected, unioned,\nand so forth as described under \'FSA Functions\' below.\n\n\nFSA Methods\n-----------\nThe class FSA defines the following methods.\n\nAcceptance\n``````````\nfsa.nextStates(state, input)\n returns a list of states\nfsa.nextState(state, input)\n returns None or a single state if\n |nextStates| <= 1, otherwise it raises an exception\nfsa.nextStateSet(states, input)\n returns a list of states\nfsa.accepts(sequence)\n returns true or false\n\nAccessors and predicates\n````````````````````````\nisEmpty()\n returns true iff the language accepted by the FSA is the empty language\nlabels()\n returns a list of labels that are used in any transition\nnextAvailableState()\n returns an integer n such that no states in the FSA\n are numeric values >= n\n\nReductions\n``````````\nsorted(initial=0)\n returns an equivalent FSA whose states are numbered\n upwards from 0\ndeterminized()\n returns an equivalent deterministic FSA\nminimized()\n returns an equivalent minimal FSA\ntrimmed()\n returns an equivalent FSA that contains no unreachable or dead\n states\n\nPresentation\n````````````\ntoDotString()\n returns a string suitable as *.dot file for the \'dot\'\n program from AT&T GraphViz\nview()\n views the FSA with a gs viewer, if gs and dot are installed\n\n\nFSA Functions\n------------\nConstruction from FSAs\n``````````````````````\ncomplement(a)\n returns an fsa that accepts exactly those sequences that its\n argument does not\nclosure(a)\n returns an fsa that accepts sequences composed of zero or more\n concatenations of sequences accepted by the argument\nconcatenation(a, b)\n returns an fsa that accepts sequences composed of a\n sequence accepted by a, followed by a sequence accepted by b\ncontainment(a, occurrences=1)\n returns an fsa that accepts sequences that\n contain at least occurrences occurrences of a subsequence recognized by the\n argument.\ndifference(a, b)\n returns an fsa that accepts those sequences accepted by a\n but not b\nintersection(a, b)\n returns an fsa that accepts sequences accepted by both a\n and b\niteration(a, min=1, max=None)\n returns an fsa that accepts sequences\n consisting of from min to max (or any number, if max is None) of sequences\n accepted by its first argument\noption(a)\n equivalent to union(a, EMPTY_STRING_FSA)\nreverse(a)\n returns an fsa that accepts strings whose reversal is accepted by\n the argument\nunion(a, b)\n returns an fsa that accepts sequences accepted by both a and b\n\nPredicates\n``````````\nequivalent(a, b)\n returns true iff a and b accept the same language\n\nReductions (these equivalent to the similarly-named methods)\n````````````````````````````````````````````````````````````\ndeterminize(fsa)\n returns an equivalent deterministic FSA\nminimize(fsa)\n returns an equivalent minimal FSA\nsort(fsa, initial=0)\n returns an equivalent FSA whose states are numbered from\n initial\ntrim(fsa)\n returns an equivalent FSA that contains no dead or unreachable\n states\n\nConstruction from labels\n````````````````````````\ncompileRE(string)\n returns an FSA that accepts the language described by\n string, where string is a list of symbols and \'*\', \'+\', \'?\', and \'|\' operators,\n with \'(\' and \')\' to control precedence.\nsequence(sequence)\n returns an fsa that accepts sequences that are matched by\n the elements of the argument. For example, sequence(\'abc\') returns an fsa that\n accepts \'abc\' and [\'a\', \'b\', \'c\'].\nsingleton(label)\n returns an fsa that accepts singletons whose elements are\n matched by label. For example, singleton(\'a\') returns an fsa that accepts only\n the string \'a\'.\n\n\nFSA Constants\n------------\nEMPTY_STRING_FSA is an FSA that accepts the language consisting only\nof the empty string.\n\nNULL_FSA is an FSA that accepts the null language.\n\nUNIVERSAL_FSA is an FSA that accepts S*, where S is any object.\n\n\nFSA instance creation\n---------------------\nFSA is initialized with a list of states, an alphabet, a list of\ntransition, an initial state, and a list of final states. If fsa is an\nFSA, fsa.tuple() returns these values in that order, i.e. (states,\nalphabet, transitions, initialState, finalStates). They\'re also\navailable as fields of fsa with those names.\n\nEach element of transition is a tuple of a start state, an end state,\nand a label: (startState, endSTate, label).\n\nIf the list of states is None, it\'s computed from initialState,\nfinalStates, and the states in transitions.\n\nIf alphabet is None, an open alphabet is used: labels are assumed to\nbe objects that implements label.matches(input), label.complement(),\nand label.intersection() as follows:\n\n - label.matches(input) returns true iff label matches input\n - label.complement() returnseither a label or a list of labels which,\n together with the receiver, partition the input alphabet\n - label.intersection(other) returns either None (if label and other don\'t\n both match any symbol), or a label that matches the set of symbols that\n both label and other match\n\nAs a special case, strings can be used as labels. If a strings \'a\' and\n\'b\' are used as a label and there\'s no alphabet, \'~a\' and \'~b\' are\ntheir respective complements, and \'~a&~b\' is the intersection of \'~a\'\nand \'~b\'. (The intersections of \'a\' and \'b\', \'a\' and \'~b\', and \'~a\'\nand \'b\' are, respectively, None, \'a\', and \'b\'.)\n\n\nGoals\n-----\nDesign Goals:\n\n- easy to use\n- easy to read (simple implementation, direct expression of algorithms)\n- extensible\n\nNon-Goals:\n\n- efficiency\n"""'
sg25
S'fsa'
sg26
Nsg27
g319
sg28
(dp585
S'include_comments'
p586
I1
sS'cacheFilePrefix'
p587
S'.happydoc.'
p588
sS'useCache'
p589
I1
sS'docStringFormat'
p590
S'StructuredText'
p591
ssg29
g5
sg30
g31
sg42
S''
sbt. | 10.604245 | 7,695 | 0.761195 | (S'822675c38199b44f85699c1653abb0fc'
p1
(ihappydoclib.parseinfo.moduleinfo
ModuleInfo
p2
(dp3
S'_namespaces'
p4
((dp5
S'FSA'
p6
(ihappydoclib.parseinfo.classinfo
ClassInfo
p7
(dp8
g4
((dp9
(dp10
S'nextStates'
p11
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp12
g4
((dp13
(dp14
tsS'_exception_info'
p15
(dsS'_parameter_names'
p16
(S'self'
p17
S'state'
p18
S'input'
p19
tsS'_parameter_info'
p20
(dp21
g19
(NNNtsg17
(NNNtsg18
(NNNtssS'_filename'
p22
S'fsa.py'
p23
sS'_docstring'
p24
S''
sS'_name'
p25
g11
sS'_parent'
p26
g7
sS'_function_info'
p27
g14
sS'_configuration_values'
p28
(dsS'_class_info'
p29
g13
sS'_comment_info'
p30
(dp31
(S'FSA'
p32
S'labelMatches'
tS' \n Accepting\n \n'
p33
s(g32
S'sorted'
tS' \n Reductions\n \n'
p34
s(g32
S'create'
tS' \n Copying\n \n'
p35
s(g32
S'complement'
tS' \n FSA operations\n \n'
p36
s(g32
S'hasArcMetadata'
tS' \n Arc Metadata Accessors\n \n'
p37
s(g32
S'__repr__'
tS' \n Presentation Methods\n \n'
p38
s(g32
S'makeStateTable'
tS' \n Initialization\n \n'
p39
s(g32
S'isEmpty'
tS' \n Predicates\n \n'
p40
s(g32
S'epsilonClosure'
tS' \n Accessors\n \n'
p41
ssS'_comments'
p42
S''
sbsS'makeStateTable'
p43
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp44
g4
((dp45
(dp46
tsg15
(dsg16
(S'self'
p47
S'default'
p48
tsg20
(dp49
g48
(I1
S'None'
Ntsg47
(NNNtssg22
g23
sg24
S''
sg25
g43
sg26
g7
sg27
g46
sg28
(dsg29
g45
sg30
g31
sg42
g39
sbsS'tuple'
p50
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp51
g4
((dp52
(dp53
tsg15
(dsg16
(S'self'
p54
tsg20
(dp55
g54
(NNNtssg22
g23
sg24
S''
sg25
g50
sg26
g7
sg27
g53
sg28
(dsg29
g52
sg30
g31
sg42
S''
sbsS'collectStates'
p56
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp57
g4
((dp58
(dp59
tsg15
(dsg16
(S'self'
p60
S'transitions'
p61
S'initialState'
p62
S'finalStates'
p63
tsg20
(dp64
g60
(NNNtsg61
(NNNtsg63
(NNNtsg62
(NNNtssg22
g23
sg24
S''
sg25
g56
sg26
g7
sg27
g59
sg28
(dsg29
g58
sg30
g31
sg42
S''
sbsS'complement'
p65
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp66
g4
((dp67
(dp68
tsg15
(dsg16
(S'self'
p69
tsg20
(dp70
g69
(NNNtssg22
g23
sg24
S''
sg25
g65
sg26
g7
sg27
g68
sg28
(dsg29
g67
sg30
g31
sg42
g36
sbsS'labels'
p71
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp72
g4
((dp73
(dp74
tsg15
(dsg16
(S'self'
p75
tsg20
(dp76
g75
(NNNtssg22
g23
sg24
S'Returns a list of transition labels.'
sg25
g71
sg26
g7
sg27
g74
sg28
(dsg29
g73
sg30
g31
sg42
S''
sbsS'determinized'
p77
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp78
g4
((dp79
(dp80
tsg15
(dsg16
(S'self'
p81
tsg20
(dp82
g81
(NNNtssg22
g23
sg24
S'Returns a deterministic FSA that accepts the same language.'
sg25
g77
sg26
g7
sg27
g80
sg28
(dsg29
g79
sg30
g31
sg42
S''
sbsS'minimized'
p83
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp84
g4
((dp85
(dp86
tsg15
(dsg16
(S'self'
p87
tsg20
(dp88
g87
(NNNtssg22
g23
sg24
S'Returns a minimal FSA that accepts the same language.'
sg25
g83
sg26
g7
sg27
g86
sg28
(dsg29
g85
sg30
g31
sg42
S''
sbsS'initializeTransitionTables'
p89
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp90
g4
((dp91
(dp92
tsg15
(dsg16
(S'self'
p93
tsg20
(dp94
g93
(NNNtssg22
g23
sg24
S''
sg25
g89
sg26
g7
sg27
g92
sg28
(dsg29
g91
sg30
g31
sg42
S''
sbsS'coerce'
p95
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp96
g4
((dp97
(dp98
tsg15
(dsg16
(S'self'
p99
S'klass'
p100
tsg20
(dp101
g99
(NNNtsg100
(NNNtssg22
g23
sg24
S''
sg25
g95
sg26
g7
sg27
g98
sg28
(dsg29
g97
sg30
g31
sg42
S''
sbsS'hasArcMetadata'
p102
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp103
g4
((dp104
(dp105
tsg15
(dsg16
(S'self'
p106
tsg20
(dp107
g106
(NNNtssg22
g23
sg24
S''
sg25
g102
sg26
g7
sg27
g105
sg28
(dsg29
g104
sg30
g31
sg42
g37
sbsS'__str__'
p108
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp109
g4
((dp110
(dp111
tsg15
(dsg16
(S'self'
p112
tsg20
(dp113
g112
(NNNtssg22
g23
sg24
S''
sg25
g108
sg26
g7
sg27
g111
sg28
(dsg29
g110
sg30
g31
sg42
S''
sbsS'stateLabelString'
p114
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp115
g4
((dp116
(dp117
tsg15
(dsg16
(S'self'
p118
S'state'
p119
tsg20
(dp120
g118
(NNNtsg119
(NNNtssg22
g23
sg24
S"A template method for specifying a state's label, for use in dot\n diagrams. If this returns None, the default (the string representation\n of the state) is used."
sg25
g114
sg26
g7
sg27
g117
sg28
(dsg29
g116
sg30
g31
sg42
S''
sbsS'labelMatches'
p121
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp122
g4
((dp123
(dp124
tsg15
(dsg16
(S'self'
p125
S'label'
p126
S'input'
p127
tsg20
(dp128
g127
(NNNtsg125
(NNNtsg126
(NNNtssg22
g23
sg24
S''
sg25
g121
sg26
g7
sg27
g124
sg28
(dsg29
g123
sg30
g31
sg42
g33
sbsS'computeEpsilonClosure'
p129
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp130
g4
((dp131
(dp132
tsg15
(dsg16
(S'self'
p133
S'state'
p134
tsg20
(dp135
g133
(NNNtsg134
(NNNtssg22
g23
sg24
S''
sg25
g129
sg26
g7
sg27
g132
sg28
(dsg29
g131
sg30
g31
sg42
S''
sbsS'sorted'
p136
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp137
g4
((dp138
(dp139
tsg15
(dsg16
(S'self'
p140
S'initial'
p141
tsg20
(dp142
g140
(NNNtsg141
(I1
S'0'
Ntssg22
g23
sg24
S''
sg25
g136
sg26
g7
sg27
g139
sg28
(dsg29
g138
sg30
g31
sg42
g34
sbsS'copy'
p143
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp144
g4
((dp145
(dp146
tsg15
(dsg16
(S'self'
p147
S'*args'
p148
tsg20
(dp149
g148
(NNNtsg147
(NNNtssg22
g23
sg24
S''
sg25
g143
sg26
g7
sg27
g146
sg28
(dsg29
g145
sg30
g31
sg42
S''
sbsS'addArcMetadataFor'
p150
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp151
g4
((dp152
(dp153
tsg15
(dsg16
(S'self'
p154
S'transition'
p155
S'data'
p156
tsg20
(dp157
g154
(NNNtsg155
(NNNtsg156
(NNNtssg22
g23
sg24
S''
sg25
g150
sg26
g7
sg27
g153
sg28
(dsg29
g152
sg30
g31
sg42
S''
sbsS'__init__'
p158
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp159
g4
((dp160
(dp161
tsg15
(dsg16
(S'self'
p162
S'states'
p163
S'alphabet'
p164
S'transitions'
p165
S'initialState'
p166
S'finalStates'
p167
S'arcMetadata'
p168
tsg20
(dp169
g163
(NNNtsg167
(NNNtsg164
(NNNtsg162
(NNNtsg165
(NNNtsg168
(I1
S'[]'
Ntsg166
(NNNtssg22
g23
sg24
S''
sg25
g158
sg26
g7
sg27
g161
sg28
(dsg29
g160
sg30
g31
sg42
S''
sbsS'getArcMetadata'
p170
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp171
g4
((dp172
(dp173
tsg15
(dsg16
(S'self'
p174
tsg20
(dp175
g174
(NNNtssg22
g23
sg24
S''
sg25
g170
sg26
g7
sg27
g173
sg28
(dsg29
g172
sg30
g31
sg42
S''
sbsS'setArcMetadataFor'
p176
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp177
g4
((dp178
(dp179
tsg15
(dsg16
(S'self'
p180
S'transition'
p181
S'data'
p182
tsg20
(dp183
g180
(NNNtsg181
(NNNtsg182
(NNNtssg22
g23
sg24
S''
sg25
g176
sg26
g7
sg27
g179
sg28
(dsg29
g178
sg30
g31
sg42
S''
sbsS'withoutEpsilons'
p184
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp185
g4
((dp186
(dp187
tsg15
(dsg16
(S'self'
p188
tsg20
(dp189
g188
(NNNtssg22
g23
sg24
S''
sg25
g184
sg26
g7
sg27
g187
sg28
(dsg29
g186
sg30
g31
sg42
S''
sbsS'addArcMetadata'
p190
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp191
g4
((dp192
(dp193
tsg15
(dsg16
(S'self'
p194
S'list'
p195
tsg20
(dp196
g194
(NNNtsg195
(NNNtssg22
g23
sg24
S''
sg25
g190
sg26
g7
sg27
g193
sg28
(dsg29
g192
sg30
g31
sg42
S''
sbsS'epsilonClosure'
p197
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp198
g4
((dp199
(dp200
tsg15
(dsg16
(S'self'
p201
S'state'
p202
tsg20
(dp203
g201
(NNNtsg202
(NNNtssg22
g23
sg24
S''
sg25
g197
sg26
g7
sg27
g200
sg28
(dsg29
g199
sg30
g31
sg42
g41
sbsS'additionalTransitionInfoString'
p204
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp205
g4
((dp206
(dp207
tsg15
(dsg16
(S'self'
p208
S'transition'
p209
tsg20
(dp210
g208
(NNNtsg209
(NNNtssg22
g23
sg24
S''
sg25
g204
sg26
g7
sg27
g207
sg28
(dsg29
g206
sg30
g31
sg42
S''
sbsS'create'
p211
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp212
g4
((dp213
(dp214
tsg15
(dsg16
(S'self'
p215
S'*args'
p216
tsg20
(dp217
g216
(NNNtsg215
(NNNtssg22
g23
sg24
S''
sg25
g211
sg26
g7
sg27
g214
sg28
(dsg29
g213
sg30
g31
sg42
g35
sbsS'isEmpty'
p218
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp219
g4
((dp220
(dp221
tsg15
(dsg16
(S'self'
p222
tsg20
(dp223
g222
(NNNtssg22
g23
sg24
S''
sg25
g218
sg26
g7
sg27
g221
sg28
(dsg29
g220
sg30
g31
sg42
g40
sbsS'accepts'
p224
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp225
g4
((dp226
(dp227
tsg15
(dsg16
(S'self'
p228
S'sequence'
p229
tsg20
(dp230
g228
(NNNtsg229
(NNNtssg22
g23
sg24
S''
sg25
g224
sg26
g7
sg27
g227
sg28
(dsg29
g226
sg30
g31
sg42
S''
sbsS'getArcMetadataFor'
p231
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp232
g4
((dp233
(dp234
tsg15
(dsg16
(S'self'
p235
S'transition'
p236
S'default'
p237
tsg20
(dp238
g237
(I1
S'None'
Ntsg235
(NNNtsg236
(NNNtssg22
g23
sg24
S''
sg25
g231
sg26
g7
sg27
g234
sg28
(dsg29
g233
sg30
g31
sg42
S''
sbsS'nextState'
p239
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp240
g4
((dp241
(dp242
tsg15
(dsg16
(S'self'
p243
S'state'
p244
S'input'
p245
tsg20
(dp246
g245
(NNNtsg243
(NNNtsg244
(NNNtssg22
g23
sg24
S''
sg25
g239
sg26
g7
sg27
g242
sg28
(dsg29
g241
sg30
g31
sg42
S''
sbsS'trimmed'
p247
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp248
g4
((dp249
(dp250
tsg15
(dsg16
(S'self'
p251
tsg20
(dp252
g251
(NNNtssg22
g23
sg24
S"Returns an equivalent FSA that doesn't include unreachable states,\n or states that only lead to dead states."
sg25
g247
sg26
g7
sg27
g250
sg28
(dsg29
g249
sg30
g31
sg42
S''
sbsS'isFSA'
p253
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp254
g4
((dp255
(dp256
tsg15
(dsg16
(S'self'
p257
tsg20
(dp258
g257
(NNNtssg22
g23
sg24
S''
sg25
g253
sg26
g7
sg27
g256
sg28
(dsg29
g255
sg30
g31
sg42
S''
sbsS'creationArgs'
p259
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp260
g4
((dp261
(dp262
tsg15
(dsg16
(S'self'
p263
tsg20
(dp264
g263
(NNNtssg22
g23
sg24
S''
sg25
g259
sg26
g7
sg27
g262
sg28
(dsg29
g261
sg30
g31
sg42
S''
sbsS'__repr__'
p265
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp266
g4
((dp267
(dp268
tsg15
(dsg16
(S'self'
p269
tsg20
(dp270
g269
(NNNtssg22
g23
sg24
S''
sg25
g265
sg26
g7
sg27
g268
sg28
(dsg29
g267
sg30
g31
sg42
g38
sbsS'setArcMetadata'
p271
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp272
g4
((dp273
(dp274
tsg15
(dsg16
(S'self'
p275
S'list'
p276
tsg20
(dp277
g275
(NNNtsg276
(NNNtssg22
g23
sg24
S''
sg25
g271
sg26
g7
sg27
g274
sg28
(dsg29
g273
sg30
g31
sg42
S''
sbsS'nextAvailableState'
p278
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp279
g4
((dp280
(dp281
tsg15
(dsg16
(S'self'
p282
tsg20
(dp283
g282
(NNNtssg22
g23
sg24
S''
sg25
g278
sg26
g7
sg27
g281
sg28
(dsg29
g280
sg30
g31
sg42
S''
sbsS'computeEpsilonClosures'
p284
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp285
g4
((dp286
(dp287
tsg15
(dsg16
(S'self'
p288
tsg20
(dp289
g288
(NNNtssg22
g23
sg24
S''
sg25
g284
sg26
g7
sg27
g287
sg28
(dsg29
g286
sg30
g31
sg42
S''
sbsS'view'
p290
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp291
g4
((dp292
(dp293
tsg15
(dsg16
(S'self'
p294
tsg20
(dp295
g294
(NNNtssg22
g23
sg24
S''
sg25
g290
sg26
g7
sg27
g293
sg28
(dsg29
g292
sg30
g31
sg42
S''
sbsS'nextStateSet'
p296
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp297
g4
((dp298
(dp299
tsg15
(dsg16
(S'self'
p300
S'states'
p301
S'input'
p302
tsg20
(dp303
g301
(NNNtsg302
(NNNtsg300
(NNNtssg22
g23
sg24
S''
sg25
g296
sg26
g7
sg27
g299
sg28
(dsg29
g298
sg30
g31
sg42
S''
sbsS'toDotString'
p304
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp305
g4
((dp306
(dp307
tsg15
(dsg16
(S'self'
p308
tsg20
(dp309
g308
(NNNtssg22
g23
sg24
S'Returns a string that can be printed by the DOT tool at\n http://www.research.att.com/sw/tools/graphviz/ .'
sg25
g304
sg26
g7
sg27
g307
sg28
(dsg29
g306
sg30
g31
sg42
S''
sbsS'transitionsFrom'
p310
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp311
g4
((dp312
(dp313
tsg15
(dsg16
(S'self'
p314
S'state'
p315
tsg20
(dp316
g314
(NNNtsg315
(NNNtssg22
g23
sg24
S''
sg25
g310
sg26
g7
sg27
g313
sg28
(dsg29
g312
sg30
g31
sg42
S''
sbstsg22
g23
sg24
S''
sS'_class_member_info'
p317
(lsg25
g6
sg26
g2
sg27
g10
sg42
S''
sg28
(dsg29
g9
sg30
g31
sS'_base_class_info'
p318
(lsbs(dp319
S'trim'
p320
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp321
g4
((dp322
(dp323
tsg15
(dsg16
(S'fsa'
p324
tsg20
(dp325
g324
(NNNtssg22
g23
sg24
S''
sg25
g320
sg26
g2
sg27
g323
sg28
(dsg29
g322
sg30
g31
sg42
S''
sbsS'completion'
p326
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp327
g4
((dp328
(dp329
tsg15
(dsg16
(S'fsa'
p330
tsg20
(dp331
g330
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts the same language as the argument, but that\n lands in a defined state for every input.'
sg25
g326
sg26
g2
sg27
g329
sg28
(dsg29
g328
sg30
g31
sg42
S''
sbsS'singleton'
p332
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp333
g4
((dp334
(dp335
tsg15
(dsg16
(S'symbol'
p336
S'alphabet'
p337
S'arcMetadata'
p338
tsg20
(dp339
g337
(I1
S'None'
Ntsg336
(NNNtsg338
(I1
S'None'
Ntssg22
g23
sg24
S''
sg25
g332
sg26
g2
sg27
g335
sg28
(dsg29
g334
sg30
g31
sg42
S''
sbsS'option'
p340
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp341
g4
((dp342
(dp343
tsg15
(dsg16
(S'fsa'
p344
tsg20
(dp345
g344
(NNNtssg22
g23
sg24
S''
sg25
g340
sg26
g2
sg27
g343
sg28
(dsg29
g342
sg30
g31
sg42
S''
sbsS'sequence'
p346
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp347
g4
((dp348
(dp349
tsg15
(dsg16
(S'sequence'
p350
S'alphabet'
p351
tsg20
(dp352
g351
(I1
S'None'
Ntsg350
(NNNtssg22
g23
sg24
S''
sg25
g346
sg26
g2
sg27
g349
sg28
(dsg29
g348
sg30
g31
sg42
S''
sbsS'equivalent'
p353
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp354
g4
((dp355
(dp356
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp357
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Return true ifff a and b accept the same language.'
sg25
g353
sg26
g2
sg27
g356
sg28
(dsg29
g355
sg30
g31
sg42
S''
sbsS'unionLabelSets'
p358
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp359
g4
((dp360
(dp361
tsg15
(dsg16
(S'alist'
p362
S'blist'
p363
S'alphabet'
p364
tsg20
(dp365
g364
(I1
S'None'
Ntsg363
(NNNtsg362
(NNNtssg22
g23
sg24
S''
sg25
g358
sg26
g2
sg27
g361
sg28
(dsg29
g360
sg30
g31
sg42
S''
sbsS'symbolComplement'
p366
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp367
g4
((dp368
(dp369
tsg15
(dsg16
(S'symbol'
p370
tsg20
(dp371
g370
(NNNtssg22
g23
sg24
S''
sg25
g366
sg26
g2
sg27
g369
sg28
(dsg29
g368
sg30
g31
sg42
S''
sbsS'concatenation'
p372
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp373
g4
((dp374
(dp375
tsg15
(dsg16
(S'a'
S'*args'
p376
tsg20
(dp377
S'a'
(NNNtsg376
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts the language consisting of the concatenation\n of strings recognized by the arguments.'
sg25
g372
sg26
g2
sg27
g375
sg28
(dsg29
g374
sg30
g31
sg42
S''
sbsS'sort'
p378
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp379
g4
((dp380
(dp381
tsg15
(dsg16
(S'fsa'
p382
tsg20
(dp383
g382
(NNNtssg22
g23
sg24
S''
sg25
g378
sg26
g2
sg27
g381
sg28
(dsg29
g380
sg30
g31
sg42
S''
sbsS'labelIntersection'
p384
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp385
g4
((dp386
(dp387
tsg15
(dsg16
(S'l1'
p388
S'l2'
p389
tsg20
(dp390
g389
(NNNtsg388
(NNNtssg22
g23
sg24
S''
sg25
g384
sg26
g2
sg27
g387
sg28
(dsg29
g386
sg30
g31
sg42
S''
sbsS'intersectLabelSets'
p391
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp392
g4
((dp393
(dp394
tsg15
(dsg16
(S'alist'
p395
S'blist'
p396
tsg20
(dp397
g396
(NNNtsg395
(NNNtssg22
g23
sg24
S''
sg25
g391
sg26
g2
sg27
g394
sg28
(dsg29
g393
sg30
g31
sg42
S''
sbsS'labelString'
p398
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp399
g4
((dp400
(dp401
tsg15
(dsg16
(S'label'
p402
tsg20
(dp403
g402
(NNNtssg22
g23
sg24
S''
sg25
g398
sg26
g2
sg27
g401
sg28
(dsg29
g400
sg30
g31
sg42
S''
sbsS'compileItem'
p404
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp405
g4
((dp406
(dp407
tsg15
(dp408
S"'unimplemented'"
Nssg16
(S'str'
p409
S'index'
p410
S'options'
p411
tsg20
(dp412
g410
(NNNtsg411
(NNNtsg409
(NNNtssg22
g23
sg24
S''
sg25
g404
sg26
g2
sg27
g407
sg28
(dsg29
g406
sg30
g31
sg42
S''
sbsS'labelComplement'
p413
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp414
g4
((dp415
(dp416
tsg15
(dsg16
(S'label'
p417
S'alphabet'
p418
tsg20
(dp419
g418
(NNNtsg417
(NNNtssg22
g23
sg24
S''
sg25
g413
sg26
g2
sg27
g416
sg28
(dsg29
g415
sg30
g31
sg42
S''
sbsS'removeDuplicates'
p420
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp421
g4
((dp422
(dp423
tsg15
(dsg16
(S'sequence'
p424
tsg20
(dp425
g424
(NNNtssg22
g23
sg24
S''
sg25
g420
sg26
g2
sg27
g423
sg28
(dsg29
g422
sg30
g31
sg42
S''
sbsS'difference'
p426
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp427
g4
((dp428
(dp429
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp430
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts those strings accepted by the first\n argument, but not the second.'
sg25
g426
sg26
g2
sg27
g429
sg28
(dsg29
g428
sg30
g31
sg42
S''
sbsS'compileREExpr'
p431
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp432
g4
((dp433
(dp434
tsg15
(dsg16
(S'str'
p435
S'index'
p436
S'options'
p437
tsg20
(dp438
g436
(NNNtsg437
(NNNtsg435
(NNNtssg22
g23
sg24
S''
sg25
g431
sg26
g2
sg27
g434
sg28
(dsg29
g433
sg30
g31
sg42
S''
sbsS'complementLabelSet'
p439
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp440
g4
((dp441
(dp442
tsg15
(dsg16
(S'labels'
p443
S'alphabet'
p444
tsg20
(dp445
g444
(I1
S'None'
Ntsg443
(NNNtssg22
g23
sg24
S''
sg25
g439
sg26
g2
sg27
g442
sg28
(dsg29
g441
sg30
g31
sg42
S''
sbsS'compileRE'
p446
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp447
g4
((dp448
(dp449
tsg15
(dp450
S"'extra ' + ` ')' `"
Nssg16
(S's'
S'**options'
p451
tsg20
(dp452
S's'
(NNNtsg451
(NNNtssg22
g23
sg24
S''
sg25
g446
sg26
g2
sg27
g449
sg28
(dsg29
g448
sg30
g31
sg42
S''
sbsS'closure'
p453
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp454
g4
((dp455
(dp456
tsg15
(dsg16
(S'arg'
p457
tsg20
(dp458
g457
(NNNtssg22
g23
sg24
S''
sg25
g453
sg26
g2
sg27
g456
sg28
(dsg29
g455
sg30
g31
sg42
S''
sbsS'labelComplements'
p459
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp460
g4
((dp461
(dp462
tsg15
(dsg16
(S'label'
p463
S'alphabet'
p464
tsg20
(dp465
g464
(NNNtsg463
(NNNtssg22
g23
sg24
S''
sg25
g459
sg26
g2
sg27
g462
sg28
(dsg29
g461
sg30
g31
sg42
S''
sbsS'intersection'
p466
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp467
g4
((dp468
(dp469
tsg15
(dsg16
(S'a'
S'b'
tsg20
(dp470
S'a'
(NNNtsS'b'
(NNNtssg22
g23
sg24
S'Returns the intersection of two FSAs'
sg25
g466
sg26
g2
sg27
g469
sg28
(dsg29
g468
sg30
g31
sg42
S''
sbsS'reverse'
p471
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp472
g4
((dp473
(dp474
tsg15
(dsg16
(S'fsa'
p475
tsg20
(dp476
g475
(NNNtssg22
g23
sg24
S''
sg25
g471
sg26
g2
sg27
g474
sg28
(dsg29
g473
sg30
g31
sg42
S''
sbsS'determinize'
p477
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp478
g4
((dp479
(dp480
tsg15
(dsg16
(S'fsa'
p481
tsg20
(dp482
g481
(NNNtssg22
g23
sg24
S''
sg25
g477
sg26
g2
sg27
g480
sg28
(dsg29
g479
sg30
g31
sg42
S''
sbsS'union'
p483
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp484
g4
((dp485
(dp486
tsg15
(dsg16
(S'*args'
p487
tsg20
(dp488
g487
(NNNtssg22
g23
sg24
S''
sg25
g483
sg26
g2
sg27
g486
sg28
(dsg29
g485
sg30
g31
sg42
S''
sbsS'symbolIntersection'
p489
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp490
g4
((dp491
(dp492
tsg15
(dsg16
(S's1'
p493
S's2'
p494
tsg20
(dp495
g494
(NNNtsg493
(NNNtssg22
g23
sg24
S''
sg25
g489
sg26
g2
sg27
g492
sg28
(dsg29
g491
sg30
g31
sg42
S''
sbsS'compileSequence'
p496
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp497
g4
((dp498
(dp499
tsg15
(dsg16
(S'str'
p500
S'index'
p501
S'options'
p502
tsg20
(dp503
g501
(NNNtsg502
(NNNtsg500
(NNNtssg22
g23
sg24
S''
sg25
g496
sg26
g2
sg27
g499
sg28
(dsg29
g498
sg30
g31
sg42
S''
sbsS'iteration'
p504
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp505
g4
((dp506
(dp507
tsg15
(dsg16
(S'fsa'
p508
S'min'
p509
S'max'
p510
tsg20
(dp511
g508
(NNNtsg510
(I1
S'None'
Ntsg509
(I1
S'1'
Ntssg22
g23
sg24
S"\n >>> equivalent(iteration(singleton('a', 0, 2)), compileRE('|a|aa'))\n >>> equivalent(iteration(singleton('a', 1, 2)), compileRE('a|aa'))\n >>> equivalent(iteration(singleton('a', 1)), compileRE('aa*'))\n "
sg25
g504
sg26
g2
sg27
g507
sg28
(dsg29
g506
sg30
g31
sg42
S''
sbsS'constructLabelMap'
p512
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp513
g4
((dp514
(dp515
tsg15
(dsg16
(S'labels'
p516
S'alphabet'
p517
S'includeComplements'
p518
tsg20
(dp519
g517
(NNNtsg516
(NNNtsg518
(I1
S'0'
Ntssg22
g23
sg24
S'Return a list of (newLabel, positives), where newLabel is an\n intersection of elements from labels and their complemens, and positives is\n a list of labels that have non-empty intersections with newLabel.'
sg25
g512
sg26
g2
sg27
g515
sg28
(dsg29
g514
sg30
g31
sg42
S''
sbsS'toFSA'
p520
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp521
g4
((dp522
(dp523
tsg15
(dsg16
(S'arg'
p524
tsg20
(dp525
g524
(NNNtssg22
g23
sg24
S''
sg25
g520
sg26
g2
sg27
g523
sg28
(dsg29
g522
sg30
g31
sg42
S''
sbsS'compileConjunction'
p526
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp527
g4
((dp528
(dp529
tsg15
(dsg16
(S'str'
p530
S'index'
p531
S'options'
p532
tsg20
(dp533
g531
(NNNtsg532
(NNNtsg530
(NNNtssg22
g23
sg24
S''
sg25
g526
sg26
g2
sg27
g529
sg28
(dsg29
g528
sg30
g31
sg42
S''
sbsS'minimize'
p534
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp535
g4
((dp536
(dp537
tsg15
(dsg16
(S'fsa'
p538
tsg20
(dp539
g538
(NNNtssg22
g23
sg24
S''
sg25
g534
sg26
g2
sg27
g537
sg28
(dsg29
g536
sg30
g31
sg42
S''
sbsS'consolidateTransitions'
p540
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp541
g4
((dp542
(dp543
tsg15
(dsg16
(S'transitions'
p544
tsg20
(dp545
g544
(NNNtssg22
g23
sg24
S''
sg25
g540
sg26
g2
sg27
g543
sg28
(dsg29
g542
sg30
g31
sg42
S''
sbsS'containment'
p546
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp547
g4
((dp548
(dp549
tsg15
(dsg16
(S'arg'
p550
S'occurrences'
p551
tsg20
(dp552
g551
(I1
S'1'
Ntsg550
(NNNtssg22
g23
sg24
S'Returns an FSA that matches sequences containing at least _count_\n occurrences\n of _symbol_.'
sg25
g546
sg26
g2
sg27
g549
sg28
(dsg29
g548
sg30
g31
sg42
S''
sbsS'labelMatches'
p553
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp554
g4
((dp555
(dp556
tsg15
(dsg16
(S'label'
p557
S'input'
p558
tsg20
(dp559
g558
(NNNtsg557
(NNNtssg22
g23
sg24
S''
sg25
g553
sg26
g2
sg27
g556
sg28
(dsg29
g555
sg30
g31
sg42
S''
sbsS'_labelIntersection'
p560
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp561
g4
((dp562
(dp563
tsg15
(dsg16
(S'l1'
p564
S'l2'
p565
tsg20
(dp566
g565
(NNNtsg564
(NNNtssg22
g23
sg24
S''
sg25
g560
sg26
g2
sg27
g563
sg28
(dsg29
g562
sg30
g31
sg42
S''
sbsS'complement'
p567
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp568
g4
((dp569
(dp570
tsg15
(dsg16
(S'arg'
p571
tsg20
(dp572
g571
(NNNtssg22
g23
sg24
S'Returns an FSA that accepts exactly those strings that the argument does\n not.'
sg25
g567
sg26
g2
sg27
g570
sg28
(dsg29
g569
sg30
g31
sg42
S''
sbsS'view'
p573
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
(dp574
g4
((dp575
(dp576
tsg15
(dsg16
(S'str'
p577
tsg20
(dp578
g577
(NNNtssg22
g23
sg24
S''
sg25
g573
sg26
g2
sg27
g576
sg28
(dsg29
g575
sg30
g31
sg42
S''
sbstsS'_import_info'
p579
(ihappydoclib.parseinfo.imports
ImportInfo
(dp580
S'_named_imports'
p581
(dp582
S'types'
(lp583
S'InstanceType'
aS'ListType'
aS'IntType'
aS'LongType'
assS'_straight_imports'
p584
(lsbsg22
g23
sg24
S'""" methods to manipulate finite-state automata\n\nThis module defines an FSA class, for representing and operating on\nfinite-state automata (FSAs). FSAs can be used to represent regular\nexpressions and to test sequences for membership in the languages\ndescribed by regular expressions.\n\nFSAs can be deterministic or nondeterministic, and they can contain\nepsilon transitions. Methods to determinize an automaton (also\neliminating its epsilon transitions), and to minimize an automaton,\nare provided.\n\nThe transition labels for an FSA can be symbols from an alphabet, as\nin the standard formal definition of an FSA, but they can also be\ninstances which represent predicates. If these instances implement\ninstance.matches(), then the FSA nextState() function and accepts()\npredicate can be used. If they implement instance.complement() and\ninstance.intersection(), the FSA can be be determinized and minimized,\nto find a minimal deterministic FSA that accepts an equivalent\nlanguage.\n\n\nQuick Start\n----------\nInstances of FSA can be created out of labels (for instance, strings)\nby the singleton() function, and combined to create more complex FSAs\nthrough the complement(), closure(), concatenation(), union(), and\nother constructors. For example, concatenation(singleton(\'a\'),\nunion(singleton(\'b\'), closure(singleton(\'c\')))) creates an FSA that\naccepts the strings \'a\', \'ab\', \'ac\', \'acc\', \'accc\', and so on.\n\nInstances of FSA can also be created with the compileRE() function,\nwhich compiles a simple regular expression (using only \'*\', \'?\', \'+\',\n\'|\', \'(\', and \')\' as metacharacters) into an FSA. For example,\ncompileRE(\'a(b|c*)\') returns an FSA equivalent to the example in the\nprevious paragraph.\n\nFSAs can be determinized, to create equivalent FSAs (FSAs accepting\nthe same language) with unique successor states for each input, and\nminimized, to create an equivalent deterministic FSA with the smallest\nnumber of states. FSAs can also be complemented, intersected, unioned,\nand so forth as described under \'FSA Functions\' below.\n\n\nFSA Methods\n-----------\nThe class FSA defines the following methods.\n\nAcceptance\n``````````\nfsa.nextStates(state, input)\n returns a list of states\nfsa.nextState(state, input)\n returns None or a single state if\n |nextStates| <= 1, otherwise it raises an exception\nfsa.nextStateSet(states, input)\n returns a list of states\nfsa.accepts(sequence)\n returns true or false\n\nAccessors and predicates\n````````````````````````\nisEmpty()\n returns true iff the language accepted by the FSA is the empty language\nlabels()\n returns a list of labels that are used in any transition\nnextAvailableState()\n returns an integer n such that no states in the FSA\n are numeric values >= n\n\nReductions\n``````````\nsorted(initial=0)\n returns an equivalent FSA whose states are numbered\n upwards from 0\ndeterminized()\n returns an equivalent deterministic FSA\nminimized()\n returns an equivalent minimal FSA\ntrimmed()\n returns an equivalent FSA that contains no unreachable or dead\n states\n\nPresentation\n````````````\ntoDotString()\n returns a string suitable as *.dot file for the \'dot\'\n program from AT&T GraphViz\nview()\n views the FSA with a gs viewer, if gs and dot are installed\n\n\nFSA Functions\n------------\nConstruction from FSAs\n``````````````````````\ncomplement(a)\n returns an fsa that accepts exactly those sequences that its\n argument does not\nclosure(a)\n returns an fsa that accepts sequences composed of zero or more\n concatenations of sequences accepted by the argument\nconcatenation(a, b)\n returns an fsa that accepts sequences composed of a\n sequence accepted by a, followed by a sequence accepted by b\ncontainment(a, occurrences=1)\n returns an fsa that accepts sequences that\n contain at least occurrences occurrences of a subsequence recognized by the\n argument.\ndifference(a, b)\n returns an fsa that accepts those sequences accepted by a\n but not b\nintersection(a, b)\n returns an fsa that accepts sequences accepted by both a\n and b\niteration(a, min=1, max=None)\n returns an fsa that accepts sequences\n consisting of from min to max (or any number, if max is None) of sequences\n accepted by its first argument\noption(a)\n equivalent to union(a, EMPTY_STRING_FSA)\nreverse(a)\n returns an fsa that accepts strings whose reversal is accepted by\n the argument\nunion(a, b)\n returns an fsa that accepts sequences accepted by both a and b\n\nPredicates\n``````````\nequivalent(a, b)\n returns true iff a and b accept the same language\n\nReductions (these equivalent to the similarly-named methods)\n````````````````````````````````````````````````````````````\ndeterminize(fsa)\n returns an equivalent deterministic FSA\nminimize(fsa)\n returns an equivalent minimal FSA\nsort(fsa, initial=0)\n returns an equivalent FSA whose states are numbered from\n initial\ntrim(fsa)\n returns an equivalent FSA that contains no dead or unreachable\n states\n\nConstruction from labels\n````````````````````````\ncompileRE(string)\n returns an FSA that accepts the language described by\n string, where string is a list of symbols and \'*\', \'+\', \'?\', and \'|\' operators,\n with \'(\' and \')\' to control precedence.\nsequence(sequence)\n returns an fsa that accepts sequences that are matched by\n the elements of the argument. For example, sequence(\'abc\') returns an fsa that\n accepts \'abc\' and [\'a\', \'b\', \'c\'].\nsingleton(label)\n returns an fsa that accepts singletons whose elements are\n matched by label. For example, singleton(\'a\') returns an fsa that accepts only\n the string \'a\'.\n\n\nFSA Constants\n------------\nEMPTY_STRING_FSA is an FSA that accepts the language consisting only\nof the empty string.\n\nNULL_FSA is an FSA that accepts the null language.\n\nUNIVERSAL_FSA is an FSA that accepts S*, where S is any object.\n\n\nFSA instance creation\n---------------------\nFSA is initialized with a list of states, an alphabet, a list of\ntransition, an initial state, and a list of final states. If fsa is an\nFSA, fsa.tuple() returns these values in that order, i.e. (states,\nalphabet, transitions, initialState, finalStates). They\'re also\navailable as fields of fsa with those names.\n\nEach element of transition is a tuple of a start state, an end state,\nand a label: (startState, endSTate, label).\n\nIf the list of states is None, it\'s computed from initialState,\nfinalStates, and the states in transitions.\n\nIf alphabet is None, an open alphabet is used: labels are assumed to\nbe objects that implements label.matches(input), label.complement(),\nand label.intersection() as follows:\n\n - label.matches(input) returns true iff label matches input\n - label.complement() returnseither a label or a list of labels which,\n together with the receiver, partition the input alphabet\n - label.intersection(other) returns either None (if label and other don\'t\n both match any symbol), or a label that matches the set of symbols that\n both label and other match\n\nAs a special case, strings can be used as labels. If a strings \'a\' and\n\'b\' are used as a label and there\'s no alphabet, \'~a\' and \'~b\' are\ntheir respective complements, and \'~a&~b\' is the intersection of \'~a\'\nand \'~b\'. (The intersections of \'a\' and \'b\', \'a\' and \'~b\', and \'~a\'\nand \'b\' are, respectively, None, \'a\', and \'b\'.)\n\n\nGoals\n-----\nDesign Goals:\n\n- easy to use\n- easy to read (simple implementation, direct expression of algorithms)\n- extensible\n\nNon-Goals:\n\n- efficiency\n"""'
sg25
S'fsa'
sg26
Nsg27
g319
sg28
(dp585
S'include_comments'
p586
I1
sS'cacheFilePrefix'
p587
S'.happydoc.'
p588
sS'useCache'
p589
I1
sS'docStringFormat'
p590
S'StructuredText'
p591
ssg29
g5
sg30
g31
sg42
S''
sbt. | false | true |
f72ababf067d4c75b7546894366ccba2992a76c1 | 8,329 | py | Python | test/functional/feature_proxy.py | KingricharVD/DSW | 7281f6ed5c102687805d2bca707e675cbce7dd4d | [
"MIT"
] | 3 | 2020-10-02T13:11:53.000Z | 2021-11-06T18:02:32.000Z | test/functional/feature_proxy.py | KingricharVD/DSW | 7281f6ed5c102687805d2bca707e675cbce7dd4d | [
"MIT"
] | 3 | 2020-08-06T17:35:37.000Z | 2021-07-22T01:37:56.000Z | test/functional/feature_proxy.py | KingricharVD/DSW | 7281f6ed5c102687805d2bca707e675cbce7dd4d | [
"MIT"
] | 6 | 2020-10-09T16:42:49.000Z | 2021-07-05T20:57:23.000Z | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind with different proxy configuration.
Test plan:
- Start nesteggd's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on nesteggd side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create nesteggds that connect to them
- Manipulate the nesteggds using addnode (onetry) an observe effects
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
"""
import socket
import os
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import PivxTestFramework
from test_framework.util import (
PORT_MIN,
PORT_RANGE,
assert_equal,
)
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
class ProxyTest(PivxTestFramework):
def set_test_params(self):
self.num_nodes = 4
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
# Note: proxies are not used to connect to local nodes
# this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost
args = [
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
[]
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
# Test: outgoing IPv4 connection through node
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if self.have_ipv6:
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if test_onion:
# Test: outgoing onion connection through node
node.addnode("bitcoinostk4e4re.onion:8333", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing DNS name connection through node
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
# -proxy plus -onion
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
# proxy on IPv6 localhost
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
# test RPC getnetworkinfo
n0 = networks_dict(self.nodes[0].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n0[net]['proxy_randomize_credentials'], True)
assert_equal(n0['onion']['reachable'], True)
n1 = networks_dict(self.nodes[1].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
n2 = networks_dict(self.nodes[2].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n2[net]['proxy_randomize_credentials'], True)
assert_equal(n2['onion']['reachable'], True)
if self.have_ipv6:
n3 = networks_dict(self.nodes[3].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
if __name__ == '__main__':
ProxyTest().main()
| 41.232673 | 121 | 0.625405 |
import socket
import os
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import PivxTestFramework
from test_framework.util import (
PORT_MIN,
PORT_RANGE,
assert_equal,
)
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE
class ProxyTest(PivxTestFramework):
def set_test_params(self):
self.num_nodes = 4
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
args = [
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
[]
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if self.have_ipv6:
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if test_onion:
node.addnode("bitcoinostk4e4re.onion:8333", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
n0 = networks_dict(self.nodes[0].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n0[net]['proxy_randomize_credentials'], True)
assert_equal(n0['onion']['reachable'], True)
n1 = networks_dict(self.nodes[1].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
n2 = networks_dict(self.nodes[2].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n2[net]['proxy_randomize_credentials'], True)
assert_equal(n2['onion']['reachable'], True)
if self.have_ipv6:
n3 = networks_dict(self.nodes[3].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
if __name__ == '__main__':
ProxyTest().main()
| true | true |
f72abb4a157ff48785fee482319d874695a9722b | 10,815 | py | Python | tensorflow/python/training/tracking/resource.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 7 | 2022-03-04T21:14:47.000Z | 2022-03-22T23:07:39.000Z | tensorflow/python/training/tracking/resource.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 1 | 2022-03-08T18:28:46.000Z | 2022-03-08T18:37:20.000Z | tensorflow/python/training/tracking/resource.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 1 | 2022-03-22T00:45:15.000Z | 2022-03-22T00:45:15.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definitions for resource-type trackable object classes."""
import contextlib
import copy
import weakref
import six
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import ops
from tensorflow.python.training.tracking import base
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# global _RESOURCE_TRACKER_STACK
_RESOURCE_TRACKER_STACK = []
class ResourceTracker(object):
"""An object that tracks a list of resources."""
__slots__ = ["_resources"]
def __init__(self):
self._resources = []
@property
def resources(self):
return self._resources
def add_resource(self, resource):
self._resources.append(resource)
@tf_contextlib.contextmanager
def resource_tracker_scope(resource_tracker):
"""A context to manage resource trackers.
Use this in order to collect up all resources created within a block of code.
Example usage:
```python
resource_tracker = ResourceTracker()
with resource_tracker_scope(resource_tracker):
resource = TrackableResource()
assert resource_tracker.resources == [resource]
Args:
resource_tracker: The passed in ResourceTracker object
Yields:
A scope in which the resource_tracker is active.
"""
global _RESOURCE_TRACKER_STACK
old = list(_RESOURCE_TRACKER_STACK)
_RESOURCE_TRACKER_STACK.append(resource_tracker)
try:
yield
finally:
_RESOURCE_TRACKER_STACK = old
def _make_getter(captured_getter, captured_previous):
"""To avoid capturing loop variables."""
def getter(*args, **kwargs):
return captured_getter(captured_previous, *args, **kwargs)
return getter
class _ResourceMetaclass(type):
"""Metaclass for CapturableResource."""
def __call__(cls, *args, **kwargs):
def default_resource_creator(next_creator, *a, **kw):
assert next_creator is None
obj = cls.__new__(cls, *a, **kw)
obj.__init__(*a, **kw)
return obj
previous_getter = lambda *a, **kw: default_resource_creator(None, *a, **kw)
resource_creator_stack = ops.get_default_graph()._resource_creator_stack
for getter in resource_creator_stack[cls._resource_type()]:
previous_getter = _make_getter(getter, previous_getter)
return previous_getter(*args, **kwargs)
class CapturableResource(six.with_metaclass(_ResourceMetaclass,
base.Trackable)):
"""Holds a Tensor which a tf.function can capture.
`CapturableResource`s are discovered by traversing the graph of object
attributes, e.g. during `tf.saved_model.save`. They are excluded from the
scope-based tracking of `TrackableResource`; generally things that require
initialization should inherit from `TrackableResource` instead of
`CapturableResource` directly.
"""
def __init__(self, device=""):
"""Initialize the `CapturableResource`.
Args:
device: A string indicating a required placement for this resource,
e.g. "CPU" if this resource must be created on a CPU device. A blank
device allows the user to place resource creation, so generally this
should be blank unless the resource only makes sense on one device.
"""
self._resource_handle_value = None
self._resource_device = device
self._self_destruction_context = (
context.eager_mode if context.executing_eagerly()
else ops.get_default_graph().as_default)
@classmethod
def _resource_type(cls):
return cls.__name__
@property
def _destruction_context(self):
return getattr(self, "_self_destruction_context",
# no-op context
contextlib.suppress)
@_destruction_context.setter
def _destruction_context(self, destruction_context):
self._self_destruction_context = destruction_context
def _create_resource(self):
"""A function that creates a resource handle."""
raise NotImplementedError("TrackableResource._create_resource not "
"implemented.")
@property
def _resource_handle(self):
return self._resource_handle_value
@_resource_handle.setter
def _resource_handle(self, value):
if isinstance(value, (ops.Tensor, ops.EagerTensor)):
value._parent_trackable = weakref.ref(self) # pylint: disable=protected-access
self._resource_handle_value = value
def _initialize(self):
"""A function that initializes the resource. Optional."""
pass
def _destroy_resource(self):
"""A function that destroys the resource. Optional."""
pass
@property
def resource_handle(self):
"""Returns the resource handle associated with this Resource."""
if self._resource_handle is None:
with ops.device(self._resource_device):
self._resource_handle = self._create_resource()
return self._resource_handle
def _map_resources(self, _):
"""For implementing `Trackable`."""
new_obj = copy.copy(self)
# pylint: disable=protected-access
with ops.device(self._resource_device):
new_resource = new_obj._create_resource()
new_obj._resource_handle = new_resource
# pylint: enable=protected-access
obj_map = {self: new_obj}
resource_map = {self.resource_handle: new_resource}
return obj_map, resource_map
def _trackable_children(self, save_type, **kwargs):
children = super()._trackable_children(save_type, **kwargs)
if save_type == "savedmodel":
@def_function.function(input_signature=[], autograph=False)
def _creator():
resource = self._create_resource()
return resource
@def_function.function(input_signature=[], autograph=False)
def _initializer():
self._initialize()
return 1 # Dummy return
@def_function.function(input_signature=[], autograph=False)
def _destroyer():
self._destroy_resource()
return 1 # Dummy return
children.update({
"_create_resource": _creator,
"_initialize": _initializer,
"_destroy_resource": _destroyer,
})
return children
def __del__(self):
try:
# Outer race condition: on program exit, the destruction context may be
# deleted before this __del__ is called. At this point we can safely
# exit without calling _destroy_resource() and let Python handle things.
with self._destruction_context():
# Inner race condition: possible between this and `ScopedTFFunction`
# whereby if an entire garbage collection chain containing both
# objects is moved to unreachable during the same garbage collection
# cycle, the __del__ for `ScopedTFFunction` can be collected before
# this method is called. In that case, we can't do much but
# continue.
self._destroy_resource()
except Exception: # pylint: disable=broad-except
# Silence all error logs that occur when attempting to destroy this
# resource.
pass
@tf_export("saved_model.experimental.TrackableResource")
class TrackableResource(CapturableResource):
"""Holds a Tensor which a tf.function can capture.
A TrackableResource is most useful for stateful Tensors that require
initialization, such as `tf.lookup.StaticHashTable`. `TrackableResource`s
are discovered by traversing the graph of object attributes, e.g. during
`tf.saved_model.save`.
A TrackableResource has three methods to override:
* `_create_resource` should create the resource tensor handle.
* `_initialize` should initialize the resource held at `self.resource_handle`.
* `_destroy_resource` is called upon a `TrackableResource`'s destruction
and should decrement the resource's ref count. For most resources, this
should be done with a call to `tf.raw_ops.DestroyResourceOp`.
Example usage:
>>> class DemoResource(tf.saved_model.experimental.TrackableResource):
... def __init__(self):
... super().__init__()
... self._initialize()
... def _create_resource(self):
... return tf.raw_ops.VarHandleOp(dtype=tf.float32, shape=[2])
... def _initialize(self):
... tf.raw_ops.AssignVariableOp(
... resource=self.resource_handle, value=tf.ones([2]))
... def _destroy_resource(self):
... tf.raw_ops.DestroyResourceOp(resource=self.resource_handle)
>>> class DemoModule(tf.Module):
... def __init__(self):
... self.resource = DemoResource()
... def increment(self, tensor):
... return tensor + tf.raw_ops.ReadVariableOp(
... resource=self.resource.resource_handle, dtype=tf.float32)
>>> demo = DemoModule()
>>> demo.increment([5, 1])
<tf.Tensor: shape=(2,), dtype=float32, numpy=array([6., 2.], dtype=float32)>
"""
def __init__(self, device=""):
"""Initialize the `TrackableResource`.
Args:
device: A string indicating a required placement for this resource,
e.g. "CPU" if this resource must be created on a CPU device. A blank
device allows the user to place resource creation, so generally this
should be blank unless the resource only makes sense on one device.
"""
global _RESOURCE_TRACKER_STACK
for resource_tracker in _RESOURCE_TRACKER_STACK:
resource_tracker.add_resource(self)
super(TrackableResource, self).__init__(device=device)
# TODO(b/124205571,b/124092991): Solve destruction of resources.
class RestoredResource(TrackableResource):
"""Restored SavedResource."""
def __init__(self, device=""):
super(RestoredResource, self).__init__(device=device)
@classmethod
def _deserialize_from_proto(cls, object_proto, dependencies, **unused_kwargs):
obj = cls(device=object_proto.resource.device)
resource_creator = dependencies.get("_create_resource")
if resource_creator is not None:
obj._create_resource = resource_creator # pylint: disable=protected-access
return obj
def _add_trackable_child(self, name, value):
setattr(self, name, value)
if (isinstance(value, base.Trackable) and
not isinstance(value, def_function.Function)):
self._track_trackable(value, name)
| 34.887097 | 85 | 0.713269 |
import contextlib
import copy
import weakref
import six
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import ops
from tensorflow.python.training.tracking import base
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
_RESOURCE_TRACKER_STACK = []
class ResourceTracker(object):
__slots__ = ["_resources"]
def __init__(self):
self._resources = []
@property
def resources(self):
return self._resources
def add_resource(self, resource):
self._resources.append(resource)
@tf_contextlib.contextmanager
def resource_tracker_scope(resource_tracker):
global _RESOURCE_TRACKER_STACK
old = list(_RESOURCE_TRACKER_STACK)
_RESOURCE_TRACKER_STACK.append(resource_tracker)
try:
yield
finally:
_RESOURCE_TRACKER_STACK = old
def _make_getter(captured_getter, captured_previous):
def getter(*args, **kwargs):
return captured_getter(captured_previous, *args, **kwargs)
return getter
class _ResourceMetaclass(type):
def __call__(cls, *args, **kwargs):
def default_resource_creator(next_creator, *a, **kw):
assert next_creator is None
obj = cls.__new__(cls, *a, **kw)
obj.__init__(*a, **kw)
return obj
previous_getter = lambda *a, **kw: default_resource_creator(None, *a, **kw)
resource_creator_stack = ops.get_default_graph()._resource_creator_stack
for getter in resource_creator_stack[cls._resource_type()]:
previous_getter = _make_getter(getter, previous_getter)
return previous_getter(*args, **kwargs)
class CapturableResource(six.with_metaclass(_ResourceMetaclass,
base.Trackable)):
def __init__(self, device=""):
self._resource_handle_value = None
self._resource_device = device
self._self_destruction_context = (
context.eager_mode if context.executing_eagerly()
else ops.get_default_graph().as_default)
@classmethod
def _resource_type(cls):
return cls.__name__
@property
def _destruction_context(self):
return getattr(self, "_self_destruction_context",
contextlib.suppress)
@_destruction_context.setter
def _destruction_context(self, destruction_context):
self._self_destruction_context = destruction_context
def _create_resource(self):
raise NotImplementedError("TrackableResource._create_resource not "
"implemented.")
@property
def _resource_handle(self):
return self._resource_handle_value
@_resource_handle.setter
def _resource_handle(self, value):
if isinstance(value, (ops.Tensor, ops.EagerTensor)):
value._parent_trackable = weakref.ref(self)
self._resource_handle_value = value
def _initialize(self):
pass
def _destroy_resource(self):
pass
@property
def resource_handle(self):
if self._resource_handle is None:
with ops.device(self._resource_device):
self._resource_handle = self._create_resource()
return self._resource_handle
def _map_resources(self, _):
new_obj = copy.copy(self)
with ops.device(self._resource_device):
new_resource = new_obj._create_resource()
new_obj._resource_handle = new_resource
obj_map = {self: new_obj}
resource_map = {self.resource_handle: new_resource}
return obj_map, resource_map
def _trackable_children(self, save_type, **kwargs):
children = super()._trackable_children(save_type, **kwargs)
if save_type == "savedmodel":
@def_function.function(input_signature=[], autograph=False)
def _creator():
resource = self._create_resource()
return resource
@def_function.function(input_signature=[], autograph=False)
def _initializer():
self._initialize()
return 1
@def_function.function(input_signature=[], autograph=False)
def _destroyer():
self._destroy_resource()
return 1
children.update({
"_create_resource": _creator,
"_initialize": _initializer,
"_destroy_resource": _destroyer,
})
return children
def __del__(self):
try:
with self._destruction_context():
# continue.
self._destroy_resource()
except Exception: # pylint: disable=broad-except
# Silence all error logs that occur when attempting to destroy this
# resource.
pass
@tf_export("saved_model.experimental.TrackableResource")
class TrackableResource(CapturableResource):
def __init__(self, device=""):
global _RESOURCE_TRACKER_STACK
for resource_tracker in _RESOURCE_TRACKER_STACK:
resource_tracker.add_resource(self)
super(TrackableResource, self).__init__(device=device)
# TODO(b/124205571,b/124092991): Solve destruction of resources.
class RestoredResource(TrackableResource):
def __init__(self, device=""):
super(RestoredResource, self).__init__(device=device)
@classmethod
def _deserialize_from_proto(cls, object_proto, dependencies, **unused_kwargs):
obj = cls(device=object_proto.resource.device)
resource_creator = dependencies.get("_create_resource")
if resource_creator is not None:
obj._create_resource = resource_creator # pylint: disable=protected-access
return obj
def _add_trackable_child(self, name, value):
setattr(self, name, value)
if (isinstance(value, base.Trackable) and
not isinstance(value, def_function.Function)):
self._track_trackable(value, name)
| true | true |
f72abbd3ee1fb7ddc9a51049416b8d1194ab3660 | 9,235 | py | Python | remove_code/sotas/SSAH-adversarial-attack-main/utils/fid_score.py | JohnZhang000/adaptive-jpeg-compression | f54e4798c01169812958f4d5539a03927dbdc313 | [
"MIT"
] | 9 | 2022-03-15T02:59:32.000Z | 2022-03-26T09:16:44.000Z | remove_code/sotas/SSAH-adversarial-attack-main/utils/fid_score.py | JohnZhang000/adaptive-jpeg-compression | f54e4798c01169812958f4d5539a03927dbdc313 | [
"MIT"
] | 1 | 2022-03-30T02:59:55.000Z | 2022-03-30T02:59:55.000Z | remove_code/sotas/SSAH-adversarial-attack-main/utils/fid_score.py | JohnZhang000/adaptive-jpeg-compression | f54e4798c01169812958f4d5539a03927dbdc313 | [
"MIT"
] | 1 | 2022-03-20T12:19:26.000Z | 2022-03-20T12:19:26.000Z | """Calculates the Frechet Inception Distance (FID) to evalulate GANs
The FID metric calculates the distance between two distributions of images.
Typically, we have summary statistics (mean & covariance matrix) of one
of these distributions, while the 2nd distribution is given by a GAN.
When run as a stand-alone program, it compares the distribution of
images that are stored as PNG/JPEG at a specified location with a
distribution given by summary statistics (in pickle format).
The FID is calculated by assuming that X_1 and X_2 are the activations of
the pool_3 layer of the inception net for generated samples and real world
samples respectively.
See --help to see further details.
Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead
of Tensorflow
Copyright 2018 Institute of Bioinformatics, JKU Linz
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import pathlib
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from multiprocessing import cpu_count
import numpy as np
import torch
import torchvision.transforms as TF
from PIL import Image
from scipy import linalg
from torch.nn.functional import adaptive_avg_pool2d
try:
from tqdm import tqdm
except ImportError:
# If tqdm is not available, provide a mock version of it
def tqdm(x):
return x
from utils.inception import InceptionV3
print(InceptionV3.BLOCK_INDEX_BY_DIM)
IMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',
'tif', 'tiff', 'webp'}
class ImagePathDataset(torch.utils.data.Dataset):
def __init__(self, files, transforms=None):
self.files = files
self.transforms = transforms
def __len__(self):
return len(self.files)
def __getitem__(self, i):
path = self.files[i]
img = Image.open(path).convert('RGB')
if self.transforms is not None:
img = self.transforms(img)
return img
def get_activations(files, model, batch_size=50, dims=2048, device='cuda'):
"""Calculates the activations of the pool_3 layer for all images.
Params:
-- files : List of image files paths
-- model : Instance of inception model
-- batch_size : Batch size of images for the model to process at once.
Make sure that the number of samples is a multiple of
the batch size, otherwise some samples are ignored. This
behavior is retained to match the original FID score
implementation.
-- dims : Dimensionality of features returned by Inception
-- device : Device to run calculations
Returns:
-- A numpy array of dimension (num images, dims) that contains the
activations of the given tensor when feeding inception with the
query tensor.
"""
model.eval()
print(len(files), batch_size)
if batch_size > len(files):
print(('Warning: batch size is bigger than the data size. '
'Setting batch size to data size'))
batch_size = len(files)
dataset = ImagePathDataset(files, transforms=TF.ToTensor())
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
drop_last=False,
num_workers=cpu_count())
pred_arr = np.empty((len(files), dims))
start_idx = 0
for batch in tqdm(dataloader):
batch = batch.to(device)
with torch.no_grad():
pred = model(batch)[0]
# If model output is not scalar, apply global spatial average pooling.
# This happens if you choose a dimensionality not equal 2048.
if pred.size(2) != 1 or pred.size(3) != 1:
pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
pred = pred.squeeze(3).squeeze(2).cpu().numpy()
pred_arr[start_idx:start_idx + pred.shape[0]] = pred
start_idx = start_idx + pred.shape[0]
return pred_arr
def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
"""Numpy implementation of the Frechet Distance.
The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
and X_2 ~ N(mu_2, C_2) is
d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
Stable version by Dougal J. Sutherland.
Params:
-- mu1 : Numpy array containing the activations of a layer of the
inception net (like returned by the function 'get_predictions')
for generated samples.
-- mu2 : The sample mean over activations, precalculated on an
representative data set.
-- sigma1: The covariance matrix over activations for generated samples.
-- sigma2: The covariance matrix over activations, precalculated on an
representative data set.
Returns:
-- : The Frechet Distance.
"""
mu1 = np.atleast_1d(mu1)
mu2 = np.atleast_1d(mu2)
sigma1 = np.atleast_2d(sigma1)
sigma2 = np.atleast_2d(sigma2)
assert mu1.shape == mu2.shape, \
'Training and test mean vectors have different lengths'
assert sigma1.shape == sigma2.shape, \
'Training and test covariances have different dimensions'
diff = mu1 - mu2
# Product might be almost singular
covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
if not np.isfinite(covmean).all():
msg = ('fid calculation produces singular product; '
'adding %s to diagonal of cov estimates') % eps
print(msg)
offset = np.eye(sigma1.shape[0]) * eps
covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
# Numerical error might give slight imaginary component
if np.iscomplexobj(covmean):
if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
m = np.max(np.abs(covmean.imag))
raise ValueError('Imaginary component {}'.format(m))
covmean = covmean.real
tr_covmean = np.trace(covmean)
return (diff.dot(diff) + np.trace(sigma1)
+ np.trace(sigma2) - 2 * tr_covmean)
def calculate_activation_statistics(files, model, batch_size=50, dims=2048,
device='cuda'):
"""Calculation of the statistics used by the FID.
Params:
-- files : List of image files paths
-- model : Instance of inception model
-- batch_size : The images numpy array is split into batches with
batch size batch_size. A reasonable batch size
depends on the hardware.
-- dims : Dimensionality of features returned by Inception
-- device : Device to run calculations
Returns:
-- mu : The mean over samples of the activations of the pool_3 layer of
the inception model.
-- sigma : The covariance matrix of the activations of the pool_3 layer of
the inception model.
"""
act = get_activations(files, model, batch_size, dims, device)
mu = np.mean(act, axis=0)
sigma = np.cov(act, rowvar=False)
return mu, sigma
def compute_statistics_of_path(path, model, batch_size, dims, device):
if path.endswith('.npz'):
with np.load(path) as f:
m, s = f['mu'][:], f['sigma'][:]
else:
path = pathlib.Path(path)
files = sorted([file for ext in IMAGE_EXTENSIONS
for file in path.glob('*.{}'.format(ext))])
m, s = calculate_activation_statistics(files, model, batch_size,
dims, device)
return m, s
def calculate_fid_given_paths(paths, batch_size, device, dims):
"""Calculates the FID of two paths"""
print('paths is :', paths)
for p in paths:
if not os.path.exists(p):
raise RuntimeError('Invalid path: %s' % p)
block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
model = InceptionV3([block_idx]).to(device)
m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,
dims, device)
m2, s2 = compute_statistics_of_path(paths[1], model, batch_size,
dims, device)
fid_value = calculate_frechet_distance(m1, s1, m2, s2)
return fid_value
def return_fid(path1, path2):
device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')
fid_value = calculate_fid_given_paths(paths=[path1, path2],
batch_size=50,
device=device,
dims=2048)
return fid_value
| 35.794574 | 78 | 0.636492 | import os
import pathlib
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from multiprocessing import cpu_count
import numpy as np
import torch
import torchvision.transforms as TF
from PIL import Image
from scipy import linalg
from torch.nn.functional import adaptive_avg_pool2d
try:
from tqdm import tqdm
except ImportError:
def tqdm(x):
return x
from utils.inception import InceptionV3
print(InceptionV3.BLOCK_INDEX_BY_DIM)
IMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',
'tif', 'tiff', 'webp'}
class ImagePathDataset(torch.utils.data.Dataset):
def __init__(self, files, transforms=None):
self.files = files
self.transforms = transforms
def __len__(self):
return len(self.files)
def __getitem__(self, i):
path = self.files[i]
img = Image.open(path).convert('RGB')
if self.transforms is not None:
img = self.transforms(img)
return img
def get_activations(files, model, batch_size=50, dims=2048, device='cuda'):
model.eval()
print(len(files), batch_size)
if batch_size > len(files):
print(('Warning: batch size is bigger than the data size. '
'Setting batch size to data size'))
batch_size = len(files)
dataset = ImagePathDataset(files, transforms=TF.ToTensor())
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
drop_last=False,
num_workers=cpu_count())
pred_arr = np.empty((len(files), dims))
start_idx = 0
for batch in tqdm(dataloader):
batch = batch.to(device)
with torch.no_grad():
pred = model(batch)[0]
if pred.size(2) != 1 or pred.size(3) != 1:
pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
pred = pred.squeeze(3).squeeze(2).cpu().numpy()
pred_arr[start_idx:start_idx + pred.shape[0]] = pred
start_idx = start_idx + pred.shape[0]
return pred_arr
def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
mu1 = np.atleast_1d(mu1)
mu2 = np.atleast_1d(mu2)
sigma1 = np.atleast_2d(sigma1)
sigma2 = np.atleast_2d(sigma2)
assert mu1.shape == mu2.shape, \
'Training and test mean vectors have different lengths'
assert sigma1.shape == sigma2.shape, \
'Training and test covariances have different dimensions'
diff = mu1 - mu2
covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
if not np.isfinite(covmean).all():
msg = ('fid calculation produces singular product; '
'adding %s to diagonal of cov estimates') % eps
print(msg)
offset = np.eye(sigma1.shape[0]) * eps
covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
if np.iscomplexobj(covmean):
if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
m = np.max(np.abs(covmean.imag))
raise ValueError('Imaginary component {}'.format(m))
covmean = covmean.real
tr_covmean = np.trace(covmean)
return (diff.dot(diff) + np.trace(sigma1)
+ np.trace(sigma2) - 2 * tr_covmean)
def calculate_activation_statistics(files, model, batch_size=50, dims=2048,
device='cuda'):
act = get_activations(files, model, batch_size, dims, device)
mu = np.mean(act, axis=0)
sigma = np.cov(act, rowvar=False)
return mu, sigma
def compute_statistics_of_path(path, model, batch_size, dims, device):
if path.endswith('.npz'):
with np.load(path) as f:
m, s = f['mu'][:], f['sigma'][:]
else:
path = pathlib.Path(path)
files = sorted([file for ext in IMAGE_EXTENSIONS
for file in path.glob('*.{}'.format(ext))])
m, s = calculate_activation_statistics(files, model, batch_size,
dims, device)
return m, s
def calculate_fid_given_paths(paths, batch_size, device, dims):
print('paths is :', paths)
for p in paths:
if not os.path.exists(p):
raise RuntimeError('Invalid path: %s' % p)
block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
model = InceptionV3([block_idx]).to(device)
m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,
dims, device)
m2, s2 = compute_statistics_of_path(paths[1], model, batch_size,
dims, device)
fid_value = calculate_frechet_distance(m1, s1, m2, s2)
return fid_value
def return_fid(path1, path2):
device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')
fid_value = calculate_fid_given_paths(paths=[path1, path2],
batch_size=50,
device=device,
dims=2048)
return fid_value
| true | true |
f72abcf519c3d777dae73575160a3505946609c2 | 421 | py | Python | test/command_line/test_plot_Fo_vs_Fc.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | 58 | 2015-10-15T09:28:20.000Z | 2022-03-28T20:09:38.000Z | test/command_line/test_plot_Fo_vs_Fc.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | 1,741 | 2015-11-24T08:17:02.000Z | 2022-03-31T15:46:42.000Z | test/command_line/test_plot_Fo_vs_Fc.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | 45 | 2015-10-14T13:44:16.000Z | 2022-03-22T14:45:56.000Z | import procrunner
def test(dials_data, tmp_path):
mtz_file = dials_data("lysozyme_electron_diffraction").join("refmac_final.mtz")
result = procrunner.run(
["dials.plot_Fo_vs_Fc", "hklin=" + mtz_file.strpath], working_directory=tmp_path
)
assert not result.returncode and not result.stderr
assert tmp_path.joinpath("Fo_vs_Fc.pdf").is_file()
assert "|Fe| = 42.0" in result.stdout.decode()
| 35.083333 | 88 | 0.719715 | import procrunner
def test(dials_data, tmp_path):
mtz_file = dials_data("lysozyme_electron_diffraction").join("refmac_final.mtz")
result = procrunner.run(
["dials.plot_Fo_vs_Fc", "hklin=" + mtz_file.strpath], working_directory=tmp_path
)
assert not result.returncode and not result.stderr
assert tmp_path.joinpath("Fo_vs_Fc.pdf").is_file()
assert "|Fe| = 42.0" in result.stdout.decode()
| true | true |
f72ac027d54393cfbc8f4c4a085d814d8add6b01 | 99 | py | Python | algo228/shooter_game.py | voidwalker-so2/vasya228 | cf766ee40341aa46799a461a246fa1f8f24df0ec | [
"BSD-2-Clause"
] | null | null | null | algo228/shooter_game.py | voidwalker-so2/vasya228 | cf766ee40341aa46799a461a246fa1f8f24df0ec | [
"BSD-2-Clause"
] | null | null | null | algo228/shooter_game.py | voidwalker-so2/vasya228 | cf766ee40341aa46799a461a246fa1f8f24df0ec | [
"BSD-2-Clause"
] | null | null | null | #Создай собственный Шутер!
from pygame import *
dfgshfhsdljfvhs
ssdkgvkshdv
sdhvljsdhv
sljgvksjdg
| 12.375 | 26 | 0.848485 |
from pygame import *
dfgshfhsdljfvhs
ssdkgvkshdv
sdhvljsdhv
sljgvksjdg
| true | true |
f72ac04ea85e822cd8063706b8bc88973fb8d216 | 7,842 | py | Python | src/python/pants/backend/jvm/tasks/classpath_util.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | 1 | 2021-11-11T14:04:24.000Z | 2021-11-11T14:04:24.000Z | src/python/pants/backend/jvm/tasks/classpath_util.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | 2 | 2016-10-13T21:37:42.000Z | 2018-07-20T20:14:33.000Z | src/python/pants/backend/jvm/tasks/classpath_util.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | 1 | 2018-03-08T22:21:44.000Z | 2018-03-08T22:21:44.000Z | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import itertools
import os
from collections import OrderedDict
from twitter.common.collections import OrderedSet
from pants.util.contextutil import open_zip
from pants.util.dirutil import fast_relpath, safe_walk
from pants.util.strutil import ensure_text
class ClasspathUtil(object):
@classmethod
def compute_classpath(cls, targets, classpath_products, extra_classpath_tuples, confs):
"""Return the list of classpath entries for a classpath covering the passed targets.
Filters and adds paths from extra_classpath_tuples to the end of the resulting list.
:param targets: The targets to generate a classpath for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param extra_classpath_tuples: Additional classpath entries.
:param confs: The list of confs for use by this classpath.
:returns: The classpath as a list of path elements.
:rtype: list of string
"""
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
total_classpath = OrderedSet(classpath_iter)
filtered_extra_classpath_iter = cls._filtered_classpath_by_confs_iter(extra_classpath_tuples,
confs)
extra_classpath_iter = cls._entries_iter(filtered_extra_classpath_iter)
total_classpath.update(extra_classpath_iter)
return list(total_classpath)
@classmethod
def classpath(cls, targets, classpath_products, confs=('default',)):
"""Return the classpath as a list of paths covering all the passed targets.
:param targets: Targets to build an aggregated classpath for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param confs: The list of confs for use by this classpath.
:returns: The classpath as a list of path elements.
:rtype: list of string
"""
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
return list(classpath_iter)
@classmethod
def _classpath_iter(cls, targets, classpath_products, confs=('default',)):
classpath_tuples = classpath_products.get_for_targets(targets)
filtered_tuples_iter = cls._filtered_classpath_by_confs_iter(classpath_tuples, confs)
return cls._entries_iter(filtered_tuples_iter)
@classmethod
def internal_classpath(cls, targets, classpath_products, confs=('default',)):
"""Return the list of internal classpath entries for a classpath covering all `targets`.
Any classpath entries contributed by external dependencies will be omitted.
:param targets: Targets to build an aggregated classpath for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param confs: The list of confs for use by this classpath.
:returns: The classpath as a list of path elements.
:rtype: list of string
"""
classpath_tuples = classpath_products.get_internal_classpath_entries_for_targets(targets)
filtered_tuples_iter = cls._filtered_classpath_by_confs_iter(classpath_tuples, confs)
return [entry.path for entry in cls._entries_iter(filtered_tuples_iter)]
@classmethod
def classpath_by_targets(cls, targets, classpath_products, confs=('default',)):
"""Return classpath entries grouped by their targets for the given `targets`.
:param targets: The targets to lookup classpath products for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param confs: The list of confs for use by this classpath.
:returns: The ordered (target, classpath) mappings.
:rtype: OrderedDict
"""
classpath_target_tuples = classpath_products.get_product_target_mappings_for_targets(targets)
filtered_items_iter = itertools.ifilter(cls._accept_conf_filter(confs, lambda x: x[0][0]),
classpath_target_tuples)
# group (classpath_entry, target) tuples by targets
target_to_classpath = OrderedDict()
for classpath_entry, target in filtered_items_iter:
_, entry = classpath_entry
if not target in target_to_classpath:
target_to_classpath[target] = []
target_to_classpath[target].append(entry)
return target_to_classpath
@classmethod
def _accept_conf_filter(cls, confs, unpack_func=None):
def accept_conf_in_item(item):
conf = unpack_func(item)
return confs is None or conf in confs
unpack_func = unpack_func or (lambda x: x)
return accept_conf_in_item
@classmethod
def _filtered_classpath_by_confs_iter(cls, classpath_tuples, confs):
filter_func = cls._accept_conf_filter(confs, unpack_func=lambda x: x[0])
return itertools.ifilter(filter_func, classpath_tuples)
@classmethod
def _entries_iter(cls, classpath):
for conf, entry in classpath:
yield entry
@classmethod
def classpath_contents(cls, targets, classpath_products, confs=('default',)):
"""Provide a generator over the contents (classes/resources) of a classpath.
:param targets: Targets to iterate the contents classpath for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param confs: The list of confs for use by this classpath.
:returns: An iterator over all classpath contents, one directory, class or resource relative
path per iteration step.
:rtype: :class:`collections.Iterator` of string
"""
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
for f in cls.classpath_entries_contents(classpath_iter):
yield f
@classmethod
def classpath_entries_contents(cls, classpath_entries):
"""Provide a generator over the contents (classes/resources) of a classpath.
Subdirectories are included and differentiated via a trailing forward slash (for symmetry
across ZipFile.namelist and directory walks).
:param classpath_entries: A sequence of classpath_entries. Non-jars/dirs are ignored.
:returns: An iterator over all classpath contents, one directory, class or resource relative
path per iteration step.
:rtype: :class:`collections.Iterator` of string
"""
for entry in classpath_entries:
if cls.is_jar(entry):
# Walk the jar namelist.
with open_zip(entry, mode='r') as jar:
for name in jar.namelist():
yield ensure_text(name)
elif os.path.isdir(entry):
# Walk the directory, including subdirs.
def rel_walk_name(abs_sub_dir, name):
return fast_relpath(os.path.join(abs_sub_dir, name), entry)
for abs_sub_dir, dirnames, filenames in safe_walk(entry):
for name in dirnames:
yield '{}/'.format(rel_walk_name(abs_sub_dir, name))
for name in filenames:
yield rel_walk_name(abs_sub_dir, name)
else:
# non-jar and non-directory classpath entries should be ignored
pass
@classmethod
def classname_for_rel_classfile(cls, class_file_name):
"""Return the class name for the given relative-to-a-classpath-entry file, or None."""
if not class_file_name.endswith('.class'):
return None
return class_file_name[:-len('.class')].replace('/', '.')
@classmethod
def is_jar(cls, path):
"""True if the given path represents an existing jar or zip file."""
return path.endswith(('.jar', '.zip')) and os.path.isfile(path)
@classmethod
def is_dir(cls, path):
"""True if the given path represents an existing directory."""
return os.path.isdir(path)
| 43.087912 | 97 | 0.733614 |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import itertools
import os
from collections import OrderedDict
from twitter.common.collections import OrderedSet
from pants.util.contextutil import open_zip
from pants.util.dirutil import fast_relpath, safe_walk
from pants.util.strutil import ensure_text
class ClasspathUtil(object):
@classmethod
def compute_classpath(cls, targets, classpath_products, extra_classpath_tuples, confs):
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
total_classpath = OrderedSet(classpath_iter)
filtered_extra_classpath_iter = cls._filtered_classpath_by_confs_iter(extra_classpath_tuples,
confs)
extra_classpath_iter = cls._entries_iter(filtered_extra_classpath_iter)
total_classpath.update(extra_classpath_iter)
return list(total_classpath)
@classmethod
def classpath(cls, targets, classpath_products, confs=('default',)):
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
return list(classpath_iter)
@classmethod
def _classpath_iter(cls, targets, classpath_products, confs=('default',)):
classpath_tuples = classpath_products.get_for_targets(targets)
filtered_tuples_iter = cls._filtered_classpath_by_confs_iter(classpath_tuples, confs)
return cls._entries_iter(filtered_tuples_iter)
@classmethod
def internal_classpath(cls, targets, classpath_products, confs=('default',)):
classpath_tuples = classpath_products.get_internal_classpath_entries_for_targets(targets)
filtered_tuples_iter = cls._filtered_classpath_by_confs_iter(classpath_tuples, confs)
return [entry.path for entry in cls._entries_iter(filtered_tuples_iter)]
@classmethod
def classpath_by_targets(cls, targets, classpath_products, confs=('default',)):
classpath_target_tuples = classpath_products.get_product_target_mappings_for_targets(targets)
filtered_items_iter = itertools.ifilter(cls._accept_conf_filter(confs, lambda x: x[0][0]),
classpath_target_tuples)
target_to_classpath = OrderedDict()
for classpath_entry, target in filtered_items_iter:
_, entry = classpath_entry
if not target in target_to_classpath:
target_to_classpath[target] = []
target_to_classpath[target].append(entry)
return target_to_classpath
@classmethod
def _accept_conf_filter(cls, confs, unpack_func=None):
def accept_conf_in_item(item):
conf = unpack_func(item)
return confs is None or conf in confs
unpack_func = unpack_func or (lambda x: x)
return accept_conf_in_item
@classmethod
def _filtered_classpath_by_confs_iter(cls, classpath_tuples, confs):
filter_func = cls._accept_conf_filter(confs, unpack_func=lambda x: x[0])
return itertools.ifilter(filter_func, classpath_tuples)
@classmethod
def _entries_iter(cls, classpath):
for conf, entry in classpath:
yield entry
@classmethod
def classpath_contents(cls, targets, classpath_products, confs=('default',)):
classpath_iter = cls._classpath_iter(targets, classpath_products, confs=confs)
for f in cls.classpath_entries_contents(classpath_iter):
yield f
@classmethod
def classpath_entries_contents(cls, classpath_entries):
for entry in classpath_entries:
if cls.is_jar(entry):
with open_zip(entry, mode='r') as jar:
for name in jar.namelist():
yield ensure_text(name)
elif os.path.isdir(entry):
def rel_walk_name(abs_sub_dir, name):
return fast_relpath(os.path.join(abs_sub_dir, name), entry)
for abs_sub_dir, dirnames, filenames in safe_walk(entry):
for name in dirnames:
yield '{}/'.format(rel_walk_name(abs_sub_dir, name))
for name in filenames:
yield rel_walk_name(abs_sub_dir, name)
else:
pass
@classmethod
def classname_for_rel_classfile(cls, class_file_name):
if not class_file_name.endswith('.class'):
return None
return class_file_name[:-len('.class')].replace('/', '.')
@classmethod
def is_jar(cls, path):
return path.endswith(('.jar', '.zip')) and os.path.isfile(path)
@classmethod
def is_dir(cls, path):
return os.path.isdir(path)
| true | true |
f72ac0bed67e590b7732695c441a21acb5828469 | 2,176 | py | Python | Slider_Trinkey/Hue_Brightness_Python_Code/Hue_Brightness_Python_code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | Slider_Trinkey/Hue_Brightness_Python_Code/Hue_Brightness_Python_code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | Slider_Trinkey/Hue_Brightness_Python_Code/Hue_Brightness_Python_code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 Kattni Rembor for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
Slider Trinkey Hue Brightness Python Example
(Requires Hue and Monitor Brightness CircuitPython example to be running on the Slider Trinkey)
"""
import sys
from phue import Bridge
import serial
from serial.tools import list_ports
# Update this to the room, zone or individual lamp you want to control.
LAMP_OR_GROUP_NAME = "Office"
# Update this to the IP address of your Hue Bridge.
b = Bridge("0.0.0.0")
slider_trinkey_port = None
ports = list_ports.comports(include_links=False)
for p in ports:
if p.pid is not None:
print("Port:", p.device, "-", hex(p.pid), end="\t")
if p.pid == 0x8102:
slider_trinkey_port = p
print("Found Slider Trinkey!")
trinkey = serial.Serial(p.device)
break
else:
print("Did not find Slider Trinkey port :(")
sys.exit()
# If the app is not registered and the button on the Hue Bridge is not pressed, press the button
# and call connect() (this only needs to be run a single time)
b.connect()
b.get_api()
is_group = False
light = None
# First, check if it's a group name.
for group_data in b.get_group().values():
if group_data["name"] == LAMP_OR_GROUP_NAME:
print("Found group with name", LAMP_OR_GROUP_NAME)
is_group = True
# If it's not a group, find the lamp by name.
if not is_group:
light_names = b.get_light_objects("name")
light = light_names[LAMP_OR_GROUP_NAME]
print("Found light with name", LAMP_OR_GROUP_NAME)
current_brightness = None
while True:
x = trinkey.readline().decode("utf-8")
if not x.startswith("Slider: "):
continue
# Convert the Slider Trinkey output value of 0-100 to 0-254.
brightness_value = int((float(x.split(": ")[1]) / 100) * 254)
if current_brightness is None or brightness_value != current_brightness:
print("Setting brightness to:", brightness_value)
if is_group:
b.set_group(LAMP_OR_GROUP_NAME, {"bri": brightness_value})
else:
light.brightness = brightness_value
current_brightness = brightness_value
| 31.085714 | 96 | 0.688879 |
import sys
from phue import Bridge
import serial
from serial.tools import list_ports
LAMP_OR_GROUP_NAME = "Office"
b = Bridge("0.0.0.0")
slider_trinkey_port = None
ports = list_ports.comports(include_links=False)
for p in ports:
if p.pid is not None:
print("Port:", p.device, "-", hex(p.pid), end="\t")
if p.pid == 0x8102:
slider_trinkey_port = p
print("Found Slider Trinkey!")
trinkey = serial.Serial(p.device)
break
else:
print("Did not find Slider Trinkey port :(")
sys.exit()
b.connect()
b.get_api()
is_group = False
light = None
for group_data in b.get_group().values():
if group_data["name"] == LAMP_OR_GROUP_NAME:
print("Found group with name", LAMP_OR_GROUP_NAME)
is_group = True
# If it's not a group, find the lamp by name.
if not is_group:
light_names = b.get_light_objects("name")
light = light_names[LAMP_OR_GROUP_NAME]
print("Found light with name", LAMP_OR_GROUP_NAME)
current_brightness = None
while True:
x = trinkey.readline().decode("utf-8")
if not x.startswith("Slider: "):
continue
brightness_value = int((float(x.split(": ")[1]) / 100) * 254)
if current_brightness is None or brightness_value != current_brightness:
print("Setting brightness to:", brightness_value)
if is_group:
b.set_group(LAMP_OR_GROUP_NAME, {"bri": brightness_value})
else:
light.brightness = brightness_value
current_brightness = brightness_value
| true | true |
f72ac1123188353e94ecd664682ea810ce628d26 | 2,748 | py | Python | mira/auth.py | Bl4ck4/mira-1 | 2b907c1a4c09585f0c68223e0435cc7414eab3c5 | [
"MIT"
] | null | null | null | mira/auth.py | Bl4ck4/mira-1 | 2b907c1a4c09585f0c68223e0435cc7414eab3c5 | [
"MIT"
] | null | null | null | mira/auth.py | Bl4ck4/mira-1 | 2b907c1a4c09585f0c68223e0435cc7414eab3c5 | [
"MIT"
] | 1 | 2021-10-02T10:36:21.000Z | 2021-10-02T10:36:21.000Z | """Mira 2020."""
import functools
import requests
from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for
)
from werkzeug.security import check_password_hash, generate_password_hash
BLUEPRINT = Blueprint('auth', __name__, url_prefix='/auth')
@BLUEPRINT.route('/login', methods = ['GET', 'POST'])
def login():
"""Login to the application."""
error = ""
if request.method == 'POST':
email = request.form['email']
password = request.form['password']
if not email:
error = 'Email is required.'
elif not password:
error = 'Password is required.'
data = {"email": email, "password": password}
response = requests.post("http://localhost:5000/login", json=data)
if error is "" and response.json().get('status') == "success":
data = response.json().get('data')
session.clear()
session['access_token'] = data.get('access_token')
session['refresh_token'] = data.get('refresh_token')
return redirect(url_for('index'))
error = response.json().get('message')
return render_template('auth/login.html', error=error)
@BLUEPRINT.route('/register', methods = ['GET', 'POST'])
def register():
"""Register a new user."""
error = ""
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
email = request.form['email']
if not username:
error = 'Username is required.'
elif not email:
error = 'Email is required.'
elif not password:
error = 'Password is required.'
if error is "":
data = {"username": username, "email": email, "password": password}
response = requests.post("http://localhost:5000/register", json=data)
if response.json().get("status") == "success":
return redirect(url_for('auth.login'))
error = response.json().get("message")
return render_template('auth/register.html', error=error)
@BLUEPRINT.route('/forgot_password', methods = ['GET', 'POST'])
def forgot_password():
"""Restore password for user."""
return render_template('auth/forgot_password.html')
@BLUEPRINT.route('/logout')
def logout():
"""Destroy and clear session of logged in user."""
session.clear()
return redirect(url_for('auth.login'))
def login_required(view):
"""Decorator for viewes that requires the user to be logged in."""
@funtools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
| 31.953488 | 81 | 0.612445 | import functools
import requests
from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for
)
from werkzeug.security import check_password_hash, generate_password_hash
BLUEPRINT = Blueprint('auth', __name__, url_prefix='/auth')
@BLUEPRINT.route('/login', methods = ['GET', 'POST'])
def login():
error = ""
if request.method == 'POST':
email = request.form['email']
password = request.form['password']
if not email:
error = 'Email is required.'
elif not password:
error = 'Password is required.'
data = {"email": email, "password": password}
response = requests.post("http://localhost:5000/login", json=data)
if error is "" and response.json().get('status') == "success":
data = response.json().get('data')
session.clear()
session['access_token'] = data.get('access_token')
session['refresh_token'] = data.get('refresh_token')
return redirect(url_for('index'))
error = response.json().get('message')
return render_template('auth/login.html', error=error)
@BLUEPRINT.route('/register', methods = ['GET', 'POST'])
def register():
error = ""
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
email = request.form['email']
if not username:
error = 'Username is required.'
elif not email:
error = 'Email is required.'
elif not password:
error = 'Password is required.'
if error is "":
data = {"username": username, "email": email, "password": password}
response = requests.post("http://localhost:5000/register", json=data)
if response.json().get("status") == "success":
return redirect(url_for('auth.login'))
error = response.json().get("message")
return render_template('auth/register.html', error=error)
@BLUEPRINT.route('/forgot_password', methods = ['GET', 'POST'])
def forgot_password():
return render_template('auth/forgot_password.html')
@BLUEPRINT.route('/logout')
def logout():
session.clear()
return redirect(url_for('auth.login'))
def login_required(view):
@funtools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
| true | true |
f72ac2161ec154a6fbc2d4c0db4116346291b457 | 9,690 | py | Python | homeassistant/components/zha/core/discovery.py | twrecked/core | d3ae8a938cdea9b6e0d443c91c37ac3dbbd459ab | [
"Apache-2.0"
] | 2 | 2021-09-13T21:44:02.000Z | 2021-12-17T21:20:51.000Z | homeassistant/components/zha/core/discovery.py | twrecked/core | d3ae8a938cdea9b6e0d443c91c37ac3dbbd459ab | [
"Apache-2.0"
] | 5 | 2021-02-08T20:55:25.000Z | 2022-03-12T00:51:18.000Z | homeassistant/components/zha/core/discovery.py | twrecked/core | d3ae8a938cdea9b6e0d443c91c37ac3dbbd459ab | [
"Apache-2.0"
] | 2 | 2020-11-04T07:40:01.000Z | 2021-09-13T21:44:03.000Z | """Device discovery functions for Zigbee Home Automation."""
from collections import Counter
import logging
from typing import Callable, List, Tuple
from homeassistant import const as ha_const
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import HomeAssistantType
from . import const as zha_const, registries as zha_regs, typing as zha_typing
from .. import ( # noqa: F401 pylint: disable=unused-import,
binary_sensor,
cover,
device_tracker,
fan,
light,
lock,
sensor,
switch,
)
from .channels import base
_LOGGER = logging.getLogger(__name__)
@callback
async def async_add_entities(
_async_add_entities: Callable,
entities: List[
Tuple[
zha_typing.ZhaEntityType,
Tuple[str, zha_typing.ZhaDeviceType, List[zha_typing.ChannelType]],
]
],
) -> None:
"""Add entities helper."""
if not entities:
return
to_add = [ent_cls(*args) for ent_cls, args in entities]
_async_add_entities(to_add, update_before_add=True)
entities.clear()
class ProbeEndpoint:
"""All discovered channels and entities of an endpoint."""
def __init__(self):
"""Initialize instance."""
self._device_configs = {}
@callback
def discover_entities(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
self.discover_by_device_type(channel_pool)
self.discover_by_cluster_id(channel_pool)
@callback
def discover_by_device_type(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
unique_id = channel_pool.unique_id
component = self._device_configs.get(unique_id, {}).get(ha_const.CONF_TYPE)
if component is None:
ep_profile_id = channel_pool.endpoint.profile_id
ep_device_type = channel_pool.endpoint.device_type
component = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type)
if component and component in zha_const.COMPONENTS:
channels = channel_pool.unclaimed_channels()
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, channel_pool.manufacturer, channel_pool.model, channels
)
if entity_class is None:
return
channel_pool.claim_channels(claimed)
channel_pool.async_new_entity(component, entity_class, unique_id, claimed)
@callback
def discover_by_cluster_id(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items()
single_input_clusters = {
cluster_class: match
for cluster_class, match in items
if not isinstance(cluster_class, int)
}
remaining_channels = channel_pool.unclaimed_channels()
for channel in remaining_channels:
if channel.cluster.cluster_id in zha_regs.CHANNEL_ONLY_CLUSTERS:
channel_pool.claim_channels([channel])
continue
component = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get(
channel.cluster.cluster_id
)
if component is None:
for cluster_class, match in single_input_clusters.items():
if isinstance(channel.cluster, cluster_class):
component = match
break
self.probe_single_cluster(component, channel, channel_pool)
# until we can get rid off registries
self.handle_on_off_output_cluster_exception(channel_pool)
@staticmethod
def probe_single_cluster(
component: str,
channel: zha_typing.ChannelType,
ep_channels: zha_typing.ChannelPoolType,
) -> None:
"""Probe specified cluster for specific component."""
if component is None or component not in zha_const.COMPONENTS:
return
channel_list = [channel]
unique_id = f"{ep_channels.unique_id}-{channel.cluster.cluster_id}"
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, ep_channels.manufacturer, ep_channels.model, channel_list
)
if entity_class is None:
return
ep_channels.claim_channels(claimed)
ep_channels.async_new_entity(component, entity_class, unique_id, claimed)
def handle_on_off_output_cluster_exception(
self, ep_channels: zha_typing.ChannelPoolType
) -> None:
"""Process output clusters of the endpoint."""
profile_id = ep_channels.endpoint.profile_id
device_type = ep_channels.endpoint.device_type
if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []):
return
for cluster_id, cluster in ep_channels.endpoint.out_clusters.items():
component = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get(
cluster.cluster_id
)
if component is None:
continue
channel_class = zha_regs.ZIGBEE_CHANNEL_REGISTRY.get(
cluster_id, base.ZigbeeChannel
)
channel = channel_class(cluster, ep_channels)
self.probe_single_cluster(component, channel, ep_channels)
def initialize(self, hass: HomeAssistantType) -> None:
"""Update device overrides config."""
zha_config = hass.data[zha_const.DATA_ZHA].get(zha_const.DATA_ZHA_CONFIG, {})
overrides = zha_config.get(zha_const.CONF_DEVICE_CONFIG)
if overrides:
self._device_configs.update(overrides)
class GroupProbe:
"""Determine the appropriate component for a group."""
def __init__(self):
"""Initialize instance."""
self._hass = None
self._unsubs = []
def initialize(self, hass: HomeAssistantType) -> None:
"""Initialize the group probe."""
self._hass = hass
self._unsubs.append(
async_dispatcher_connect(
hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group
)
)
def cleanup(self):
"""Clean up on when zha shuts down."""
for unsub in self._unsubs[:]:
unsub()
self._unsubs.remove(unsub)
def _reprobe_group(self, group_id: int) -> None:
"""Reprobe a group for entities after its members change."""
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
zha_group = zha_gateway.groups.get(group_id)
if zha_group is None:
return
self.discover_group_entities(zha_group)
@callback
def discover_group_entities(self, group: zha_typing.ZhaGroupType) -> None:
"""Process a group and create any entities that are needed."""
# only create a group entity if there are 2 or more members in a group
if len(group.members) < 2:
_LOGGER.debug(
"Group: %s:0x%04x has less than 2 members - skipping entity discovery",
group.name,
group.group_id,
)
return
entity_domains = GroupProbe.determine_entity_domains(self._hass, group)
if not entity_domains:
return
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
for domain in entity_domains:
entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain)
if entity_class is None:
continue
self._hass.data[zha_const.DATA_ZHA][domain].append(
(
entity_class,
(
group.get_domain_entity_ids(domain),
f"{domain}_zha_group_0x{group.group_id:04x}",
group.group_id,
zha_gateway.coordinator_zha_device,
),
)
)
async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES)
@staticmethod
def determine_entity_domains(
hass: HomeAssistantType, group: zha_typing.ZhaGroupType
) -> List[str]:
"""Determine the entity domains for this group."""
entity_domains: List[str] = []
zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
all_domain_occurrences = []
for member in group.members:
if member.device.is_coordinator:
continue
entities = async_entries_for_device(
zha_gateway.ha_entity_registry, member.device.device_id
)
all_domain_occurrences.extend(
[
entity.domain
for entity in entities
if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS
]
)
if not all_domain_occurrences:
return entity_domains
# get all domains we care about if there are more than 2 entities of this domain
counts = Counter(all_domain_occurrences)
entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2]
_LOGGER.debug(
"The entity domains are: %s for group: %s:0x%04x",
entity_domains,
group.name,
group.group_id,
)
return entity_domains
PROBE = ProbeEndpoint()
GROUP_PROBE = GroupProbe()
| 36.022305 | 88 | 0.637771 |
from collections import Counter
import logging
from typing import Callable, List, Tuple
from homeassistant import const as ha_const
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import HomeAssistantType
from . import const as zha_const, registries as zha_regs, typing as zha_typing
from .. import (
binary_sensor,
cover,
device_tracker,
fan,
light,
lock,
sensor,
switch,
)
from .channels import base
_LOGGER = logging.getLogger(__name__)
@callback
async def async_add_entities(
_async_add_entities: Callable,
entities: List[
Tuple[
zha_typing.ZhaEntityType,
Tuple[str, zha_typing.ZhaDeviceType, List[zha_typing.ChannelType]],
]
],
) -> None:
if not entities:
return
to_add = [ent_cls(*args) for ent_cls, args in entities]
_async_add_entities(to_add, update_before_add=True)
entities.clear()
class ProbeEndpoint:
def __init__(self):
self._device_configs = {}
@callback
def discover_entities(self, channel_pool: zha_typing.ChannelPoolType) -> None:
self.discover_by_device_type(channel_pool)
self.discover_by_cluster_id(channel_pool)
@callback
def discover_by_device_type(self, channel_pool: zha_typing.ChannelPoolType) -> None:
unique_id = channel_pool.unique_id
component = self._device_configs.get(unique_id, {}).get(ha_const.CONF_TYPE)
if component is None:
ep_profile_id = channel_pool.endpoint.profile_id
ep_device_type = channel_pool.endpoint.device_type
component = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type)
if component and component in zha_const.COMPONENTS:
channels = channel_pool.unclaimed_channels()
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, channel_pool.manufacturer, channel_pool.model, channels
)
if entity_class is None:
return
channel_pool.claim_channels(claimed)
channel_pool.async_new_entity(component, entity_class, unique_id, claimed)
@callback
def discover_by_cluster_id(self, channel_pool: zha_typing.ChannelPoolType) -> None:
items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items()
single_input_clusters = {
cluster_class: match
for cluster_class, match in items
if not isinstance(cluster_class, int)
}
remaining_channels = channel_pool.unclaimed_channels()
for channel in remaining_channels:
if channel.cluster.cluster_id in zha_regs.CHANNEL_ONLY_CLUSTERS:
channel_pool.claim_channels([channel])
continue
component = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get(
channel.cluster.cluster_id
)
if component is None:
for cluster_class, match in single_input_clusters.items():
if isinstance(channel.cluster, cluster_class):
component = match
break
self.probe_single_cluster(component, channel, channel_pool)
self.handle_on_off_output_cluster_exception(channel_pool)
@staticmethod
def probe_single_cluster(
component: str,
channel: zha_typing.ChannelType,
ep_channels: zha_typing.ChannelPoolType,
) -> None:
if component is None or component not in zha_const.COMPONENTS:
return
channel_list = [channel]
unique_id = f"{ep_channels.unique_id}-{channel.cluster.cluster_id}"
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, ep_channels.manufacturer, ep_channels.model, channel_list
)
if entity_class is None:
return
ep_channels.claim_channels(claimed)
ep_channels.async_new_entity(component, entity_class, unique_id, claimed)
def handle_on_off_output_cluster_exception(
self, ep_channels: zha_typing.ChannelPoolType
) -> None:
profile_id = ep_channels.endpoint.profile_id
device_type = ep_channels.endpoint.device_type
if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []):
return
for cluster_id, cluster in ep_channels.endpoint.out_clusters.items():
component = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get(
cluster.cluster_id
)
if component is None:
continue
channel_class = zha_regs.ZIGBEE_CHANNEL_REGISTRY.get(
cluster_id, base.ZigbeeChannel
)
channel = channel_class(cluster, ep_channels)
self.probe_single_cluster(component, channel, ep_channels)
def initialize(self, hass: HomeAssistantType) -> None:
zha_config = hass.data[zha_const.DATA_ZHA].get(zha_const.DATA_ZHA_CONFIG, {})
overrides = zha_config.get(zha_const.CONF_DEVICE_CONFIG)
if overrides:
self._device_configs.update(overrides)
class GroupProbe:
def __init__(self):
self._hass = None
self._unsubs = []
def initialize(self, hass: HomeAssistantType) -> None:
self._hass = hass
self._unsubs.append(
async_dispatcher_connect(
hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group
)
)
def cleanup(self):
for unsub in self._unsubs[:]:
unsub()
self._unsubs.remove(unsub)
def _reprobe_group(self, group_id: int) -> None:
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
zha_group = zha_gateway.groups.get(group_id)
if zha_group is None:
return
self.discover_group_entities(zha_group)
@callback
def discover_group_entities(self, group: zha_typing.ZhaGroupType) -> None:
if len(group.members) < 2:
_LOGGER.debug(
"Group: %s:0x%04x has less than 2 members - skipping entity discovery",
group.name,
group.group_id,
)
return
entity_domains = GroupProbe.determine_entity_domains(self._hass, group)
if not entity_domains:
return
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
for domain in entity_domains:
entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain)
if entity_class is None:
continue
self._hass.data[zha_const.DATA_ZHA][domain].append(
(
entity_class,
(
group.get_domain_entity_ids(domain),
f"{domain}_zha_group_0x{group.group_id:04x}",
group.group_id,
zha_gateway.coordinator_zha_device,
),
)
)
async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES)
@staticmethod
def determine_entity_domains(
hass: HomeAssistantType, group: zha_typing.ZhaGroupType
) -> List[str]:
entity_domains: List[str] = []
zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
all_domain_occurrences = []
for member in group.members:
if member.device.is_coordinator:
continue
entities = async_entries_for_device(
zha_gateway.ha_entity_registry, member.device.device_id
)
all_domain_occurrences.extend(
[
entity.domain
for entity in entities
if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS
]
)
if not all_domain_occurrences:
return entity_domains
counts = Counter(all_domain_occurrences)
entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2]
_LOGGER.debug(
"The entity domains are: %s for group: %s:0x%04x",
entity_domains,
group.name,
group.group_id,
)
return entity_domains
PROBE = ProbeEndpoint()
GROUP_PROBE = GroupProbe()
| true | true |
f72ac2c60476f898867047bfebd012f5f4feae2c | 3,209 | py | Python | autocalibration/lib/python2.7/site-packages/matplotlib/tests/test_units.py | prcalopa/reactable-autocalibration | eb67a5b5ee0e50f1effa773f6f3f934b5fda6fcf | [
"MIT"
] | 5 | 2017-11-15T10:33:42.000Z | 2021-11-16T02:21:31.000Z | matplotlib/tests/test_units.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 2 | 2017-10-28T03:30:26.000Z | 2017-10-28T03:31:00.000Z | matplotlib/tests/test_units.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 6 | 2017-11-30T00:34:20.000Z | 2021-05-20T02:58:02.000Z | from matplotlib.cbook import iterable
import matplotlib.pyplot as plt
from matplotlib.testing.decorators import image_comparison
import matplotlib.units as munits
import numpy as np
try:
# mock in python 3.3+
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
# Basic class that wraps numpy array and has units
class Quantity(object):
def __init__(self, data, units):
self.magnitude = data
self.units = units
def to(self, new_units):
factors = {('hours', 'seconds'): 3600, ('minutes', 'hours'): 1 / 60,
('minutes', 'seconds'): 60, ('feet', 'miles'): 1 / 5280.,
('feet', 'inches'): 12, ('miles', 'inches'): 12 * 5280}
if self.units != new_units:
mult = factors[self.units, new_units]
return Quantity(mult * self.magnitude, new_units)
else:
return Quantity(self.magnitude, self.units)
def __getattr__(self, attr):
return getattr(self.magnitude, attr)
def __getitem__(self, item):
return Quantity(self.magnitude[item], self.units)
def __array__(self):
return np.asarray(self.magnitude)
# Tests that the conversion machinery works properly for classes that
# work as a facade over numpy arrays (like pint)
@image_comparison(baseline_images=['plot_pint'],
extensions=['png'], remove_text=False, style='mpl20')
def test_numpy_facade():
# Create an instance of the conversion interface and
# mock so we can check methods called
qc = munits.ConversionInterface()
def convert(value, unit, axis):
if hasattr(value, 'units'):
return value.to(unit).magnitude
elif iterable(value):
try:
return [v.to(unit).magnitude for v in value]
except AttributeError:
return [Quantity(v, axis.get_units()).to(unit).magnitude
for v in value]
else:
return Quantity(value, axis.get_units()).to(unit).magnitude
qc.convert = MagicMock(side_effect=convert)
qc.axisinfo = MagicMock(side_effect=lambda u, a: munits.AxisInfo(label=u))
qc.default_units = MagicMock(side_effect=lambda x, a: x.units)
# Register the class
munits.registry[Quantity] = qc
# Simple test
y = Quantity(np.linspace(0, 30), 'miles')
x = Quantity(np.linspace(0, 5), 'hours')
fig, ax = plt.subplots()
fig.subplots_adjust(left=0.15) # Make space for label
ax.plot(x, y, 'tab:blue')
ax.axhline(Quantity(26400, 'feet'), color='tab:red')
ax.axvline(Quantity(120, 'minutes'), color='tab:green')
ax.yaxis.set_units('inches')
ax.xaxis.set_units('seconds')
assert qc.convert.called
assert qc.axisinfo.called
assert qc.default_units.called
# Tests gh-8908
@image_comparison(baseline_images=['plot_masked_units'],
extensions=['png'], remove_text=True, style='mpl20')
def test_plot_masked_units():
data = np.linspace(-5, 5)
data_masked = np.ma.array(data, mask=(data > -2) & (data < 2))
data_masked_units = Quantity(data_masked, 'meters')
fig, ax = plt.subplots()
ax.plot(data_masked_units)
| 33.778947 | 78 | 0.644126 | from matplotlib.cbook import iterable
import matplotlib.pyplot as plt
from matplotlib.testing.decorators import image_comparison
import matplotlib.units as munits
import numpy as np
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
class Quantity(object):
def __init__(self, data, units):
self.magnitude = data
self.units = units
def to(self, new_units):
factors = {('hours', 'seconds'): 3600, ('minutes', 'hours'): 1 / 60,
('minutes', 'seconds'): 60, ('feet', 'miles'): 1 / 5280.,
('feet', 'inches'): 12, ('miles', 'inches'): 12 * 5280}
if self.units != new_units:
mult = factors[self.units, new_units]
return Quantity(mult * self.magnitude, new_units)
else:
return Quantity(self.magnitude, self.units)
def __getattr__(self, attr):
return getattr(self.magnitude, attr)
def __getitem__(self, item):
return Quantity(self.magnitude[item], self.units)
def __array__(self):
return np.asarray(self.magnitude)
@image_comparison(baseline_images=['plot_pint'],
extensions=['png'], remove_text=False, style='mpl20')
def test_numpy_facade():
qc = munits.ConversionInterface()
def convert(value, unit, axis):
if hasattr(value, 'units'):
return value.to(unit).magnitude
elif iterable(value):
try:
return [v.to(unit).magnitude for v in value]
except AttributeError:
return [Quantity(v, axis.get_units()).to(unit).magnitude
for v in value]
else:
return Quantity(value, axis.get_units()).to(unit).magnitude
qc.convert = MagicMock(side_effect=convert)
qc.axisinfo = MagicMock(side_effect=lambda u, a: munits.AxisInfo(label=u))
qc.default_units = MagicMock(side_effect=lambda x, a: x.units)
munits.registry[Quantity] = qc
y = Quantity(np.linspace(0, 30), 'miles')
x = Quantity(np.linspace(0, 5), 'hours')
fig, ax = plt.subplots()
fig.subplots_adjust(left=0.15)
ax.plot(x, y, 'tab:blue')
ax.axhline(Quantity(26400, 'feet'), color='tab:red')
ax.axvline(Quantity(120, 'minutes'), color='tab:green')
ax.yaxis.set_units('inches')
ax.xaxis.set_units('seconds')
assert qc.convert.called
assert qc.axisinfo.called
assert qc.default_units.called
@image_comparison(baseline_images=['plot_masked_units'],
extensions=['png'], remove_text=True, style='mpl20')
def test_plot_masked_units():
data = np.linspace(-5, 5)
data_masked = np.ma.array(data, mask=(data > -2) & (data < 2))
data_masked_units = Quantity(data_masked, 'meters')
fig, ax = plt.subplots()
ax.plot(data_masked_units)
| true | true |
f72ac3357d035fb96b484046450f998989af2f98 | 36,873 | py | Python | src/unity/python/turicreate/toolkits/drawing_classifier/drawing_classifier.py | jolinlaw/turicreate | 6b2057dc29533da225d18138e93cc15680eea85d | [
"BSD-3-Clause"
] | null | null | null | src/unity/python/turicreate/toolkits/drawing_classifier/drawing_classifier.py | jolinlaw/turicreate | 6b2057dc29533da225d18138e93cc15680eea85d | [
"BSD-3-Clause"
] | null | null | null | src/unity/python/turicreate/toolkits/drawing_classifier/drawing_classifier.py | jolinlaw/turicreate | 6b2057dc29533da225d18138e93cc15680eea85d | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright © 2019 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import turicreate as _tc
import numpy as _np
import time as _time
from turicreate.toolkits._model import CustomModel as _CustomModel
from turicreate.toolkits._model import PythonProxy as _PythonProxy
from turicreate.toolkits import evaluation as _evaluation
import turicreate.toolkits._internal_utils as _tkutl
from turicreate.toolkits._main import ToolkitError as _ToolkitError
from turicreate import extensions as _extensions
from .. import _pre_trained_models
BITMAP_WIDTH = 28
BITMAP_HEIGHT = 28
TRAIN_VALIDATION_SPLIT = .95
def _raise_error_if_not_drawing_classifier_input_sframe(
dataset, feature, target):
"""
Performs some sanity checks on the SFrame provided as input to
`turicreate.drawing_classifier.create` and raises a ToolkitError
if something in the dataset is missing or wrong.
"""
from turicreate.toolkits._internal_utils import _raise_error_if_not_sframe
_raise_error_if_not_sframe(dataset)
if feature not in dataset.column_names():
raise _ToolkitError("Feature column '%s' does not exist" % feature)
if target not in dataset.column_names():
raise _ToolkitError("Target column '%s' does not exist" % target)
if (dataset[feature].dtype != _tc.Image and dataset[feature].dtype != list):
raise _ToolkitError("Feature column must contain images"
+ " or stroke-based drawings encoded as lists of strokes"
+ " where each stroke is a list of points and"
+ " each point is stored as a dictionary")
if dataset[target].dtype != int and dataset[target].dtype != str:
raise _ToolkitError("Target column contains " + str(dataset[target].dtype)
+ " but it must contain strings or integers to represent"
+ " labels for drawings.")
if len(dataset) == 0:
raise _ToolkitError("Input Dataset is empty!")
def create(input_dataset, target, feature=None, validation_set='auto',
warm_start='auto', batch_size=256,
max_iterations=100, verbose=True):
"""
Create a :class:`DrawingClassifier` model.
Parameters
----------
dataset : SFrame
Input data. The columns named by the ``feature`` and ``target``
parameters will be extracted for training the drawing classifier.
target : string
Name of the column containing the target variable. The values in this
column must be of string or integer type.
feature : string optional
Name of the column containing the input drawings. 'None' (the default)
indicates the column in `dataset` named "drawing" should be used as the
feature.
The feature column can contain both bitmap-based drawings as well as
stroke-based drawings. Bitmap-based drawing input can be a grayscale
tc.Image of any size.
Stroke-based drawing input must be in the following format:
Every drawing must be represented by a list of strokes, where each
stroke must be a list of points in the order in which they were drawn
on the canvas.
Each point must be a dictionary with two keys, "x" and "y", and their
respective values must be numerical, i.e. either integer or float.
validation_set : SFrame optional
A dataset for monitoring the model's generalization performance.
The format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
warm_start : string optional
A string to denote which pretrained model to use. Set to "auto"
by default which uses a model trained on 245 of the 345 classes in the
Quick, Draw! dataset. To disable warm start, pass in None to this
argument. Here is a list of all the pretrained models that
can be passed in as this argument:
"auto": Uses quickdraw_245_v0
"quickdraw_245_v0": Uses a model trained on 245 of the 345 classes in the
Quick, Draw! dataset.
None: No Warm Start
batch_size: int optional
The number of drawings per training step. If not set, a default
value of 256 will be used. If you are getting memory errors,
try decreasing this value. If you have a powerful computer, increasing
this value may improve performance.
max_iterations : int optional
The maximum number of allowed passes through the data. More passes over
the data can result in a more accurately trained model.
verbose : bool optional
If True, print progress updates and model details.
Returns
-------
out : DrawingClassifier
A trained :class:`DrawingClassifier` model.
See Also
--------
DrawingClassifier
Examples
--------
.. sourcecode:: python
# Train a drawing classifier model
>>> model = turicreate.drawing_classifier.create(data)
# Make predictions on the training set and as column to the SFrame
>>> data['predictions'] = model.predict(data)
"""
import mxnet as _mx
from mxnet import autograd as _autograd
from ._model_architecture import Model as _Model
from ._sframe_loader import SFrameClassifierIter as _SFrameClassifierIter
from .._mxnet import _mxnet_utils
start_time = _time.time()
accepted_values_for_warm_start = ["auto", "quickdraw_245_v0", None]
# @TODO: Should be able to automatically choose number of iterations
# based on data size: Tracked in Github Issue #1576
# automatically infer feature column
if feature is None:
feature = _tkutl._find_only_drawing_column(input_dataset)
_raise_error_if_not_drawing_classifier_input_sframe(
input_dataset, feature, target)
if batch_size is not None and not isinstance(batch_size, int):
raise TypeError("'batch_size' must be an integer >= 1")
if batch_size is not None and batch_size < 1:
raise ValueError("'batch_size' must be >= 1")
if max_iterations is not None and not isinstance(max_iterations, int):
raise TypeError("'max_iterations' must be an integer >= 1")
if max_iterations is not None and max_iterations < 1:
raise ValueError("'max_iterations' must be >= 1")
is_stroke_input = (input_dataset[feature].dtype != _tc.Image)
dataset = _extensions._drawing_classifier_prepare_data(
input_dataset, feature) if is_stroke_input else input_dataset
iteration = 0
classes = dataset[target].unique()
classes = sorted(classes)
class_to_index = {name: index for index, name in enumerate(classes)}
validation_set_corrective_string = ("'validation_set' parameter must be "
+ "an SFrame, or None, or must be set to 'auto' for the toolkit to "
+ "automatically create a validation set.")
if isinstance(validation_set, _tc.SFrame):
_raise_error_if_not_drawing_classifier_input_sframe(
validation_set, feature, target)
is_validation_stroke_input = (validation_set[feature].dtype != _tc.Image)
validation_dataset = _extensions._drawing_classifier_prepare_data(
validation_set, feature) if is_validation_stroke_input else validation_set
elif isinstance(validation_set, str):
if validation_set == 'auto':
if dataset.num_rows() >= 100:
if verbose:
print ( "PROGRESS: Creating a validation set from 5 percent of training data. This may take a while.\n"
" You can set ``validation_set=None`` to disable validation tracking.\n")
dataset, validation_dataset = dataset.random_split(TRAIN_VALIDATION_SPLIT, exact=True)
else:
validation_set = None
validation_dataset = _tc.SFrame()
else:
raise _ToolkitError("Unrecognized value for 'validation_set'. "
+ validation_set_corrective_string)
elif validation_set is None:
validation_dataset = _tc.SFrame()
else:
raise TypeError("Unrecognized type for 'validation_set'."
+ validation_set_corrective_string)
train_loader = _SFrameClassifierIter(dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=max_iterations)
train_loader_to_compute_accuracy = _SFrameClassifierIter(dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=1)
validation_loader = _SFrameClassifierIter(validation_dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=1)
if verbose and iteration == 0:
column_names = ['iteration', 'train_loss', 'train_accuracy', 'time']
column_titles = ['Iteration', 'Training Loss', 'Training Accuracy', 'Elapsed Time (seconds)']
if validation_set is not None:
column_names.insert(3, 'validation_accuracy')
column_titles.insert(3, 'Validation Accuracy')
table_printer = _tc.util._ProgressTablePrinter(
column_names, column_titles)
ctx = _mxnet_utils.get_mxnet_context(max_devices=batch_size)
model = _Model(num_classes = len(classes), prefix="drawing_")
model_params = model.collect_params()
model_params.initialize(_mx.init.Xavier(), ctx=ctx)
if warm_start is not None:
if type(warm_start) is not str:
raise TypeError("'warm_start' must be a string or None. "
+ "'warm_start' can take in the following values: "
+ str(accepted_values_for_warm_start))
if warm_start not in accepted_values_for_warm_start:
raise _ToolkitError("Unrecognized value for 'warm_start': "
+ warm_start + ". 'warm_start' can take in the following "
+ "values: " + str(accepted_values_for_warm_start))
pretrained_model = _pre_trained_models.DrawingClassifierPreTrainedModel(
warm_start)
pretrained_model_params_path = pretrained_model.get_model_path()
model.load_params(pretrained_model_params_path,
ctx=ctx,
allow_missing=True)
softmax_cross_entropy = _mx.gluon.loss.SoftmaxCrossEntropyLoss()
model.hybridize()
trainer = _mx.gluon.Trainer(model.collect_params(), 'adam')
train_accuracy = _mx.metric.Accuracy()
validation_accuracy = _mx.metric.Accuracy()
def get_data_and_label_from_batch(batch):
if batch.pad is not None:
size = batch_size - batch.pad
sliced_data = _mx.nd.slice_axis(batch.data[0], axis=0, begin=0, end=size)
sliced_label = _mx.nd.slice_axis(batch.label[0], axis=0, begin=0, end=size)
num_devices = min(sliced_data.shape[0], len(ctx))
batch_data = _mx.gluon.utils.split_and_load(sliced_data, ctx_list=ctx[:num_devices], even_split=False)
batch_label = _mx.gluon.utils.split_and_load(sliced_label, ctx_list=ctx[:num_devices], even_split=False)
else:
batch_data = _mx.gluon.utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0)
batch_label = _mx.gluon.utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0)
return batch_data, batch_label
def compute_accuracy(accuracy_metric, batch_loader):
batch_loader.reset()
accuracy_metric.reset()
for batch in batch_loader:
batch_data, batch_label = get_data_and_label_from_batch(batch)
outputs = []
for x, y in zip(batch_data, batch_label):
if x is None or y is None: continue
z = model(x)
outputs.append(z)
accuracy_metric.update(batch_label, outputs)
for train_batch in train_loader:
train_batch_data, train_batch_label = get_data_and_label_from_batch(train_batch)
with _autograd.record():
# Inside training scope
for x, y in zip(train_batch_data, train_batch_label):
z = model(x)
# Computes softmax cross entropy loss.
loss = softmax_cross_entropy(z, y)
# Backpropagate the error for one iteration.
loss.backward()
# Make one step of parameter update. Trainer needs to know the
# batch size of data to normalize the gradient by 1/batch_size.
trainer.step(train_batch.data[0].shape[0])
# calculate training metrics
train_loss = loss.mean().asscalar()
train_time = _time.time() - start_time
if train_batch.iteration > iteration:
# Compute training accuracy
compute_accuracy(train_accuracy, train_loader_to_compute_accuracy)
# Compute validation accuracy
if validation_set is not None:
compute_accuracy(validation_accuracy, validation_loader)
iteration = train_batch.iteration
if verbose:
kwargs = { "iteration": iteration,
"train_loss": float(train_loss),
"train_accuracy": train_accuracy.get()[1],
"time": train_time}
if validation_set is not None:
kwargs["validation_accuracy"] = validation_accuracy.get()[1]
table_printer.print_row(**kwargs)
state = {
'_model': model,
'_class_to_index': class_to_index,
'num_classes': len(classes),
'classes': classes,
'input_image_shape': (1, BITMAP_WIDTH, BITMAP_HEIGHT),
'batch_size': batch_size,
'training_loss': train_loss,
'training_accuracy': train_accuracy.get()[1],
'training_time': train_time,
'validation_accuracy': validation_accuracy.get()[1],
# nan if validation_set=None
'max_iterations': max_iterations,
'target': target,
'feature': feature,
'num_examples': len(input_dataset)
}
return DrawingClassifier(state)
class DrawingClassifier(_CustomModel):
"""
A trained model that is ready to use for classification, and to be
exported to Core ML.
This model should not be constructed directly.
"""
_PYTHON_DRAWING_CLASSIFIER_VERSION = 1
def __init__(self, state):
self.__proxy__ = _PythonProxy(state)
@classmethod
def _native_name(cls):
return "drawing_classifier"
def _get_native_state(self):
from .._mxnet import _mxnet_utils
state = self.__proxy__.get_state()
mxnet_params = state['_model'].collect_params()
state['_model'] = _mxnet_utils.get_gluon_net_params_state(mxnet_params)
return state
def _get_version(self):
return self._PYTHON_DRAWING_CLASSIFIER_VERSION
@classmethod
def _load_version(cls, state, version):
_tkutl._model_version_check(version,
cls._PYTHON_DRAWING_CLASSIFIER_VERSION)
from ._model_architecture import Model as _Model
from .._mxnet import _mxnet_utils
net = _Model(num_classes = len(state['classes']), prefix = 'drawing_')
ctx = _mxnet_utils.get_mxnet_context(max_devices=state['batch_size'])
net_params = net.collect_params()
_mxnet_utils.load_net_params_from_state(
net_params, state['_model'], ctx=ctx
)
state['_model'] = net
# For a model trained on integer classes, when saved and loaded back,
# the classes are loaded as floats. The following if statement casts
# the loaded "float" classes back to int.
if len(state['classes']) > 0 and isinstance(state['classes'][0], float):
state['classes'] = list(map(int, state['classes']))
return DrawingClassifier(state)
def __str__(self):
"""
Return a string description of the model to the ``print`` method.
Returns
-------
out : string
A description of the DrawingClassifier.
"""
return self.__repr__()
def __repr__(self):
"""
Returns a string description of the model when the model name is
entered in the terminal.
"""
width = 40
sections, section_titles = self._get_summary_struct()
out = _tkutl._toolkit_repr_print(self, sections, section_titles,
width=width)
return out
def _get_summary_struct(self):
"""
Returns a structured description of the model, including (where
relevant) the schema of the training data, description of the training
data, training statistics, and model hyperparameters.
Returns
-------
sections : list (of list of tuples)
A list of summary sections.
Each section is a list.
Each item in a section list is a tuple of the form:
('<label>','<field>')
section_titles: list
A list of section titles.
The order matches that of the 'sections' object.
"""
model_fields = [
('Number of classes', 'num_classes'),
('Feature column', 'feature'),
('Target column', 'target')
]
training_fields = [
('Training Iterations', 'max_iterations'),
('Training Accuracy', 'training_accuracy'),
('Validation Accuracy', 'validation_accuracy'),
('Training Time', 'training_time'),
('Number of Examples', 'num_examples'),
('Batch Size', 'batch_size'),
('Final Loss (specific to model)', 'training_loss')
]
section_titles = ['Schema', 'Training summary']
return([model_fields, training_fields], section_titles)
def export_coreml(self, filename, verbose=False):
"""
Save the model in Core ML format. The Core ML model takes a grayscale
drawing of fixed size as input and produces two outputs:
`classLabel` and `labelProbabilities`.
The first one, `classLabel` is an integer or string (depending on the
classes the model was trained on) to store the label of the top
prediction by the model.
The second one, `labelProbabilities`, is a dictionary with all the
class labels in the dataset as the keys, and their respective
probabilities as the values.
See Also
--------
save
Parameters
----------
filename : string
The path of the file where we want to save the Core ML model.
verbose : bool optional
If True, prints export progress.
Examples
--------
>>> model.export_coreml('drawing_classifier.mlmodel')
"""
import mxnet as _mx
from .._mxnet._mxnet_to_coreml import _mxnet_converter
import coremltools as _coremltools
batch_size = 1
image_shape = (batch_size,) + (1, BITMAP_WIDTH, BITMAP_HEIGHT)
s_image = _mx.sym.Variable(self.feature,
shape=image_shape, dtype=_np.float32)
from copy import copy as _copy
net = _copy(self._model)
s_ymap = net(s_image)
mod = _mx.mod.Module(symbol=s_ymap, label_names=None, data_names=[self.feature])
mod.bind(for_training=False, data_shapes=[(self.feature, image_shape)])
mod.init_params()
arg_params, aux_params = mod.get_params()
net_params = net.collect_params()
new_arg_params = {}
for k, param in arg_params.items():
new_arg_params[k] = net_params[k].data(net_params[k].list_ctx()[0])
new_aux_params = {}
for k, param in aux_params.items():
new_aux_params[k] = net_params[k].data(net_params[k].list_ctx()[0])
mod.set_params(new_arg_params, new_aux_params)
coreml_model = _mxnet_converter.convert(mod, mode='classifier',
class_labels=self.classes,
input_shape=[(self.feature, image_shape)],
builder=None, verbose=verbose,
preprocessor_args={
'image_input_names': [self.feature],
'image_scale': 1.0/255
})
DESIRED_OUTPUT_NAME = self.target + "Probabilities"
spec = coreml_model._spec
class_label_output_index = 0 if spec.description.output[0].name == "classLabel" else 1
probabilities_output_index = 1-class_label_output_index
spec.neuralNetworkClassifier.labelProbabilityLayerName = DESIRED_OUTPUT_NAME
spec.neuralNetworkClassifier.layers[-1].name = DESIRED_OUTPUT_NAME
spec.neuralNetworkClassifier.layers[-1].output[0] = DESIRED_OUTPUT_NAME
spec.description.predictedProbabilitiesName = DESIRED_OUTPUT_NAME
spec.description.output[probabilities_output_index].name = DESIRED_OUTPUT_NAME
from turicreate.toolkits import _coreml_utils
model_type = "drawing classifier"
spec.description.metadata.shortDescription = _coreml_utils._mlmodel_short_description(model_type)
spec.description.input[0].shortDescription = self.feature
spec.description.output[probabilities_output_index].shortDescription = 'Prediction probabilities'
spec.description.output[class_label_output_index].shortDescription = 'Class Label of Top Prediction'
from coremltools.models.utils import save_spec as _save_spec
_save_spec(spec, filename)
def _predict_with_probabilities(self, input_dataset, batch_size=None,
verbose=True):
"""
Predict with probabilities. The core prediction part that both
`evaluate` and `predict` share.
Returns an SFrame with two columns, self.target, and "probabilities".
The column with column name, self.target, contains the predictions made
by the model for the provided dataset.
The "probabilities" column contains the probabilities for each class
that the model predicted for the data provided to the function.
"""
from .._mxnet import _mxnet_utils
import mxnet as _mx
from ._sframe_loader import SFrameClassifierIter as _SFrameClassifierIter
is_stroke_input = (input_dataset[self.feature].dtype != _tc.Image)
dataset = _extensions._drawing_classifier_prepare_data(
input_dataset, self.feature) if is_stroke_input else input_dataset
batch_size = self.batch_size if batch_size is None else batch_size
loader = _SFrameClassifierIter(dataset, batch_size,
class_to_index=self._class_to_index,
feature_column=self.feature,
target_column=self.target,
load_labels=False,
shuffle=False,
iterations=1)
dataset_size = len(dataset)
ctx = _mxnet_utils.get_mxnet_context()
index = 0
last_time = 0
done = False
from turicreate import SArrayBuilder
from array import array
classes = self.classes
all_predicted_builder = SArrayBuilder(dtype=type(classes[0]))
all_probabilities_builder = SArrayBuilder(dtype=array)
for batch in loader:
if batch.pad is not None:
size = batch_size - batch.pad
batch_data = _mx.nd.slice_axis(batch.data[0],
axis=0, begin=0, end=size)
else:
batch_data = batch.data[0]
size = batch_size
num_devices = min(batch_data.shape[0], len(ctx))
split_data = _mx.gluon.utils.split_and_load(batch_data, ctx_list=ctx[:num_devices], even_split=False)
for data in split_data:
z = self._model(data).asnumpy()
predicted = list(map(lambda x: classes[x], z.argmax(axis=1)))
split_length = z.shape[0]
all_predicted_builder.append_multiple(predicted)
all_probabilities_builder.append_multiple(z.tolist())
index += split_length
if index == dataset_size - 1:
done = True
cur_time = _time.time()
# Do not print progress if only a few samples are predicted
if verbose and (dataset_size >= 5
and cur_time > last_time + 10 or done):
print('Predicting {cur_n:{width}d}/{max_n:{width}d}'.format(
cur_n = index + 1,
max_n = dataset_size,
width = len(str(dataset_size))))
last_time = cur_time
return (_tc.SFrame({self.target: all_predicted_builder.close(),
'probability': all_probabilities_builder.close()}))
def evaluate(self, dataset, metric='auto', batch_size=None, verbose=True):
"""
Evaluate the model by making predictions of target values and comparing
these to actual values.
Parameters
----------
dataset : SFrame
Dataset of new observations. Must include columns with the same
names as the feature and target columns used for model training.
Additional columns are ignored.
metric : str, optional
Name of the evaluation metric. Possible values are:
- 'auto' : Returns all available metrics.
- 'accuracy' : Classification accuracy (micro average).
- 'auc' : Area under the ROC curve (macro average)
- 'precision' : Precision score (macro average)
- 'recall' : Recall score (macro average)
- 'f1_score' : F1 score (macro average)
- 'confusion_matrix' : An SFrame with counts of possible
prediction/true label combinations.
- 'roc_curve' : An SFrame containing information needed for an
ROC curve
verbose : bool, optional
If True, prints prediction progress.
Returns
-------
out : dict
Dictionary of evaluation results where the key is the name of the
evaluation metric (e.g. `accuracy`) and the value is the evaluation
score.
See Also
----------
create, predict
Examples
----------
.. sourcecode:: python
>>> results = model.evaluate(data)
>>> print(results['accuracy'])
"""
if self.target not in dataset.column_names():
raise _ToolkitError("Must provide ground truth column, '"
+ self.target + "' in the evaluation dataset.")
predicted = self._predict_with_probabilities(dataset, batch_size, verbose)
avail_metrics = ['accuracy', 'auc', 'precision', 'recall',
'f1_score', 'confusion_matrix', 'roc_curve']
_tkutl._check_categorical_option_type(
'metric', metric, avail_metrics + ['auto'])
metrics = avail_metrics if metric == 'auto' else [metric]
ret = {}
if 'accuracy' in metrics:
ret['accuracy'] = _evaluation.accuracy(
dataset[self.target], predicted[self.target])
if 'auc' in metrics:
ret['auc'] = _evaluation.auc(
dataset[self.target], predicted['probability'],
index_map=self._class_to_index)
if 'precision' in metrics:
ret['precision'] = _evaluation.precision(
dataset[self.target], predicted[self.target])
if 'recall' in metrics:
ret['recall'] = _evaluation.recall(
dataset[self.target], predicted[self.target])
if 'f1_score' in metrics:
ret['f1_score'] = _evaluation.f1_score(
dataset[self.target], predicted[self.target])
if 'confusion_matrix' in metrics:
ret['confusion_matrix'] = _evaluation.confusion_matrix(
dataset[self.target], predicted[self.target])
if 'roc_curve' in metrics:
ret['roc_curve'] = _evaluation.roc_curve(
dataset[self.target], predicted['probability'],
index_map=self._class_to_index)
return ret
def predict_topk(self, dataset, output_type="probability", k=3,
batch_size=None):
"""
Return top-k predictions for the ``dataset``, using the trained model.
Predictions are returned as an SFrame with three columns: `id`,
`class`, and `probability` or `rank`, depending on the ``output_type``
parameter.
Parameters
----------
dataset : SFrame | SArray | turicreate.Image | list
Drawings to be classified.
If dataset is an SFrame, it must include columns with the same
names as the features used for model training, but does not require
a target column. Additional columns are ignored.
output_type : {'probability', 'rank'}, optional
Choose the return type of the prediction:
- `probability`: Probability associated with each label in the
prediction.
- `rank` : Rank associated with each label in the prediction.
k : int, optional
Number of classes to return for each input example.
batch_size : int, optional
If you are getting memory errors, try decreasing this value. If you
have a powerful computer, increasing this value may improve
performance.
Returns
-------
out : SFrame
An SFrame with model predictions.
See Also
--------
predict, evaluate
Examples
--------
>>> pred = m.predict_topk(validation_data, k=3)
>>> pred
+----+-------+-------------------+
| id | class | probability |
+----+-------+-------------------+
| 0 | 4 | 0.995623886585 |
| 0 | 9 | 0.0038311756216 |
| 0 | 7 | 0.000301006948575 |
| 1 | 1 | 0.928708016872 |
| 1 | 3 | 0.0440889261663 |
| 1 | 2 | 0.0176190119237 |
| 2 | 3 | 0.996967732906 |
| 2 | 2 | 0.00151345680933 |
| 2 | 7 | 0.000637513934635 |
| 3 | 1 | 0.998070061207 |
| .. | ... | ... |
+----+-------+-------------------+
[35688 rows x 3 columns]
"""
_tkutl._check_categorical_option_type("output_type", output_type,
["probability", "rank"])
if not isinstance(k, int):
raise TypeError("'k' must be an integer >= 1")
if k <= 0:
raise ValueError("'k' must be >= 1")
if batch_size is not None and not isinstance(batch_size, int):
raise TypeError("'batch_size' must be an integer >= 1")
if batch_size is not None and batch_size < 1:
raise ValueError("'batch_size' must be >= 1")
prob_vector = self.predict(
dataset, output_type='probability_vector', batch_size=batch_size)
classes = self.classes
if output_type == 'probability':
results = prob_vector.apply(lambda p: [
{'class': classes[i], 'probability': p[i]}
for i in reversed(_np.argsort(p)[-k:])]
)
else:
assert(output_type == 'rank')
results = prob_vector.apply(lambda p: [
{'class': classes[index], 'rank': rank}
for rank, index in enumerate(reversed(_np.argsort(p)[-k:]))]
)
results = _tc.SFrame({'X': results})
results = results.add_row_number()
results = results.stack('X', new_column_name='X')
results = results.unpack('X', column_name_prefix='')
return results
def predict(self, data, output_type='class', batch_size=None, verbose=True):
"""
Predict on an SFrame or SArray of drawings, or on a single drawing.
Parameters
----------
data : SFrame | SArray | tc.Image | list
The drawing(s) on which to perform drawing classification.
If dataset is an SFrame, it must have a column with the same name
as the feature column during training. Additional columns are
ignored.
If the data is a single drawing, it can be either of type tc.Image,
in which case it is a bitmap-based drawing input,
or of type list, in which case it is a stroke-based drawing input.
output_type : {'probability', 'class', 'probability_vector'}, optional
Form of the predictions which are one of:
- 'class': Class prediction. For multi-class classification, this
returns the class with maximum probability.
- 'probability': Prediction probability associated with the True
class (not applicable for multi-class classification)
- 'probability_vector': Prediction probability associated with each
class as a vector. Label ordering is dictated by the ``classes``
member variable.
batch_size : int, optional
If you are getting memory errors, try decreasing this value. If you
have a powerful computer, increasing this value may improve
performance.
verbose : bool, optional
If True, prints prediction progress.
Returns
-------
out : SArray
An SArray with model predictions. Each element corresponds to
a drawing and contains a single value corresponding to the
predicted label. Each prediction will have type integer or string
depending on the type of the classes the model was trained on.
If `data` is a single drawing, the return value will be a single
prediction.
See Also
--------
evaluate
Examples
--------
.. sourcecode:: python
# Make predictions
>>> pred = model.predict(data)
# Print predictions, for a better overview
>>> print(pred)
dtype: int
Rows: 10
[3, 4, 3, 3, 4, 5, 8, 8, 8, 4]
"""
_tkutl._check_categorical_option_type("output_type", output_type,
["probability", "class", "probability_vector"])
if isinstance(data, _tc.SArray):
predicted = self._predict_with_probabilities(
_tc.SFrame({
self.feature: data
}),
batch_size,
verbose
)
elif isinstance(data, _tc.SFrame):
predicted = self._predict_with_probabilities(data, batch_size, verbose)
else:
# single input
predicted = self._predict_with_probabilities(
_tc.SFrame({
self.feature: [data]
}),
batch_size,
verbose
)
if output_type == "class":
return predicted[self.target]
elif output_type == "probability":
_class_to_index = self._class_to_index
target = self.target
return predicted.apply(
lambda row: row["probability"][_class_to_index[row[target]]])
else:
assert (output_type == "probability_vector")
return predicted["probability"]
| 41.901136 | 123 | 0.609389 |
import turicreate as _tc
import numpy as _np
import time as _time
from turicreate.toolkits._model import CustomModel as _CustomModel
from turicreate.toolkits._model import PythonProxy as _PythonProxy
from turicreate.toolkits import evaluation as _evaluation
import turicreate.toolkits._internal_utils as _tkutl
from turicreate.toolkits._main import ToolkitError as _ToolkitError
from turicreate import extensions as _extensions
from .. import _pre_trained_models
BITMAP_WIDTH = 28
BITMAP_HEIGHT = 28
TRAIN_VALIDATION_SPLIT = .95
def _raise_error_if_not_drawing_classifier_input_sframe(
dataset, feature, target):
from turicreate.toolkits._internal_utils import _raise_error_if_not_sframe
_raise_error_if_not_sframe(dataset)
if feature not in dataset.column_names():
raise _ToolkitError("Feature column '%s' does not exist" % feature)
if target not in dataset.column_names():
raise _ToolkitError("Target column '%s' does not exist" % target)
if (dataset[feature].dtype != _tc.Image and dataset[feature].dtype != list):
raise _ToolkitError("Feature column must contain images"
+ " or stroke-based drawings encoded as lists of strokes"
+ " where each stroke is a list of points and"
+ " each point is stored as a dictionary")
if dataset[target].dtype != int and dataset[target].dtype != str:
raise _ToolkitError("Target column contains " + str(dataset[target].dtype)
+ " but it must contain strings or integers to represent"
+ " labels for drawings.")
if len(dataset) == 0:
raise _ToolkitError("Input Dataset is empty!")
def create(input_dataset, target, feature=None, validation_set='auto',
warm_start='auto', batch_size=256,
max_iterations=100, verbose=True):
import mxnet as _mx
from mxnet import autograd as _autograd
from ._model_architecture import Model as _Model
from ._sframe_loader import SFrameClassifierIter as _SFrameClassifierIter
from .._mxnet import _mxnet_utils
start_time = _time.time()
accepted_values_for_warm_start = ["auto", "quickdraw_245_v0", None]
if feature is None:
feature = _tkutl._find_only_drawing_column(input_dataset)
_raise_error_if_not_drawing_classifier_input_sframe(
input_dataset, feature, target)
if batch_size is not None and not isinstance(batch_size, int):
raise TypeError("'batch_size' must be an integer >= 1")
if batch_size is not None and batch_size < 1:
raise ValueError("'batch_size' must be >= 1")
if max_iterations is not None and not isinstance(max_iterations, int):
raise TypeError("'max_iterations' must be an integer >= 1")
if max_iterations is not None and max_iterations < 1:
raise ValueError("'max_iterations' must be >= 1")
is_stroke_input = (input_dataset[feature].dtype != _tc.Image)
dataset = _extensions._drawing_classifier_prepare_data(
input_dataset, feature) if is_stroke_input else input_dataset
iteration = 0
classes = dataset[target].unique()
classes = sorted(classes)
class_to_index = {name: index for index, name in enumerate(classes)}
validation_set_corrective_string = ("'validation_set' parameter must be "
+ "an SFrame, or None, or must be set to 'auto' for the toolkit to "
+ "automatically create a validation set.")
if isinstance(validation_set, _tc.SFrame):
_raise_error_if_not_drawing_classifier_input_sframe(
validation_set, feature, target)
is_validation_stroke_input = (validation_set[feature].dtype != _tc.Image)
validation_dataset = _extensions._drawing_classifier_prepare_data(
validation_set, feature) if is_validation_stroke_input else validation_set
elif isinstance(validation_set, str):
if validation_set == 'auto':
if dataset.num_rows() >= 100:
if verbose:
print ( "PROGRESS: Creating a validation set from 5 percent of training data. This may take a while.\n"
" You can set ``validation_set=None`` to disable validation tracking.\n")
dataset, validation_dataset = dataset.random_split(TRAIN_VALIDATION_SPLIT, exact=True)
else:
validation_set = None
validation_dataset = _tc.SFrame()
else:
raise _ToolkitError("Unrecognized value for 'validation_set'. "
+ validation_set_corrective_string)
elif validation_set is None:
validation_dataset = _tc.SFrame()
else:
raise TypeError("Unrecognized type for 'validation_set'."
+ validation_set_corrective_string)
train_loader = _SFrameClassifierIter(dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=max_iterations)
train_loader_to_compute_accuracy = _SFrameClassifierIter(dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=1)
validation_loader = _SFrameClassifierIter(validation_dataset, batch_size,
feature_column=feature,
target_column=target,
class_to_index=class_to_index,
load_labels=True,
shuffle=True,
iterations=1)
if verbose and iteration == 0:
column_names = ['iteration', 'train_loss', 'train_accuracy', 'time']
column_titles = ['Iteration', 'Training Loss', 'Training Accuracy', 'Elapsed Time (seconds)']
if validation_set is not None:
column_names.insert(3, 'validation_accuracy')
column_titles.insert(3, 'Validation Accuracy')
table_printer = _tc.util._ProgressTablePrinter(
column_names, column_titles)
ctx = _mxnet_utils.get_mxnet_context(max_devices=batch_size)
model = _Model(num_classes = len(classes), prefix="drawing_")
model_params = model.collect_params()
model_params.initialize(_mx.init.Xavier(), ctx=ctx)
if warm_start is not None:
if type(warm_start) is not str:
raise TypeError("'warm_start' must be a string or None. "
+ "'warm_start' can take in the following values: "
+ str(accepted_values_for_warm_start))
if warm_start not in accepted_values_for_warm_start:
raise _ToolkitError("Unrecognized value for 'warm_start': "
+ warm_start + ". 'warm_start' can take in the following "
+ "values: " + str(accepted_values_for_warm_start))
pretrained_model = _pre_trained_models.DrawingClassifierPreTrainedModel(
warm_start)
pretrained_model_params_path = pretrained_model.get_model_path()
model.load_params(pretrained_model_params_path,
ctx=ctx,
allow_missing=True)
softmax_cross_entropy = _mx.gluon.loss.SoftmaxCrossEntropyLoss()
model.hybridize()
trainer = _mx.gluon.Trainer(model.collect_params(), 'adam')
train_accuracy = _mx.metric.Accuracy()
validation_accuracy = _mx.metric.Accuracy()
def get_data_and_label_from_batch(batch):
if batch.pad is not None:
size = batch_size - batch.pad
sliced_data = _mx.nd.slice_axis(batch.data[0], axis=0, begin=0, end=size)
sliced_label = _mx.nd.slice_axis(batch.label[0], axis=0, begin=0, end=size)
num_devices = min(sliced_data.shape[0], len(ctx))
batch_data = _mx.gluon.utils.split_and_load(sliced_data, ctx_list=ctx[:num_devices], even_split=False)
batch_label = _mx.gluon.utils.split_and_load(sliced_label, ctx_list=ctx[:num_devices], even_split=False)
else:
batch_data = _mx.gluon.utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0)
batch_label = _mx.gluon.utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0)
return batch_data, batch_label
def compute_accuracy(accuracy_metric, batch_loader):
batch_loader.reset()
accuracy_metric.reset()
for batch in batch_loader:
batch_data, batch_label = get_data_and_label_from_batch(batch)
outputs = []
for x, y in zip(batch_data, batch_label):
if x is None or y is None: continue
z = model(x)
outputs.append(z)
accuracy_metric.update(batch_label, outputs)
for train_batch in train_loader:
train_batch_data, train_batch_label = get_data_and_label_from_batch(train_batch)
with _autograd.record():
for x, y in zip(train_batch_data, train_batch_label):
z = model(x)
loss = softmax_cross_entropy(z, y)
loss.backward()
trainer.step(train_batch.data[0].shape[0])
train_loss = loss.mean().asscalar()
train_time = _time.time() - start_time
if train_batch.iteration > iteration:
compute_accuracy(train_accuracy, train_loader_to_compute_accuracy)
if validation_set is not None:
compute_accuracy(validation_accuracy, validation_loader)
iteration = train_batch.iteration
if verbose:
kwargs = { "iteration": iteration,
"train_loss": float(train_loss),
"train_accuracy": train_accuracy.get()[1],
"time": train_time}
if validation_set is not None:
kwargs["validation_accuracy"] = validation_accuracy.get()[1]
table_printer.print_row(**kwargs)
state = {
'_model': model,
'_class_to_index': class_to_index,
'num_classes': len(classes),
'classes': classes,
'input_image_shape': (1, BITMAP_WIDTH, BITMAP_HEIGHT),
'batch_size': batch_size,
'training_loss': train_loss,
'training_accuracy': train_accuracy.get()[1],
'training_time': train_time,
'validation_accuracy': validation_accuracy.get()[1],
'max_iterations': max_iterations,
'target': target,
'feature': feature,
'num_examples': len(input_dataset)
}
return DrawingClassifier(state)
class DrawingClassifier(_CustomModel):
_PYTHON_DRAWING_CLASSIFIER_VERSION = 1
def __init__(self, state):
self.__proxy__ = _PythonProxy(state)
@classmethod
def _native_name(cls):
return "drawing_classifier"
def _get_native_state(self):
from .._mxnet import _mxnet_utils
state = self.__proxy__.get_state()
mxnet_params = state['_model'].collect_params()
state['_model'] = _mxnet_utils.get_gluon_net_params_state(mxnet_params)
return state
def _get_version(self):
return self._PYTHON_DRAWING_CLASSIFIER_VERSION
@classmethod
def _load_version(cls, state, version):
_tkutl._model_version_check(version,
cls._PYTHON_DRAWING_CLASSIFIER_VERSION)
from ._model_architecture import Model as _Model
from .._mxnet import _mxnet_utils
net = _Model(num_classes = len(state['classes']), prefix = 'drawing_')
ctx = _mxnet_utils.get_mxnet_context(max_devices=state['batch_size'])
net_params = net.collect_params()
_mxnet_utils.load_net_params_from_state(
net_params, state['_model'], ctx=ctx
)
state['_model'] = net
if len(state['classes']) > 0 and isinstance(state['classes'][0], float):
state['classes'] = list(map(int, state['classes']))
return DrawingClassifier(state)
def __str__(self):
return self.__repr__()
def __repr__(self):
width = 40
sections, section_titles = self._get_summary_struct()
out = _tkutl._toolkit_repr_print(self, sections, section_titles,
width=width)
return out
def _get_summary_struct(self):
model_fields = [
('Number of classes', 'num_classes'),
('Feature column', 'feature'),
('Target column', 'target')
]
training_fields = [
('Training Iterations', 'max_iterations'),
('Training Accuracy', 'training_accuracy'),
('Validation Accuracy', 'validation_accuracy'),
('Training Time', 'training_time'),
('Number of Examples', 'num_examples'),
('Batch Size', 'batch_size'),
('Final Loss (specific to model)', 'training_loss')
]
section_titles = ['Schema', 'Training summary']
return([model_fields, training_fields], section_titles)
def export_coreml(self, filename, verbose=False):
import mxnet as _mx
from .._mxnet._mxnet_to_coreml import _mxnet_converter
import coremltools as _coremltools
batch_size = 1
image_shape = (batch_size,) + (1, BITMAP_WIDTH, BITMAP_HEIGHT)
s_image = _mx.sym.Variable(self.feature,
shape=image_shape, dtype=_np.float32)
from copy import copy as _copy
net = _copy(self._model)
s_ymap = net(s_image)
mod = _mx.mod.Module(symbol=s_ymap, label_names=None, data_names=[self.feature])
mod.bind(for_training=False, data_shapes=[(self.feature, image_shape)])
mod.init_params()
arg_params, aux_params = mod.get_params()
net_params = net.collect_params()
new_arg_params = {}
for k, param in arg_params.items():
new_arg_params[k] = net_params[k].data(net_params[k].list_ctx()[0])
new_aux_params = {}
for k, param in aux_params.items():
new_aux_params[k] = net_params[k].data(net_params[k].list_ctx()[0])
mod.set_params(new_arg_params, new_aux_params)
coreml_model = _mxnet_converter.convert(mod, mode='classifier',
class_labels=self.classes,
input_shape=[(self.feature, image_shape)],
builder=None, verbose=verbose,
preprocessor_args={
'image_input_names': [self.feature],
'image_scale': 1.0/255
})
DESIRED_OUTPUT_NAME = self.target + "Probabilities"
spec = coreml_model._spec
class_label_output_index = 0 if spec.description.output[0].name == "classLabel" else 1
probabilities_output_index = 1-class_label_output_index
spec.neuralNetworkClassifier.labelProbabilityLayerName = DESIRED_OUTPUT_NAME
spec.neuralNetworkClassifier.layers[-1].name = DESIRED_OUTPUT_NAME
spec.neuralNetworkClassifier.layers[-1].output[0] = DESIRED_OUTPUT_NAME
spec.description.predictedProbabilitiesName = DESIRED_OUTPUT_NAME
spec.description.output[probabilities_output_index].name = DESIRED_OUTPUT_NAME
from turicreate.toolkits import _coreml_utils
model_type = "drawing classifier"
spec.description.metadata.shortDescription = _coreml_utils._mlmodel_short_description(model_type)
spec.description.input[0].shortDescription = self.feature
spec.description.output[probabilities_output_index].shortDescription = 'Prediction probabilities'
spec.description.output[class_label_output_index].shortDescription = 'Class Label of Top Prediction'
from coremltools.models.utils import save_spec as _save_spec
_save_spec(spec, filename)
def _predict_with_probabilities(self, input_dataset, batch_size=None,
verbose=True):
from .._mxnet import _mxnet_utils
import mxnet as _mx
from ._sframe_loader import SFrameClassifierIter as _SFrameClassifierIter
is_stroke_input = (input_dataset[self.feature].dtype != _tc.Image)
dataset = _extensions._drawing_classifier_prepare_data(
input_dataset, self.feature) if is_stroke_input else input_dataset
batch_size = self.batch_size if batch_size is None else batch_size
loader = _SFrameClassifierIter(dataset, batch_size,
class_to_index=self._class_to_index,
feature_column=self.feature,
target_column=self.target,
load_labels=False,
shuffle=False,
iterations=1)
dataset_size = len(dataset)
ctx = _mxnet_utils.get_mxnet_context()
index = 0
last_time = 0
done = False
from turicreate import SArrayBuilder
from array import array
classes = self.classes
all_predicted_builder = SArrayBuilder(dtype=type(classes[0]))
all_probabilities_builder = SArrayBuilder(dtype=array)
for batch in loader:
if batch.pad is not None:
size = batch_size - batch.pad
batch_data = _mx.nd.slice_axis(batch.data[0],
axis=0, begin=0, end=size)
else:
batch_data = batch.data[0]
size = batch_size
num_devices = min(batch_data.shape[0], len(ctx))
split_data = _mx.gluon.utils.split_and_load(batch_data, ctx_list=ctx[:num_devices], even_split=False)
for data in split_data:
z = self._model(data).asnumpy()
predicted = list(map(lambda x: classes[x], z.argmax(axis=1)))
split_length = z.shape[0]
all_predicted_builder.append_multiple(predicted)
all_probabilities_builder.append_multiple(z.tolist())
index += split_length
if index == dataset_size - 1:
done = True
cur_time = _time.time()
if verbose and (dataset_size >= 5
and cur_time > last_time + 10 or done):
print('Predicting {cur_n:{width}d}/{max_n:{width}d}'.format(
cur_n = index + 1,
max_n = dataset_size,
width = len(str(dataset_size))))
last_time = cur_time
return (_tc.SFrame({self.target: all_predicted_builder.close(),
'probability': all_probabilities_builder.close()}))
def evaluate(self, dataset, metric='auto', batch_size=None, verbose=True):
if self.target not in dataset.column_names():
raise _ToolkitError("Must provide ground truth column, '"
+ self.target + "' in the evaluation dataset.")
predicted = self._predict_with_probabilities(dataset, batch_size, verbose)
avail_metrics = ['accuracy', 'auc', 'precision', 'recall',
'f1_score', 'confusion_matrix', 'roc_curve']
_tkutl._check_categorical_option_type(
'metric', metric, avail_metrics + ['auto'])
metrics = avail_metrics if metric == 'auto' else [metric]
ret = {}
if 'accuracy' in metrics:
ret['accuracy'] = _evaluation.accuracy(
dataset[self.target], predicted[self.target])
if 'auc' in metrics:
ret['auc'] = _evaluation.auc(
dataset[self.target], predicted['probability'],
index_map=self._class_to_index)
if 'precision' in metrics:
ret['precision'] = _evaluation.precision(
dataset[self.target], predicted[self.target])
if 'recall' in metrics:
ret['recall'] = _evaluation.recall(
dataset[self.target], predicted[self.target])
if 'f1_score' in metrics:
ret['f1_score'] = _evaluation.f1_score(
dataset[self.target], predicted[self.target])
if 'confusion_matrix' in metrics:
ret['confusion_matrix'] = _evaluation.confusion_matrix(
dataset[self.target], predicted[self.target])
if 'roc_curve' in metrics:
ret['roc_curve'] = _evaluation.roc_curve(
dataset[self.target], predicted['probability'],
index_map=self._class_to_index)
return ret
def predict_topk(self, dataset, output_type="probability", k=3,
batch_size=None):
_tkutl._check_categorical_option_type("output_type", output_type,
["probability", "rank"])
if not isinstance(k, int):
raise TypeError("'k' must be an integer >= 1")
if k <= 0:
raise ValueError("'k' must be >= 1")
if batch_size is not None and not isinstance(batch_size, int):
raise TypeError("'batch_size' must be an integer >= 1")
if batch_size is not None and batch_size < 1:
raise ValueError("'batch_size' must be >= 1")
prob_vector = self.predict(
dataset, output_type='probability_vector', batch_size=batch_size)
classes = self.classes
if output_type == 'probability':
results = prob_vector.apply(lambda p: [
{'class': classes[i], 'probability': p[i]}
for i in reversed(_np.argsort(p)[-k:])]
)
else:
assert(output_type == 'rank')
results = prob_vector.apply(lambda p: [
{'class': classes[index], 'rank': rank}
for rank, index in enumerate(reversed(_np.argsort(p)[-k:]))]
)
results = _tc.SFrame({'X': results})
results = results.add_row_number()
results = results.stack('X', new_column_name='X')
results = results.unpack('X', column_name_prefix='')
return results
def predict(self, data, output_type='class', batch_size=None, verbose=True):
_tkutl._check_categorical_option_type("output_type", output_type,
["probability", "class", "probability_vector"])
if isinstance(data, _tc.SArray):
predicted = self._predict_with_probabilities(
_tc.SFrame({
self.feature: data
}),
batch_size,
verbose
)
elif isinstance(data, _tc.SFrame):
predicted = self._predict_with_probabilities(data, batch_size, verbose)
else:
predicted = self._predict_with_probabilities(
_tc.SFrame({
self.feature: [data]
}),
batch_size,
verbose
)
if output_type == "class":
return predicted[self.target]
elif output_type == "probability":
_class_to_index = self._class_to_index
target = self.target
return predicted.apply(
lambda row: row["probability"][_class_to_index[row[target]]])
else:
assert (output_type == "probability_vector")
return predicted["probability"]
| true | true |
f72ac42fd9ceac1af5051c46c0355962da805968 | 15,671 | py | Python | restio/model.py | eduardostarling/restio | 66bdb0f86105bf090d7f109da2dd37cbd0096da7 | [
"MIT"
] | 3 | 2019-11-11T14:18:26.000Z | 2020-09-04T20:50:11.000Z | restio/model.py | eduardostarling/restio | 66bdb0f86105bf090d7f109da2dd37cbd0096da7 | [
"MIT"
] | 16 | 2019-11-19T14:39:30.000Z | 2021-06-26T15:08:21.000Z | restio/model.py | eduardostarling/restio | 66bdb0f86105bf090d7f109da2dd37cbd0096da7 | [
"MIT"
] | null | null | null | from __future__ import annotations
from collections.abc import Iterable
from reprlib import Repr
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, Type
from uuid import UUID, uuid4
from restio.event import EventListener
from restio.fields.base import Field, T_co
from restio.shared import (
CURRENT_SESSION,
MODEL_INSTANTIATED_EVENT,
MODEL_PRE_UPDATE_EVENT,
MODEL_TYPE_REGISTRY,
MODEL_UPDATE_EVENT,
)
from restio.state import ModelState
if TYPE_CHECKING:
from restio.session import Session
def _check_model_type(obj: Optional[BaseModel]):
if not isinstance(obj, BaseModel):
raise TypeError("The provided object is not of type BaseModel.")
class ModelMeta:
__slots__ = ("init", "init_ignore_extra", "repr", "fields", "primary_keys", "alias")
init: bool
init_ignore_extra: bool
repr: bool
fields: Dict[str, Field]
primary_keys: Dict[str, Field]
alias: Optional[str]
def __init__(self):
self.init = True
self.init_ignore_extra = True
self.repr = True
self.fields = dict()
self.primary_keys = dict()
self.alias = None
# Meta attributes that don't get inherited from parent classes
__MODEL_META_NOT_INHERITED__ = ("alias",)
# Read-only meta attributes, can't be modified by model class
__MODEL_META_READONLY__ = ("fields", "primary_keys")
class BaseModelMeta(type):
__slots__ = ()
"""
BaseModel metaclass. Responsible to internally cache the data schema in a BaseModel
subclass by identifying fields and primary keys.
"""
def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]):
# internal fields not initialized in BaseModel
dct["_internal_id"] = None
dct["_hash"] = None
dct["_listener"] = None
dct["_persistent_values"] = None
# prepares metadata for the model type
meta = ModelMeta()
dct["_meta"] = meta
def _update_meta(
_meta: Optional[ModelMeta],
extend: bool,
not_inherited: Tuple[str, ...] = tuple(),
):
if not _meta:
return
propagate_meta = (
set(meta.__slots__) - set(__MODEL_META_READONLY__) - set(not_inherited)
)
for meta_attribute in propagate_meta:
if not hasattr(_meta, meta_attribute):
continue
setattr(meta, meta_attribute, getattr(_meta, meta_attribute))
# excluded meta, needs to be propagated manually
if extend:
meta.fields.update(_meta.fields)
meta.primary_keys.update(_meta.primary_keys)
base: Type[BaseModel]
for base in bases:
if not hasattr(base, "_meta"):
continue
_update_meta(base._meta, True, __MODEL_META_NOT_INHERITED__)
_update_meta(dct.get("Meta", None), False)
# process class fields
for field_name, field_value in dct.items():
if not isinstance(field_value, Field):
continue
meta.fields[field_name] = field_value
if field_value.pk:
meta.primary_keys[field_name] = field_value
# set alias name to class name when None
name_alias = meta.alias or name
# validate if the alias is not duplicate
# the caveat here is that two classes with the same name in two
# different files will have a name collision and fail initializing
if name_alias in MODEL_TYPE_REGISTRY:
raise ValueError(
f"Model alias `{name_alias}` is already used by another class."
)
cls_object = super().__new__(cls, name, bases, dct)
# set the model alias to the model type
if name_alias != "BaseModel":
MODEL_TYPE_REGISTRY[name_alias] = cls_object
return cls_object
def __call__(self, *args, **kwargs):
instance: BaseModel = super().__call__(*args, **kwargs)
# stores the default after the constructor, if nothing has been set yet
# this is implemented here so that this is always called, regardless of the
# models with custom constructors calling or not super().__init__()
for field in instance._meta.fields.values():
field._store_default(instance, force=False)
instance._internal_id = uuid4()
instance._hash = hash((instance.__class__, str(instance._internal_id)))
instance._persistent_values = {}
instance._listener = EventListener()
instance._initialized = True
session = CURRENT_SESSION.get()
if session:
session._listener.dispatch(MODEL_INSTANTIATED_EVENT, instance)
return instance
_repr_obj: Repr = Repr()
_repr_obj.maxother = 200
class BaseModel(metaclass=BaseModelMeta):
"""
A representation of a remote object model.
BaseModel is an abstract class that should be extended to represent models incoming
from or outgoing to a remote REST API.
Models can exist independently from Sessions but contain an internal state that
indicates the status of the model within the current context. The Sessions are
responsible to control this state. Also, each model contains a set of control
attributes that indicate which fields are watched by restio internals. By default,
all Field descriptors in the model will become field attributes. Fields declared
with pk=True will be used by restio to optimize the caching of the models in a
Session.
Models that change over time will contain an internal dictionary with the latest
know persistent value of each field. This is done to guarantee fast rollback of the
values when the Session is invalid, and to also indicate which values might have
changed within the session scope. If a field is modified directly, the model will
intercept the change and save the older value into the persistent dictionary until
`_persist` is called. During a `_rollback` call, however, the stored values are
re-assigned to their original attributes. Each attribute change will also dispatch
an update event so that the session is aware of changes and manages the model's
internal state accordingly. The persistent dictionary (through the helper method
`is_field_modified`) can also be used by DAO's to verify which values where updated
prior to sending a request through the REST API, thus allowing for proper
optimization and minimizing chances of conflicting changes on the remote object.
All models automatically generate a random internal UUID when created. This UUID is
used internally for comparison purposes, and externally as an identity. Although
this attribute is not explicitly set as private, it should never be modified.
"""
# these are all initialized by the metaclass
_meta: ModelMeta
__state: ModelState = ModelState.UNBOUND
__primary_keys: Optional[Dict[str, Any]] = None
_initialized: bool = False
_internal_id: UUID
_hash: int
_persistent_values: Dict[str, Any]
_listener: EventListener
def __init__(self, **kwargs: T_co):
"""
Instantiates the model by matching `kwargs` parameters to field names.
Behavior is disabled when init=False in the model Meta class.
:param kwargs: The dictionary of keyword arguments matching the field names of
the model class.
:raises ValueError: When invalid arguments are provided.
"""
meta = self._meta
if not meta.init:
return
for arg_name, value in kwargs.items():
field_object = meta.fields.get(arg_name, None)
if not field_object:
if not meta.init_ignore_extra:
raise ValueError(
"Invalid argument provided to constructor of"
f" `{self.__class__.__name__}`: {arg_name}"
)
continue # pragma: no cover
if not field_object.init:
if not meta.init_ignore_extra:
raise ValueError(f"Attribute `{arg_name}` cannot be initialized.")
continue # pragma: no cover
field_object.__set__(self, value)
@property
def _state(self) -> ModelState:
"""
Returns the state of the current model.
:return: The ModelState representation.
"""
return self.__state
@_state.setter
def _state(self, state: ModelState):
self.__state = state
@property
def primary_keys(self) -> Dict[str, T_co]:
"""
Returns a dictionary containing all primary keys. The keys will be
ordered in the same order as they are declared in the model type,
also following the order in which they appear in class inheritance.
This property is optimized to minimize the number of iterations done
in the model instance by internalizing a cache with the latest retrieved
primary keys. This cache is reset for every modification of a primary
key and recovered during the next call to the property.
:return: The ordered tuple of values.
"""
if self.__primary_keys is None:
self.__primary_keys = self._load_primary_keys()
return self.__primary_keys
def _load_primary_keys(self) -> Dict[str, T_co]:
"""
Returns a dictionary containing the primary key fields (keys) and their
current values in the model (values). This operation will inspect the
instance and collect all current values on-spot.
:return: Dictionary of primary keys values.
"""
return {key: getattr(self, key) for key in self._meta.primary_keys}
def _reset_primary_keys(self):
"""
Resets the internal cache of primary keys for the instance.
"""
self.__primary_keys = None
def get_children(
self,
recursive: bool = False,
children: Optional[Set[BaseModel]] = None,
top_level: Optional[BaseModel] = None,
) -> Set[BaseModel]:
"""
Returns the list of all children of the current model. This algorithm checks in
runtime for all objects refered by the instance and that are part of fields
marked with depends_on=True. When `recursive` is True, then the algorithm will
recursively search through all children.
`children` and `top_level` are control variables that indicate which models
have already been inspected by this function, in order to avoid infinite
recursion if any circular dependency exists. In most cases, they should be left
empty.
:param recursive: If True, recursively searches for children. Returns only
first degree relationships otherwise. Defaults to False.
:param children: List of existing models already inspected.
:param top_level: The top-level model from where inspection started.
:return: The list of children.
"""
if children is None:
children = set()
if top_level:
if self == top_level:
return children
children.add(self)
else:
top_level = self
for value in self.dependency_fields.values():
def check(child: Optional[BaseModel]):
# this can happen when the field allows none
if not child or child in children: # type: ignore
return
if recursive:
child.get_children(recursive, children, top_level)
else:
children.add(child)
# iterables are only supported if the values are not iterables - there is
# no recursiveness
if isinstance(value, Iterable):
value: Iterable[Any]
for item in value:
check(item)
else:
check(value)
return children
@property
def fields(self) -> Dict[str, Any]:
"""
Returns the values of each field in the model instance.
:return: A dict with keys containing the string names of the fields,
and values containing the value of the corresponding field.
"""
return {k: getattr(self, k) for k in self._filter_fields(lambda v: True)}
@property
def dependency_fields(self) -> Dict[str, Any]:
"""
Returns the values of each field that have relationship with other models.
:return: The dictionary of fields and their values
"""
return {
k: getattr(self, k) for k in self._filter_fields(lambda v: v.depends_on)
}
def is_field_modified(self, field_name: str) -> bool:
"""
Indicates of field with name `field_name` has been modified.
:param field_name: The name of the field.
:raises ValueError: When the field name does not exist.
:return: True if field is modified, False otherwise.
"""
if field_name not in self._meta.fields:
raise ValueError(
f"Field `{field_name}` does not exist in model"
" `{self.__class__.__name__}`."
)
return field_name in self._persistent_values
def _filter_fields(self, filt: Callable[[Field], bool]):
return {k: v for k, v in self._meta.fields.items() if filt(v)}
def _rollback(self):
"""
Restore the persistent values in the model to their original attributes.
"""
for attr, value in list(self._persistent_values.items()):
setattr(self, attr, value)
self._persist()
def _persist(self):
"""
Persists the current attribute values by emptying the internal persistent
dictionary. Once this is called, it is not possible to rollback to the old
values anymore. It is recommended that this method should only be called by the
party that persisted the values on the remote server.
"""
self._persistent_values = {}
def _pre_update(self, field: Field[T_co], value: T_co):
self._listener.dispatch(MODEL_PRE_UPDATE_EVENT, self, field, value)
def _update(self, field: Field[T_co], value: T_co):
if field.pk:
self._reset_primary_keys()
self._listener.dispatch(MODEL_UPDATE_EVENT, self, field, value)
def _update_persistent_values(self, field: Field[T_co], value: T_co):
name: str = field.name
if name in self._persistent_values:
if value == self._persistent_values[name]:
del self._persistent_values[name]
else:
mutable_fields = self.fields
if value != mutable_fields[name]:
self._persistent_values[name] = mutable_fields[name]
def __eq__(self, other: BaseModel) -> bool:
return isinstance(other, self.__class__) and self._hash == other._hash
def __repr__(self) -> str:
if not self._meta.repr:
return super().__repr__()
def get_field_repr(field: str):
value = getattr(self, field)
return f"{field}={_repr_obj.repr(value)}"
repr_args: List[str] = [
get_field_repr(n) for n in self._filter_fields(lambda x: x.repr)
]
return f"{self.__class__.__name__}({', '.join(repr_args)})"
def __hash__(self) -> int:
return self._hash
| 35.942661 | 88 | 0.639972 | from __future__ import annotations
from collections.abc import Iterable
from reprlib import Repr
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, Type
from uuid import UUID, uuid4
from restio.event import EventListener
from restio.fields.base import Field, T_co
from restio.shared import (
CURRENT_SESSION,
MODEL_INSTANTIATED_EVENT,
MODEL_PRE_UPDATE_EVENT,
MODEL_TYPE_REGISTRY,
MODEL_UPDATE_EVENT,
)
from restio.state import ModelState
if TYPE_CHECKING:
from restio.session import Session
def _check_model_type(obj: Optional[BaseModel]):
if not isinstance(obj, BaseModel):
raise TypeError("The provided object is not of type BaseModel.")
class ModelMeta:
__slots__ = ("init", "init_ignore_extra", "repr", "fields", "primary_keys", "alias")
init: bool
init_ignore_extra: bool
repr: bool
fields: Dict[str, Field]
primary_keys: Dict[str, Field]
alias: Optional[str]
def __init__(self):
self.init = True
self.init_ignore_extra = True
self.repr = True
self.fields = dict()
self.primary_keys = dict()
self.alias = None
__MODEL_META_NOT_INHERITED__ = ("alias",)
# Read-only meta attributes, can't be modified by model class
__MODEL_META_READONLY__ = ("fields", "primary_keys")
class BaseModelMeta(type):
__slots__ = ()
def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]):
dct["_internal_id"] = None
dct["_hash"] = None
dct["_listener"] = None
dct["_persistent_values"] = None
meta = ModelMeta()
dct["_meta"] = meta
def _update_meta(
_meta: Optional[ModelMeta],
extend: bool,
not_inherited: Tuple[str, ...] = tuple(),
):
if not _meta:
return
propagate_meta = (
set(meta.__slots__) - set(__MODEL_META_READONLY__) - set(not_inherited)
)
for meta_attribute in propagate_meta:
if not hasattr(_meta, meta_attribute):
continue
setattr(meta, meta_attribute, getattr(_meta, meta_attribute))
if extend:
meta.fields.update(_meta.fields)
meta.primary_keys.update(_meta.primary_keys)
base: Type[BaseModel]
for base in bases:
if not hasattr(base, "_meta"):
continue
_update_meta(base._meta, True, __MODEL_META_NOT_INHERITED__)
_update_meta(dct.get("Meta", None), False)
for field_name, field_value in dct.items():
if not isinstance(field_value, Field):
continue
meta.fields[field_name] = field_value
if field_value.pk:
meta.primary_keys[field_name] = field_value
name_alias = meta.alias or name
if name_alias in MODEL_TYPE_REGISTRY:
raise ValueError(
f"Model alias `{name_alias}` is already used by another class."
)
cls_object = super().__new__(cls, name, bases, dct)
if name_alias != "BaseModel":
MODEL_TYPE_REGISTRY[name_alias] = cls_object
return cls_object
def __call__(self, *args, **kwargs):
instance: BaseModel = super().__call__(*args, **kwargs)
for field in instance._meta.fields.values():
field._store_default(instance, force=False)
instance._internal_id = uuid4()
instance._hash = hash((instance.__class__, str(instance._internal_id)))
instance._persistent_values = {}
instance._listener = EventListener()
instance._initialized = True
session = CURRENT_SESSION.get()
if session:
session._listener.dispatch(MODEL_INSTANTIATED_EVENT, instance)
return instance
_repr_obj: Repr = Repr()
_repr_obj.maxother = 200
class BaseModel(metaclass=BaseModelMeta):
_meta: ModelMeta
__state: ModelState = ModelState.UNBOUND
__primary_keys: Optional[Dict[str, Any]] = None
_initialized: bool = False
_internal_id: UUID
_hash: int
_persistent_values: Dict[str, Any]
_listener: EventListener
def __init__(self, **kwargs: T_co):
meta = self._meta
if not meta.init:
return
for arg_name, value in kwargs.items():
field_object = meta.fields.get(arg_name, None)
if not field_object:
if not meta.init_ignore_extra:
raise ValueError(
"Invalid argument provided to constructor of"
f" `{self.__class__.__name__}`: {arg_name}"
)
continue
if not field_object.init:
if not meta.init_ignore_extra:
raise ValueError(f"Attribute `{arg_name}` cannot be initialized.")
continue
field_object.__set__(self, value)
@property
def _state(self) -> ModelState:
return self.__state
@_state.setter
def _state(self, state: ModelState):
self.__state = state
@property
def primary_keys(self) -> Dict[str, T_co]:
if self.__primary_keys is None:
self.__primary_keys = self._load_primary_keys()
return self.__primary_keys
def _load_primary_keys(self) -> Dict[str, T_co]:
return {key: getattr(self, key) for key in self._meta.primary_keys}
def _reset_primary_keys(self):
self.__primary_keys = None
def get_children(
self,
recursive: bool = False,
children: Optional[Set[BaseModel]] = None,
top_level: Optional[BaseModel] = None,
) -> Set[BaseModel]:
if children is None:
children = set()
if top_level:
if self == top_level:
return children
children.add(self)
else:
top_level = self
for value in self.dependency_fields.values():
def check(child: Optional[BaseModel]):
if not child or child in children:
return
if recursive:
child.get_children(recursive, children, top_level)
else:
children.add(child)
if isinstance(value, Iterable):
value: Iterable[Any]
for item in value:
check(item)
else:
check(value)
return children
@property
def fields(self) -> Dict[str, Any]:
return {k: getattr(self, k) for k in self._filter_fields(lambda v: True)}
@property
def dependency_fields(self) -> Dict[str, Any]:
return {
k: getattr(self, k) for k in self._filter_fields(lambda v: v.depends_on)
}
def is_field_modified(self, field_name: str) -> bool:
if field_name not in self._meta.fields:
raise ValueError(
f"Field `{field_name}` does not exist in model"
" `{self.__class__.__name__}`."
)
return field_name in self._persistent_values
def _filter_fields(self, filt: Callable[[Field], bool]):
return {k: v for k, v in self._meta.fields.items() if filt(v)}
def _rollback(self):
for attr, value in list(self._persistent_values.items()):
setattr(self, attr, value)
self._persist()
def _persist(self):
self._persistent_values = {}
def _pre_update(self, field: Field[T_co], value: T_co):
self._listener.dispatch(MODEL_PRE_UPDATE_EVENT, self, field, value)
def _update(self, field: Field[T_co], value: T_co):
if field.pk:
self._reset_primary_keys()
self._listener.dispatch(MODEL_UPDATE_EVENT, self, field, value)
def _update_persistent_values(self, field: Field[T_co], value: T_co):
name: str = field.name
if name in self._persistent_values:
if value == self._persistent_values[name]:
del self._persistent_values[name]
else:
mutable_fields = self.fields
if value != mutable_fields[name]:
self._persistent_values[name] = mutable_fields[name]
def __eq__(self, other: BaseModel) -> bool:
return isinstance(other, self.__class__) and self._hash == other._hash
def __repr__(self) -> str:
if not self._meta.repr:
return super().__repr__()
def get_field_repr(field: str):
value = getattr(self, field)
return f"{field}={_repr_obj.repr(value)}"
repr_args: List[str] = [
get_field_repr(n) for n in self._filter_fields(lambda x: x.repr)
]
return f"{self.__class__.__name__}({', '.join(repr_args)})"
def __hash__(self) -> int:
return self._hash
| true | true |
f72ac444a8eab9e84fe6a3ecf0f61835271a6e97 | 4,638 | py | Python | opencv3_align_images.py | jaydenmedia/OpenCV3-Python | e0bfed6582447c567f100c507f5a8c59b621dfe1 | [
"MIT"
] | null | null | null | opencv3_align_images.py | jaydenmedia/OpenCV3-Python | e0bfed6582447c567f100c507f5a8c59b621dfe1 | [
"MIT"
] | null | null | null | opencv3_align_images.py | jaydenmedia/OpenCV3-Python | e0bfed6582447c567f100c507f5a8c59b621dfe1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Input data files are available in the "../input/" directory.
# For example, running this (by clicking run or pressing Shift+Enter) will
# list the files in the input directory from subprocess import check_output
#print(check_output(["ls", "../input"]).decode("utf8"))
#ORB is basically a fusion of FAST keypoint detector and BRIEF descriptor with
# many modifications to enhance the performance. First it use FAST to find
# keypoints, then apply Harris corner measure to find top N points among them.
#For any feature set of n binary tests at location (x_i, y_i),
# define a 2 \times n matrix, S which contains the coordinates of these pixels.
# Then using the orientation of patch, \theta, its rotation matrix is found
# and rotates the S to get steered(rotated) version S_\theta.
#ORB runs a greedy search among all possible binary tests to find the ones that
# have both high variance and means close to 0.5, as well as being uncorrelated.
# Any results write to the current directory are saved as output.
import numpy as np # linear algebra
import cv2
import os
import csv
import sys
from time import sleep
def im_align_orb(imp1, imp2, nf=10000):
"""
:param imp1: image1 file path
:param imp2: image2 file path
:param nf: max number of ORB key points
:return: transformed image2, so that it can be aligned with image1
"""
img1 = cv2.imread(imp1, 0)
img2 = cv2.imread(imp2, 0)
h2, w2 = img2.shape[:2]
orb = cv2.ORB_create(nfeatures=nf, WTA_K=2)
kp1, des1 = orb.detectAndCompute(img1, None)
kp2, des2 = orb.detectAndCompute(img2, None)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=False)
matches = bf.knnMatch(des1, des2, 2)
matches_ = []
for m in matches:
if len(m) == 2 and m[0].distance < m[1].distance * 0.75:
matches_.append((m[0].trainIdx, m[0].queryIdx))
kp1_ = np.float32([kp1[m[1]].pt for m in matches_]).reshape(-1, 1, 2)
kp2_ = np.float32([kp2[m[0]].pt for m in matches_]).reshape(-1, 1, 2)
H, mask = cv2.findHomography(kp2_, kp1_, cv2.RANSAC, 1.0)
h1, w1 = img1.shape[:2]
img2 = cv2.warpPerspective(cv2.imread(imp2), H, (w1, h1))
return img2
def align_set_by_id(setid, setvalue, isTrain=True, nFeatures=20000):
"""
:param setid: image set id values
:param isTrain: train (true) or test (false) path
:return: aligned images into output path
"""
train_path = '../output/train_sm/'
test_path = '../output/test_sm/'
counter = 0
if isTrain:
image_path = train_path
fn1 = train_path + "set" + key + "_" + elem[0] + ".jpg"
outputpath = "./train_output/"
else:
image_path = test_path
fn1 = train_path + "set" + key + "_" + elem[0] + ".jpg"
print(fn1)
outputpath = "./test_output/"
result = list()
result.append(cv2.cvtColor(cv2.imread(fn1), cv2.COLOR_BGR2RGB))
for id in elem: # outputmatrix elem
fn2 = image_path + "set" + str(setid) + "_" + str(id) + ".jpg"
print("fn1=%s, fn2=%s" % (os.path.basename(fn1), os.path.basename(fn2)))
im = im_align_orb(fn1, fn2, nFeatures)
cv2.imwrite(outputpath + os.path.basename(fn2), im)
result.append(cv2.cvtColor(im, cv2.COLOR_BGR2RGB))
counter += 1
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write(
'[%-20s] %d%% %d/%d ' % ('=' * i, 5 * i, counter, om_len)
)
sys.stdout.flush()
sleep(0.25)
return result
def align_all_set(path, isTrain=True):
allfiles = os.listdir(path)
allfiles = [
os.path.basename(file) for file in allfiles if file.startswith('set')]
allsets = np.unique([f.split("_")[0].replace("set", "") for f in allfiles])
for s in allsets:
align_set_by_id(s, isTrain=True, nFeatures=20000)
#align_all_set(path='../output/train_sm')
def csv_lists(path):
row = []
matrix = {}
with open(path) as f:
csv_reader = csv.reader(f)
csv_list = list(csv_reader)
for idx, val in enumerate(csv_list):
if not row:
row.extend([val[0]])
if row[0] == val[0]:
row.extend([val[1]])
elif row != val[0]:
row = [val[0]]
row.extend([val[1]])
if len(row) is 6:
matrix.update({row[0]: row[1:]})
return matrix
outputmatrix = csv_lists('../output/features_means_train.csv')
om_len = len(outputmatrix)
for key, elem in list(outputmatrix.items()):
align_set_by_id(key, elem, isTrain=True, nFeatures=15000) | 32.661972 | 80 | 0.625916 |
import numpy as np
import cv2
import os
import csv
import sys
from time import sleep
def im_align_orb(imp1, imp2, nf=10000):
img1 = cv2.imread(imp1, 0)
img2 = cv2.imread(imp2, 0)
h2, w2 = img2.shape[:2]
orb = cv2.ORB_create(nfeatures=nf, WTA_K=2)
kp1, des1 = orb.detectAndCompute(img1, None)
kp2, des2 = orb.detectAndCompute(img2, None)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=False)
matches = bf.knnMatch(des1, des2, 2)
matches_ = []
for m in matches:
if len(m) == 2 and m[0].distance < m[1].distance * 0.75:
matches_.append((m[0].trainIdx, m[0].queryIdx))
kp1_ = np.float32([kp1[m[1]].pt for m in matches_]).reshape(-1, 1, 2)
kp2_ = np.float32([kp2[m[0]].pt for m in matches_]).reshape(-1, 1, 2)
H, mask = cv2.findHomography(kp2_, kp1_, cv2.RANSAC, 1.0)
h1, w1 = img1.shape[:2]
img2 = cv2.warpPerspective(cv2.imread(imp2), H, (w1, h1))
return img2
def align_set_by_id(setid, setvalue, isTrain=True, nFeatures=20000):
train_path = '../output/train_sm/'
test_path = '../output/test_sm/'
counter = 0
if isTrain:
image_path = train_path
fn1 = train_path + "set" + key + "_" + elem[0] + ".jpg"
outputpath = "./train_output/"
else:
image_path = test_path
fn1 = train_path + "set" + key + "_" + elem[0] + ".jpg"
print(fn1)
outputpath = "./test_output/"
result = list()
result.append(cv2.cvtColor(cv2.imread(fn1), cv2.COLOR_BGR2RGB))
for id in elem:
fn2 = image_path + "set" + str(setid) + "_" + str(id) + ".jpg"
print("fn1=%s, fn2=%s" % (os.path.basename(fn1), os.path.basename(fn2)))
im = im_align_orb(fn1, fn2, nFeatures)
cv2.imwrite(outputpath + os.path.basename(fn2), im)
result.append(cv2.cvtColor(im, cv2.COLOR_BGR2RGB))
counter += 1
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write(
'[%-20s] %d%% %d/%d ' % ('=' * i, 5 * i, counter, om_len)
)
sys.stdout.flush()
sleep(0.25)
return result
def align_all_set(path, isTrain=True):
allfiles = os.listdir(path)
allfiles = [
os.path.basename(file) for file in allfiles if file.startswith('set')]
allsets = np.unique([f.split("_")[0].replace("set", "") for f in allfiles])
for s in allsets:
align_set_by_id(s, isTrain=True, nFeatures=20000)
def csv_lists(path):
row = []
matrix = {}
with open(path) as f:
csv_reader = csv.reader(f)
csv_list = list(csv_reader)
for idx, val in enumerate(csv_list):
if not row:
row.extend([val[0]])
if row[0] == val[0]:
row.extend([val[1]])
elif row != val[0]:
row = [val[0]]
row.extend([val[1]])
if len(row) is 6:
matrix.update({row[0]: row[1:]})
return matrix
outputmatrix = csv_lists('../output/features_means_train.csv')
om_len = len(outputmatrix)
for key, elem in list(outputmatrix.items()):
align_set_by_id(key, elem, isTrain=True, nFeatures=15000) | true | true |
f72ac5724f4c0949289c5827a02bc25b216cc4ef | 687 | py | Python | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | 3 | 2021-09-09T06:22:39.000Z | 2022-02-25T13:51:29.000Z | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | 1 | 2022-01-25T11:07:05.000Z | 2022-01-27T01:33:00.000Z | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | null | null | null | import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name='twarc-hashtags',
version='0.0.5',
url='https://github.com/docnow/twarc-hashtags',
author='Ed Summers',
author_email='ehs@pobox.com',
py_modules=['twarc_hashtags'],
description='A twarc plugin to extract hashtags from Twitter data',
long_description=long_description,
long_description_content_type="text/markdown",
python_requires='>=3.3',
install_requires=['twarc>=2.1.1'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
entry_points='''
[twarc.plugins]
hashtags=twarc_hashtags:hashtags
'''
)
| 27.48 | 71 | 0.6754 | import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name='twarc-hashtags',
version='0.0.5',
url='https://github.com/docnow/twarc-hashtags',
author='Ed Summers',
author_email='ehs@pobox.com',
py_modules=['twarc_hashtags'],
description='A twarc plugin to extract hashtags from Twitter data',
long_description=long_description,
long_description_content_type="text/markdown",
python_requires='>=3.3',
install_requires=['twarc>=2.1.1'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
entry_points='''
[twarc.plugins]
hashtags=twarc_hashtags:hashtags
'''
)
| true | true |
f72ac585b2ba49e680b69313a2fa0d0a5d6a749c | 137 | py | Python | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 219 | 2018-06-17T19:47:22.000Z | 2022-03-27T15:28:56.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 2 | 2020-08-12T16:47:41.000Z | 2020-12-15T17:05:57.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 182 | 2018-12-12T21:36:50.000Z | 2022-03-26T17:49:51.000Z | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | 27.4 | 74 | 0.605839 | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | true | true |
f72ac6556032482e4ba83a528d58e88c2de8f5b6 | 3,955 | py | Python | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 64 | 2015-06-12T19:29:51.000Z | 2022-01-03T17:14:56.000Z | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 4 | 2015-11-27T18:49:40.000Z | 2017-12-14T21:32:48.000Z | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 39 | 2016-02-07T08:47:02.000Z | 2022-03-07T06:07:10.000Z | import MiniNero
import ed25519
import binascii
import PaperWallet
import cherrypy
import os
import time
import bitmonerod
import SimpleXMR2
lasttime = 0
def HexSigningPubKey(s):
return binascii.hexlify(ed25519.publickey(ed25519.encodeint(MiniNero.hexToInt(s))))
def Signature(m, sk):
#note this seems to return nicely sized version of the signature
#contrast with, i.e. tweetnacl..
sk2 = ed25519.encodeint(MiniNero.hexToInt(sk))
pk = ed25519.publickey(sk2)
return binascii.hexlify(ed25519.signature(m, sk2, pk))
def Verify(sig, m, pk):
return ed25519.checkvalid(binascii.unhexlify(sig), m, binascii.unhexlify(pk))
class MiniNeroServer:
exposed = True
def GET(self, id=None):
times = str(int(time.time()))
return (times)
def POST(self, signature, Type, timestamp, amount=None, destination=None, pid=None, mixin=None):
times= int(time.time())
pubkey = MiniNeroPk
global lasttime
if (abs(times - int(timestamp)) > 30):
ver = False
return ('fail based on timestamp too old')
else:
if Type == 'address':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting address")
address = bitmonerod.myAddress()
return (str(address))
if Type == 'balance':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting balance")
balance = bitmonerod.balance()
return (str(float(balance)/1000000000000))
if Type == 'send':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
#create xmr2 order async, return uuid
uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
lasttime = times
return ('order uuid: '+uuid)
if Type == 'sendXMR':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
#create xmr2 order async, return uuid
#uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
lasttime = times
xmr_amount = amount
xmr_addr = destination
xmr_pid = pid
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
return ('sent')
if __name__ == '__main__':
#check if api pubkey is created, if not create it:
if(os.path.isfile('MiniNeroPubKey.py')):
from MiniNeroPubKey import *
try:
MiniNeroPk
except NameError:
MiniNeroSk= PaperWallet.skGen()
MiniNeroPk= HexSigningPubKey(MiniNeroSk)
print("Your new api secret key is:")
print(MiniNeroSk)
print("You should save this in a password manager")
print("Your pubkey will be stored in MiniNeroPubKey.py")
f = open('MiniNeroPubKey.py', 'w')
f.write("MiniNeroPk = \'"+MiniNeroPk+"\'")
print("Your MiniNeroServer PubKey is:")
print(MiniNeroPk)
lasttime = 0
#Launch Cherry Server
cherrypy.tree.mount(
MiniNeroServer(), '/api/mininero',
{'/':
{'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
}
)
cherrypy.server.socket_host = '0.0.0.0' #run on metal
cherrypy.engine.start()
cherrypy.engine.block()
| 35.3125 | 100 | 0.588369 | import MiniNero
import ed25519
import binascii
import PaperWallet
import cherrypy
import os
import time
import bitmonerod
import SimpleXMR2
lasttime = 0
def HexSigningPubKey(s):
return binascii.hexlify(ed25519.publickey(ed25519.encodeint(MiniNero.hexToInt(s))))
def Signature(m, sk):
sk2 = ed25519.encodeint(MiniNero.hexToInt(sk))
pk = ed25519.publickey(sk2)
return binascii.hexlify(ed25519.signature(m, sk2, pk))
def Verify(sig, m, pk):
return ed25519.checkvalid(binascii.unhexlify(sig), m, binascii.unhexlify(pk))
class MiniNeroServer:
exposed = True
def GET(self, id=None):
times = str(int(time.time()))
return (times)
def POST(self, signature, Type, timestamp, amount=None, destination=None, pid=None, mixin=None):
times= int(time.time())
pubkey = MiniNeroPk
global lasttime
if (abs(times - int(timestamp)) > 30):
ver = False
return ('fail based on timestamp too old')
else:
if Type == 'address':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting address")
address = bitmonerod.myAddress()
return (str(address))
if Type == 'balance':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting balance")
balance = bitmonerod.balance()
return (str(float(balance)/1000000000000))
if Type == 'send':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
lasttime = times
return ('order uuid: '+uuid)
if Type == 'sendXMR':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
lasttime = times
xmr_amount = amount
xmr_addr = destination
xmr_pid = pid
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
return ('sent')
if __name__ == '__main__':
if(os.path.isfile('MiniNeroPubKey.py')):
from MiniNeroPubKey import *
try:
MiniNeroPk
except NameError:
MiniNeroSk= PaperWallet.skGen()
MiniNeroPk= HexSigningPubKey(MiniNeroSk)
print("Your new api secret key is:")
print(MiniNeroSk)
print("You should save this in a password manager")
print("Your pubkey will be stored in MiniNeroPubKey.py")
f = open('MiniNeroPubKey.py', 'w')
f.write("MiniNeroPk = \'"+MiniNeroPk+"\'")
print("Your MiniNeroServer PubKey is:")
print(MiniNeroPk)
lasttime = 0
cherrypy.tree.mount(
MiniNeroServer(), '/api/mininero',
{'/':
{'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
}
)
cherrypy.server.socket_host = '0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
| true | true |
f72ac71ab4bf2592bbd31344ee98206db5efb0b0 | 1,390 | py | Python | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | import dvc.logger as logger
from dvc.command.base import CmdBase
from dvc.exceptions import DvcException
class CmdRun(CmdBase):
def _joined_cmd(self):
if len(self.args.command) == 0:
return ''
if len(self.args.command) == 1:
return self.args.command[0]
cmd = ''
for chunk in self.args.command:
if len(chunk.split()) != 1:
fmt = ' "{}"'
else:
fmt = ' {}'
cmd += fmt.format(chunk)
return cmd
def run(self):
overwrite = (self.args.yes or self.args.overwrite_dvcfile)
try:
self.project.run(cmd=self._joined_cmd(),
outs=self.args.outs,
outs_no_cache=self.args.outs_no_cache,
metrics_no_cache=self.args.metrics_no_cache,
deps=self.args.deps,
fname=self.args.file,
cwd=self.args.cwd,
no_exec=self.args.no_exec,
overwrite=overwrite,
ignore_build_cache=self.args.ignore_build_cache,
remove_outs=self.args.remove_outs)
except DvcException:
logger.error('failed to run command')
return 1
return 0
| 33.095238 | 77 | 0.488489 | import dvc.logger as logger
from dvc.command.base import CmdBase
from dvc.exceptions import DvcException
class CmdRun(CmdBase):
def _joined_cmd(self):
if len(self.args.command) == 0:
return ''
if len(self.args.command) == 1:
return self.args.command[0]
cmd = ''
for chunk in self.args.command:
if len(chunk.split()) != 1:
fmt = ' "{}"'
else:
fmt = ' {}'
cmd += fmt.format(chunk)
return cmd
def run(self):
overwrite = (self.args.yes or self.args.overwrite_dvcfile)
try:
self.project.run(cmd=self._joined_cmd(),
outs=self.args.outs,
outs_no_cache=self.args.outs_no_cache,
metrics_no_cache=self.args.metrics_no_cache,
deps=self.args.deps,
fname=self.args.file,
cwd=self.args.cwd,
no_exec=self.args.no_exec,
overwrite=overwrite,
ignore_build_cache=self.args.ignore_build_cache,
remove_outs=self.args.remove_outs)
except DvcException:
logger.error('failed to run command')
return 1
return 0
| true | true |
f72ac72145f9cff31e471c1a682180a9ab441579 | 1,584 | py | Python | python/misc.py | dnbh/kpg | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 69 | 2018-01-08T19:56:55.000Z | 2022-03-05T17:14:05.000Z | python/misc.py | dnbaker/emp | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 6 | 2018-04-14T21:09:51.000Z | 2021-07-17T21:08:54.000Z | python/misc.py | dnbaker/emp | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 11 | 2018-03-21T19:28:35.000Z | 2021-06-29T17:33:34.000Z | #!/usr/bin/env python
import sys
import string
from collections import defaultdict
def freq(iterable):
"""
Returns a dictionary of counts for each item in an iterable.
>>>freq("ACGTTTAAA")
{'A': 4, 'C': 1, 'G': 1, 'T': 3}
"""
ret = defaultdict(int)
for el in iterable:
ret[el] += 1
return ret
try:
from cytoolz import frequencies as freq
except ImportError:
pass
# Don't sweat it
REV_CMP_TABLE = (str if sys.version_info[0] == 3
else string).maketrans("ACGTN", "TGCAN")
def revcmp(seq):
"""
Returns the reverse complement of a sequence.
>>>revcmp("ACGTNTTTAAATTT")
'AAATTTAAANACGT'
"""
return seq[::-1].translate(REV_CMP_TABLE)
def xopen(path):
"""
Stolen from Dooplicity. (https://github.com/nellore/rail/),
then stripped to only open files with open or gzip to open
based on magic number presence.
"""
import gzip
fh = (gzip.open(path, "rb") if open(path, 'rb').read(2) == '\x1f\x8b'
else open(path, "r"))
try:
yield fh
finally:
fh.close()
__all__ = [revcmp, REV_CMP_TABLE, freq, xopen]
if __name__ == "__main__":
"""
Unit tests
"""
import unittest
class Test(unittest.TestCase):
def test_revcmp(self):
self.assertEqual(revcmp("ACGTACCTTATATATATA"),
"TATATATATAAGGTACGT")
def test_freq(self):
self.assertEqual(freq("ACGTTTAAA"),
{'A': 4, 'C': 1, 'G': 1, 'T': 3})
unittest.main()
| 22.628571 | 73 | 0.571338 |
import sys
import string
from collections import defaultdict
def freq(iterable):
ret = defaultdict(int)
for el in iterable:
ret[el] += 1
return ret
try:
from cytoolz import frequencies as freq
except ImportError:
pass
REV_CMP_TABLE = (str if sys.version_info[0] == 3
else string).maketrans("ACGTN", "TGCAN")
def revcmp(seq):
return seq[::-1].translate(REV_CMP_TABLE)
def xopen(path):
import gzip
fh = (gzip.open(path, "rb") if open(path, 'rb').read(2) == '\x1f\x8b'
else open(path, "r"))
try:
yield fh
finally:
fh.close()
__all__ = [revcmp, REV_CMP_TABLE, freq, xopen]
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_revcmp(self):
self.assertEqual(revcmp("ACGTACCTTATATATATA"),
"TATATATATAAGGTACGT")
def test_freq(self):
self.assertEqual(freq("ACGTTTAAA"),
{'A': 4, 'C': 1, 'G': 1, 'T': 3})
unittest.main()
| true | true |
f72ac86bdcf9c11af4e34184f7bc61e8e47c1475 | 1,781 | py | Python | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | # DExTer : Debugging Experience Tester
# ~~~~~~ ~ ~~ ~ ~~
#
# Copyright (c) 2018 by SN Systems Ltd., Sony Interactive Entertainment Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Serialization of the DExTer commands embedded within the files under test.
"""
from dex.dextIR.CommandIR import CommandIR
from dex.utils.serialize import SrField, SrObject
class CommandListIR(SrObject):
sr_fields = [
SrField(
'command_list',
CommandIR,
list_of=True,
required_in_init=False,
default_value=list),
]
def __getitem__(self, idx):
return getattr(self, 'command_list')[idx]
def append(self, item):
return getattr(self, 'command_list').append(item)
| 39.577778 | 79 | 0.718136 |
from dex.dextIR.CommandIR import CommandIR
from dex.utils.serialize import SrField, SrObject
class CommandListIR(SrObject):
sr_fields = [
SrField(
'command_list',
CommandIR,
list_of=True,
required_in_init=False,
default_value=list),
]
def __getitem__(self, idx):
return getattr(self, 'command_list')[idx]
def append(self, item):
return getattr(self, 'command_list').append(item)
| true | true |
f72ac92ca104149447f8f64cf75ef595d16ca300 | 9,128 | py | Python | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-09-06T09:55:18.000Z | 2019-09-06T09:55:18.000Z | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-12-09T08:41:32.000Z | 2019-12-09T08:41:32.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator
from airflow.hooks.S3_hook import S3Hook
from tests.compat import mock
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
TASK_ID = 'test-gcs-list-operator'
GCS_BUCKET = 'test-bucket'
DELIMITER = '.csv'
PREFIX = 'TEST'
S3_BUCKET = 's3://bucket/'
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
class TestGoogleCloudStorageToS3Operator(unittest.TestCase):
# Test1: incremental behaviour (just some files missing)
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
b.put_object(Key=MOCK_FILES[0], Body=b'testing')
# we expect all except first file in MOCK_FILES to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES[1:]),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test2: All the files are already in origin and destination without replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_without_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect nothing to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual([],
uploaded_files)
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test3: There are no files in destination bucket
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket without files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
# we expect all MOCK_FILES to be uploaded
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test4: Destination and Origin are in sync but replace all files in destination
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect all MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test5: Incremental sync with replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with just two files (the first two files in MOCK_FILES)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES[:2]]
# we expect all the MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
| 48.296296 | 86 | 0.594106 |
import unittest
from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator
from airflow.hooks.S3_hook import S3Hook
from tests.compat import mock
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
TASK_ID = 'test-gcs-list-operator'
GCS_BUCKET = 'test-bucket'
DELIMITER = '.csv'
PREFIX = 'TEST'
S3_BUCKET = 's3://bucket/'
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
class TestGoogleCloudStorageToS3Operator(unittest.TestCase):
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
b.put_object(Key=MOCK_FILES[0], Body=b'testing')
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES[1:]),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_without_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
uploaded_files = operator.execute(None)
self.assertEqual([],
uploaded_files)
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES[:2]]
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
| true | true |
f72ac9963aa7ac311bd31b4b62e26bdb62abf353 | 4,930 | py | Python | apps/fig8.py | songhan/Halide | e7f78ac4ed6e154474732b1d53b9418fe353b0c0 | [
"MIT"
] | null | null | null | apps/fig8.py | songhan/Halide | e7f78ac4ed6e154474732b1d53b9418fe353b0c0 | [
"MIT"
] | null | null | null | apps/fig8.py | songhan/Halide | e7f78ac4ed6e154474732b1d53b9418fe353b0c0 | [
"MIT"
] | 1 | 2021-02-18T14:18:09.000Z | 2021-02-18T14:18:09.000Z | #/usr/bin/env python
from pygg import *
import pandas
from sqlalchemy import create_engine
from tempfile import mkstemp
import sys, os
resfname='fig8.csv'
res = pandas.read_csv(resfname)
"""
t = theme(axis.line=element_blank(),
axis.text.x=element_blank(),
axis.text.y=element_blank(),
axis.ticks=element_blank(),
axis.title.x=element_blank(),
axis.title.y=element_blank(),
legend.position="none",
panel.background=element_blank(),
panel.border=element_blank(),
panel.grid.major=element_blank(),
panel.grid.minor=element_blank(),
plot.background=element_blank())
"""
prolog = """
library(ggplot2)
require(grid)
require(gridExtra)
data = read.csv('{csvfile}',sep=',')
data$version <- factor(data$version, levels=c('naive','ref','auto'))
data$threads <- factor(data$threads)
data$app <- factor(data$app)
t = theme(
axis.title.x=element_blank(),
axis.title.y=element_blank(),
axis.line = element_line(colour = "grey20", size = 0.15),
axis.text.x = element_text(colour="grey20",size=2, face="plain"),
axis.text.y = element_text(colour="grey20",size=2, face="plain"),
axis.ticks=element_blank(),
panel.grid.major=element_blank(),
panel.background=element_blank(),
panel.grid.minor=element_blank(),
panel.border=element_blank(),
axis.ticks.margin = unit(1,'pt'),
axis.ticks.length = unit(0,'pt'),
panel.margin=unit(0,'pt'),
plot.title = element_text(size=2.5),
plot.margin= unit(c(0, 0, 0, 0), "lines"),
plot.background=element_blank(),
legend.position="none"
)
"""
# axis.line=element_blank(),
# axis.text.x=element_blank(),
# axis.text.y=element_blank(),
# panel.background=element_rect(fill='grey97'),
# panel.grid.major=element_line(size=0.25),
# panel.border=element_rect(color='grey90', fill=NA, size=0.5),
printable_name = {
'blur': 'BLUR',
'unsharp': 'UNSHARP',
'harris': 'HARRIS',
'camera_pipe': 'CAMERA',
'non_local_means': 'NLMEANS',
'max_filter': 'MAXFILTER',
'interpolate': 'MSCALE_INTERP',
'local_laplacian': 'LOCAL_LAPLACIAN',
'lens_blur': 'LENS_BLUR',
'bilateral_grid': 'BILATERAL',
'hist': 'HIST_EQ',
'conv_layer': 'CONVLAYER',
'vgg': 'VGG',
'mat_mul': 'MATMUL'
}
def plot(app):
pl = ggplot("subset(data, (data$app == '{0}') & (data$threads == 'cpu' | data$threads == 'gpu'))".format(app),
aes(x='threads', y='throughput_norm')) + ylim(0,1) # + labs(x='NULL',y='NULL') + guides(fill='FALSE')
pl+= geom_bar(aes(fill='version'), width='0.5', stat="'identity'",
position="position_dodge(width=0.6)")
pl+= scale_fill_manual('values=c("#b3b3b3","#f5c46c","#F95738")')
pl+= ggtitle("'{0}'".format(printable_name[app]))
pl+= scale_x_discrete('expand=c(0, 0.5), labels=c("ARM", "GPU")')
pl+= scale_y_continuous('expand=c(0, 0), breaks=c(0, 0.5, 1), labels = c("0", "0.5", "1")')
pl+= coord_fixed(ratio = 1.25)
return str(pl)
# app_name_norm = app.replace(' ', '_').lower()
# fname = 'fig1-{0}.png'.format(app_name_norm)
# ggsave('fig1-{0}.png'.format(app_name_norm),
# pl,
# #data=res[(res.app == app) & ((res.threads == 1) | (res.threads == 4))],
# prefix="""
# data = subset(read.csv('benchmarks.csv',sep=','), (threads == 1 | threads == 4))
# data$version <- factor(data$version, levels=c('naive','auto','ref'))
# data$threads <- factor(data$threads)
# """.format(app))
sys.exit()
apps = ['blur', 'unsharp', 'harris', 'camera_pipe', 'non_local_means', \
'interpolate', 'local_laplacian', 'lens_blur', 'max_filter', 'bilateral_grid', 'hist',\
'conv_layer', 'vgg', 'mat_mul']
prog = "plots <- list()" + '\n'
plot_num = 0
arrange_str = ""
for app in apps:
print '\n\n\n===== {0} ====='.format(app)
plot_num = plot_num + 1
app_name_norm = app.replace(' ', '_').lower()
fname = 'fig1-{0}.pdf'.format(app_name_norm)
# select
reldata = res[((res.threads == 'cpu') | (res.threads == 'gpu')) & (res.app == app)]
#re-normalize
reldata.throughput_norm = reldata.throughput_norm / max(reldata.throughput_norm)
assert(max(reldata.throughput_norm) == 1.0)
(csvfp,csvfile) = mkstemp(suffix='.csv')
reldata.to_csv(csvfile)
prog += prolog.format(csvfile=csvfile) + '\n'
arrange_str += "p{0},".format(plot_num)
prog += "p{0} <- {1} + t".format(plot_num, plot(app)) + '\n'
prog += "pdf('fig8.pdf', width = 7, height = 1.5)" + '\n'
prog += "grid.arrange(" + arrange_str + "ncol = 7, clip=TRUE)" + '\n'
prog += "dev.off()" + '\n'
print prog
execute_r(prog, True)
| 33.537415 | 117 | 0.582353 |
from pygg import *
import pandas
from sqlalchemy import create_engine
from tempfile import mkstemp
import sys, os
resfname='fig8.csv'
res = pandas.read_csv(resfname)
"""
t = theme(axis.line=element_blank(),
axis.text.x=element_blank(),
axis.text.y=element_blank(),
axis.ticks=element_blank(),
axis.title.x=element_blank(),
axis.title.y=element_blank(),
legend.position="none",
panel.background=element_blank(),
panel.border=element_blank(),
panel.grid.major=element_blank(),
panel.grid.minor=element_blank(),
plot.background=element_blank())
"""
prolog = """
library(ggplot2)
require(grid)
require(gridExtra)
data = read.csv('{csvfile}',sep=',')
data$version <- factor(data$version, levels=c('naive','ref','auto'))
data$threads <- factor(data$threads)
data$app <- factor(data$app)
t = theme(
axis.title.x=element_blank(),
axis.title.y=element_blank(),
axis.line = element_line(colour = "grey20", size = 0.15),
axis.text.x = element_text(colour="grey20",size=2, face="plain"),
axis.text.y = element_text(colour="grey20",size=2, face="plain"),
axis.ticks=element_blank(),
panel.grid.major=element_blank(),
panel.background=element_blank(),
panel.grid.minor=element_blank(),
panel.border=element_blank(),
axis.ticks.margin = unit(1,'pt'),
axis.ticks.length = unit(0,'pt'),
panel.margin=unit(0,'pt'),
plot.title = element_text(size=2.5),
plot.margin= unit(c(0, 0, 0, 0), "lines"),
plot.background=element_blank(),
legend.position="none"
)
"""
printable_name = {
'blur': 'BLUR',
'unsharp': 'UNSHARP',
'harris': 'HARRIS',
'camera_pipe': 'CAMERA',
'non_local_means': 'NLMEANS',
'max_filter': 'MAXFILTER',
'interpolate': 'MSCALE_INTERP',
'local_laplacian': 'LOCAL_LAPLACIAN',
'lens_blur': 'LENS_BLUR',
'bilateral_grid': 'BILATERAL',
'hist': 'HIST_EQ',
'conv_layer': 'CONVLAYER',
'vgg': 'VGG',
'mat_mul': 'MATMUL'
}
def plot(app):
pl = ggplot("subset(data, (data$app == '{0}') & (data$threads == 'cpu' | data$threads == 'gpu'))".format(app),
aes(x='threads', y='throughput_norm')) + ylim(0,1)
pl+= geom_bar(aes(fill='version'), width='0.5', stat="'identity'",
position="position_dodge(width=0.6)")
pl+= scale_fill_manual('values=c("#b3b3b3","#f5c46c","#F95738")')
pl+= ggtitle("'{0}'".format(printable_name[app]))
pl+= scale_x_discrete('expand=c(0, 0.5), labels=c("ARM", "GPU")')
pl+= scale_y_continuous('expand=c(0, 0), breaks=c(0, 0.5, 1), labels = c("0", "0.5", "1")')
pl+= coord_fixed(ratio = 1.25)
return str(pl)
, (threads == 1 | threads == 4))
# data$version <- factor(data$version, levels=c('naive','auto','ref'))
# data$threads <- factor(data$threads)
# """.format(app))
sys.exit()
apps = ['blur', 'unsharp', 'harris', 'camera_pipe', 'non_local_means', \
'interpolate', 'local_laplacian', 'lens_blur', 'max_filter', 'bilateral_grid', 'hist',\
'conv_layer', 'vgg', 'mat_mul']
prog = "plots <- list()" + '\n'
plot_num = 0
arrange_str = ""
for app in apps:
print '\n\n\n===== {0} ====='.format(app)
plot_num = plot_num + 1
app_name_norm = app.replace(' ', '_').lower()
fname = 'fig1-{0}.pdf'.format(app_name_norm)
reldata = res[((res.threads == 'cpu') | (res.threads == 'gpu')) & (res.app == app)]
reldata.throughput_norm = reldata.throughput_norm / max(reldata.throughput_norm)
assert(max(reldata.throughput_norm) == 1.0)
(csvfp,csvfile) = mkstemp(suffix='.csv')
reldata.to_csv(csvfile)
prog += prolog.format(csvfile=csvfile) + '\n'
arrange_str += "p{0},".format(plot_num)
prog += "p{0} <- {1} + t".format(plot_num, plot(app)) + '\n'
prog += "pdf('fig8.pdf', width = 7, height = 1.5)" + '\n'
prog += "grid.arrange(" + arrange_str + "ncol = 7, clip=TRUE)" + '\n'
prog += "dev.off()" + '\n'
print prog
execute_r(prog, True)
| false | true |
f72acb68ed93a51226e787125180c68eb7131f4d | 5,030 | py | Python | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | # [LICENSE]
# Copyright (c) 2018, Alliance for Sustainable Energy.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# 2. Redistributions in binary form must reproduce the
# above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or
# promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# [/LICENSE]
from collections import OrderedDict
import logging
# gdxpds needs to be imported before pandas to try to avoid library conflict on
# Linux that causes a segmentation fault.
from gdxpds.tools import Error
from gdxpds.gdx import GdxFile
logger = logging.getLogger(__name__)
class Translator(object):
def __init__(self,gdx_file,gams_dir=None,lazy_load=False):
self.__gdx = GdxFile(gams_dir=gams_dir,lazy_load=lazy_load)
self.__gdx.read(gdx_file)
self.__dataframes = None
def __exit__(self, *args):
self.__gdx.__exit__(self, *args)
def __del__(self):
self.__gdx.__del__()
@property
def gams_dir(self):
return self.gdx.gams_dir
@gams_dir.setter
def gams_dir(self, value):
self.gdx.gams_dir = value
@property
def gdx_file(self):
return self.gdx.filename
@gdx_file.setter
def gdx_file(self,value):
self.__gdx.__del__()
self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir,lazy_load=self.gdx.lazy_load)
self.__gdx.read(value)
self.__dataframes = None
@property
def gdx(self):
return self.__gdx
@property
def dataframes(self):
if self.__dataframes is None:
self.__dataframes = OrderedDict()
for symbol in self.__gdx:
if not symbol.loaded:
symbol.load()
self.__dataframes[symbol.name] = symbol.dataframe.copy()
return self.__dataframes
@property
def symbols(self):
return [symbol_name for symbol_name in self.gdx]
def dataframe(self, symbol_name):
if not symbol_name in self.gdx:
raise Error("No symbol named '{}' in '{}'.".format(symbol_name, self.gdx_file))
if not self.gdx[symbol_name].loaded:
self.gdx[symbol_name].load()
# This was returning { symbol_name: dataframe }, which seems intuitively off.
return self.gdx[symbol_name].dataframe.copy()
def to_dataframes(gdx_file,gams_dir=None):
"""
Primary interface for converting a GAMS GDX file to pandas DataFrames.
Parameters:
- gdx_file (string): path to a GDX file
- gams_dir (string): optional path to GAMS directory
Returns a dict of Pandas DataFrames, one item for each symbol in the GDX
file, keyed with the symbol name.
"""
dfs = Translator(gdx_file,gams_dir=gams_dir).dataframes
return dfs
def list_symbols(gdx_file,gams_dir=None):
"""
Returns the list of symbols available in gdx_file.
Parameters:
- gdx_file (string): path to a GDX file
- gams_dir (string): optional path to GAMS directory
"""
symbols = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).symbols
return symbols
def to_dataframe(gdx_file,symbol_name,gams_dir=None,old_interface=True):
"""
Interface for getting the { symbol_name: pandas.DataFrame } dict for a
single symbol.
Parameters:
- gdx_file (string): path to a GDX file
- symbol_name (string): symbol whose pandas.DataFrame is being requested
- gams_dir (string): optional path to GAMS directory
Returns a dict with a single entry, where the key is symbol_name and the
value is the corresponding pandas.DataFrame.
"""
df = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).dataframe(symbol_name)
return {symbol_name: df} if old_interface else df
| 34.689655 | 91 | 0.706362 |
from collections import OrderedDict
import logging
from gdxpds.tools import Error
from gdxpds.gdx import GdxFile
logger = logging.getLogger(__name__)
class Translator(object):
def __init__(self,gdx_file,gams_dir=None,lazy_load=False):
self.__gdx = GdxFile(gams_dir=gams_dir,lazy_load=lazy_load)
self.__gdx.read(gdx_file)
self.__dataframes = None
def __exit__(self, *args):
self.__gdx.__exit__(self, *args)
def __del__(self):
self.__gdx.__del__()
@property
def gams_dir(self):
return self.gdx.gams_dir
@gams_dir.setter
def gams_dir(self, value):
self.gdx.gams_dir = value
@property
def gdx_file(self):
return self.gdx.filename
@gdx_file.setter
def gdx_file(self,value):
self.__gdx.__del__()
self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir,lazy_load=self.gdx.lazy_load)
self.__gdx.read(value)
self.__dataframes = None
@property
def gdx(self):
return self.__gdx
@property
def dataframes(self):
if self.__dataframes is None:
self.__dataframes = OrderedDict()
for symbol in self.__gdx:
if not symbol.loaded:
symbol.load()
self.__dataframes[symbol.name] = symbol.dataframe.copy()
return self.__dataframes
@property
def symbols(self):
return [symbol_name for symbol_name in self.gdx]
def dataframe(self, symbol_name):
if not symbol_name in self.gdx:
raise Error("No symbol named '{}' in '{}'.".format(symbol_name, self.gdx_file))
if not self.gdx[symbol_name].loaded:
self.gdx[symbol_name].load()
return self.gdx[symbol_name].dataframe.copy()
def to_dataframes(gdx_file,gams_dir=None):
dfs = Translator(gdx_file,gams_dir=gams_dir).dataframes
return dfs
def list_symbols(gdx_file,gams_dir=None):
symbols = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).symbols
return symbols
def to_dataframe(gdx_file,symbol_name,gams_dir=None,old_interface=True):
df = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).dataframe(symbol_name)
return {symbol_name: df} if old_interface else df
| true | true |
f72acbaa7eb80d299ab01ae2d3c86752036d4dac | 24,244 | py | Python | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | # -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="test_table.py">
# Copyright (c) 2020 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import os
import dateutil.parser
import asposewordscloud.models.requests
from test.base_test_context import BaseTestContext
#
# Example of how to work wtih table.
#
class TestTable(BaseTestContext):
#
# Test for getting tables.
#
def test_get_tables(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTables.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, node_path='', folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTables response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTables response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
#
# Test for getting tables without node path.
#
def test_get_tables_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTablesWithoutNodePath response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTablesWithoutNodePath response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
#
# Test for getting table.
#
def test_get_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTable response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTable response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
#
# Test for getting table without node path.
#
def test_get_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
#
# Test for deleting table.
#
def test_delete_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
self.words_api.delete_table(request)
#
# Test for deleting table without node path.
#
def test_delete_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
self.words_api.delete_table(request)
#
# Test for adding table.
#
def test_insert_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, node_path='', folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTable response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTable response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
#
# Test for adding table without node path.
#
def test_insert_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
#
# Test for getting document properties.
#
def test_get_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTableProperties response')
self.assertEqual('Table Grid', result.properties.style_name)
#
# Test for getting document properties without node path.
#
def test_get_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTablePropertiesWithoutNodePath response')
self.assertEqual('Table Grid', result.properties.style_name)
#
# Test for updating table properties.
#
def test_update_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTableProperties response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTableProperties response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTableProperties response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
#
# Test for updating table properties without node path.
#
def test_update_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1.0, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
#
# Test for getting table row.
#
def test_get_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate GetTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate GetTableRow response')
self.assertEqual(2, len(result.row.table_cell_list))
#
# Test for deleting table row.
#
def test_delete_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
self.words_api.delete_table_row(request)
#
# Test for adding row.
#
def test_insert_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestRow = asposewordscloud.TableRowInsert(columns_count=5)
request = asposewordscloud.models.requests.InsertTableRowRequest(name=remoteFileName, row=requestRow, table_path='sections/0/tables/2', folder=remoteDataFolder)
result = self.words_api.insert_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate InsertTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate InsertTableRow response')
self.assertEqual(5, len(result.row.table_cell_list))
#
# Test for getting row format.
#
def test_get_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowFormatRequest(name=remoteFileName, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate GetTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate GetTableRowFormat response')
#
# Test updating row format.
#
def test_update_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableRowFormat(allow_break_across_pages=True, heading_format=True, height=10.0, height_rule='Exactly')
request = asposewordscloud.models.requests.UpdateTableRowFormatRequest(name=remoteFileName, format=requestFormat, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.heading_format, 'Validate UpdateTableRowFormat response')
self.assertEqual(10.0, result.row_format.height)
#
# Test for getting table cell.
#
def test_get_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate GetTableCell response')
self.assertEqual('0.0.5.0.0', result.cell.node_id)
#
# Test for deleting cell.
#
def test_delete_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
self.words_api.delete_table_cell(request)
#
# Test for adding cell.
#
def test_insert_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestCell = asposewordscloud.TableCellInsert()
request = asposewordscloud.models.requests.InsertTableCellRequest(name=remoteFileName, cell=requestCell, table_row_path='sections/0/tables/2/rows/0', folder=remoteDataFolder)
result = self.words_api.insert_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate InsertTableCell response')
self.assertEqual('0.0.5.0.3', result.cell.node_id)
#
# Test for getting cell format.
#
def test_get_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellFormatRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate GetTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate GetTableCellFormat response')
#
# Test for updating cell format.
#
def test_update_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableCellFormat(bottom_padding=5.0, fit_text=True, horizontal_merge='First', wrap_text=True)
request = asposewordscloud.models.requests.UpdateTableCellFormatRequest(name=remoteFileName, format=requestFormat, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate UpdateTableCellFormat response')
self.assertEqual(5.0, result.cell_format.bottom_padding)
self.assertTrue(result.cell_format.fit_text, 'Validate UpdateTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate UpdateTableCellFormat response')
#
# Test for table rendering.
#
def test_render_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, node_path='', folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
#
# Test for table rendering without node path.
#
def test_render_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
| 51.803419 | 201 | 0.735068 |
import os
import dateutil.parser
import asposewordscloud.models.requests
from test.base_test_context import BaseTestContext
class TestTable(BaseTestContext):
def test_get_tables(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTables.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, node_path='', folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTables response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTables response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
def test_get_tables_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTablesWithoutNodePath response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTablesWithoutNodePath response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
def test_get_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTable response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTable response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
def test_get_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
def test_delete_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
self.words_api.delete_table(request)
def test_delete_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
self.words_api.delete_table(request)
def test_insert_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, node_path='', folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTable response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTable response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
def test_insert_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
def test_get_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTableProperties response')
self.assertEqual('Table Grid', result.properties.style_name)
def test_get_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTablePropertiesWithoutNodePath response')
self.assertEqual('Table Grid', result.properties.style_name)
def test_update_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTableProperties response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTableProperties response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTableProperties response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
def test_update_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1.0, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
def test_get_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate GetTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate GetTableRow response')
self.assertEqual(2, len(result.row.table_cell_list))
def test_delete_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
self.words_api.delete_table_row(request)
def test_insert_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestRow = asposewordscloud.TableRowInsert(columns_count=5)
request = asposewordscloud.models.requests.InsertTableRowRequest(name=remoteFileName, row=requestRow, table_path='sections/0/tables/2', folder=remoteDataFolder)
result = self.words_api.insert_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate InsertTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate InsertTableRow response')
self.assertEqual(5, len(result.row.table_cell_list))
def test_get_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowFormatRequest(name=remoteFileName, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate GetTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate GetTableRowFormat response')
def test_update_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableRowFormat(allow_break_across_pages=True, heading_format=True, height=10.0, height_rule='Exactly')
request = asposewordscloud.models.requests.UpdateTableRowFormatRequest(name=remoteFileName, format=requestFormat, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.heading_format, 'Validate UpdateTableRowFormat response')
self.assertEqual(10.0, result.row_format.height)
def test_get_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate GetTableCell response')
self.assertEqual('0.0.5.0.0', result.cell.node_id)
def test_delete_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
self.words_api.delete_table_cell(request)
def test_insert_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestCell = asposewordscloud.TableCellInsert()
request = asposewordscloud.models.requests.InsertTableCellRequest(name=remoteFileName, cell=requestCell, table_row_path='sections/0/tables/2/rows/0', folder=remoteDataFolder)
result = self.words_api.insert_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate InsertTableCell response')
self.assertEqual('0.0.5.0.3', result.cell.node_id)
def test_get_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellFormatRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate GetTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate GetTableCellFormat response')
def test_update_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableCellFormat(bottom_padding=5.0, fit_text=True, horizontal_merge='First', wrap_text=True)
request = asposewordscloud.models.requests.UpdateTableCellFormatRequest(name=remoteFileName, format=requestFormat, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate UpdateTableCellFormat response')
self.assertEqual(5.0, result.cell_format.bottom_padding)
self.assertTrue(result.cell_format.fit_text, 'Validate UpdateTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate UpdateTableCellFormat response')
def test_render_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, node_path='', folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
def test_render_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
| true | true |
f72acc530ce86db9d84ee3320a91420735f171b5 | 1,307 | py | Python | setup.py | jamenor/pichetprofile | 6633ea6eaa7473af9e10f34f6a19428c2db92465 | [
"MIT"
] | null | null | null | setup.py | jamenor/pichetprofile | 6633ea6eaa7473af9e10f34f6a19428c2db92465 | [
"MIT"
] | null | null | null | setup.py | jamenor/pichetprofile | 6633ea6eaa7473af9e10f34f6a19428c2db92465 | [
"MIT"
] | null | null | null | import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst'
])
setup(
name = 'pichetprofile',
packages = ['pichetprofile'],
version = '0.0.1',
license='MIT',
description = 'Pichet Profile by Jame normal',
long_description=DESCRIPTION,
author = 'Jame normal',
author_email = 'pichet.mt53@gmail.com',
url = 'https://github.com/jamenor/pichetprofile',
download_url = 'https://github.com/jamenor/pichetprofile/archive/v0.0.1.zip',
keywords = ['OOP', 'School', 'jamenor'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Education',
'Topic :: Software Development :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
) | 35.324324 | 82 | 0.613619 | import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst'
])
setup(
name = 'pichetprofile',
packages = ['pichetprofile'],
version = '0.0.1',
license='MIT',
description = 'Pichet Profile by Jame normal',
long_description=DESCRIPTION,
author = 'Jame normal',
author_email = 'pichet.mt53@gmail.com',
url = 'https://github.com/jamenor/pichetprofile',
download_url = 'https://github.com/jamenor/pichetprofile/archive/v0.0.1.zip',
keywords = ['OOP', 'School', 'jamenor'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Education',
'Topic :: Software Development :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
) | false | true |
f72accd8900bf752d4868f03ba6ce4c1c4210e08 | 7,851 | py | Python | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | #Libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('whitegrid')
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.calibration import CalibratedClassifierCV
import xgboost as xgb
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import VotingClassifier
from sklearn.naive_bayes import GaussianNB
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
train.info()
train.describe(include='all')
train.head()
plt.subplot(1,4,1)
train.groupby('type').mean()['rotting_flesh'].plot(kind='bar',figsize=(7,4), color='r')
plt.subplot(1,4,2)
train.groupby('type').mean()['bone_length'].plot(kind='bar',figsize=(7,4), color='g')
plt.subplot(1,4,3)
train.groupby('type').mean()['hair_length'].plot(kind='bar',figsize=(7,4), color='y')
plt.subplot(1,4,4)
train.groupby('type').mean()['has_soul'].plot(kind='bar',figsize=(7,4), color='teal')
sns.factorplot("type", col="color", col_wrap=4, data=train, kind="count", size=2.4, aspect=.8)
#The graphs look much better with higher figsize.
fig, ax = plt.subplots(2, 2, figsize = (16, 12))
sns.pointplot(x="color", y="rotting_flesh", hue="type", data=train, ax = ax[0, 0])
sns.pointplot(x="color", y="bone_length", hue="type", data=train, ax = ax[0, 1])
sns.pointplot(x="color", y="hair_length", hue="type", data=train, ax = ax[1, 0])
sns.pointplot(x="color", y="has_soul", hue="type", data=train, ax = ax[1, 1])
sns.pairplot(train, hue='type')
train['hair_soul'] = train['hair_length'] * train['has_soul']
train['hair_bone'] = train['hair_length'] * train['bone_length']
test['hair_soul'] = test['hair_length'] * test['has_soul']
test['hair_bone'] = test['hair_length'] * test['bone_length']
train['hair_soul_bone'] = train['hair_length'] * train['has_soul'] * train['bone_length']
test['hair_soul_bone'] = test['hair_length'] * test['has_soul'] * test['bone_length']
#test_id will be used later, so save it
test_id = test['id']
train.drop(['id'], axis=1, inplace=True)
test.drop(['id'], axis=1, inplace=True)
#Deal with 'color' column
col = 'color'
dummies = pd.get_dummies(train[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
train.drop(col, axis=1, inplace=True)
train = train.join(dummies)
dummies = pd.get_dummies(test[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
test.drop(col, axis=1, inplace=True)
test = test.join(dummies)
X_train = train.drop('type', axis=1)
le = LabelEncoder()
Y_train = le.fit_transform(train.type.values)
X_test = test
clf = RandomForestClassifier(n_estimators=200)
clf = clf.fit(X_train, Y_train)
indices = np.argsort(clf.feature_importances_)[::-1]
# Print the feature ranking
print('Feature ranking:')
for f in range(X_train.shape[1]):
print('%d. feature %d %s (%f)' % (f + 1, indices[f], X_train.columns[indices[f]],
clf.feature_importances_[indices[f]]))
best_features=X_train.columns[indices[0:7]]
X = X_train[best_features]
Xt = X_test[best_features]
#Splitting data for validation
Xtrain, Xtest, ytrain, ytest = train_test_split(X, Y_train, test_size=0.20, random_state=36)
forest = RandomForestClassifier(max_depth = 100,
min_samples_split =7,
min_weight_fraction_leaf = 0.0,
max_leaf_nodes = 60)
parameter_grid = {'n_estimators' : [10, 20, 100, 150],
'criterion' : ['gini', 'entropy'],
'max_features' : ['auto', 'sqrt', 'log2', None]
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
forest = RandomForestClassifier(n_estimators = 150,
criterion = 'entropy',
max_features = 'auto')
parameter_grid = {
'max_depth' : [None, 5, 20, 100],
'min_samples_split' : [2, 5, 7],
'min_weight_fraction_leaf' : [0.0, 0.1],
'max_leaf_nodes' : [40, 60, 80],
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
#Optimal parameters
clf = RandomForestClassifier(n_estimators=150, n_jobs=-1, criterion = 'entropy', max_features = 'auto',
min_samples_split=7, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=20)
#Calibration improves probability predictions
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
calibrated_clf.fit(Xtrain, ytrain)
y_val = calibrated_clf.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
svc = svm.SVC(kernel='linear')
svc.fit(Xtrain, ytrain)
y_val_s = svc.predict(Xtest)
print("Validation accuracy: ", sum(le.inverse_transform(y_val_s)
== le.inverse_transform(ytest))/len(ytest))
#The last model is logistic regression
logreg = LogisticRegression()
parameter_grid = {'solver' : ['newton-cg', 'lbfgs'],
'multi_class' : ['ovr', 'multinomial'],
'C' : [0.005, 0.01, 1, 10, 100, 1000],
'tol': [0.0001, 0.001, 0.005]
}
grid_search = GridSearchCV(logreg, param_grid=parameter_grid, cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
log_reg.fit(Xtrain, ytrain)
y_val_l = log_reg.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val_l, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
clf = RandomForestClassifier(n_estimators=20, n_jobs=-1, criterion = 'gini', max_features = 'sqrt',
min_samples_split=2, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=100)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
gnb = GaussianNB()
calibrated_clf1 = CalibratedClassifierCV(RandomForestClassifier())
log_reg1 = LogisticRegression()
gnb1 = GaussianNB()
Vclf1 = VotingClassifier(estimators=[('LR', log_reg1), ('CRF', calibrated_clf1),
('GNB', gnb1)], voting='hard')
Vclf = VotingClassifier(estimators=[('LR', log_reg), ('CRF', calibrated_clf),
('GNB', gnb)], voting='soft', weights=[1,1,1])
hard_predict = le.inverse_transform(Vclf1.fit(X, Y_train).predict(Xt))
soft_predict = le.inverse_transform(Vclf.fit(X, Y_train).predict(Xt))
#Let's see the differences:
for i in range(len(hard_predict)):
if hard_predict[i] != soft_predict[i]:
print(i, hard_predict[i], soft_predict[i])
submission = pd.DataFrame({'id':test_id, 'type':hard_predict})
submission.to_csv('GGG_submission.csv', index=False)
| 47.011976 | 104 | 0.672271 |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('whitegrid')
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.calibration import CalibratedClassifierCV
import xgboost as xgb
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import VotingClassifier
from sklearn.naive_bayes import GaussianNB
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
train.info()
train.describe(include='all')
train.head()
plt.subplot(1,4,1)
train.groupby('type').mean()['rotting_flesh'].plot(kind='bar',figsize=(7,4), color='r')
plt.subplot(1,4,2)
train.groupby('type').mean()['bone_length'].plot(kind='bar',figsize=(7,4), color='g')
plt.subplot(1,4,3)
train.groupby('type').mean()['hair_length'].plot(kind='bar',figsize=(7,4), color='y')
plt.subplot(1,4,4)
train.groupby('type').mean()['has_soul'].plot(kind='bar',figsize=(7,4), color='teal')
sns.factorplot("type", col="color", col_wrap=4, data=train, kind="count", size=2.4, aspect=.8)
fig, ax = plt.subplots(2, 2, figsize = (16, 12))
sns.pointplot(x="color", y="rotting_flesh", hue="type", data=train, ax = ax[0, 0])
sns.pointplot(x="color", y="bone_length", hue="type", data=train, ax = ax[0, 1])
sns.pointplot(x="color", y="hair_length", hue="type", data=train, ax = ax[1, 0])
sns.pointplot(x="color", y="has_soul", hue="type", data=train, ax = ax[1, 1])
sns.pairplot(train, hue='type')
train['hair_soul'] = train['hair_length'] * train['has_soul']
train['hair_bone'] = train['hair_length'] * train['bone_length']
test['hair_soul'] = test['hair_length'] * test['has_soul']
test['hair_bone'] = test['hair_length'] * test['bone_length']
train['hair_soul_bone'] = train['hair_length'] * train['has_soul'] * train['bone_length']
test['hair_soul_bone'] = test['hair_length'] * test['has_soul'] * test['bone_length']
test_id = test['id']
train.drop(['id'], axis=1, inplace=True)
test.drop(['id'], axis=1, inplace=True)
col = 'color'
dummies = pd.get_dummies(train[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
train.drop(col, axis=1, inplace=True)
train = train.join(dummies)
dummies = pd.get_dummies(test[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
test.drop(col, axis=1, inplace=True)
test = test.join(dummies)
X_train = train.drop('type', axis=1)
le = LabelEncoder()
Y_train = le.fit_transform(train.type.values)
X_test = test
clf = RandomForestClassifier(n_estimators=200)
clf = clf.fit(X_train, Y_train)
indices = np.argsort(clf.feature_importances_)[::-1]
print('Feature ranking:')
for f in range(X_train.shape[1]):
print('%d. feature %d %s (%f)' % (f + 1, indices[f], X_train.columns[indices[f]],
clf.feature_importances_[indices[f]]))
best_features=X_train.columns[indices[0:7]]
X = X_train[best_features]
Xt = X_test[best_features]
Xtrain, Xtest, ytrain, ytest = train_test_split(X, Y_train, test_size=0.20, random_state=36)
forest = RandomForestClassifier(max_depth = 100,
min_samples_split =7,
min_weight_fraction_leaf = 0.0,
max_leaf_nodes = 60)
parameter_grid = {'n_estimators' : [10, 20, 100, 150],
'criterion' : ['gini', 'entropy'],
'max_features' : ['auto', 'sqrt', 'log2', None]
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
forest = RandomForestClassifier(n_estimators = 150,
criterion = 'entropy',
max_features = 'auto')
parameter_grid = {
'max_depth' : [None, 5, 20, 100],
'min_samples_split' : [2, 5, 7],
'min_weight_fraction_leaf' : [0.0, 0.1],
'max_leaf_nodes' : [40, 60, 80],
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
clf = RandomForestClassifier(n_estimators=150, n_jobs=-1, criterion = 'entropy', max_features = 'auto',
min_samples_split=7, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=20)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
calibrated_clf.fit(Xtrain, ytrain)
y_val = calibrated_clf.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
svc = svm.SVC(kernel='linear')
svc.fit(Xtrain, ytrain)
y_val_s = svc.predict(Xtest)
print("Validation accuracy: ", sum(le.inverse_transform(y_val_s)
== le.inverse_transform(ytest))/len(ytest))
logreg = LogisticRegression()
parameter_grid = {'solver' : ['newton-cg', 'lbfgs'],
'multi_class' : ['ovr', 'multinomial'],
'C' : [0.005, 0.01, 1, 10, 100, 1000],
'tol': [0.0001, 0.001, 0.005]
}
grid_search = GridSearchCV(logreg, param_grid=parameter_grid, cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
log_reg.fit(Xtrain, ytrain)
y_val_l = log_reg.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val_l, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
clf = RandomForestClassifier(n_estimators=20, n_jobs=-1, criterion = 'gini', max_features = 'sqrt',
min_samples_split=2, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=100)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
gnb = GaussianNB()
calibrated_clf1 = CalibratedClassifierCV(RandomForestClassifier())
log_reg1 = LogisticRegression()
gnb1 = GaussianNB()
Vclf1 = VotingClassifier(estimators=[('LR', log_reg1), ('CRF', calibrated_clf1),
('GNB', gnb1)], voting='hard')
Vclf = VotingClassifier(estimators=[('LR', log_reg), ('CRF', calibrated_clf),
('GNB', gnb)], voting='soft', weights=[1,1,1])
hard_predict = le.inverse_transform(Vclf1.fit(X, Y_train).predict(Xt))
soft_predict = le.inverse_transform(Vclf.fit(X, Y_train).predict(Xt))
for i in range(len(hard_predict)):
if hard_predict[i] != soft_predict[i]:
print(i, hard_predict[i], soft_predict[i])
submission = pd.DataFrame({'id':test_id, 'type':hard_predict})
submission.to_csv('GGG_submission.csv', index=False)
| true | true |
f72acedfe31ef0d6425a9d5e280c234bf012eb1c | 2,456 | py | Python | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | import gaopt
from gaopt import search_space
import pandas as pd
import numpy as np
import lightgbm as lgb
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from sklearn.datasets import load_diabetes
params_range={
'lambda_l1': search_space.discrete_int(-8, 2),
'lambda_l2': search_space.discrete_int(-8, 2),
'num_leaves': search_space.discrete(2, 100, 4),
'feature_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_freq': search_space.discrete_int(0,1),
'min_child_samples': search_space.discrete_int(1,30),
}
cal_time_lst = []
date_start = None
def objective1(params):
diabetes = load_diabetes()
X = diabetes.data
y = diabetes.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 0)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size = 0.3, random_state = 0)
lgb_train = lgb.Dataset(data=X_train, label=y_train)
lgb_valid = lgb.Dataset(data=X_valid, label=y_valid)
params ={
'lambda_l1': 10**params.lambda_l1,
'lambda_l2': 10**params.lambda_l2,
'num_leaves': params.num_leaves,
'feature_fraction': params.feature_fraction,
'bagging_fraction': params.bagging_fraction,
'bagging_freq': params.bagging_freq,
'min_child_samples': params.min_child_samples,
'objective': 'regression',
'metric': 'rmse',
"verbosity": -1,
"seed": 0
}
model = lgb.train(params,
train_set=lgb_train,
valid_sets=lgb_valid,
verbose_eval=False
)
y_pred_lgb = model.predict(X_test)
fitness = r2_score(y_test, y_pred_lgb)
return fitness
def main():
p_m = 0.10
p_c = 0.7
population = 30
generation = 50
instance = gaopt.GAOpt(params_range, objective=objective1, generation=generation, population=population,
p_m=p_m, p_c=p_c, elitism=True,
history=2, verbose=2, maximizing=True)
best_params, best_fitness, best_fitness_lst, worst_fitness_lst, mean_fitness_lst, median_fitness_lst, sd_fitness_lst, search_history_lst = instance.fit()
print("best params: ", best_params)
print("best fitness: ", best_fitness)
if __name__ == '__main__':
main()
| 31.487179 | 157 | 0.664088 | import gaopt
from gaopt import search_space
import pandas as pd
import numpy as np
import lightgbm as lgb
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from sklearn.datasets import load_diabetes
params_range={
'lambda_l1': search_space.discrete_int(-8, 2),
'lambda_l2': search_space.discrete_int(-8, 2),
'num_leaves': search_space.discrete(2, 100, 4),
'feature_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_freq': search_space.discrete_int(0,1),
'min_child_samples': search_space.discrete_int(1,30),
}
cal_time_lst = []
date_start = None
def objective1(params):
diabetes = load_diabetes()
X = diabetes.data
y = diabetes.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 0)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size = 0.3, random_state = 0)
lgb_train = lgb.Dataset(data=X_train, label=y_train)
lgb_valid = lgb.Dataset(data=X_valid, label=y_valid)
params ={
'lambda_l1': 10**params.lambda_l1,
'lambda_l2': 10**params.lambda_l2,
'num_leaves': params.num_leaves,
'feature_fraction': params.feature_fraction,
'bagging_fraction': params.bagging_fraction,
'bagging_freq': params.bagging_freq,
'min_child_samples': params.min_child_samples,
'objective': 'regression',
'metric': 'rmse',
"verbosity": -1,
"seed": 0
}
model = lgb.train(params,
train_set=lgb_train,
valid_sets=lgb_valid,
verbose_eval=False
)
y_pred_lgb = model.predict(X_test)
fitness = r2_score(y_test, y_pred_lgb)
return fitness
def main():
p_m = 0.10
p_c = 0.7
population = 30
generation = 50
instance = gaopt.GAOpt(params_range, objective=objective1, generation=generation, population=population,
p_m=p_m, p_c=p_c, elitism=True,
history=2, verbose=2, maximizing=True)
best_params, best_fitness, best_fitness_lst, worst_fitness_lst, mean_fitness_lst, median_fitness_lst, sd_fitness_lst, search_history_lst = instance.fit()
print("best params: ", best_params)
print("best fitness: ", best_fitness)
if __name__ == '__main__':
main()
| true | true |
f72acf6685fa304f560b7aba21b3cc59df08af86 | 1,407 | py | Python | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2018-07-16T01:51:47.000Z | 2018-07-16T01:51:47.000Z | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | null | null | null | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2019-02-18T04:12:56.000Z | 2019-02-18T04:12:56.000Z | import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='tickfont', parent_name='contour.colorbar', **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Tickfont',
data_docs="""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include *Arial*,
*Balto*, *Courier New*, *Droid Sans*,, *Droid
Serif*, *Droid Sans Mono*, *Gravitas One*, *Old
Standard TT*, *Open Sans*, *Overpass*, *PT Sans
Narrow*, *Raleway*, *Times New Roman*.
size
""",
**kwargs
)
| 39.083333 | 78 | 0.570007 | import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='tickfont', parent_name='contour.colorbar', **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Tickfont',
data_docs="""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include *Arial*,
*Balto*, *Courier New*, *Droid Sans*,, *Droid
Serif*, *Droid Sans Mono*, *Gravitas One*, *Old
Standard TT*, *Open Sans*, *Overpass*, *PT Sans
Narrow*, *Raleway*, *Times New Roman*.
size
""",
**kwargs
)
| true | true |
f72acf916cc7270f998cfd07db89c1ac93ca5b18 | 1,812 | py | Python | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | 1 | 2020-11-15T15:21:12.000Z | 2020-11-15T15:21:12.000Z | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | null | null | null | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
#
# This script is used to generate the files src/utils/syscalls{32,64}.cpp.
# As the list of syscalls depends of your Kernel version. We must
# generate the list at the compile time.
#
from __future__ import print_function
import argparse
import sys
import re
import platform
HEADER = """
/*! \\file */
#if defined(__unix__) || defined(__APPLE__)
#include <syscalls.hpp>
namespace triton {
namespace os {
namespace unix {
"""
FOOTER = """
}; /* unix namespace */
}; /* os namespace */
}; /* triton namespace */
#endif
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="this file must contains the syscalls definitions", type=str)
parser.add_argument("arch", help="syscall architecture - 32 or 64", type=str)
args = parser.parse_args()
if platform.system() == 'Linux':
regex = re.compile(r"#define\s+(__NR_)(\w+)\s+(\d+)")
elif platform.system() == 'Darwin':
regex = re.compile(r"#define\s+(SYS_)(\w+)\s+(\d+)")
else:
sys.exit(0)
with open(args.file) as hfile:
print(HEADER)
print(" const char* syscallmap%s[] = {" % args.arch)
counter = 0
for match in regex.finditer(hfile.read()):
prefix = str(match.groups()[0])
name = str(match.groups()[1])
sysid = int(match.groups()[2])
if counter != sysid:
for i in range(sysid - counter):
print(' "UNDEF", // undefined')
counter += 1
print(' "%s", // %s%s' % (name.upper(), prefix, name))
counter += 1
print(" };")
print()
print(" const unsigned int NB_SYSCALL%s = %d;" % (args.arch, counter))
print(FOOTER)
| 25.885714 | 98 | 0.570088 |
from __future__ import print_function
import argparse
import sys
import re
import platform
HEADER = """
/*! \\file */
#if defined(__unix__) || defined(__APPLE__)
#include <syscalls.hpp>
namespace triton {
namespace os {
namespace unix {
"""
FOOTER = """
}; /* unix namespace */
}; /* os namespace */
}; /* triton namespace */
#endif
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="this file must contains the syscalls definitions", type=str)
parser.add_argument("arch", help="syscall architecture - 32 or 64", type=str)
args = parser.parse_args()
if platform.system() == 'Linux':
regex = re.compile(r"#define\s+(__NR_)(\w+)\s+(\d+)")
elif platform.system() == 'Darwin':
regex = re.compile(r"#define\s+(SYS_)(\w+)\s+(\d+)")
else:
sys.exit(0)
with open(args.file) as hfile:
print(HEADER)
print(" const char* syscallmap%s[] = {" % args.arch)
counter = 0
for match in regex.finditer(hfile.read()):
prefix = str(match.groups()[0])
name = str(match.groups()[1])
sysid = int(match.groups()[2])
if counter != sysid:
for i in range(sysid - counter):
print(' "UNDEF", // undefined')
counter += 1
print(' "%s", // %s%s' % (name.upper(), prefix, name))
counter += 1
print(" };")
print()
print(" const unsigned int NB_SYSCALL%s = %d;" % (args.arch, counter))
print(FOOTER)
| true | true |
f72ad055c9ca2d52827b7e4aa011c2370f6292dc | 15,695 | py | Python | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | import asyncio
import tempfile
from decimal import Decimal
import os
from contextlib import contextmanager
from collections import defaultdict
import logging
import concurrent
from concurrent import futures
import unittest
from aiorpcx import TaskGroup
from electrum_ltc import constants
from electrum_ltc.network import Network
from electrum_ltc.ecc import ECPrivkey
from electrum_ltc import simple_config, lnutil
from electrum_ltc.lnaddr import lnencode, LnAddr, lndecode
from electrum_ltc.bitcoin import COIN, sha256
from electrum_ltc.util import bh2u, create_and_start_event_loop
from electrum_ltc.lnpeer import Peer
from electrum_ltc.lnutil import LNPeerAddr, Keypair, privkey_to_pubkey
from electrum_ltc.lnutil import LightningPeerConnectionClosed, RemoteMisbehaving
from electrum_ltc.lnutil import PaymentFailure, LnLocalFeatures, HTLCOwner
from electrum_ltc.lnchannel import channel_states, peer_states, Channel
from electrum_ltc.lnrouter import LNPathFinder
from electrum_ltc.channel_db import ChannelDB
from electrum_ltc.lnworker import LNWallet, NoPathFound
from electrum_ltc.lnmsg import encode_msg, decode_msg
from electrum_ltc.logging import console_stderr_handler, Logger
from electrum_ltc.lnworker import PaymentInfo, RECEIVED, PR_UNPAID
from .test_lnchannel import create_test_channels
from .test_bitcoin import needs_test_with_all_chacha20_implementations
from . import ElectrumTestCase
def keypair():
priv = ECPrivkey.generate_random_key().get_secret_bytes()
k1 = Keypair(
pubkey=privkey_to_pubkey(priv),
privkey=priv)
return k1
@contextmanager
def noop_lock():
yield
class MockNetwork:
def __init__(self, tx_queue):
self.callbacks = defaultdict(list)
self.lnwatcher = None
self.interface = None
user_config = {}
user_dir = tempfile.mkdtemp(prefix="electrum-lnpeer-test-")
self.config = simple_config.SimpleConfig(user_config, read_user_dir_function=lambda: user_dir)
self.asyncio_loop = asyncio.get_event_loop()
self.channel_db = ChannelDB(self)
self.channel_db.data_loaded.set()
self.path_finder = LNPathFinder(self.channel_db)
self.tx_queue = tx_queue
@property
def callback_lock(self):
return noop_lock()
register_callback = Network.register_callback
unregister_callback = Network.unregister_callback
trigger_callback = Network.trigger_callback
def get_local_height(self):
return 0
async def broadcast_transaction(self, tx):
if self.tx_queue:
await self.tx_queue.put(tx)
async def try_broadcasting(self, tx, name):
self.broadcast_transaction(tx)
class MockWallet:
def set_label(self, x, y):
pass
def save_db(self):
pass
def is_lightning_backup(self):
return False
class MockLNWallet(Logger):
def __init__(self, remote_keypair, local_keypair, chan: 'Channel', tx_queue):
Logger.__init__(self)
self.remote_keypair = remote_keypair
self.node_keypair = local_keypair
self.network = MockNetwork(tx_queue)
self.channels = {chan.channel_id: chan}
self.payments = {}
self.logs = defaultdict(list)
self.wallet = MockWallet()
self.localfeatures = LnLocalFeatures(0)
self.localfeatures |= LnLocalFeatures.OPTION_DATA_LOSS_PROTECT_OPT
self.pending_payments = defaultdict(asyncio.Future)
chan.lnworker = self
chan.node_id = remote_keypair.pubkey
# used in tests
self.enable_htlc_settle = asyncio.Event()
self.enable_htlc_settle.set()
def get_invoice_status(self, key):
pass
@property
def lock(self):
return noop_lock()
@property
def peers(self):
return {self.remote_keypair.pubkey: self.peer}
def channels_for_peer(self, pubkey):
return self.channels
def get_channel_by_short_id(self, short_channel_id):
with self.lock:
for chan in self.channels.values():
if chan.short_channel_id == short_channel_id:
return chan
def save_channel(self, chan):
print("Ignoring channel save")
is_routing = set()
preimages = {}
get_payment_info = LNWallet.get_payment_info
save_payment_info = LNWallet.save_payment_info
set_invoice_status = LNWallet.set_invoice_status
set_payment_status = LNWallet.set_payment_status
get_payment_status = LNWallet.get_payment_status
await_payment = LNWallet.await_payment
payment_received = LNWallet.payment_received
payment_sent = LNWallet.payment_sent
payment_failed = LNWallet.payment_failed
save_preimage = LNWallet.save_preimage
get_preimage = LNWallet.get_preimage
_create_route_from_invoice = LNWallet._create_route_from_invoice
_check_invoice = staticmethod(LNWallet._check_invoice)
_pay_to_route = LNWallet._pay_to_route
_pay = LNWallet._pay
force_close_channel = LNWallet.force_close_channel
try_force_closing = LNWallet.try_force_closing
get_first_timestamp = lambda self: 0
class MockTransport:
def __init__(self, name):
self.queue = asyncio.Queue()
self._name = name
def name(self):
return self._name
async def read_messages(self):
while True:
yield await self.queue.get()
class NoFeaturesTransport(MockTransport):
"""
This answers the init message with a init that doesn't signal any features.
Used for testing that we require DATA_LOSS_PROTECT.
"""
def send_bytes(self, data):
decoded = decode_msg(data)
print(decoded)
if decoded[0] == 'init':
self.queue.put_nowait(encode_msg('init', lflen=1, gflen=1, localfeatures=b"\x00", globalfeatures=b"\x00"))
class PutIntoOthersQueueTransport(MockTransport):
def __init__(self, name):
super().__init__(name)
self.other_mock_transport = None
def send_bytes(self, data):
self.other_mock_transport.queue.put_nowait(data)
def transport_pair(name1, name2):
t1 = PutIntoOthersQueueTransport(name1)
t2 = PutIntoOthersQueueTransport(name2)
t1.other_mock_transport = t2
t2.other_mock_transport = t1
return t1, t2
class TestPeer(ElectrumTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
console_stderr_handler.setLevel(logging.DEBUG)
def setUp(self):
super().setUp()
self.asyncio_loop, self._stop_loop, self._loop_thread = create_and_start_event_loop()
def tearDown(self):
super().tearDown()
self.asyncio_loop.call_soon_threadsafe(self._stop_loop.set_result, 1)
self._loop_thread.join(timeout=1)
def prepare_peers(self, alice_channel, bob_channel):
k1, k2 = keypair(), keypair()
t1, t2 = transport_pair(alice_channel.name, bob_channel.name)
q1, q2 = asyncio.Queue(), asyncio.Queue()
w1 = MockLNWallet(k1, k2, alice_channel, tx_queue=q1)
w2 = MockLNWallet(k2, k1, bob_channel, tx_queue=q2)
p1 = Peer(w1, k1.pubkey, t1)
p2 = Peer(w2, k2.pubkey, t2)
w1.peer = p1
w2.peer = p2
# mark_open won't work if state is already OPEN.
# so set it to FUNDED
alice_channel._state = channel_states.FUNDED
bob_channel._state = channel_states.FUNDED
# this populates the channel graph:
p1.mark_open(alice_channel)
p2.mark_open(bob_channel)
return p1, p2, w1, w2, q1, q2
@staticmethod
def prepare_invoice(
w2, # receiver
*,
amount_sat=100_000,
):
amount_btc = amount_sat/Decimal(COIN)
payment_preimage = os.urandom(32)
RHASH = sha256(payment_preimage)
info = PaymentInfo(RHASH, amount_sat, RECEIVED, PR_UNPAID)
w2.save_preimage(RHASH, payment_preimage)
w2.save_payment_info(info)
lnaddr = LnAddr(
RHASH,
amount_btc,
tags=[('c', lnutil.MIN_FINAL_CLTV_EXPIRY_FOR_INVOICE),
('d', 'coffee')
])
return lnencode(lnaddr, w2.node_keypair.privkey)
def test_reestablish(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
for chan in (alice_channel, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel.peer_state, peer_states.GOOD)
self.assertEqual(bob_channel.peer_state, peer_states.GOOD)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p1.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_reestablish_with_old_state(self):
alice_channel, bob_channel = create_test_channels()
alice_channel_0, bob_channel_0 = create_test_channels() # these are identical
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertEqual(result, True)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel_0, bob_channel)
for chan in (alice_channel_0, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel_0),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel_0.peer_state, peer_states.BAD)
self.assertEqual(bob_channel._state, channel_states.FORCE_CLOSING)
# wait so that pending messages are processed
#await asyncio.sleep(1)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_payment(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertTrue(result)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
#@unittest.skip("too expensive")
#@needs_test_with_all_chacha20_implementations
def test_payments_stresstest(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
alice_init_balance_msat = alice_channel.balance(HTLCOwner.LOCAL)
bob_init_balance_msat = bob_channel.balance(HTLCOwner.LOCAL)
num_payments = 50
#pay_reqs1 = [self.prepare_invoice(w1, amount_sat=1) for i in range(num_payments)]
pay_reqs2 = [self.prepare_invoice(w2, amount_sat=1) for i in range(num_payments)]
max_htlcs_in_flight = asyncio.Semaphore(5)
async def single_payment(pay_req):
async with max_htlcs_in_flight:
await w1._pay(pay_req)
async def many_payments():
async with TaskGroup() as group:
for pay_req in pay_reqs2:
await group.spawn(single_payment(pay_req))
gath.cancel()
gath = asyncio.gather(many_payments(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
self.assertEqual(alice_init_balance_msat - num_payments * 1000, alice_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(alice_init_balance_msat - num_payments * 1000, bob_channel.balance(HTLCOwner.REMOTE))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, bob_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, alice_channel.balance(HTLCOwner.REMOTE))
@needs_test_with_all_chacha20_implementations
def test_close(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
w1.network.config.set_key('dynamic_fees', False)
w2.network.config.set_key('dynamic_fees', False)
w1.network.config.set_key('fee_per_kb', 5000)
w2.network.config.set_key('fee_per_kb', 1000)
w2.enable_htlc_settle.clear()
pay_req = self.prepare_invoice(w2)
lnaddr = lndecode(pay_req, expected_hrp=constants.net.SEGWIT_HRP)
async def pay():
await asyncio.wait_for(p1.initialized, 1)
await asyncio.wait_for(p2.initialized, 1)
# alice sends htlc
route = w1._create_route_from_invoice(decoded_invoice=lnaddr)
htlc = p1.pay(route, alice_channel, int(lnaddr.amount * COIN * 1000), lnaddr.paymenthash, lnaddr.get_min_final_cltv_expiry())
# alice closes
await p1.close_channel(alice_channel.channel_id)
gath.cancel()
async def set_settle():
await asyncio.sleep(0.1)
w2.enable_htlc_settle.set()
gath = asyncio.gather(pay(), set_settle(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
def test_channel_usage_after_closing(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, q1, q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
addr = w1._check_invoice(pay_req)
route = w1._create_route_from_invoice(decoded_invoice=addr)
run(w1.force_close_channel(alice_channel.channel_id))
# check if a tx (commitment transaction) was broadcasted:
assert q1.qsize() == 1
with self.assertRaises(NoPathFound) as e:
w1._create_route_from_invoice(decoded_invoice=addr)
peer = w1.peers[route[0].node_id]
# AssertionError is ok since we shouldn't use old routes, and the
# route finding should fail when channel is closed
async def f():
await asyncio.gather(w1._pay_to_route(route, addr), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
with self.assertRaises(PaymentFailure):
run(f())
def run(coro):
return asyncio.run_coroutine_threadsafe(coro, loop=asyncio.get_event_loop()).result()
| 39.633838 | 139 | 0.680663 | import asyncio
import tempfile
from decimal import Decimal
import os
from contextlib import contextmanager
from collections import defaultdict
import logging
import concurrent
from concurrent import futures
import unittest
from aiorpcx import TaskGroup
from electrum_ltc import constants
from electrum_ltc.network import Network
from electrum_ltc.ecc import ECPrivkey
from electrum_ltc import simple_config, lnutil
from electrum_ltc.lnaddr import lnencode, LnAddr, lndecode
from electrum_ltc.bitcoin import COIN, sha256
from electrum_ltc.util import bh2u, create_and_start_event_loop
from electrum_ltc.lnpeer import Peer
from electrum_ltc.lnutil import LNPeerAddr, Keypair, privkey_to_pubkey
from electrum_ltc.lnutil import LightningPeerConnectionClosed, RemoteMisbehaving
from electrum_ltc.lnutil import PaymentFailure, LnLocalFeatures, HTLCOwner
from electrum_ltc.lnchannel import channel_states, peer_states, Channel
from electrum_ltc.lnrouter import LNPathFinder
from electrum_ltc.channel_db import ChannelDB
from electrum_ltc.lnworker import LNWallet, NoPathFound
from electrum_ltc.lnmsg import encode_msg, decode_msg
from electrum_ltc.logging import console_stderr_handler, Logger
from electrum_ltc.lnworker import PaymentInfo, RECEIVED, PR_UNPAID
from .test_lnchannel import create_test_channels
from .test_bitcoin import needs_test_with_all_chacha20_implementations
from . import ElectrumTestCase
def keypair():
priv = ECPrivkey.generate_random_key().get_secret_bytes()
k1 = Keypair(
pubkey=privkey_to_pubkey(priv),
privkey=priv)
return k1
@contextmanager
def noop_lock():
yield
class MockNetwork:
def __init__(self, tx_queue):
self.callbacks = defaultdict(list)
self.lnwatcher = None
self.interface = None
user_config = {}
user_dir = tempfile.mkdtemp(prefix="electrum-lnpeer-test-")
self.config = simple_config.SimpleConfig(user_config, read_user_dir_function=lambda: user_dir)
self.asyncio_loop = asyncio.get_event_loop()
self.channel_db = ChannelDB(self)
self.channel_db.data_loaded.set()
self.path_finder = LNPathFinder(self.channel_db)
self.tx_queue = tx_queue
@property
def callback_lock(self):
return noop_lock()
register_callback = Network.register_callback
unregister_callback = Network.unregister_callback
trigger_callback = Network.trigger_callback
def get_local_height(self):
return 0
async def broadcast_transaction(self, tx):
if self.tx_queue:
await self.tx_queue.put(tx)
async def try_broadcasting(self, tx, name):
self.broadcast_transaction(tx)
class MockWallet:
def set_label(self, x, y):
pass
def save_db(self):
pass
def is_lightning_backup(self):
return False
class MockLNWallet(Logger):
def __init__(self, remote_keypair, local_keypair, chan: 'Channel', tx_queue):
Logger.__init__(self)
self.remote_keypair = remote_keypair
self.node_keypair = local_keypair
self.network = MockNetwork(tx_queue)
self.channels = {chan.channel_id: chan}
self.payments = {}
self.logs = defaultdict(list)
self.wallet = MockWallet()
self.localfeatures = LnLocalFeatures(0)
self.localfeatures |= LnLocalFeatures.OPTION_DATA_LOSS_PROTECT_OPT
self.pending_payments = defaultdict(asyncio.Future)
chan.lnworker = self
chan.node_id = remote_keypair.pubkey
self.enable_htlc_settle = asyncio.Event()
self.enable_htlc_settle.set()
def get_invoice_status(self, key):
pass
@property
def lock(self):
return noop_lock()
@property
def peers(self):
return {self.remote_keypair.pubkey: self.peer}
def channels_for_peer(self, pubkey):
return self.channels
def get_channel_by_short_id(self, short_channel_id):
with self.lock:
for chan in self.channels.values():
if chan.short_channel_id == short_channel_id:
return chan
def save_channel(self, chan):
print("Ignoring channel save")
is_routing = set()
preimages = {}
get_payment_info = LNWallet.get_payment_info
save_payment_info = LNWallet.save_payment_info
set_invoice_status = LNWallet.set_invoice_status
set_payment_status = LNWallet.set_payment_status
get_payment_status = LNWallet.get_payment_status
await_payment = LNWallet.await_payment
payment_received = LNWallet.payment_received
payment_sent = LNWallet.payment_sent
payment_failed = LNWallet.payment_failed
save_preimage = LNWallet.save_preimage
get_preimage = LNWallet.get_preimage
_create_route_from_invoice = LNWallet._create_route_from_invoice
_check_invoice = staticmethod(LNWallet._check_invoice)
_pay_to_route = LNWallet._pay_to_route
_pay = LNWallet._pay
force_close_channel = LNWallet.force_close_channel
try_force_closing = LNWallet.try_force_closing
get_first_timestamp = lambda self: 0
class MockTransport:
def __init__(self, name):
self.queue = asyncio.Queue()
self._name = name
def name(self):
return self._name
async def read_messages(self):
while True:
yield await self.queue.get()
class NoFeaturesTransport(MockTransport):
def send_bytes(self, data):
decoded = decode_msg(data)
print(decoded)
if decoded[0] == 'init':
self.queue.put_nowait(encode_msg('init', lflen=1, gflen=1, localfeatures=b"\x00", globalfeatures=b"\x00"))
class PutIntoOthersQueueTransport(MockTransport):
def __init__(self, name):
super().__init__(name)
self.other_mock_transport = None
def send_bytes(self, data):
self.other_mock_transport.queue.put_nowait(data)
def transport_pair(name1, name2):
t1 = PutIntoOthersQueueTransport(name1)
t2 = PutIntoOthersQueueTransport(name2)
t1.other_mock_transport = t2
t2.other_mock_transport = t1
return t1, t2
class TestPeer(ElectrumTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
console_stderr_handler.setLevel(logging.DEBUG)
def setUp(self):
super().setUp()
self.asyncio_loop, self._stop_loop, self._loop_thread = create_and_start_event_loop()
def tearDown(self):
super().tearDown()
self.asyncio_loop.call_soon_threadsafe(self._stop_loop.set_result, 1)
self._loop_thread.join(timeout=1)
def prepare_peers(self, alice_channel, bob_channel):
k1, k2 = keypair(), keypair()
t1, t2 = transport_pair(alice_channel.name, bob_channel.name)
q1, q2 = asyncio.Queue(), asyncio.Queue()
w1 = MockLNWallet(k1, k2, alice_channel, tx_queue=q1)
w2 = MockLNWallet(k2, k1, bob_channel, tx_queue=q2)
p1 = Peer(w1, k1.pubkey, t1)
p2 = Peer(w2, k2.pubkey, t2)
w1.peer = p1
w2.peer = p2
# so set it to FUNDED
alice_channel._state = channel_states.FUNDED
bob_channel._state = channel_states.FUNDED
# this populates the channel graph:
p1.mark_open(alice_channel)
p2.mark_open(bob_channel)
return p1, p2, w1, w2, q1, q2
@staticmethod
def prepare_invoice(
w2, # receiver
*,
amount_sat=100_000,
):
amount_btc = amount_sat/Decimal(COIN)
payment_preimage = os.urandom(32)
RHASH = sha256(payment_preimage)
info = PaymentInfo(RHASH, amount_sat, RECEIVED, PR_UNPAID)
w2.save_preimage(RHASH, payment_preimage)
w2.save_payment_info(info)
lnaddr = LnAddr(
RHASH,
amount_btc,
tags=[('c', lnutil.MIN_FINAL_CLTV_EXPIRY_FOR_INVOICE),
('d', 'coffee')
])
return lnencode(lnaddr, w2.node_keypair.privkey)
def test_reestablish(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
for chan in (alice_channel, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel.peer_state, peer_states.GOOD)
self.assertEqual(bob_channel.peer_state, peer_states.GOOD)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p1.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_reestablish_with_old_state(self):
alice_channel, bob_channel = create_test_channels()
alice_channel_0, bob_channel_0 = create_test_channels() # these are identical
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertEqual(result, True)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel_0, bob_channel)
for chan in (alice_channel_0, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel_0),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel_0.peer_state, peer_states.BAD)
self.assertEqual(bob_channel._state, channel_states.FORCE_CLOSING)
# wait so that pending messages are processed
#await asyncio.sleep(1)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_payment(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertTrue(result)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
#@unittest.skip("too expensive")
#@needs_test_with_all_chacha20_implementations
def test_payments_stresstest(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
alice_init_balance_msat = alice_channel.balance(HTLCOwner.LOCAL)
bob_init_balance_msat = bob_channel.balance(HTLCOwner.LOCAL)
num_payments = 50
#pay_reqs1 = [self.prepare_invoice(w1, amount_sat=1) for i in range(num_payments)]
pay_reqs2 = [self.prepare_invoice(w2, amount_sat=1) for i in range(num_payments)]
max_htlcs_in_flight = asyncio.Semaphore(5)
async def single_payment(pay_req):
async with max_htlcs_in_flight:
await w1._pay(pay_req)
async def many_payments():
async with TaskGroup() as group:
for pay_req in pay_reqs2:
await group.spawn(single_payment(pay_req))
gath.cancel()
gath = asyncio.gather(many_payments(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
self.assertEqual(alice_init_balance_msat - num_payments * 1000, alice_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(alice_init_balance_msat - num_payments * 1000, bob_channel.balance(HTLCOwner.REMOTE))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, bob_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, alice_channel.balance(HTLCOwner.REMOTE))
@needs_test_with_all_chacha20_implementations
def test_close(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
w1.network.config.set_key('dynamic_fees', False)
w2.network.config.set_key('dynamic_fees', False)
w1.network.config.set_key('fee_per_kb', 5000)
w2.network.config.set_key('fee_per_kb', 1000)
w2.enable_htlc_settle.clear()
pay_req = self.prepare_invoice(w2)
lnaddr = lndecode(pay_req, expected_hrp=constants.net.SEGWIT_HRP)
async def pay():
await asyncio.wait_for(p1.initialized, 1)
await asyncio.wait_for(p2.initialized, 1)
# alice sends htlc
route = w1._create_route_from_invoice(decoded_invoice=lnaddr)
htlc = p1.pay(route, alice_channel, int(lnaddr.amount * COIN * 1000), lnaddr.paymenthash, lnaddr.get_min_final_cltv_expiry())
# alice closes
await p1.close_channel(alice_channel.channel_id)
gath.cancel()
async def set_settle():
await asyncio.sleep(0.1)
w2.enable_htlc_settle.set()
gath = asyncio.gather(pay(), set_settle(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
def test_channel_usage_after_closing(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, q1, q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
addr = w1._check_invoice(pay_req)
route = w1._create_route_from_invoice(decoded_invoice=addr)
run(w1.force_close_channel(alice_channel.channel_id))
# check if a tx (commitment transaction) was broadcasted:
assert q1.qsize() == 1
with self.assertRaises(NoPathFound) as e:
w1._create_route_from_invoice(decoded_invoice=addr)
peer = w1.peers[route[0].node_id]
# AssertionError is ok since we shouldn't use old routes, and the
async def f():
await asyncio.gather(w1._pay_to_route(route, addr), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
with self.assertRaises(PaymentFailure):
run(f())
def run(coro):
return asyncio.run_coroutine_threadsafe(coro, loop=asyncio.get_event_loop()).result()
| true | true |
f72ad123a16de5b88d83b7f0efe6887a58556b76 | 1,491 | py | Python | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 43 | 2022-01-02T04:23:28.000Z | 2022-03-30T03:04:03.000Z | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 6 | 2022-02-24T09:19:35.000Z | 2022-03-24T18:32:22.000Z | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 4 | 2022-01-03T16:49:04.000Z | 2022-03-21T09:25:44.000Z | import tkinter
import tkintermapview
# create tkinter window
root_tk = tkinter.Tk()
root_tk.geometry(f"{1000}x{700}")
root_tk.title("map_view_simple_example.py")
# create map widget
map_widget = tkintermapview.TkinterMapView(root_tk, width=1000, height=700, corner_radius=0)
map_widget.pack(fill="both", expand=True)
# set other tile server (standard is OpenStreetMap)
# map_widget.set_tile_server("https://mt0.google.com/vt/lyrs=m&hl=en&x={x}&y={y}&z={z}&s=Ga", max_zoom=22) # google normal
# map_widget.set_tile_server("https://mt0.google.com/vt/lyrs=s&hl=en&x={x}&y={y}&z={z}&s=Ga", max_zoom=22) # google satellite
# set current position and zoom
# map_widget.set_position(52.516268, 13.377695, marker=False) # Berlin, Germany
# map_widget.set_zoom(17)
# set current position with address
# map_widget.set_address("Berlin Germany", marker=False)
def marker_click(marker):
print(f"marker clicked - text: {marker.text} position: {marker.position}")
# set a position marker (also with a custom color and command on click)
marker_2 = map_widget.set_marker(52.516268, 13.377695, text="Brandenburger Tor", command=marker_click)
marker_3 = map_widget.set_marker(52.55, 13.4, text="52.55, 13.4")
# marker_3.set_position(...)
# marker_3.set_text(...)
# marker_3.delete()
# set a path
path_1 = map_widget.set_path([marker_2.position, marker_3.position, (52.568, 13.4), (52.569, 13.35)])
# path_1.add_position(...)
# path_1.remove_position(...)
# path_1.delete()
root_tk.mainloop()
| 36.365854 | 126 | 0.739772 | import tkinter
import tkintermapview
root_tk = tkinter.Tk()
root_tk.geometry(f"{1000}x{700}")
root_tk.title("map_view_simple_example.py")
map_widget = tkintermapview.TkinterMapView(root_tk, width=1000, height=700, corner_radius=0)
map_widget.pack(fill="both", expand=True)
rker clicked - text: {marker.text} position: {marker.position}")
marker_2 = map_widget.set_marker(52.516268, 13.377695, text="Brandenburger Tor", command=marker_click)
marker_3 = map_widget.set_marker(52.55, 13.4, text="52.55, 13.4")
path_1 = map_widget.set_path([marker_2.position, marker_3.position, (52.568, 13.4), (52.569, 13.35)])
root_tk.mainloop()
| true | true |
f72ad1768efdd493f94b24b3d7caadf10628ed7b | 5,376 | py | Python | qtc_scoop.py | keceli/qtc | 334fae9cd0eea493437e95c9aeb5a3088cbac343 | [
"Apache-2.0"
] | null | null | null | qtc_scoop.py | keceli/qtc | 334fae9cd0eea493437e95c9aeb5a3088cbac343 | [
"Apache-2.0"
] | null | null | null | qtc_scoop.py | keceli/qtc | 334fae9cd0eea493437e95c9aeb5a3088cbac343 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import argparse
import subprocess
import iotools as io
import obtools as ob
import qctools as qc
import tctools as tc
try:
_runserial = False
from scoop import futures
from scoop import utils
except:
_runserial = True
print "No scoop, no concurency \n Running in serial mode..."
__updated__ = "2017-05-03"
_mopacexe = 'mopac'
_nwchemexe = 'nwchem'
_gaussianexe = 'mopac'
_messpexe = 'messpf'
_thermpexe = 'thermp'
_pac99exe = 'pac99'
_qcmethod = 'pm3'
_qccode = 'mopac'
_runqc = False
_runthermo = False
def get_args():
"""
Returns args object that contains command line options.
"""
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description=
"""
April 18, 2017
Murat Keceli
Performs quantum chemistry calculations to calculate thermochemical parameters.
Writes NASA polynomials in different formats.
Uses different codes for these purposes
""")
parser.add_argument('-n', '--nproc', type=int,
default=multiprocessing.cpu_count(),
help='Number of processors, default is all processors')
parser.add_argument('-i', '--input', type=argparse.FileType('r'), nargs=1,
default='qc_list.txt',
help='List of inchi or smiles for species to be calculated')
parser.add_argument('-m', '--qcmethod', type=str, nargs=1,
default='pm3',
help='Quantum chemistry method to be used')
parser.add_argument('-c', '--qccode', type=str, nargs=1,
default='mopac',
help='Quantum chemistry code to be used')
parser.add_argument('-q', '--runqc', action='store_true',
help='Run quantum chemistry calculation')
parser.add_argument('-t', '--runthermo', action='store_true',
help='Run thermochemistry calculations')
parser.add_argument('--mopacexe', type=str, nargs=1,
default='mopac',
help='Path for mopac executable')
parser.add_argument('--messpf', type=str, nargs=1,
default='messpf',
help='Path for MESS partition function executable')
parser.add_argument('--thermp', type=str, nargs=1,
default='thermp',
help='Path for thermp executable')
parser.add_argument('--pac99', type=str, nargs=1,
default='pac99',
help='Path for pac99 executable')
return parser.parse_args()
def get_chemkin_polynomial(mol, method, zpe, xyz, freqs, deltaH):
"""
A driver to perform all operations to write NASA polynomial in
chemkin format. Assumes quantum chemistry calculation is performed.
"""
inputfile = 'pf.inp'
name = mol.formula
tag = method
inp = tc.get_pf_input(mol, method, zpe, xyz, freqs)
# print 'Running mess partition function'
tc.run_pf()
# print 'Generate thermp input'
tc.write_thermp_input(mol.formula, deltaH)
# print 'Running thermp'
tc.run_thermp()
# print 'Running pac99'
tc.run_pac99(name)
# print 'Converting to chemkin format'
chemkinfile = name + '.ckin'
tc.write_chemkin_file(deltaH, tag, name, chemkinfile)
return
def run(s):
"""
A driver function to run quantum chemistry and thermochemistry calculations based
on command line options:
--qcmethod
--qccode
"""
import qctools as qc
import obtools as ob
import tctools as tc
import iotools as io
mol = ob.get_mol(s)
mult = ob.get_multiplicity(mol)
dirpath = ob.get_unique_path(mol, method=_qcmethod, mult=mult)
groupsfile = 'new.groups'
io.mkdir(dirpath)
cwd = io.pwd()
if _runthermo:
if io.check_file(groupsfile):
io.cp(groupsfile, dirpath)
if not io.check_file(groupsfile, 1):
print 'Could not copy new.groups file to target directory {0}'.format(dirpath)
return -1
else:
print 'new.groups file required in working directory'
return -1
if io.check_dir(dirpath, 1):
io.cd(dirpath)
else:
print 'I/O error, {0} directory not found'.format(dirpath)
return -1
if _runqc:
if _qccode == 'mopac':
outstr = qc.run_mopac(s, mopacexe=_mopacexe, method=_qcmethod, mult=mult)
outfile = outstr.split(' : ')[0]
if _runthermo:
lines = io.read_file(outfile, aslines=True)
xyz = qc.get_mopac_xyz(lines)
freqs = qc.get_mopac_freq(lines)
zpe = qc.get_mopac_zpe(lines)
deltaH = qc.get_mopac_deltaH(lines)
get_chemkin_polynomial(mol, _qcmethod, zpe, xyz, freqs, deltaH)
io.cd(cwd)
return outstr
if __name__ == "__main__":
args = get_args()
print args
global _runqc
_runqc = args.runqc
_runthermo = args.runthermo
_qcmethod = args.qcmethod
_qccode = args.qccode
nproc = args.nproc
mylist = io.read_list('qc_list.txt')
results = pool.map(run, mylist)
print 'Output file : Error code'
for result in results:
print result
| 33.391304 | 94 | 0.607515 |
import argparse
import subprocess
import iotools as io
import obtools as ob
import qctools as qc
import tctools as tc
try:
_runserial = False
from scoop import futures
from scoop import utils
except:
_runserial = True
print "No scoop, no concurency \n Running in serial mode..."
__updated__ = "2017-05-03"
_mopacexe = 'mopac'
_nwchemexe = 'nwchem'
_gaussianexe = 'mopac'
_messpexe = 'messpf'
_thermpexe = 'thermp'
_pac99exe = 'pac99'
_qcmethod = 'pm3'
_qccode = 'mopac'
_runqc = False
_runthermo = False
def get_args():
"""
Returns args object that contains command line options.
"""
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description=
"""
April 18, 2017
Murat Keceli
Performs quantum chemistry calculations to calculate thermochemical parameters.
Writes NASA polynomials in different formats.
Uses different codes for these purposes
""")
parser.add_argument('-n', '--nproc', type=int,
default=multiprocessing.cpu_count(),
help='Number of processors, default is all processors')
parser.add_argument('-i', '--input', type=argparse.FileType('r'), nargs=1,
default='qc_list.txt',
help='List of inchi or smiles for species to be calculated')
parser.add_argument('-m', '--qcmethod', type=str, nargs=1,
default='pm3',
help='Quantum chemistry method to be used')
parser.add_argument('-c', '--qccode', type=str, nargs=1,
default='mopac',
help='Quantum chemistry code to be used')
parser.add_argument('-q', '--runqc', action='store_true',
help='Run quantum chemistry calculation')
parser.add_argument('-t', '--runthermo', action='store_true',
help='Run thermochemistry calculations')
parser.add_argument('--mopacexe', type=str, nargs=1,
default='mopac',
help='Path for mopac executable')
parser.add_argument('--messpf', type=str, nargs=1,
default='messpf',
help='Path for MESS partition function executable')
parser.add_argument('--thermp', type=str, nargs=1,
default='thermp',
help='Path for thermp executable')
parser.add_argument('--pac99', type=str, nargs=1,
default='pac99',
help='Path for pac99 executable')
return parser.parse_args()
def get_chemkin_polynomial(mol, method, zpe, xyz, freqs, deltaH):
"""
A driver to perform all operations to write NASA polynomial in
chemkin format. Assumes quantum chemistry calculation is performed.
"""
inputfile = 'pf.inp'
name = mol.formula
tag = method
inp = tc.get_pf_input(mol, method, zpe, xyz, freqs)
tc.run_pf()
tc.write_thermp_input(mol.formula, deltaH)
tc.run_thermp()
tc.run_pac99(name)
chemkinfile = name + '.ckin'
tc.write_chemkin_file(deltaH, tag, name, chemkinfile)
return
def run(s):
"""
A driver function to run quantum chemistry and thermochemistry calculations based
on command line options:
--qcmethod
--qccode
"""
import qctools as qc
import obtools as ob
import tctools as tc
import iotools as io
mol = ob.get_mol(s)
mult = ob.get_multiplicity(mol)
dirpath = ob.get_unique_path(mol, method=_qcmethod, mult=mult)
groupsfile = 'new.groups'
io.mkdir(dirpath)
cwd = io.pwd()
if _runthermo:
if io.check_file(groupsfile):
io.cp(groupsfile, dirpath)
if not io.check_file(groupsfile, 1):
print 'Could not copy new.groups file to target directory {0}'.format(dirpath)
return -1
else:
print 'new.groups file required in working directory'
return -1
if io.check_dir(dirpath, 1):
io.cd(dirpath)
else:
print 'I/O error, {0} directory not found'.format(dirpath)
return -1
if _runqc:
if _qccode == 'mopac':
outstr = qc.run_mopac(s, mopacexe=_mopacexe, method=_qcmethod, mult=mult)
outfile = outstr.split(' : ')[0]
if _runthermo:
lines = io.read_file(outfile, aslines=True)
xyz = qc.get_mopac_xyz(lines)
freqs = qc.get_mopac_freq(lines)
zpe = qc.get_mopac_zpe(lines)
deltaH = qc.get_mopac_deltaH(lines)
get_chemkin_polynomial(mol, _qcmethod, zpe, xyz, freqs, deltaH)
io.cd(cwd)
return outstr
if __name__ == "__main__":
args = get_args()
print args
global _runqc
_runqc = args.runqc
_runthermo = args.runthermo
_qcmethod = args.qcmethod
_qccode = args.qccode
nproc = args.nproc
mylist = io.read_list('qc_list.txt')
results = pool.map(run, mylist)
print 'Output file : Error code'
for result in results:
print result
| false | true |
f72ad17de09166bbcef6aaac4ff6b283c77049fa | 2,206 | py | Python | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | import http.client
import os
import json
import wget
import mapping_functions
import pprint
import sys
HOST = 'episteme2.scc.kit.edu'
PORT = '8080'
URL = os.path.join('http://' + HOST + ':' + PORT, 'api/v1/dataresources')
output_folder = sys.argv[1]
payload = "{\n \t\"resourceType\": {\n \t\t\"typeGeneral\":\"TEXT\"\n \t}\n}"
headers = {'Content-Type': "application/json", 'cache-control': "no-cache"}
size = 20
page = 0
def http_call(TYPE, host=HOST, port=PORT, endpoint='', search='', query='', payload='', headers={}):
check_http_method(TYPE)
conn = http.client.HTTPConnection(host, port)
if search != '' or query != '':
endpoint = os.path.join(endpoint, search + query)
url = os.path.join(URL, endpoint)
print('URL: ', url)
conn.request(TYPE, url, payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode('utf-8'))
return data
def check_http_method(method):
assert(isinstance(method, str)), 'method must be a string'
list = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE']
if method not in list:
print("{} not allowed. Use: 'POST', 'GET', 'PUT', 'PATCH', 'DELETE'".format(method))
return
def download_file(file_id, extention='xml'):
endpoint = 'data/manuscript_metadata.' + extention
url = os.path.join(URL, file_id, endpoint)
output_file = file_id + "." + extention
wget.download(url, os.path.join(output_folder, output_file))
while True:
retrieve = 'search?size=' + str(size) + '&page=' + str(page)
data = http_call('POST', search=retrieve, payload=payload, headers=headers)
print('{} results at page {}'.format(len(data), page))
if len(data) == 0:
break
for resourse in data:
manuscript_id = resourse['id']
print("manuscript id: {}".format(manuscript_id))
if resourse['state'] == "REVOKED":
print("Status of resource {} is {}".format(resourse, resourse['state']))
continue
assert(resourse['resourceType']['value'] == 'manuscriptMetadata'), "resourceType is not manuscriptMetadata"
download_file(manuscript_id, 'json')
if len(data) == size:
page += 1
else:
break
| 30.638889 | 115 | 0.629193 | import http.client
import os
import json
import wget
import mapping_functions
import pprint
import sys
HOST = 'episteme2.scc.kit.edu'
PORT = '8080'
URL = os.path.join('http://' + HOST + ':' + PORT, 'api/v1/dataresources')
output_folder = sys.argv[1]
payload = "{\n \t\"resourceType\": {\n \t\t\"typeGeneral\":\"TEXT\"\n \t}\n}"
headers = {'Content-Type': "application/json", 'cache-control': "no-cache"}
size = 20
page = 0
def http_call(TYPE, host=HOST, port=PORT, endpoint='', search='', query='', payload='', headers={}):
check_http_method(TYPE)
conn = http.client.HTTPConnection(host, port)
if search != '' or query != '':
endpoint = os.path.join(endpoint, search + query)
url = os.path.join(URL, endpoint)
print('URL: ', url)
conn.request(TYPE, url, payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode('utf-8'))
return data
def check_http_method(method):
assert(isinstance(method, str)), 'method must be a string'
list = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE']
if method not in list:
print("{} not allowed. Use: 'POST', 'GET', 'PUT', 'PATCH', 'DELETE'".format(method))
return
def download_file(file_id, extention='xml'):
endpoint = 'data/manuscript_metadata.' + extention
url = os.path.join(URL, file_id, endpoint)
output_file = file_id + "." + extention
wget.download(url, os.path.join(output_folder, output_file))
while True:
retrieve = 'search?size=' + str(size) + '&page=' + str(page)
data = http_call('POST', search=retrieve, payload=payload, headers=headers)
print('{} results at page {}'.format(len(data), page))
if len(data) == 0:
break
for resourse in data:
manuscript_id = resourse['id']
print("manuscript id: {}".format(manuscript_id))
if resourse['state'] == "REVOKED":
print("Status of resource {} is {}".format(resourse, resourse['state']))
continue
assert(resourse['resourceType']['value'] == 'manuscriptMetadata'), "resourceType is not manuscriptMetadata"
download_file(manuscript_id, 'json')
if len(data) == size:
page += 1
else:
break
| true | true |
f72ad2f82bf260bd112b090bded6d3c5ba2e8a43 | 1,180 | py | Python | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | from rest_framework import serializers
from profiles_api import models
class UserProfileSerializer(serializers.ModelSerializer):
"""serializes a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id', 'name', 'email', 'password')
extra_kwargs = {
'password' : {
'write_only' : True,
'style' : {
'input_type' : 'password'
}
}
}
# We now take over the default create function.
def create(self, validated_data):
"""create and return a new user"""
user = models.UserProfile.objects.create_user(
email = validated_data['email'],
name = validated_data['name'],
password = validated_data['password'] )
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
"""serializes profile feed items"""
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {
'user_profile' : {
'read_only' : True
}
} | 29.5 | 68 | 0.561864 | from rest_framework import serializers
from profiles_api import models
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = models.UserProfile
fields = ('id', 'name', 'email', 'password')
extra_kwargs = {
'password' : {
'write_only' : True,
'style' : {
'input_type' : 'password'
}
}
}
def create(self, validated_data):
user = models.UserProfile.objects.create_user(
email = validated_data['email'],
name = validated_data['name'],
password = validated_data['password'] )
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {
'user_profile' : {
'read_only' : True
}
} | true | true |
f72ad439a6e7cf5dac1b087074d4ee471a260a4b | 52 | py | Python | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | 10 | 2020-03-10T12:01:01.000Z | 2021-05-23T19:47:06.000Z | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | null | null | null | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | 1 | 2020-07-17T11:20:56.000Z | 2020-07-17T11:20:56.000Z | def a(z, b):
print(z + b)
a(0, 0.0)
a('e', '')
| 8.666667 | 16 | 0.365385 | def a(z, b):
print(z + b)
a(0, 0.0)
a('e', '')
| true | true |
f72ad5b39fcaee399cd011abf25e5fda0c0342a6 | 24,914 | py | Python | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | null | null | null | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | 2 | 2021-02-15T01:40:38.000Z | 2021-02-15T02:00:21.000Z | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | null | null | null | import warnings
from typing import Union, Iterable, TextIO, Dict, Optional
import numpy as np
from ...clients.base import InputType, CallbackFnType
from ...enums import DataInputType
from ...helper import deprecated_alias
class AsyncCRUDFlowMixin:
"""The asynchronous version of the Mixin for CRUD in Flow"""
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def train(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do training on the current Flow
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
warnings.warn(f'{self.train} is under heavy refactoring', FutureWarning)
async for r in self._get_client(**kwargs).train(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Using numpy ndarray as the index source for the current Flow
:param array: the numpy ndarray data source
:param axis: iterate over that axis
:param size: the maximum number of the sub arrays
:param shuffle: shuffle the the numpy data source beforehand
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).index(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a numpy ndarray as the query source for searching on the current Flow
:param array: the numpy ndarray data source
:param axis: iterate over that axis
:param size: the maximum number of the sub arrays
:param shuffle: shuffle the the numpy data source beforehand
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).search(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param filepath: a text file that each line contains a document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary
:param line_format: the format of each line: ``json`` or ``csv``
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).index(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Dict[str, str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).index(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).index(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a set of files as the index source for indexing on the current Flow
:param patterns: The pattern may contain simple shell-style wildcards, e.g. '\*.py', '[\*.zip, \*.gz]'
:param recursive: If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
:param size: the maximum number of the files
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary mode
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).index(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a set of files as the query source for searching on the current Flow
:param patterns: The pattern may contain simple shell-style wildcards, e.g. '\*.py', '[\*.zip, \*.gz]'
:param recursive: If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
:param size: the maximum number of the files
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).search(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
async def search_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of files as the query source for searching on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).search(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def search_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).search(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of files as the query source for searching on the current Flow
:param filepath: a text file that each line contains a document
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary
:param line_format: the format of each line: ``json`` or ``csv``
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).search(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do indexing on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`index`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).index(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def update(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do updates on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`index`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).update(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def delete(
self,
ids: Iterable[str],
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do deletion on the current Flow
:param ids: An iterable of ids
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).delete(
ids, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do searching on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`search`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).search(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
| 40.70915 | 120 | 0.603837 | import warnings
from typing import Union, Iterable, TextIO, Dict, Optional
import numpy as np
from ...clients.base import InputType, CallbackFnType
from ...enums import DataInputType
from ...helper import deprecated_alias
class AsyncCRUDFlowMixin:
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def train(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
warnings.warn(f'{self.train} is under heavy refactoring', FutureWarning)
async for r in self._get_client(**kwargs).train(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).index(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).search(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).index(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Dict[str, str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).index(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).index(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).index(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).search(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
async def search_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).search(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def search_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).search(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).search(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).index(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def update(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).update(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def delete(
self,
ids: Iterable[str],
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).delete(
ids, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).search(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
| true | true |
f72ad5bc7ad2d8fb6d61ac7005b04ae01a495d56 | 1,629 | py | Python | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 2 | 2017-03-28T12:11:41.000Z | 2017-04-22T02:58:25.000Z | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 12 | 2020-07-24T23:55:19.000Z | 2021-12-19T11:40:06.000Z | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 1 | 2019-01-16T22:21:54.000Z | 2019-01-16T22:21:54.000Z | import pytest
from galaxy.tool_util.lint import LintContext
from galaxy.tool_util.linters import inputs
from galaxy.util import etree
NO_SECTIONS_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
</tool>
"""
NO_WHEN_IN_CONDITIONAL_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
<inputs>
<conditional name="labels">
<param name="label_select" type="select" label="Points to label">
<option value="none" selected="True">None</option>
</param>
</conditional>
</inputs>
</tool>
"""
TESTS = [
(NO_SECTIONS_XML, inputs.lint_inputs, lambda x: 'Found no input parameters.' in x.warn_messages),
(NO_WHEN_IN_CONDITIONAL_XML, inputs.lint_inputs, lambda x: 'No <when /> block found for select option \'none\' inside conditional \'labels\'' in x.warn_messages),
]
@pytest.mark.parametrize('tool_xml,lint_func,assert_func', TESTS, ids=['Lint no sections', 'lint no when'])
def test_tool_xml(tool_xml, lint_func, assert_func):
lint_ctx = LintContext('all')
tree = etree.ElementTree(element=etree.fromstring(tool_xml))
lint_ctx.lint(name="test_lint", lint_func=lint_func, lint_target=tree)
assert assert_func(lint_ctx)
| 39.731707 | 166 | 0.715163 | import pytest
from galaxy.tool_util.lint import LintContext
from galaxy.tool_util.linters import inputs
from galaxy.util import etree
NO_SECTIONS_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
</tool>
"""
NO_WHEN_IN_CONDITIONAL_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
<inputs>
<conditional name="labels">
<param name="label_select" type="select" label="Points to label">
<option value="none" selected="True">None</option>
</param>
</conditional>
</inputs>
</tool>
"""
TESTS = [
(NO_SECTIONS_XML, inputs.lint_inputs, lambda x: 'Found no input parameters.' in x.warn_messages),
(NO_WHEN_IN_CONDITIONAL_XML, inputs.lint_inputs, lambda x: 'No <when /> block found for select option \'none\' inside conditional \'labels\'' in x.warn_messages),
]
@pytest.mark.parametrize('tool_xml,lint_func,assert_func', TESTS, ids=['Lint no sections', 'lint no when'])
def test_tool_xml(tool_xml, lint_func, assert_func):
lint_ctx = LintContext('all')
tree = etree.ElementTree(element=etree.fromstring(tool_xml))
lint_ctx.lint(name="test_lint", lint_func=lint_func, lint_target=tree)
assert assert_func(lint_ctx)
| true | true |
f72ad5f44335464611bcb3461699a32b7602d505 | 7,802 | py | Python | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 99 | 2019-10-09T16:14:46.000Z | 2022-03-17T02:23:47.000Z | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 123 | 2019-09-10T14:48:01.000Z | 2019-11-28T21:24:06.000Z | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 98 | 2019-10-17T14:48:28.000Z | 2022-01-21T03:33:38.000Z | #
# The Python Imaging Library
# $Id$
#
# Adobe PSD 2.5/3.0 file handling
#
# History:
# 1995-09-01 fl Created
# 1997-01-03 fl Read most PSD images
# 1997-01-18 fl Fixed P and CMYK support
# 2001-10-21 fl Added seek/tell support (for layers)
#
# Copyright (c) 1997-2001 by Secret Labs AB.
# Copyright (c) 1995-2001 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
# __version__ is deprecated and will be removed in a future version. Use
# PIL.__version__ instead.
__version__ = "0.4"
import io
from . import Image, ImageFile, ImagePalette
from ._binary import i8, i16be as i16, i32be as i32
MODES = {
# (photoshop mode, bits) -> (pil mode, required channels)
(0, 1): ("1", 1),
(0, 8): ("L", 1),
(1, 8): ("L", 1),
(2, 8): ("P", 1),
(3, 8): ("RGB", 3),
(4, 8): ("CMYK", 4),
(7, 8): ("L", 1), # FIXME: multilayer
(8, 8): ("L", 1), # duotone
(9, 8): ("LAB", 3),
}
# --------------------------------------------------------------------.
# read PSD images
def _accept(prefix):
return prefix[:4] == b"8BPS"
##
# Image plugin for Photoshop images.
class PsdImageFile(ImageFile.ImageFile):
format = "PSD"
format_description = "Adobe Photoshop"
_close_exclusive_fp_after_loading = False
def _open(self):
read = self.fp.read
#
# header
s = read(26)
if s[:4] != b"8BPS" or i16(s[4:]) != 1:
raise SyntaxError("not a PSD file")
psd_bits = i16(s[22:])
psd_channels = i16(s[12:])
psd_mode = i16(s[24:])
mode, channels = MODES[(psd_mode, psd_bits)]
if channels > psd_channels:
raise IOError("not enough channels")
self.mode = mode
self._size = i32(s[18:]), i32(s[14:])
#
# color mode data
size = i32(read(4))
if size:
data = read(size)
if mode == "P" and size == 768:
self.palette = ImagePalette.raw("RGB;L", data)
#
# image resources
self.resources = []
size = i32(read(4))
if size:
# load resources
end = self.fp.tell() + size
while self.fp.tell() < end:
read(4) # signature
id = i16(read(2))
name = read(i8(read(1)))
if not (len(name) & 1):
read(1) # padding
data = read(i32(read(4)))
if len(data) & 1:
read(1) # padding
self.resources.append((id, name, data))
if id == 1039: # ICC profile
self.info["icc_profile"] = data
#
# layer and mask information
self.layers = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
size = i32(read(4))
if size:
self.layers = _layerinfo(self.fp)
self.fp.seek(end)
#
# image descriptor
self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
# keep the file open
self.__fp = self.fp
self.frame = 1
self._min_frame = 1
@property
def n_frames(self):
return len(self.layers)
@property
def is_animated(self):
return len(self.layers) > 1
def seek(self, layer):
if not self._seek_check(layer):
return
# seek to given layer (1..max)
try:
name, mode, bbox, tile = self.layers[layer - 1]
self.mode = mode
self.tile = tile
self.frame = layer
self.fp = self.__fp
return name, bbox
except IndexError:
raise EOFError("no such layer")
def tell(self):
# return layer number (0=image, 1..max=layers)
return self.frame
def load_prepare(self):
# create image memory if necessary
if not self.im or self.im.mode != self.mode or self.im.size != self.size:
self.im = Image.core.fill(self.mode, self.size, 0)
# create palette (optional)
if self.mode == "P":
Image.Image.load(self)
def _close__fp(self):
try:
if self.__fp != self.fp:
self.__fp.close()
except AttributeError:
pass
finally:
self.__fp = None
def _layerinfo(file):
# read layerinfo block
layers = []
read = file.read
for i in range(abs(i16(read(2)))):
# bounding box
y0 = i32(read(4))
x0 = i32(read(4))
y1 = i32(read(4))
x1 = i32(read(4))
# image info
info = []
mode = []
types = list(range(i16(read(2))))
if len(types) > 4:
continue
for i in types:
type = i16(read(2))
if type == 65535:
m = "A"
else:
m = "RGBA"[type]
mode.append(m)
size = i32(read(4))
info.append((m, size))
# figure out the image mode
mode.sort()
if mode == ["R"]:
mode = "L"
elif mode == ["B", "G", "R"]:
mode = "RGB"
elif mode == ["A", "B", "G", "R"]:
mode = "RGBA"
else:
mode = None # unknown
# skip over blend flags and extra information
read(12) # filler
name = ""
size = i32(read(4)) # length of the extra data field
combined = 0
if size:
data_end = file.tell() + size
length = i32(read(4))
if length:
file.seek(length - 16, io.SEEK_CUR)
combined += length + 4
length = i32(read(4))
if length:
file.seek(length, io.SEEK_CUR)
combined += length + 4
length = i8(read(1))
if length:
# Don't know the proper encoding,
# Latin-1 should be a good guess
name = read(length).decode("latin-1", "replace")
combined += length + 1
file.seek(data_end)
layers.append((name, mode, (x0, y0, x1, y1)))
# get tiles
i = 0
for name, mode, bbox in layers:
tile = []
for m in mode:
t = _maketile(file, m, bbox, 1)
if t:
tile.extend(t)
layers[i] = name, mode, bbox, tile
i += 1
return layers
def _maketile(file, mode, bbox, channels):
tile = None
read = file.read
compression = i16(read(2))
xsize = bbox[2] - bbox[0]
ysize = bbox[3] - bbox[1]
offset = file.tell()
if compression == 0:
#
# raw compression
tile = []
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("raw", bbox, offset, layer))
offset = offset + xsize * ysize
elif compression == 1:
#
# packbits compression
i = 0
tile = []
bytecount = read(channels * ysize * 2)
offset = file.tell()
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("packbits", bbox, offset, layer))
for y in range(ysize):
offset = offset + i16(bytecount[i : i + 2])
i += 2
file.seek(offset)
if offset & 1:
read(1) # padding
return tile
# --------------------------------------------------------------------
# registry
Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
Image.register_extension(PsdImageFile.format, ".psd")
| 24.38125 | 81 | 0.481671 |
__version__ = "0.4"
import io
from . import Image, ImageFile, ImagePalette
from ._binary import i8, i16be as i16, i32be as i32
MODES = {
(0, 1): ("1", 1),
(0, 8): ("L", 1),
(1, 8): ("L", 1),
(2, 8): ("P", 1),
(3, 8): ("RGB", 3),
(4, 8): ("CMYK", 4),
(7, 8): ("L", 1),
(8, 8): ("L", 1),
(9, 8): ("LAB", 3),
}
def _accept(prefix):
return prefix[:4] == b"8BPS"
class PsdImageFile(ImageFile.ImageFile):
format = "PSD"
format_description = "Adobe Photoshop"
_close_exclusive_fp_after_loading = False
def _open(self):
read = self.fp.read
s = read(26)
if s[:4] != b"8BPS" or i16(s[4:]) != 1:
raise SyntaxError("not a PSD file")
psd_bits = i16(s[22:])
psd_channels = i16(s[12:])
psd_mode = i16(s[24:])
mode, channels = MODES[(psd_mode, psd_bits)]
if channels > psd_channels:
raise IOError("not enough channels")
self.mode = mode
self._size = i32(s[18:]), i32(s[14:])
size = i32(read(4))
if size:
data = read(size)
if mode == "P" and size == 768:
self.palette = ImagePalette.raw("RGB;L", data)
self.resources = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
while self.fp.tell() < end:
read(4)
id = i16(read(2))
name = read(i8(read(1)))
if not (len(name) & 1):
read(1)
data = read(i32(read(4)))
if len(data) & 1:
read(1)
self.resources.append((id, name, data))
if id == 1039:
self.info["icc_profile"] = data
self.layers = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
size = i32(read(4))
if size:
self.layers = _layerinfo(self.fp)
self.fp.seek(end)
self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
self.__fp = self.fp
self.frame = 1
self._min_frame = 1
@property
def n_frames(self):
return len(self.layers)
@property
def is_animated(self):
return len(self.layers) > 1
def seek(self, layer):
if not self._seek_check(layer):
return
try:
name, mode, bbox, tile = self.layers[layer - 1]
self.mode = mode
self.tile = tile
self.frame = layer
self.fp = self.__fp
return name, bbox
except IndexError:
raise EOFError("no such layer")
def tell(self):
return self.frame
def load_prepare(self):
if not self.im or self.im.mode != self.mode or self.im.size != self.size:
self.im = Image.core.fill(self.mode, self.size, 0)
if self.mode == "P":
Image.Image.load(self)
def _close__fp(self):
try:
if self.__fp != self.fp:
self.__fp.close()
except AttributeError:
pass
finally:
self.__fp = None
def _layerinfo(file):
layers = []
read = file.read
for i in range(abs(i16(read(2)))):
y0 = i32(read(4))
x0 = i32(read(4))
y1 = i32(read(4))
x1 = i32(read(4))
info = []
mode = []
types = list(range(i16(read(2))))
if len(types) > 4:
continue
for i in types:
type = i16(read(2))
if type == 65535:
m = "A"
else:
m = "RGBA"[type]
mode.append(m)
size = i32(read(4))
info.append((m, size))
mode.sort()
if mode == ["R"]:
mode = "L"
elif mode == ["B", "G", "R"]:
mode = "RGB"
elif mode == ["A", "B", "G", "R"]:
mode = "RGBA"
else:
mode = None
read(12)
name = ""
size = i32(read(4))
combined = 0
if size:
data_end = file.tell() + size
length = i32(read(4))
if length:
file.seek(length - 16, io.SEEK_CUR)
combined += length + 4
length = i32(read(4))
if length:
file.seek(length, io.SEEK_CUR)
combined += length + 4
length = i8(read(1))
if length:
# Latin-1 should be a good guess
name = read(length).decode("latin-1", "replace")
combined += length + 1
file.seek(data_end)
layers.append((name, mode, (x0, y0, x1, y1)))
# get tiles
i = 0
for name, mode, bbox in layers:
tile = []
for m in mode:
t = _maketile(file, m, bbox, 1)
if t:
tile.extend(t)
layers[i] = name, mode, bbox, tile
i += 1
return layers
def _maketile(file, mode, bbox, channels):
tile = None
read = file.read
compression = i16(read(2))
xsize = bbox[2] - bbox[0]
ysize = bbox[3] - bbox[1]
offset = file.tell()
if compression == 0:
#
# raw compression
tile = []
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("raw", bbox, offset, layer))
offset = offset + xsize * ysize
elif compression == 1:
#
# packbits compression
i = 0
tile = []
bytecount = read(channels * ysize * 2)
offset = file.tell()
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("packbits", bbox, offset, layer))
for y in range(ysize):
offset = offset + i16(bytecount[i : i + 2])
i += 2
file.seek(offset)
if offset & 1:
read(1) # padding
return tile
# --------------------------------------------------------------------
# registry
Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
Image.register_extension(PsdImageFile.format, ".psd")
| true | true |
f72ad8ba1938d20c873989d306f99b76c1ee53bf | 11,515 | py | Python | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 92 | 2018-06-05T11:18:38.000Z | 2018-07-01T23:50:44.000Z | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 107 | 2018-06-05T08:41:19.000Z | 2018-07-02T12:10:53.000Z | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 39 | 2018-06-05T09:55:56.000Z | 2018-07-02T08:47:35.000Z | # This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""A module for monitoring backends."""
import time
import threading
import types
from IPython.display import display
from IPython.core.magic import line_magic, Magics, magics_class
from IPython.core import magic_arguments
import matplotlib.pyplot as plt
import ipywidgets as widgets
from qiskit.tools.monitor.overview import get_unique_backends
from qiskit.visualization.gate_map import plot_gate_map
@magics_class
class BackendOverview(Magics):
"""A class of status magic functions."""
@line_magic
@magic_arguments.magic_arguments()
@magic_arguments.argument(
"-i", "--interval", type=float, default=60, help="Interval for status check."
)
def qiskit_backend_overview(self, line=""):
"""A Jupyter magic function to monitor backends."""
args = magic_arguments.parse_argstring(self.qiskit_backend_overview, line)
unique_hardware_backends = get_unique_backends()
_value = "<h2 style ='color:#ffffff; background-color:#000000;"
_value += "padding-top: 1%; padding-bottom: 1%;padding-left: 1%;"
_value += "margin-top: 0px'>Backend Overview</h2>"
backend_title = widgets.HTML(value=_value, layout=widgets.Layout(margin="0px 0px 0px 0px"))
build_back_widgets = [backend_widget(b) for b in unique_hardware_backends]
_backends = []
# Sort backends by operational or not
oper_ord_backends = []
for n, back in enumerate(unique_hardware_backends):
if back.status().operational:
oper_ord_backends = [build_back_widgets[n]] + oper_ord_backends
_backends = [back] + _backends
else:
oper_ord_backends = oper_ord_backends + [build_back_widgets[n]]
_backends = _backends + [back]
qubit_label = widgets.Label(value="Num. Qubits")
qv_label = widgets.Label(value="Quantum Vol.")
pend_label = widgets.Label(
value="Pending Jobs", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
least_label = widgets.Label(
value="Least Busy", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
oper_label = widgets.Label(
value="Operational", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
t12_label = widgets.Label(
value="Avg. T1 / T2", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
cx_label = widgets.Label(
value="Avg. CX Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
meas_label = widgets.Label(
value="Avg. Meas. Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
labels_widget = widgets.VBox(
[
qubit_label,
qv_label,
pend_label,
oper_label,
least_label,
t12_label,
cx_label,
meas_label,
],
layout=widgets.Layout(margin="295px 0px 0px 0px", min_width="100px"),
)
backend_grid = GridBox_with_thread(
children=oper_ord_backends,
layout=widgets.Layout(
grid_template_columns="250px " * len(unique_hardware_backends),
grid_template_rows="auto",
grid_gap="0px 25px",
),
)
backend_grid._backends = _backends # pylint: disable=attribute-defined-outside-init
backend_grid._update = types.MethodType( # pylint: disable=attribute-defined-outside-init
update_backend_info, backend_grid
)
backend_grid._thread = threading.Thread( # pylint: disable=attribute-defined-outside-init
target=backend_grid._update, args=(args.interval,)
)
backend_grid._thread.start()
back_box = widgets.HBox([labels_widget, backend_grid])
back_monitor = widgets.VBox([backend_title, back_box])
display(back_monitor)
class GridBox_with_thread(widgets.GridBox): # pylint: disable=invalid-name
"""A GridBox that will close an attached thread"""
def __del__(self):
"""Object disposal"""
if hasattr(self, "_thread"):
try:
self._thread.do_run = False
self._thread.join()
except Exception: # pylint: disable=broad-except
pass
self.close()
def backend_widget(backend):
"""Creates a backend widget."""
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value=f"<h4>{backend.name()}</h4>", layout=widgets.Layout())
num_qubits = config["n_qubits"]
qv_val = "-"
if "quantum_volume" in config.keys():
if config["quantum_volume"]:
qv_val = config["quantum_volume"]
qubit_count = widgets.HTML(
value=f"<h5><b>{num_qubits}</b></h5>",
layout=widgets.Layout(justify_content="center"),
)
qv_value = widgets.HTML(
value=f"<h5>{qv_val}</h5>",
layout=widgets.Layout(justify_content="center"),
)
cmap = widgets.Output(
layout=widgets.Layout(
min_width="250px",
max_width="250px",
max_height="250px",
min_height="250px",
justify_content="center",
align_items="center",
margin="0px 0px 0px 0px",
)
)
with cmap:
_cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
# Prevents plot from showing up twice.
plt.close(_cmap_fig)
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
least_busy = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
t1_units = props["qubits"][0][0]["unit"]
avg_t1 = round(sum(q[0]["value"] for q in props["qubits"]) / num_qubits, 1)
avg_t2 = round(sum(q[1]["value"] for q in props["qubits"]) / num_qubits, 1)
t12_widget = widgets.HTML(
value=f"<h5>{avg_t1} / {avg_t2} {t1_units}</h5>",
layout=widgets.Layout(),
)
avg_cx_err = "NA"
if config["coupling_map"]:
sum_cx_err = 0
num_cx = 0
for gate in props["gates"]:
if gate["gate"] == "cx":
for param in gate["parameters"]:
if param["name"] == "gate_error":
# Value == 1.0 means gate effectively off
if param["value"] != 1.0:
sum_cx_err += param["value"]
num_cx += 1
if num_cx > 0:
avg_cx_err = round(sum_cx_err / num_cx, 4)
cx_widget = widgets.HTML(value=f"<h5>{avg_cx_err}</h5>", layout=widgets.Layout())
avg_meas_err = 0
for qub in props["qubits"]:
for item in qub:
if item["name"] == "readout_error":
avg_meas_err += item["value"]
avg_meas_err = round(avg_meas_err / num_qubits, 4)
meas_widget = widgets.HTML(value=f"<h5>{avg_meas_err}</h5>", layout=widgets.Layout())
out = widgets.VBox(
[
name,
cmap,
qubit_count,
qv_value,
pending,
is_oper,
least_busy,
t12_widget,
cx_widget,
meas_widget,
],
layout=widgets.Layout(display="inline-flex", flex_flow="column", align_items="center"),
)
out._is_alive = True
return out
def update_backend_info(self, interval=60):
"""Updates the monitor info
Called from another thread.
"""
my_thread = threading.current_thread()
current_interval = 0
started = False
all_dead = False
stati = [None] * len(self._backends)
while getattr(my_thread, "do_run", True) and not all_dead:
if current_interval == interval or started is False:
for ind, back in enumerate(self._backends):
_value = self.children[ind].children[2].value
_head = _value.split("<b>")[0]
try:
_status = back.status()
stati[ind] = _status
except Exception: # pylint: disable=broad-except
self.children[ind].children[2].value = _value.replace(
_head, "<h5 style='color:#ff5c49'>"
)
self.children[ind]._is_alive = False
else:
self.children[ind]._is_alive = True
self.children[ind].children[2].value = _value.replace(_head, "<h5>")
idx = list(range(len(self._backends)))
pending = [s.pending_jobs for s in stati]
_, least_idx = zip(*sorted(zip(pending, idx)))
# Make sure least pending is operational
for ind in least_idx:
if stati[ind].operational:
least_pending_idx = ind
break
for var in idx:
if var == least_pending_idx:
self.children[var].children[6].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[6].value = "<h5 style='color:#dc267f'>False</h5>"
self.children[var].children[4].children[1].max = max(
self.children[var].children[4].children[1].max, pending[var] + 10
)
self.children[var].children[4].children[1].value = pending[var]
if stati[var].operational:
self.children[var].children[5].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[5].value = "<h5 style='color:#dc267f'>False</h5>"
started = True
current_interval = 0
time.sleep(1)
all_dead = not any(wid._is_alive for wid in self.children)
current_interval += 1
def generate_jobs_pending_widget():
"""Generates a jobs_pending progress bar widget."""
pbar = widgets.IntProgress(
value=0,
min=0,
max=50,
description="",
orientation="horizontal",
layout=widgets.Layout(max_width="180px"),
)
pbar.style.bar_color = "#71cddd"
pbar_current = widgets.Label(value=str(pbar.value), layout=widgets.Layout(min_width="auto"))
pbar_max = widgets.Label(value=str(pbar.max), layout=widgets.Layout(min_width="auto"))
def _on_max_change(change):
pbar_max.value = str(change["new"])
def _on_val_change(change):
pbar_current.value = str(change["new"])
pbar.observe(_on_max_change, names="max")
pbar.observe(_on_val_change, names="value")
jobs_widget = widgets.HBox(
[pbar_current, pbar, pbar_max],
layout=widgets.Layout(max_width="250px", min_width="250px", justify_content="center"),
)
return jobs_widget
| 35.650155 | 99 | 0.590881 |
import time
import threading
import types
from IPython.display import display
from IPython.core.magic import line_magic, Magics, magics_class
from IPython.core import magic_arguments
import matplotlib.pyplot as plt
import ipywidgets as widgets
from qiskit.tools.monitor.overview import get_unique_backends
from qiskit.visualization.gate_map import plot_gate_map
@magics_class
class BackendOverview(Magics):
@line_magic
@magic_arguments.magic_arguments()
@magic_arguments.argument(
"-i", "--interval", type=float, default=60, help="Interval for status check."
)
def qiskit_backend_overview(self, line=""):
args = magic_arguments.parse_argstring(self.qiskit_backend_overview, line)
unique_hardware_backends = get_unique_backends()
_value = "<h2 style ='color:#ffffff; background-color:#000000;"
_value += "padding-top: 1%; padding-bottom: 1%;padding-left: 1%;"
_value += "margin-top: 0px'>Backend Overview</h2>"
backend_title = widgets.HTML(value=_value, layout=widgets.Layout(margin="0px 0px 0px 0px"))
build_back_widgets = [backend_widget(b) for b in unique_hardware_backends]
_backends = []
oper_ord_backends = []
for n, back in enumerate(unique_hardware_backends):
if back.status().operational:
oper_ord_backends = [build_back_widgets[n]] + oper_ord_backends
_backends = [back] + _backends
else:
oper_ord_backends = oper_ord_backends + [build_back_widgets[n]]
_backends = _backends + [back]
qubit_label = widgets.Label(value="Num. Qubits")
qv_label = widgets.Label(value="Quantum Vol.")
pend_label = widgets.Label(
value="Pending Jobs", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
least_label = widgets.Label(
value="Least Busy", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
oper_label = widgets.Label(
value="Operational", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
t12_label = widgets.Label(
value="Avg. T1 / T2", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
cx_label = widgets.Label(
value="Avg. CX Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
meas_label = widgets.Label(
value="Avg. Meas. Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
labels_widget = widgets.VBox(
[
qubit_label,
qv_label,
pend_label,
oper_label,
least_label,
t12_label,
cx_label,
meas_label,
],
layout=widgets.Layout(margin="295px 0px 0px 0px", min_width="100px"),
)
backend_grid = GridBox_with_thread(
children=oper_ord_backends,
layout=widgets.Layout(
grid_template_columns="250px " * len(unique_hardware_backends),
grid_template_rows="auto",
grid_gap="0px 25px",
),
)
backend_grid._backends = _backends
backend_grid._update = types.MethodType(
update_backend_info, backend_grid
)
backend_grid._thread = threading.Thread(
target=backend_grid._update, args=(args.interval,)
)
backend_grid._thread.start()
back_box = widgets.HBox([labels_widget, backend_grid])
back_monitor = widgets.VBox([backend_title, back_box])
display(back_monitor)
class GridBox_with_thread(widgets.GridBox):
def __del__(self):
if hasattr(self, "_thread"):
try:
self._thread.do_run = False
self._thread.join()
except Exception:
pass
self.close()
def backend_widget(backend):
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value=f"<h4>{backend.name()}</h4>", layout=widgets.Layout())
num_qubits = config["n_qubits"]
qv_val = "-"
if "quantum_volume" in config.keys():
if config["quantum_volume"]:
qv_val = config["quantum_volume"]
qubit_count = widgets.HTML(
value=f"<h5><b>{num_qubits}</b></h5>",
layout=widgets.Layout(justify_content="center"),
)
qv_value = widgets.HTML(
value=f"<h5>{qv_val}</h5>",
layout=widgets.Layout(justify_content="center"),
)
cmap = widgets.Output(
layout=widgets.Layout(
min_width="250px",
max_width="250px",
max_height="250px",
min_height="250px",
justify_content="center",
align_items="center",
margin="0px 0px 0px 0px",
)
)
with cmap:
_cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
plt.close(_cmap_fig)
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
least_busy = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
t1_units = props["qubits"][0][0]["unit"]
avg_t1 = round(sum(q[0]["value"] for q in props["qubits"]) / num_qubits, 1)
avg_t2 = round(sum(q[1]["value"] for q in props["qubits"]) / num_qubits, 1)
t12_widget = widgets.HTML(
value=f"<h5>{avg_t1} / {avg_t2} {t1_units}</h5>",
layout=widgets.Layout(),
)
avg_cx_err = "NA"
if config["coupling_map"]:
sum_cx_err = 0
num_cx = 0
for gate in props["gates"]:
if gate["gate"] == "cx":
for param in gate["parameters"]:
if param["name"] == "gate_error":
if param["value"] != 1.0:
sum_cx_err += param["value"]
num_cx += 1
if num_cx > 0:
avg_cx_err = round(sum_cx_err / num_cx, 4)
cx_widget = widgets.HTML(value=f"<h5>{avg_cx_err}</h5>", layout=widgets.Layout())
avg_meas_err = 0
for qub in props["qubits"]:
for item in qub:
if item["name"] == "readout_error":
avg_meas_err += item["value"]
avg_meas_err = round(avg_meas_err / num_qubits, 4)
meas_widget = widgets.HTML(value=f"<h5>{avg_meas_err}</h5>", layout=widgets.Layout())
out = widgets.VBox(
[
name,
cmap,
qubit_count,
qv_value,
pending,
is_oper,
least_busy,
t12_widget,
cx_widget,
meas_widget,
],
layout=widgets.Layout(display="inline-flex", flex_flow="column", align_items="center"),
)
out._is_alive = True
return out
def update_backend_info(self, interval=60):
my_thread = threading.current_thread()
current_interval = 0
started = False
all_dead = False
stati = [None] * len(self._backends)
while getattr(my_thread, "do_run", True) and not all_dead:
if current_interval == interval or started is False:
for ind, back in enumerate(self._backends):
_value = self.children[ind].children[2].value
_head = _value.split("<b>")[0]
try:
_status = back.status()
stati[ind] = _status
except Exception:
self.children[ind].children[2].value = _value.replace(
_head, "<h5 style='color:#ff5c49'>"
)
self.children[ind]._is_alive = False
else:
self.children[ind]._is_alive = True
self.children[ind].children[2].value = _value.replace(_head, "<h5>")
idx = list(range(len(self._backends)))
pending = [s.pending_jobs for s in stati]
_, least_idx = zip(*sorted(zip(pending, idx)))
for ind in least_idx:
if stati[ind].operational:
least_pending_idx = ind
break
for var in idx:
if var == least_pending_idx:
self.children[var].children[6].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[6].value = "<h5 style='color:#dc267f'>False</h5>"
self.children[var].children[4].children[1].max = max(
self.children[var].children[4].children[1].max, pending[var] + 10
)
self.children[var].children[4].children[1].value = pending[var]
if stati[var].operational:
self.children[var].children[5].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[5].value = "<h5 style='color:#dc267f'>False</h5>"
started = True
current_interval = 0
time.sleep(1)
all_dead = not any(wid._is_alive for wid in self.children)
current_interval += 1
def generate_jobs_pending_widget():
pbar = widgets.IntProgress(
value=0,
min=0,
max=50,
description="",
orientation="horizontal",
layout=widgets.Layout(max_width="180px"),
)
pbar.style.bar_color = "#71cddd"
pbar_current = widgets.Label(value=str(pbar.value), layout=widgets.Layout(min_width="auto"))
pbar_max = widgets.Label(value=str(pbar.max), layout=widgets.Layout(min_width="auto"))
def _on_max_change(change):
pbar_max.value = str(change["new"])
def _on_val_change(change):
pbar_current.value = str(change["new"])
pbar.observe(_on_max_change, names="max")
pbar.observe(_on_val_change, names="value")
jobs_widget = widgets.HBox(
[pbar_current, pbar, pbar_max],
layout=widgets.Layout(max_width="250px", min_width="250px", justify_content="center"),
)
return jobs_widget
| true | true |
f72ada2ce523c5d4764bb97fbbec0c1d62c192e2 | 897 | py | Python | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 112 | 2019-02-11T23:16:36.000Z | 2022-03-23T20:59:57.000Z | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 621 | 2019-03-01T14:44:12.000Z | 2022-03-31T19:49:25.000Z | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 154 | 2019-02-01T23:46:33.000Z | 2022-03-23T15:07:10.000Z | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
from .condenser import Condenser
from .reboiler import Reboiler
from .tray import Tray
from .tray_column import TrayColumn
| 52.764706 | 81 | 0.654404 | true | true | |
f72adb7883b52f3f1c6bf8306f57b1dd0008ab29 | 868 | py | Python | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2020-11-23T22:11:33.000Z | 2020-11-23T22:11:33.000Z | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2021-07-13T15:31:00.000Z | 2021-07-13T15:31:00.000Z | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2021-02-02T14:11:57.000Z | 2021-02-02T14:11:57.000Z | class Color:
WHITE = (255, 255, 255)
BLACK = (0, 0, 0 )
GRAY = (100, 100, 100)
RED = (220, 20, 60 )
GREEN = (50, 205, 50 )
YELLOW = (255, 255, 0 )
PURPLE = (218, 112, 214)
ALL = [WHITE, BLACK, GRAY, RED, GREEN]
# just for printing colors in terminal
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(msg):
return bcolors.OKGREEN+msg+bcolors.ENDC
def header(msg):
return bcolors.HEADER+msg+bcolors.ENDC
def fail(msg):
return bcolors.FAIL+msg+bcolors.ENDC
def cyan(msg):
return bcolors.OKCYAN+msg+bcolors.ENDC
def warning(msg):
return bcolors.WARNING+msg+bcolors.ENDC | 22.842105 | 43 | 0.562212 | class Color:
WHITE = (255, 255, 255)
BLACK = (0, 0, 0 )
GRAY = (100, 100, 100)
RED = (220, 20, 60 )
GREEN = (50, 205, 50 )
YELLOW = (255, 255, 0 )
PURPLE = (218, 112, 214)
ALL = [WHITE, BLACK, GRAY, RED, GREEN]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(msg):
return bcolors.OKGREEN+msg+bcolors.ENDC
def header(msg):
return bcolors.HEADER+msg+bcolors.ENDC
def fail(msg):
return bcolors.FAIL+msg+bcolors.ENDC
def cyan(msg):
return bcolors.OKCYAN+msg+bcolors.ENDC
def warning(msg):
return bcolors.WARNING+msg+bcolors.ENDC | true | true |
f72adc47d855b9bd8cfb880f4445828ea9fe2109 | 9,267 | py | Python | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | # Copyright (c) SenseTime. All Rights Reserved.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import logging
import sys
import os
import cv2
import numpy as np
from torch.utils.data import Dataset
from pysot.utils.bbox import center2corner, Center
from pysot.datasets.anchor_target import AnchorTarget
from pysot.datasets.augmentation import Augmentation
from pysot.core.config import cfg
logger = logging.getLogger("global")
# setting opencv
pyv = sys.version[0]
if pyv[0] == '3':
cv2.ocl.setUseOpenCL(False)
class SubDataset(object):
def __init__(self, name, root, anno, frame_range, num_use, start_idx):
cur_path = os.path.dirname(os.path.realpath(__file__))
self.name = name
self.root = os.path.join(cur_path, '../../', root)
self.anno = os.path.join(cur_path, '../../', anno)
self.frame_range = frame_range
self.num_use = num_use
self.start_idx = start_idx
logger.info("loading " + name)
with open(self.anno, 'r') as f:
meta_data = json.load(f)
meta_data = self._filter_zero(meta_data)
for video in list(meta_data.keys()):
for track in meta_data[video]:
frames = meta_data[video][track]
frames = list(map(int,
filter(lambda x: x.isdigit(), frames.keys())))
frames.sort()
meta_data[video][track]['frames'] = frames
if len(frames) <= 0:
logger.warning("{}/{} has no frames".format(video, track))
del meta_data[video][track]
for video in list(meta_data.keys()):
if len(meta_data[video]) <= 0:
logger.warning("{} has no tracks".format(video))
del meta_data[video]
self.labels = meta_data
self.num = len(self.labels)
self.num_use = self.num if self.num_use == -1 else self.num_use
self.videos = list(meta_data.keys())
logger.info("{} loaded".format(self.name))
self.path_format = '{}.{}.{}.jpg'
self.pick = self.shuffle()
def _filter_zero(self, meta_data):
meta_data_new = {}
for video, tracks in meta_data.items():
new_tracks = {}
for trk, frames in tracks.items():
new_frames = {}
for frm, bbox in frames.items():
if not isinstance(bbox, dict):
if len(bbox) == 4:
x1, y1, x2, y2 = bbox
w, h = x2 - x1, y2 - y1
else:
w, h = bbox
if w <= 0 or h <= 0:
continue
new_frames[frm] = bbox
if len(new_frames) > 0:
new_tracks[trk] = new_frames
if len(new_tracks) > 0:
meta_data_new[video] = new_tracks
return meta_data_new
def log(self):
logger.info("{} start-index {} select [{}/{}] path_format {}".format(
self.name, self.start_idx, self.num_use,
self.num, self.path_format))
def shuffle(self):
lists = list(range(self.start_idx, self.start_idx + self.num))
pick = []
while len(pick) < self.num_use:
np.random.shuffle(lists)
pick += lists
return pick[:self.num_use]
def get_image_anno(self, video, track, frame):
frame = "{:06d}".format(frame)
image_path = os.path.join(self.root, video,
self.path_format.format(frame, track, 'x'))
image_anno = self.labels[video][track][frame]
return image_path, image_anno
# track is tracking object in video
# video is one of subfolder under ILSVRC2015_VID_train_000{0-3}, for example, ILSVRC2015_train_00004000
def get_positive_pair(self, index):
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
template_frame = np.random.randint(0, len(frames))
template_frame = frames[template_frame]
return self.get_image_anno(video_name, track, template_frame)
def get_random_target(self, index=-1):
if index == -1:
index = np.random.randint(0, self.num)
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
frame = np.random.choice(frames)
return self.get_image_anno(video_name, track, frame)
def __len__(self):
return self.num
class TrkDataset(Dataset):
def __init__(self,):
super(TrkDataset, self).__init__()
desired_size = (cfg.TRAIN.SEARCH_SIZE - cfg.TRAIN.EXEMPLAR_SIZE) / \
cfg.ANCHOR.STRIDE + 1 + cfg.TRAIN.BASE_SIZE
if desired_size != cfg.TRAIN.OUTPUT_SIZE:
raise Exception('size not match!')
# create anchor target
self.anchor_target = AnchorTarget()
# create sub dataset
self.all_dataset = []
start = 0
self.num = 0
for name in cfg.DATASET.NAMES:
subdata_cfg = getattr(cfg.DATASET, name)
sub_dataset = SubDataset(
name,
subdata_cfg.ROOT,
subdata_cfg.ANNO,
subdata_cfg.FRAME_RANGE,
subdata_cfg.NUM_USE,
start
)
start += sub_dataset.num
self.num += sub_dataset.num_use
sub_dataset.log()
self.all_dataset.append(sub_dataset)
# data augmentation
self.template_aug = Augmentation(
cfg.DATASET.TEMPLATE.SHIFT,
cfg.DATASET.TEMPLATE.SCALE,
cfg.DATASET.TEMPLATE.BLUR,
cfg.DATASET.TEMPLATE.FLIP,
cfg.DATASET.TEMPLATE.COLOR
)
self.search_aug = Augmentation(
cfg.DATASET.SEARCH.SHIFT,
cfg.DATASET.SEARCH.SCALE,
cfg.DATASET.SEARCH.BLUR,
cfg.DATASET.SEARCH.FLIP,
cfg.DATASET.SEARCH.COLOR
)
videos_per_epoch = cfg.DATASET.VIDEOS_PER_EPOCH
self.num = videos_per_epoch if videos_per_epoch > 0 else self.num
self.num *= cfg.TRAIN.EPOCH
self.pick = self.shuffle()
def shuffle(self):
pick = []
m = 0
while m < self.num:
p = []
for sub_dataset in self.all_dataset:
sub_p = sub_dataset.pick
p += sub_p
np.random.shuffle(p)
pick += p
m = len(pick)
logger.info("shuffle done!")
logger.info("dataset length {}".format(self.num))
return pick[:self.num]
def _find_dataset(self, index):
for dataset in self.all_dataset:
if dataset.start_idx + dataset.num > index:
return dataset, index - dataset.start_idx
def _get_bbox(self, image, shape):
imh, imw = image.shape[:2]
if len(shape) == 4:
w, h = shape[2]-shape[0], shape[3]-shape[1]
else:
w, h = shape
context_amount = 0.5
exemplar_size = cfg.TRAIN.EXEMPLAR_SIZE
wc_z = w + context_amount * (w+h)
hc_z = h + context_amount * (w+h)
s_z = np.sqrt(wc_z * hc_z)
scale_z = exemplar_size / s_z
w = w*scale_z
h = h*scale_z
cx, cy = imw//2, imh//2
bbox = center2corner(Center(cx, cy, w, h))
return bbox
def __len__(self):
return self.num
def __getitem__(self, index):
index = self.pick[index]
dataset, index = self._find_dataset(index)
gray = cfg.DATASET.GRAY and cfg.DATASET.GRAY > np.random.random()
neg = cfg.DATASET.NEG and cfg.DATASET.NEG > np.random.random()
# get one dataset
if neg:
print('please check this suspension due to it was removed negative function (distractor)')
import pdb
pdb.set_trace()
template = dataset.get_random_target(index)
search = np.random.choice(self.all_dataset).get_random_target()
else:
template = dataset.get_positive_pair(index)
if not os.path.exists(template[0]):
print(template[0])
# get image
template_image = cv2.imread(template[0])
# get bounding box
template_box = self._get_bbox(template_image, template[1])
# augmentation
template, _ = self.template_aug(template_image,
template_box,
cfg.TRAIN.EXEMPLAR_SIZE,
gray=gray)
template = template.transpose((2, 0, 1)).astype(np.float32)
return {
'template': template,
'gt': template_box
}
| 34.449814 | 107 | 0.555627 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import logging
import sys
import os
import cv2
import numpy as np
from torch.utils.data import Dataset
from pysot.utils.bbox import center2corner, Center
from pysot.datasets.anchor_target import AnchorTarget
from pysot.datasets.augmentation import Augmentation
from pysot.core.config import cfg
logger = logging.getLogger("global")
pyv = sys.version[0]
if pyv[0] == '3':
cv2.ocl.setUseOpenCL(False)
class SubDataset(object):
def __init__(self, name, root, anno, frame_range, num_use, start_idx):
cur_path = os.path.dirname(os.path.realpath(__file__))
self.name = name
self.root = os.path.join(cur_path, '../../', root)
self.anno = os.path.join(cur_path, '../../', anno)
self.frame_range = frame_range
self.num_use = num_use
self.start_idx = start_idx
logger.info("loading " + name)
with open(self.anno, 'r') as f:
meta_data = json.load(f)
meta_data = self._filter_zero(meta_data)
for video in list(meta_data.keys()):
for track in meta_data[video]:
frames = meta_data[video][track]
frames = list(map(int,
filter(lambda x: x.isdigit(), frames.keys())))
frames.sort()
meta_data[video][track]['frames'] = frames
if len(frames) <= 0:
logger.warning("{}/{} has no frames".format(video, track))
del meta_data[video][track]
for video in list(meta_data.keys()):
if len(meta_data[video]) <= 0:
logger.warning("{} has no tracks".format(video))
del meta_data[video]
self.labels = meta_data
self.num = len(self.labels)
self.num_use = self.num if self.num_use == -1 else self.num_use
self.videos = list(meta_data.keys())
logger.info("{} loaded".format(self.name))
self.path_format = '{}.{}.{}.jpg'
self.pick = self.shuffle()
def _filter_zero(self, meta_data):
meta_data_new = {}
for video, tracks in meta_data.items():
new_tracks = {}
for trk, frames in tracks.items():
new_frames = {}
for frm, bbox in frames.items():
if not isinstance(bbox, dict):
if len(bbox) == 4:
x1, y1, x2, y2 = bbox
w, h = x2 - x1, y2 - y1
else:
w, h = bbox
if w <= 0 or h <= 0:
continue
new_frames[frm] = bbox
if len(new_frames) > 0:
new_tracks[trk] = new_frames
if len(new_tracks) > 0:
meta_data_new[video] = new_tracks
return meta_data_new
def log(self):
logger.info("{} start-index {} select [{}/{}] path_format {}".format(
self.name, self.start_idx, self.num_use,
self.num, self.path_format))
def shuffle(self):
lists = list(range(self.start_idx, self.start_idx + self.num))
pick = []
while len(pick) < self.num_use:
np.random.shuffle(lists)
pick += lists
return pick[:self.num_use]
def get_image_anno(self, video, track, frame):
frame = "{:06d}".format(frame)
image_path = os.path.join(self.root, video,
self.path_format.format(frame, track, 'x'))
image_anno = self.labels[video][track][frame]
return image_path, image_anno
def get_positive_pair(self, index):
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
template_frame = np.random.randint(0, len(frames))
template_frame = frames[template_frame]
return self.get_image_anno(video_name, track, template_frame)
def get_random_target(self, index=-1):
if index == -1:
index = np.random.randint(0, self.num)
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
frame = np.random.choice(frames)
return self.get_image_anno(video_name, track, frame)
def __len__(self):
return self.num
class TrkDataset(Dataset):
def __init__(self,):
super(TrkDataset, self).__init__()
desired_size = (cfg.TRAIN.SEARCH_SIZE - cfg.TRAIN.EXEMPLAR_SIZE) / \
cfg.ANCHOR.STRIDE + 1 + cfg.TRAIN.BASE_SIZE
if desired_size != cfg.TRAIN.OUTPUT_SIZE:
raise Exception('size not match!')
self.anchor_target = AnchorTarget()
self.all_dataset = []
start = 0
self.num = 0
for name in cfg.DATASET.NAMES:
subdata_cfg = getattr(cfg.DATASET, name)
sub_dataset = SubDataset(
name,
subdata_cfg.ROOT,
subdata_cfg.ANNO,
subdata_cfg.FRAME_RANGE,
subdata_cfg.NUM_USE,
start
)
start += sub_dataset.num
self.num += sub_dataset.num_use
sub_dataset.log()
self.all_dataset.append(sub_dataset)
self.template_aug = Augmentation(
cfg.DATASET.TEMPLATE.SHIFT,
cfg.DATASET.TEMPLATE.SCALE,
cfg.DATASET.TEMPLATE.BLUR,
cfg.DATASET.TEMPLATE.FLIP,
cfg.DATASET.TEMPLATE.COLOR
)
self.search_aug = Augmentation(
cfg.DATASET.SEARCH.SHIFT,
cfg.DATASET.SEARCH.SCALE,
cfg.DATASET.SEARCH.BLUR,
cfg.DATASET.SEARCH.FLIP,
cfg.DATASET.SEARCH.COLOR
)
videos_per_epoch = cfg.DATASET.VIDEOS_PER_EPOCH
self.num = videos_per_epoch if videos_per_epoch > 0 else self.num
self.num *= cfg.TRAIN.EPOCH
self.pick = self.shuffle()
def shuffle(self):
pick = []
m = 0
while m < self.num:
p = []
for sub_dataset in self.all_dataset:
sub_p = sub_dataset.pick
p += sub_p
np.random.shuffle(p)
pick += p
m = len(pick)
logger.info("shuffle done!")
logger.info("dataset length {}".format(self.num))
return pick[:self.num]
def _find_dataset(self, index):
for dataset in self.all_dataset:
if dataset.start_idx + dataset.num > index:
return dataset, index - dataset.start_idx
def _get_bbox(self, image, shape):
imh, imw = image.shape[:2]
if len(shape) == 4:
w, h = shape[2]-shape[0], shape[3]-shape[1]
else:
w, h = shape
context_amount = 0.5
exemplar_size = cfg.TRAIN.EXEMPLAR_SIZE
wc_z = w + context_amount * (w+h)
hc_z = h + context_amount * (w+h)
s_z = np.sqrt(wc_z * hc_z)
scale_z = exemplar_size / s_z
w = w*scale_z
h = h*scale_z
cx, cy = imw//2, imh//2
bbox = center2corner(Center(cx, cy, w, h))
return bbox
def __len__(self):
return self.num
def __getitem__(self, index):
index = self.pick[index]
dataset, index = self._find_dataset(index)
gray = cfg.DATASET.GRAY and cfg.DATASET.GRAY > np.random.random()
neg = cfg.DATASET.NEG and cfg.DATASET.NEG > np.random.random()
if neg:
print('please check this suspension due to it was removed negative function (distractor)')
import pdb
pdb.set_trace()
template = dataset.get_random_target(index)
search = np.random.choice(self.all_dataset).get_random_target()
else:
template = dataset.get_positive_pair(index)
if not os.path.exists(template[0]):
print(template[0])
template_image = cv2.imread(template[0])
template_box = self._get_bbox(template_image, template[1])
template, _ = self.template_aug(template_image,
template_box,
cfg.TRAIN.EXEMPLAR_SIZE,
gray=gray)
template = template.transpose((2, 0, 1)).astype(np.float32)
return {
'template': template,
'gt': template_box
}
| true | true |
f72addc1225c0aa169e2bb36069de6d370480522 | 12,614 | py | Python | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | null | null | null | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | 2 | 2019-02-13T09:10:26.000Z | 2019-02-20T02:59:43.000Z | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | 1 | 2019-02-13T03:07:06.000Z | 2019-02-13T03:07:06.000Z | # coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utility classes and functions. They help organize and keep statistics of datasets."""
from __future__ import absolute_import
from __future__ import print_function
__all__ = [
'Counter', 'count_tokens', 'concat_sequence', 'slice_sequence', 'train_valid_split',
'line_splitter', 'whitespace_splitter', 'Splitter'
]
import os
import collections
import zipfile
import tarfile
import numpy as np
from mxnet.gluon.data import SimpleDataset
from mxnet.gluon.utils import _get_repo_url, download, check_sha1
from .. import _constants as C
class Counter(collections.Counter): # pylint: disable=abstract-method
"""Counter class for keeping token frequencies."""
def discard(self, min_freq, unknown_token):
"""Discards tokens with frequency below min_frequency and represents them
as `unknown_token`.
Parameters
----------
min_freq: int
Tokens whose frequency is under min_freq is counted as `unknown_token` in
the Counter returned.
unknown_token: str
The representation for any unknown token.
Returns
-------
The Counter instance.
Examples
--------
>>> a = gluonnlp.data.Counter({'a': 10, 'b': 1, 'c': 1})
>>> a.discard(3, '<unk>')
Counter({'a': 10, '<unk>': 2})
"""
freq = 0
ret = Counter({})
for token, count in self.items():
if count < min_freq:
freq += count
else:
ret[token] = count
ret[unknown_token] = ret.get(unknown_token, 0) + freq
return ret
class DefaultLookupDict(dict):
"""Dictionary class with fall-back look-up with default value set in the constructor."""
def __init__(self, default, d=None):
if d:
super(DefaultLookupDict, self).__init__(d)
else:
super(DefaultLookupDict, self).__init__()
self._default = default
def __getitem__(self, k):
return self.get(k, self._default)
def count_tokens(tokens, to_lower=False, counter=None):
r"""Counts tokens in the specified string.
For token_delim='(td)' and seq_delim='(sd)', a specified string of two sequences of tokens may
look like::
(td)token1(td)token2(td)token3(td)(sd)(td)token4(td)token5(td)(sd)
Parameters
----------
tokens : list of str
A source list of tokens.
to_lower : bool, default False
Whether to convert the source source_str to the lower case.
counter : Counter or None, default None
The Counter instance to be updated with the counts of `tokens`. If
None, return a new Counter instance counting tokens from `tokens`.
Returns
-------
The `counter` Counter instance after being updated with the token
counts of `source_str`. If `counter` is None, return a new Counter
instance counting tokens from `source_str`.
Examples
--------
>>> import re
>>> source_str = ' Life is great ! \n life is good . \n'
>>> source_str_tokens = filter(None, re.split(' |\n', source_str))
>>> gluonnlp.data.count_tokens(source_str_tokens)
Counter({'is': 2, 'Life': 1, 'great': 1, '!': 1, 'life': 1, 'good': 1, '.': 1})
"""
if to_lower:
tokens = [t.lower() for t in tokens]
if counter is None:
return Counter(tokens)
else:
counter.update(tokens)
return counter
def concat_sequence(sequences):
"""Concatenate sequences of tokens into a single flattened list of tokens.
Parameters
----------
sequences : list of list of object
Sequences of tokens, each of which is an iterable of tokens.
Returns
-------
Flattened list of tokens.
"""
return [token for seq in sequences for token in seq if token]
def slice_sequence(sequence, length, pad_last=False, pad_val=C.PAD_TOKEN, overlap=0):
"""Slice a flat sequence of tokens into sequences tokens, with each
inner sequence's length equal to the specified `length`, taking into account the requested
sequence overlap.
Parameters
----------
sequence : list of object
A flat list of tokens.
length : int
The length of each of the samples.
pad_last : bool, default False
Whether to pad the last sequence when its length doesn't align. If the last sequence's
length doesn't align and ``pad_last`` is False, it will be dropped.
pad_val : object, default
The padding value to use when the padding of the last sequence is enabled. In general,
the type of ``pad_val`` should be the same as the tokens.
overlap : int, default 0
The extra number of items in current sample that should overlap with the
next sample.
Returns
-------
List of list of tokens, with the length of each inner list equal to `length`.
"""
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
if pad_last:
pad_len = _slice_pad_length(len(sequence), length, overlap)
sequence = sequence + [pad_val] * pad_len
num_samples = (len(sequence)-length) // (length-overlap) + 1
return [sequence[i*(length-overlap):((i+1)*length-i*overlap)] for i in range(num_samples)]
def _slice_pad_length(num_items, length, overlap=0):
"""Calculate the padding length needed for sliced samples in order not to discard data.
Parameters
----------
num_items : int
Number of items in dataset before collating.
length : int
The length of each of the samples.
overlap : int, default 0
The extra number of items in current sample that should overlap with the
next sample.
Returns
-------
Length of paddings.
"""
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
step = length-overlap
span = num_items-length
residual = span % step
if residual:
return step - residual
else:
return 0
_vocab_sha1 = {'wikitext-2': 'be36dc5238c2e7d69720881647ab72eb506d0131',
'gbw': 'ebb1a287ca14d8fa6f167c3a779e5e7ed63ac69f',
'WMT2014_src': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'WMT2014_tgt': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'book_corpus_wiki_en_cased': '2d62af22535ed51f35cc8e2abb607723c89c2636',
'book_corpus_wiki_en_uncased': 'a66073971aa0b1a262453fe51342e57166a8abcf',
'wiki_multilingual_cased': '71bb9e248dc75dce9227d3c8c16fde3993588b9e',
'wiki_cn': 'a1e06f8e39ae51ab8a92b8458e6a658b8b1f72bf',
'wiki_multilingual': '2b2514cc539047b9179e9d98a4e68c36db05c97a'}
_url_format = '{repo_url}gluon/dataset/vocab/{file_name}.zip'
def train_valid_split(dataset, valid_ratio=0.05):
"""Split the dataset into training and validation sets.
Parameters
----------
train : list
A list of training samples.
valid_ratio : float, default 0.05
Proportion of training samples to use for validation set
range: [0, 1]
Returns
-------
train : SimpleDataset
valid : SimpleDataset
"""
if not 0.0 <= valid_ratio <= 1.0:
raise ValueError('valid_ratio should be in [0, 1]')
num_train = len(dataset)
num_valid = np.ceil(num_train * valid_ratio).astype('int')
indices = np.arange(num_train)
np.random.shuffle(indices)
valid = SimpleDataset([dataset[indices[i]] for i in range(num_valid)])
train = SimpleDataset([dataset[indices[i + num_valid]] for i in range(num_train - num_valid)])
return train, valid
def short_hash(name):
if name not in _vocab_sha1:
raise ValueError('Vocabulary for {name} is not available.'.format(name=name))
return _vocab_sha1[name][:8]
def _load_pretrained_vocab(name, root=os.path.join('~', '.mxnet', 'models'), cls=None):
"""Load the accompanying vocabulary object for pre-trained model.
Parameters
----------
name : str
Name of the vocabulary, usually the name of the dataset.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
cls : nlp.Vocab or nlp.vocab.BERTVocab, default nlp.Vocab
Returns
-------
Vocab or nlp.bert.BERTVocab
Loaded vocabulary object for the pre-trained model.
"""
file_name = '{name}-{short_hash}'.format(name=name,
short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name+'.vocab')
sha1_hash = _vocab_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
print('Detected mismatch in the content of model vocab file. Downloading again.')
else:
print('Vocab file is not found. Downloading.')
if not os.path.exists(root):
os.makedirs(root)
zip_file_path = os.path.join(root, file_name+'.zip')
repo_url = _get_repo_url()
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def _load_vocab_file(file_path, cls):
with open(file_path, 'r') as f:
if cls is None:
from ..vocab import Vocab
cls = Vocab
return cls.from_json(f.read())
def _get_home_dir():
"""Get home directory for storing datasets/models/pre-trained word embeddings"""
_home_dir = os.environ.get('MXNET_HOME', os.path.join('~', '.mxnet'))
# expand ~ to actual path
_home_dir = os.path.expanduser(_home_dir)
return _home_dir
def _extract_archive(file, target_dir):
"""Extract archive file
Parameters
----------
file : str
Absolute path of the archive file.
target_dir : str
Target directory of the archive to be uncompressed
"""
if file.endswith('.gz') or file.endswith('.tar') or file.endswith('.tgz'):
archive = tarfile.open(file, 'r')
elif file.endswith('.zip'):
archive = zipfile.ZipFile(file, 'r')
else:
raise Exception('Unrecognized file type: ' + file)
archive.extractall(path=target_dir)
archive.close()
def line_splitter(s):
"""Split a string at newlines.
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.splitlines().
"""
return s.splitlines()
def whitespace_splitter(s):
"""Split a string at whitespace (space, tab, newline, return, formfeed).
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.split().
"""
return s.split()
class Splitter(object):
"""Split a string based on a separator.
Parameters
----------
separator : str
The separator based on which string is split.
"""
def __init__(self, separator=None):
self._separator = separator
def __call__(self, s):
"""Split a string based on the separator.
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.split(separator).
"""
return s.split(self._separator)
| 30.616505 | 98 | 0.641589 |
from __future__ import absolute_import
from __future__ import print_function
__all__ = [
'Counter', 'count_tokens', 'concat_sequence', 'slice_sequence', 'train_valid_split',
'line_splitter', 'whitespace_splitter', 'Splitter'
]
import os
import collections
import zipfile
import tarfile
import numpy as np
from mxnet.gluon.data import SimpleDataset
from mxnet.gluon.utils import _get_repo_url, download, check_sha1
from .. import _constants as C
class Counter(collections.Counter):
def discard(self, min_freq, unknown_token):
freq = 0
ret = Counter({})
for token, count in self.items():
if count < min_freq:
freq += count
else:
ret[token] = count
ret[unknown_token] = ret.get(unknown_token, 0) + freq
return ret
class DefaultLookupDict(dict):
def __init__(self, default, d=None):
if d:
super(DefaultLookupDict, self).__init__(d)
else:
super(DefaultLookupDict, self).__init__()
self._default = default
def __getitem__(self, k):
return self.get(k, self._default)
def count_tokens(tokens, to_lower=False, counter=None):
if to_lower:
tokens = [t.lower() for t in tokens]
if counter is None:
return Counter(tokens)
else:
counter.update(tokens)
return counter
def concat_sequence(sequences):
return [token for seq in sequences for token in seq if token]
def slice_sequence(sequence, length, pad_last=False, pad_val=C.PAD_TOKEN, overlap=0):
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
if pad_last:
pad_len = _slice_pad_length(len(sequence), length, overlap)
sequence = sequence + [pad_val] * pad_len
num_samples = (len(sequence)-length) // (length-overlap) + 1
return [sequence[i*(length-overlap):((i+1)*length-i*overlap)] for i in range(num_samples)]
def _slice_pad_length(num_items, length, overlap=0):
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
step = length-overlap
span = num_items-length
residual = span % step
if residual:
return step - residual
else:
return 0
_vocab_sha1 = {'wikitext-2': 'be36dc5238c2e7d69720881647ab72eb506d0131',
'gbw': 'ebb1a287ca14d8fa6f167c3a779e5e7ed63ac69f',
'WMT2014_src': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'WMT2014_tgt': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'book_corpus_wiki_en_cased': '2d62af22535ed51f35cc8e2abb607723c89c2636',
'book_corpus_wiki_en_uncased': 'a66073971aa0b1a262453fe51342e57166a8abcf',
'wiki_multilingual_cased': '71bb9e248dc75dce9227d3c8c16fde3993588b9e',
'wiki_cn': 'a1e06f8e39ae51ab8a92b8458e6a658b8b1f72bf',
'wiki_multilingual': '2b2514cc539047b9179e9d98a4e68c36db05c97a'}
_url_format = '{repo_url}gluon/dataset/vocab/{file_name}.zip'
def train_valid_split(dataset, valid_ratio=0.05):
if not 0.0 <= valid_ratio <= 1.0:
raise ValueError('valid_ratio should be in [0, 1]')
num_train = len(dataset)
num_valid = np.ceil(num_train * valid_ratio).astype('int')
indices = np.arange(num_train)
np.random.shuffle(indices)
valid = SimpleDataset([dataset[indices[i]] for i in range(num_valid)])
train = SimpleDataset([dataset[indices[i + num_valid]] for i in range(num_train - num_valid)])
return train, valid
def short_hash(name):
if name not in _vocab_sha1:
raise ValueError('Vocabulary for {name} is not available.'.format(name=name))
return _vocab_sha1[name][:8]
def _load_pretrained_vocab(name, root=os.path.join('~', '.mxnet', 'models'), cls=None):
file_name = '{name}-{short_hash}'.format(name=name,
short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name+'.vocab')
sha1_hash = _vocab_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
print('Detected mismatch in the content of model vocab file. Downloading again.')
else:
print('Vocab file is not found. Downloading.')
if not os.path.exists(root):
os.makedirs(root)
zip_file_path = os.path.join(root, file_name+'.zip')
repo_url = _get_repo_url()
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def _load_vocab_file(file_path, cls):
with open(file_path, 'r') as f:
if cls is None:
from ..vocab import Vocab
cls = Vocab
return cls.from_json(f.read())
def _get_home_dir():
_home_dir = os.environ.get('MXNET_HOME', os.path.join('~', '.mxnet'))
_home_dir = os.path.expanduser(_home_dir)
return _home_dir
def _extract_archive(file, target_dir):
if file.endswith('.gz') or file.endswith('.tar') or file.endswith('.tgz'):
archive = tarfile.open(file, 'r')
elif file.endswith('.zip'):
archive = zipfile.ZipFile(file, 'r')
else:
raise Exception('Unrecognized file type: ' + file)
archive.extractall(path=target_dir)
archive.close()
def line_splitter(s):
return s.splitlines()
def whitespace_splitter(s):
return s.split()
class Splitter(object):
def __init__(self, separator=None):
self._separator = separator
def __call__(self, s):
return s.split(self._separator)
| true | true |
f72addc1825c766c27b5ea9433ca8b1b439ac3e5 | 33,419 | py | Python | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Quantum gates that are commonly used in the literature.
This module creates Gate instances for the following gates:
X,Y,Z: Pauli gates.
H,S: Clifford gates.
T: A non-Clifford gate.
CZ: Controlled phase gate.
CNOT: Controlled not gate.
SWAP: the swap gate.
ISWAP: a swap gate with a phase on the swapped subspace.
Each of these are implemented as EigenGates, which means that they can be
raised to a power (i.e. cirq.H**0.5). See the definition in EigenGate.
In addition MeasurementGate is defined and convenience methods for
measurements are provided
measure
measure_each
"""
from typing import (
Any, Callable, cast, Iterable, List, Optional, Tuple, Union,
)
import numpy as np
from cirq import linalg, protocols, value
from cirq.ops import gate_features, eigen_gate, raw_types, gate_operation
from cirq.type_workarounds import NotImplementedType
# Note: avoiding 'from/as' because it creates a circular dependency in python 2.
import cirq.ops.phased_x_gate
class XPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the X axis of the Bloch sphere.
The unitary matrix of ``XPowGate(exponent=t)`` is:
[[g·c, -i·g·s],
[-i·g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli X axis. See `cirq.Rx` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.X`, the Pauli X gate, is an instance of this gate at exponent=1.
"""
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.available_buffer[zero] = args.target_tensor[one]
args.available_buffer[one] = args.target_tensor[zero]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.available_buffer *= p
return args.available_buffer
def _eigen_components(self):
return [
(0, np.array([[0.5, 0.5], [0.5, 0.5]])),
(1, np.array([[0.5, -0.5], [-0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rx',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('X',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('x {0};\n', qubits[0])
else:
return args.format('rx({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
"""See `cirq.SupportsPhase`."""
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'X'
return 'X**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rx(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.X'
return '(cirq.X**{!r})'.format(self._exponent)
return (
'cirq.XPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class YPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the Y axis of the Bloch sphere.
The unitary matrix of ``YPowGate(exponent=t)`` is:
[[g·c, g·s],
[-g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli Y axis. See `cirq.Ry` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.Y`, the Pauli Y gate, is an instance of this gate at exponent=1.
"""
def _eigen_components(self):
return [
(0, np.array([[0.5, -0.5j], [0.5j, 0.5]])),
(1, np.array([[0.5, 0.5j], [-0.5j, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Ry',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('Y',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('y {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
"""See `cirq.SupportsPhase`."""
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=0.5 + phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'Y'
return 'Y**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Ry(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.Y'
return '(cirq.Y**{!r})'.format(self._exponent)
return (
'cirq.YPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ZPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the Z axis of the Bloch sphere.
The unitary matrix of ``ZPowGate(exponent=t)`` is:
[[1, 0],
[0, g]]
where:
g = exp(i·π·t).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli Z axis. See `cirq.Rz` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.Z`, the Pauli Z gate, is an instance of this gate at exponent=1.
"""
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if protocols.is_parameterized(self):
return None
one = args.subspace_index(1)
c = 1j**(self._exponent * 2)
args.target_tensor[one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _eigen_components(self):
return [
(0, np.diag([1, 0])),
(1, np.diag([0, 1])),
]
def _phase_by_(self, phase_turns: float, qubit_index: int):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rz',
args,
self._diagram_exponent(args, ignore_global_phase=False))
e = self._diagram_exponent(args)
if e in [-0.25, 0.25]:
return protocols.CircuitDiagramInfo(
wire_symbols=('T',),
exponent=cast(float, e) * 4)
if e in [-0.5, 0.5]:
return protocols.CircuitDiagramInfo(
wire_symbols=('S',),
exponent=cast(float, e) * 2)
return protocols.CircuitDiagramInfo(
wire_symbols=('Z',),
exponent=e)
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('z {0};\n', qubits[0])
else:
return args.format('rz({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def __str__(self) -> str:
if self._exponent == 0.25:
return 'T'
if self._exponent == -0.25:
return 'T**-1'
if self._exponent == 0.5:
return 'S'
if self._exponent == -0.5:
return 'S**-1'
if self._exponent == 1:
return 'Z'
return 'Z**{}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rz(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 0.25:
return 'cirq.T'
if self._exponent == -0.25:
return '(cirq.T**-1)'
if self._exponent == 0.5:
return 'cirq.S'
if self._exponent == -0.5:
return '(cirq.S**-1)'
if self._exponent == 1:
return 'cirq.Z'
return '(cirq.Z**{!r})'.format(self._exponent)
return (
'cirq.ZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
@value.value_equality
class MeasurementGate(raw_types.Gate):
"""A gate that measures qubits in the computational basis.
The measurement gate contains a key that is used to identify results
of measurements.
"""
def __init__(self,
key: str = '',
invert_mask: Tuple[bool, ...] = ()) -> None:
"""
Args:
key: The string key of the measurement.
invert_mask: A list of values indicating whether the corresponding
qubits should be flipped. The list's length must not be longer
than the number of qubits, but it is permitted to be shorter.
Qubits with indices past the end of the mask are not flipped.
"""
self.key = key
self.invert_mask = invert_mask or ()
@staticmethod
def is_measurement(op: Union[raw_types.Gate, raw_types.Operation]) -> bool:
if isinstance(op, MeasurementGate):
return True
if (isinstance(op, gate_operation.GateOperation) and
isinstance(op.gate, MeasurementGate)):
return True
return False
def with_bits_flipped(self, *bit_positions: int) -> 'MeasurementGate':
"""Toggles whether or not the measurement inverts various outputs."""
old_mask = self.invert_mask or ()
n = max(len(old_mask) - 1, *bit_positions) + 1
new_mask = [k < len(old_mask) and old_mask[k] for k in range(n)]
for b in bit_positions:
new_mask[b] = not new_mask[b]
return MeasurementGate(key=self.key, invert_mask=tuple(new_mask))
def validate_args(self, qubits):
if (self.invert_mask is not None and
len(self.invert_mask) > len(qubits)):
raise ValueError('len(invert_mask) > len(qubits)')
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
n = (max(1, len(self.invert_mask))
if args.known_qubit_count is None
else args.known_qubit_count)
symbols = ['M'] * n
# Show which output bits are negated.
if self.invert_mask:
for i, b in enumerate(self.invert_mask):
if b:
symbols[i] = '!M'
# Mention the measurement key.
if (not args.known_qubits or
self.key != _default_measurement_key(args.known_qubits)):
symbols[0] += "('{}')".format(self.key)
return protocols.CircuitDiagramInfo(tuple(symbols))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
invert_mask = self.invert_mask
if len(invert_mask) < len(qubits):
invert_mask = (invert_mask
+ (False,) * (len(qubits) - len(invert_mask)))
lines = []
for i, (qubit, inv) in enumerate(zip(qubits, invert_mask)):
if inv:
lines.append(args.format(
'x {0}; // Invert the following measurement\n', qubit))
lines.append(args.format('measure {0} -> {1:meas}[{2}];\n',
qubit, self.key, i))
return ''.join(lines)
def __repr__(self):
return 'cirq.MeasurementGate({}, {})'.format(repr(self.key),
repr(self.invert_mask))
def _value_equality_values_(self):
return self.key, self.invert_mask
def _default_measurement_key(qubits: Iterable[raw_types.QubitId]) -> str:
return ','.join(str(q) for q in qubits)
def measure(*qubits: raw_types.QubitId,
key: Optional[str] = None,
invert_mask: Tuple[bool, ...] = ()
) -> gate_operation.GateOperation:
"""Returns a single MeasurementGate applied to all the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits that the measurement gate should measure.
key: The string key of the measurement. If this is None, it defaults
to a comma-separated list of the target qubits' str values.
invert_mask: A list of Truthy or Falsey values indicating whether
the corresponding qubits should be flipped. None indicates no
inverting should be done.
Returns:
An operation targeting the given qubits with a measurement.
Raises:
ValueError if the qubits are not instances of QubitId.
"""
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError(
'measure() was called a numpy ndarray. Perhaps you meant '
'to call measure_state_vector on numpy array?'
)
elif not isinstance(qubit, raw_types.QubitId):
raise ValueError(
'measure() was called with type different than QubitId.')
if key is None:
key = _default_measurement_key(qubits)
return MeasurementGate(key, invert_mask).on(*qubits)
def measure_each(*qubits: raw_types.QubitId,
key_func: Callable[[raw_types.QubitId], str] = str
) -> List[gate_operation.GateOperation]:
"""Returns a list of operations individually measuring the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits to measure.
key_func: Determines the key of the measurements of each qubit. Takes
the qubit and returns the key for that qubit. Defaults to str.
Returns:
A list of operations individually measuring the given qubits.
"""
return [MeasurementGate(key_func(q)).on(q) for q in qubits]
class HPowGate(eigen_gate.EigenGate, gate_features.SingleQubitGate):
"""A Gate that performs a rotation around the X+Z axis of the Bloch sphere.
The unitary matrix of ``HPowGate(exponent=t)`` is:
[[g·(c-i·s/sqrt(2)), -i·g·s/sqrt(2)],
[-i·g·s/sqrt(2)], g·(c+i·s/sqrt(2))]]
where
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that for `t=1`, this gives the Hadamard matrix.
`cirq.H`, the Hadamard gate, is an instance of this gate at `exponent=1`.
"""
def _eigen_components(self):
s = np.sqrt(2)
component0 = np.array([
[3 + 2 * s, 1 + s],
[1 + s, 1]
]) / (4 + 2 * s)
component1 = np.array([
[3 - 2 * s, 1 - s],
[1 - s, 1]
]) / (4 - 2 * s)
return [(0, component0), (1, component1)]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.target_tensor[one] -= args.target_tensor[zero]
args.target_tensor[one] *= -0.5
args.target_tensor[zero] -= args.target_tensor[one]
p = 1j**(2 * self._exponent * self._global_shift)
args.target_tensor *= np.sqrt(2) * p
return args.target_tensor
def _decompose_(self, qubits):
q = qubits[0]
if self._exponent == 1:
yield cirq.Y(q)**0.5
yield cirq.XPowGate(global_shift=-0.25).on(q)
return
yield Y(q)**0.25
yield X(q)**self._exponent
yield Y(q)**-0.25
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(('H',))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('h {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {3};\n'
'rx({1:half_turns}) {3};\n'
'ry({2:half_turns}) {3};\n',
0.25, self._exponent, -0.25, qubits[0])
def __str__(self):
if self._exponent == 1:
return 'H'
return 'H^{}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.H'
return '(cirq.H**{!r})'.format(self._exponent)
return (
'cirq.HPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class CZPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
"""A gate that applies a phase to the |11⟩ state of two qubits.
The unitary matrix of `CZPowGate(exponent=t)` is:
[[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, g]]
where:
g = exp(i·π·t/2).
`cirq.CZ`, the controlled Z gate, is an instance of this gate at
`exponent=1`.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 1])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Union[np.ndarray, NotImplementedType]:
if protocols.is_parameterized(self):
return NotImplemented
c = 1j**(2 * self._exponent)
one_one = linalg.slice_for_qubits_equal_to(args.axes, 0b11)
args.target_tensor[one_one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _phase_by_(self, phase_turns, qubit_index):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', '@'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cz {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CZ'
return 'CZ**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CZ'
return '(cirq.CZ**{!r})'.format(self._exponent)
return (
'cirq.CZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def _rads_func_symbol(func_name: str,
args: protocols.CircuitDiagramInfoArgs,
half_turns: Any) -> str:
unit = 'π' if args.use_unicode_characters else 'pi'
if half_turns == 1:
return '{}({})'.format(func_name, unit)
if half_turns == -1:
return '{}(-{})'.format(func_name, unit)
return '{}({}{})'.format(func_name, half_turns, unit)
class CNotPowGate(eigen_gate.EigenGate, gate_features.TwoQubitGate):
"""A gate that applies a controlled power of an X gate.
When applying CNOT (controlled-not) to qubits, you can either use
positional arguments CNOT(q1, q2), where q2 is toggled when q1 is on,
or named arguments CNOT(control=q1, target=q2).
(Mixing the two is not permitted.)
The unitary matrix of `CNotPowGate(exponent=t)` is:
[[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, g·c, -i·g·s],
[0, 0, -i·g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
`cirq.CNOT`, the controlled NOT gate, is an instance of this gate at
`exponent=1`.
"""
def _decompose_(self, qubits):
c, t = qubits
yield Y(t)**-0.5
yield CZ(c, t)**self._exponent
yield Y(t)**0.5
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0.5, 0.5],
[0, 0, 0.5, 0.5]])),
(1, np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0.5, -0.5],
[0, 0, -0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', 'X'),
exponent=self._diagram_exponent(args))
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
oo = args.subspace_index(0b11)
zo = args.subspace_index(0b01)
args.available_buffer[oo] = args.target_tensor[oo]
args.target_tensor[oo] = args.target_tensor[zo]
args.target_tensor[zo] = args.available_buffer[oo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cx {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CNOT'
return 'CNOT**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CNOT'
return '(cirq.CNOT**{!r})'.format(self._exponent)
return (
'cirq.CNotPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def on(self, *args: raw_types.QubitId,
**kwargs: raw_types.QubitId) -> gate_operation.GateOperation:
if not kwargs:
return super().on(*args)
if not args and set(kwargs.keys()) == {'control', 'target'}:
return super().on(kwargs['control'], kwargs['target'])
raise ValueError(
"Expected two positional argument or else 'target' AND 'control' "
"keyword arguments. But got args={!r}, kwargs={!r}.".format(
args, kwargs))
class SwapPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
"""The SWAP gate, possibly raised to a power. Exchanges qubits.
SwapPowGate()**t = SwapPowGate(exponent=t) and acts on two qubits in the
computational basis as the matrix:
[[1, 0, 0, 0],
[0, g·c, -i·g·s, 0],
[0, -i·g·s, g·c, 0],
[0, 0, 0, 1]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
`cirq.SWAP`, the swap gate, is an instance of this gate at exponent=1.
"""
def _decompose_(self, qubits):
"""See base class."""
a, b = qubits
yield CNOT(a, b)
yield CNOT(b, a) ** self._exponent
yield CNOT(a, b)
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 1]])),
(1, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(
wire_symbols=('swap', 'swap'),
exponent=self._diagram_exponent(args))
return protocols.CircuitDiagramInfo(
wire_symbols=('×', '×'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('swap {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'SWAP'
return 'SWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.SWAP'
return '(cirq.SWAP**{!r})'.format(self._exponent)
return (
'cirq.SwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ISwapPowGate(eigen_gate.EigenGate,
gate_features.InterchangeableQubitsGate,
gate_features.TwoQubitGate):
"""Rotates the |01⟩-vs-|10⟩ subspace of two qubits around its Bloch X-axis.
When exponent=1, swaps the two qubits and phases |01⟩ and |10⟩ by i. More
generally, this gate's matrix is defined as follows:
ISWAP**t ≡ exp(+i π t (X⊗X + Y⊗Y) / 4)
which is given by the matrix:
[[1, 0, 0, 0],
[0, c, i·s, 0],
[0, i·s, c, 0],
[0, 0, 0, 1]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
`cirq.ISWAP`, the swap gate that applies -i to the |01> and |10> states,
is an instance of this gate at exponent=1.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 0, 0, 1])),
(+0.5, np.array([[0, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 0]])),
(-0.5, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield H(a)
yield CNOT(b, a)
yield S(a)**self._exponent
yield CNOT(b, a)
yield S(a)**-self._exponent
yield H(a)
yield CNOT(a, b)
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
args.target_tensor[zo] *= 1j
args.target_tensor[oz] *= 1j
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('iSwap', 'iSwap'),
exponent=self._diagram_exponent(args))
def __str__(self) -> str:
if self._exponent == 1:
return 'ISWAP'
return 'ISWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.ISWAP'
return '(cirq.ISWAP**{!r})'.format(self._exponent)
return (
'cirq.ISwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def Rx(rads: float) -> XPowGate:
"""Returns a gate with the matrix e^{-i X rads / 2}."""
return XPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Ry(rads: float) -> YPowGate:
"""Returns a gate with the matrix e^{-i Y rads / 2}."""
return YPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Rz(rads: float) -> ZPowGate:
"""Returns a gate with the matrix e^{-i Z rads / 2}."""
return ZPowGate(exponent=rads / np.pi, global_shift=-0.5)
X = XPowGate()
"""The Pauli X gate.
Matrix:
[[0, 1],
[1, 0]]
"""
#: The Pauli Y gate.
#:
#: Matrix:
#:
#: [[0, -i],
#: [i, 0]]
Y = YPowGate()
# The Pauli Z gate.
#
# Matrix:
#
# [[1, 0],
# [0, -1]]
Z = ZPowGate()
# The Hadamard gate.
#
# Matrix:
#
# [[s, s],
# [s, -s]]
# where s = sqrt(0.5).
H = HPowGate()
# The Clifford S gate.
#
# Matrix:
#
# [[1, 0],
# [0, i]]
S = Z**0.5
# The T gate.
#
# Matrix:
#
# [[1, 0]
# [0, exp(i pi / 4)]]
T = Z**0.25
# The controlled Z gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 1, 0],
# [0, 0, 0, -1]]
CZ = CZPowGate()
# The controlled NOT gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1],
# [0, 0, 1, 0]]
CNOT = CNotPowGate()
# The swap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, 1, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1]]
SWAP = SwapPowGate()
# The iswap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, i, 0],
# [0, i, 0, 0],
# [0, 0, 0, 1]]
ISWAP = ISwapPowGate()
| 32.071977 | 80 | 0.552739 |
from typing import (
Any, Callable, cast, Iterable, List, Optional, Tuple, Union,
)
import numpy as np
from cirq import linalg, protocols, value
from cirq.ops import gate_features, eigen_gate, raw_types, gate_operation
from cirq.type_workarounds import NotImplementedType
import cirq.ops.phased_x_gate
class XPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.available_buffer[zero] = args.target_tensor[one]
args.available_buffer[one] = args.target_tensor[zero]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.available_buffer *= p
return args.available_buffer
def _eigen_components(self):
return [
(0, np.array([[0.5, 0.5], [0.5, 0.5]])),
(1, np.array([[0.5, -0.5], [-0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rx',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('X',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('x {0};\n', qubits[0])
else:
return args.format('rx({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'X'
return 'X**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rx(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.X'
return '(cirq.X**{!r})'.format(self._exponent)
return (
'cirq.XPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class YPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _eigen_components(self):
return [
(0, np.array([[0.5, -0.5j], [0.5j, 0.5]])),
(1, np.array([[0.5, 0.5j], [-0.5j, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Ry',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('Y',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('y {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=0.5 + phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'Y'
return 'Y**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Ry(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.Y'
return '(cirq.Y**{!r})'.format(self._exponent)
return (
'cirq.YPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ZPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if protocols.is_parameterized(self):
return None
one = args.subspace_index(1)
c = 1j**(self._exponent * 2)
args.target_tensor[one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _eigen_components(self):
return [
(0, np.diag([1, 0])),
(1, np.diag([0, 1])),
]
def _phase_by_(self, phase_turns: float, qubit_index: int):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rz',
args,
self._diagram_exponent(args, ignore_global_phase=False))
e = self._diagram_exponent(args)
if e in [-0.25, 0.25]:
return protocols.CircuitDiagramInfo(
wire_symbols=('T',),
exponent=cast(float, e) * 4)
if e in [-0.5, 0.5]:
return protocols.CircuitDiagramInfo(
wire_symbols=('S',),
exponent=cast(float, e) * 2)
return protocols.CircuitDiagramInfo(
wire_symbols=('Z',),
exponent=e)
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('z {0};\n', qubits[0])
else:
return args.format('rz({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def __str__(self) -> str:
if self._exponent == 0.25:
return 'T'
if self._exponent == -0.25:
return 'T**-1'
if self._exponent == 0.5:
return 'S'
if self._exponent == -0.5:
return 'S**-1'
if self._exponent == 1:
return 'Z'
return 'Z**{}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rz(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 0.25:
return 'cirq.T'
if self._exponent == -0.25:
return '(cirq.T**-1)'
if self._exponent == 0.5:
return 'cirq.S'
if self._exponent == -0.5:
return '(cirq.S**-1)'
if self._exponent == 1:
return 'cirq.Z'
return '(cirq.Z**{!r})'.format(self._exponent)
return (
'cirq.ZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
@value.value_equality
class MeasurementGate(raw_types.Gate):
def __init__(self,
key: str = '',
invert_mask: Tuple[bool, ...] = ()) -> None:
self.key = key
self.invert_mask = invert_mask or ()
@staticmethod
def is_measurement(op: Union[raw_types.Gate, raw_types.Operation]) -> bool:
if isinstance(op, MeasurementGate):
return True
if (isinstance(op, gate_operation.GateOperation) and
isinstance(op.gate, MeasurementGate)):
return True
return False
def with_bits_flipped(self, *bit_positions: int) -> 'MeasurementGate':
old_mask = self.invert_mask or ()
n = max(len(old_mask) - 1, *bit_positions) + 1
new_mask = [k < len(old_mask) and old_mask[k] for k in range(n)]
for b in bit_positions:
new_mask[b] = not new_mask[b]
return MeasurementGate(key=self.key, invert_mask=tuple(new_mask))
def validate_args(self, qubits):
if (self.invert_mask is not None and
len(self.invert_mask) > len(qubits)):
raise ValueError('len(invert_mask) > len(qubits)')
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
n = (max(1, len(self.invert_mask))
if args.known_qubit_count is None
else args.known_qubit_count)
symbols = ['M'] * n
if self.invert_mask:
for i, b in enumerate(self.invert_mask):
if b:
symbols[i] = '!M'
if (not args.known_qubits or
self.key != _default_measurement_key(args.known_qubits)):
symbols[0] += "('{}')".format(self.key)
return protocols.CircuitDiagramInfo(tuple(symbols))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
invert_mask = self.invert_mask
if len(invert_mask) < len(qubits):
invert_mask = (invert_mask
+ (False,) * (len(qubits) - len(invert_mask)))
lines = []
for i, (qubit, inv) in enumerate(zip(qubits, invert_mask)):
if inv:
lines.append(args.format(
'x {0}; // Invert the following measurement\n', qubit))
lines.append(args.format('measure {0} -> {1:meas}[{2}];\n',
qubit, self.key, i))
return ''.join(lines)
def __repr__(self):
return 'cirq.MeasurementGate({}, {})'.format(repr(self.key),
repr(self.invert_mask))
def _value_equality_values_(self):
return self.key, self.invert_mask
def _default_measurement_key(qubits: Iterable[raw_types.QubitId]) -> str:
return ','.join(str(q) for q in qubits)
def measure(*qubits: raw_types.QubitId,
key: Optional[str] = None,
invert_mask: Tuple[bool, ...] = ()
) -> gate_operation.GateOperation:
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError(
'measure() was called a numpy ndarray. Perhaps you meant '
'to call measure_state_vector on numpy array?'
)
elif not isinstance(qubit, raw_types.QubitId):
raise ValueError(
'measure() was called with type different than QubitId.')
if key is None:
key = _default_measurement_key(qubits)
return MeasurementGate(key, invert_mask).on(*qubits)
def measure_each(*qubits: raw_types.QubitId,
key_func: Callable[[raw_types.QubitId], str] = str
) -> List[gate_operation.GateOperation]:
return [MeasurementGate(key_func(q)).on(q) for q in qubits]
class HPowGate(eigen_gate.EigenGate, gate_features.SingleQubitGate):
def _eigen_components(self):
s = np.sqrt(2)
component0 = np.array([
[3 + 2 * s, 1 + s],
[1 + s, 1]
]) / (4 + 2 * s)
component1 = np.array([
[3 - 2 * s, 1 - s],
[1 - s, 1]
]) / (4 - 2 * s)
return [(0, component0), (1, component1)]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.target_tensor[one] -= args.target_tensor[zero]
args.target_tensor[one] *= -0.5
args.target_tensor[zero] -= args.target_tensor[one]
p = 1j**(2 * self._exponent * self._global_shift)
args.target_tensor *= np.sqrt(2) * p
return args.target_tensor
def _decompose_(self, qubits):
q = qubits[0]
if self._exponent == 1:
yield cirq.Y(q)**0.5
yield cirq.XPowGate(global_shift=-0.25).on(q)
return
yield Y(q)**0.25
yield X(q)**self._exponent
yield Y(q)**-0.25
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(('H',))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('h {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {3};\n'
'rx({1:half_turns}) {3};\n'
'ry({2:half_turns}) {3};\n',
0.25, self._exponent, -0.25, qubits[0])
def __str__(self):
if self._exponent == 1:
return 'H'
return 'H^{}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.H'
return '(cirq.H**{!r})'.format(self._exponent)
return (
'cirq.HPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class CZPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 1])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Union[np.ndarray, NotImplementedType]:
if protocols.is_parameterized(self):
return NotImplemented
c = 1j**(2 * self._exponent)
one_one = linalg.slice_for_qubits_equal_to(args.axes, 0b11)
args.target_tensor[one_one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _phase_by_(self, phase_turns, qubit_index):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', '@'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('cz {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CZ'
return 'CZ**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CZ'
return '(cirq.CZ**{!r})'.format(self._exponent)
return (
'cirq.CZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def _rads_func_symbol(func_name: str,
args: protocols.CircuitDiagramInfoArgs,
half_turns: Any) -> str:
unit = 'π' if args.use_unicode_characters else 'pi'
if half_turns == 1:
return '{}({})'.format(func_name, unit)
if half_turns == -1:
return '{}(-{})'.format(func_name, unit)
return '{}({}{})'.format(func_name, half_turns, unit)
class CNotPowGate(eigen_gate.EigenGate, gate_features.TwoQubitGate):
def _decompose_(self, qubits):
c, t = qubits
yield Y(t)**-0.5
yield CZ(c, t)**self._exponent
yield Y(t)**0.5
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0.5, 0.5],
[0, 0, 0.5, 0.5]])),
(1, np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0.5, -0.5],
[0, 0, -0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', 'X'),
exponent=self._diagram_exponent(args))
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
oo = args.subspace_index(0b11)
zo = args.subspace_index(0b01)
args.available_buffer[oo] = args.target_tensor[oo]
args.target_tensor[oo] = args.target_tensor[zo]
args.target_tensor[zo] = args.available_buffer[oo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cx {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CNOT'
return 'CNOT**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CNOT'
return '(cirq.CNOT**{!r})'.format(self._exponent)
return (
'cirq.CNotPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def on(self, *args: raw_types.QubitId,
**kwargs: raw_types.QubitId) -> gate_operation.GateOperation:
if not kwargs:
return super().on(*args)
if not args and set(kwargs.keys()) == {'control', 'target'}:
return super().on(kwargs['control'], kwargs['target'])
raise ValueError(
"Expected two positional argument or else 'target' AND 'control' "
"keyword arguments. But got args={!r}, kwargs={!r}.".format(
args, kwargs))
class SwapPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield CNOT(b, a) ** self._exponent
yield CNOT(a, b)
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 1]])),
(1, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(
wire_symbols=('swap', 'swap'),
exponent=self._diagram_exponent(args))
return protocols.CircuitDiagramInfo(
wire_symbols=('×', '×'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('swap {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'SWAP'
return 'SWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.SWAP'
return '(cirq.SWAP**{!r})'.format(self._exponent)
return (
'cirq.SwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ISwapPowGate(eigen_gate.EigenGate,
gate_features.InterchangeableQubitsGate,
gate_features.TwoQubitGate):
def _eigen_components(self):
return [
(0, np.diag([1, 0, 0, 1])),
(+0.5, np.array([[0, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 0]])),
(-0.5, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield H(a)
yield CNOT(b, a)
yield S(a)**self._exponent
yield CNOT(b, a)
yield S(a)**-self._exponent
yield H(a)
yield CNOT(a, b)
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
args.target_tensor[zo] *= 1j
args.target_tensor[oz] *= 1j
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('iSwap', 'iSwap'),
exponent=self._diagram_exponent(args))
def __str__(self) -> str:
if self._exponent == 1:
return 'ISWAP'
return 'ISWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.ISWAP'
return '(cirq.ISWAP**{!r})'.format(self._exponent)
return (
'cirq.ISwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def Rx(rads: float) -> XPowGate:
return XPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Ry(rads: float) -> YPowGate:
return YPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Rz(rads: float) -> ZPowGate:
return ZPowGate(exponent=rads / np.pi, global_shift=-0.5)
X = XPowGate()
#: The Pauli Y gate.
#:
#: Matrix:
#:
#: [[0, -i],
#: [i, 0]]
Y = YPowGate()
# The Pauli Z gate.
#
# Matrix:
#
# [[1, 0],
# [0, -1]]
Z = ZPowGate()
# The Hadamard gate.
#
# Matrix:
#
# [[s, s],
# [s, -s]]
# where s = sqrt(0.5).
H = HPowGate()
# The Clifford S gate.
#
# Matrix:
#
# [[1, 0],
# [0, i]]
S = Z**0.5
# The T gate.
#
# Matrix:
#
# [[1, 0]
# [0, exp(i pi / 4)]]
T = Z**0.25
# The controlled Z gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 1, 0],
# [0, 0, 0, -1]]
CZ = CZPowGate()
# The controlled NOT gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1],
# [0, 0, 1, 0]]
CNOT = CNotPowGate()
# The swap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, 1, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1]]
SWAP = SwapPowGate()
# The iswap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, i, 0],
# [0, i, 0, 0],
# [0, 0, 0, 1]]
ISWAP = ISwapPowGate()
| true | true |
f72adde5fd070ac204654007f643a021dddeff3a | 4,967 | py | Python | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | null | null | null | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | 1 | 2021-03-28T22:15:29.000Z | 2021-11-03T09:06:14.000Z | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# (c) Copyright 2020 Sensirion AG, Switzerland
##############################################################################
##############################################################################
# _____ _ _ _______ _____ ____ _ _
# / ____| /\ | | | |__ __|_ _/ __ \| \ | |
# | | / \ | | | | | | | || | | | \| |
# | | / /\ \| | | | | | | || | | | . ` |
# | |____ / ____ \ |__| | | | _| || |__| | |\ |
# \_____/_/ \_\____/ |_| |_____\____/|_| \_|
#
# THIS FILE IS AUTOMATICALLY GENERATED AND MUST NOT BE EDITED MANUALLY!
#
# Generator: sensirion-shdlc-interface-generator 0.5.1
# Product: Sensor Bridge
# Version: 0.1.0
#
##############################################################################
##############################################################################
# flake8: noqa
from __future__ import absolute_import, division, print_function
from sensirion_shdlc_driver.command import ShdlcCommand
from struct import pack, unpack
import logging
log = logging.getLogger(__name__)
class SensorBridgeCmdFirmwareUpdateBase(ShdlcCommand):
"""
SHDLC command 0xF3: "Firmware Update".
"""
def __init__(self, *args, **kwargs):
super(SensorBridgeCmdFirmwareUpdateBase, self).__init__(
0xF3, *args, **kwargs)
class SensorBridgeCmdEnterBootloader(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self):
"""
Enter Bootloader Command
Command to enter into the bootloader mode. The device will reboot into
bootloader mode and wait until the new Firmware is received (start
update command expected). Even after a power reset, the device returns
into bootloader mode. The response frame is sent before the reset.
.. note:: After the response frame is received, the device will not
accept new commands until fully booted (wait at least 1 s).
"""
super(SensorBridgeCmdEnterBootloader, self).__init__(
data=[],
max_response_time=0.5,
post_processing_time=1.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdStartUpdate(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self):
"""
Start Update Command
Command to start the firmware update. The devices flash will be erased
(except bootloader) and the internal pointers resetted. The device is
then ready to receive the new firmware with the update data command.
.. note:: Only supported when in bootloader mode.
"""
super(SensorBridgeCmdStartUpdate, self).__init__(
data=b"".join([bytes(bytearray([0x01]))]),
max_response_time=0.5,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdUpdateData(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self, data):
"""
Update Data Command
Command to send the new firmware data as hex code in binary format.
.. note:: Only supported when in bootloader mode after receiving the
start update command. Send even number of bytes except for
the last frame.
:param bytes data:
Firmware hex data in binary format.
"""
super(SensorBridgeCmdUpdateData, self).__init__(
data=b"".join([bytes(bytearray([0x02])),
bytes(bytearray(data))]),
max_response_time=0.5,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdStopUpdate(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self, checksum):
"""
Stop Update Command
After all update data frames are sent, the stop update marks the end of
the update sequence. The checksum is sent to the device and
verification is done. The device state represents the success of the
update sequence. If successfully, the device writes the signature and
reboots into the application.
.. note:: The checksum is calculated the same way as the SHDLC
checksum. First sum all firmware update data bytes and then
take the LSB of the result and invert it. This will be the
checksum.
:param int checksum:
Checksum of the firmware data.
"""
super(SensorBridgeCmdStopUpdate, self).__init__(
data=b"".join([bytes(bytearray([0x03])),
pack(">B", checksum)]),
max_response_time=1.0,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
| 35.733813 | 79 | 0.562915 | true | true | |
f72adf1f6af0532364f442d4ae606bac033e4b53 | 584 | py | Python | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.4 on 2021-02-11 21:05
from django.db import migrations
import martor.models
class Migration(migrations.Migration):
dependencies = [
('tutorials', '0030_auto_20200408_1257'),
]
operations = [
migrations.AlterField(
model_name='historicalsoftware',
name='desc',
field=martor.models.MartorField(max_length=400),
),
migrations.AlterField(
model_name='software',
name='desc',
field=martor.models.MartorField(max_length=400),
),
]
| 23.36 | 60 | 0.601027 |
from django.db import migrations
import martor.models
class Migration(migrations.Migration):
dependencies = [
('tutorials', '0030_auto_20200408_1257'),
]
operations = [
migrations.AlterField(
model_name='historicalsoftware',
name='desc',
field=martor.models.MartorField(max_length=400),
),
migrations.AlterField(
model_name='software',
name='desc',
field=martor.models.MartorField(max_length=400),
),
]
| true | true |
f72adf93c081da254d9748d047115a98b9ef3ffc | 6,631 | py | Python | feature_export.py | TAMU-CPT/blast-db-download | 53261f08d1f9193c4f538fa90983a465502190a9 | [
"BSD-3-Clause"
] | null | null | null | feature_export.py | TAMU-CPT/blast-db-download | 53261f08d1f9193c4f538fa90983a465502190a9 | [
"BSD-3-Clause"
] | 3 | 2017-09-15T18:58:21.000Z | 2020-03-24T19:11:16.000Z | feature_export.py | TAMU-CPT/blast-db-download | 53261f08d1f9193c4f538fa90983a465502190a9 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import sys
import argparse
import logging
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.SeqFeature import SeqFeature, FeatureLocation
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def get_id(feature=None, parent_prefix=None):
result = ""
if parent_prefix is not None:
result += parent_prefix + '|'
if 'locus_tag' in feature.qualifiers:
result += feature.qualifiers['locus_tag'][0]
elif 'gene' in feature.qualifiers:
result += feature.qualifiers['gene'][0]
elif 'product' in feature.qualifiers:
result += feature.qualifiers['product'][0]
else:
result += '%s_%s_%s' % (feature.location.start, feature.location.end,
feature.location.strand)
return result
def ensure_location_in_bounds(start=0, end=0, parent_length=0):
# This prevents frameshift errors
while start < 0:
start += 3
while end < 0:
end += 3
while start > parent_length:
start -= 3
while end > parent_length:
end -= 3
return (start, end)
def extract_features(genbank_file=None, tag='CDS', translate=False,
n_bases_upstream=0, n_bases_downstream=0,
strip_stops=False, translation_table_id=11, informative=False):
for record in SeqIO.parse(genbank_file, "genbank"):
for feature in record.features:
if feature.type in tag:
# Find new feature boundaries
start = int(feature.location.start)
end = int(feature.location.end)
strand = feature.location.strand
if n_bases_downstream != 0:
# If we want extra on the end we cannot listen to
# stop_stripping requests
if strand > 0:
end += n_bases_downstream
else:
start -= n_bases_downstream
# n_bases_upstream
if strand > 0:
start -= n_bases_upstream
else:
end += n_bases_upstream
__seqs = []
# Upstream addition
if n_bases_upstream > 0:
__seqs.append(SeqFeature(FeatureLocation(start,
int(feature.location.start),
strand=strand),
type='domain'))
__seqs.append(feature)
# Downstream addition
if n_bases_downstream > 0:
__seqs.append(SeqFeature(FeatureLocation(int(feature.location.end),
end,
strand=strand),
type='domain'))
if translate:
extracted_seqs = []
for x in __seqs:
try:
y = x.extract(record.seq).translate(table=translation_table_id, cds=True)
extracted_seqs.append(y)
except Exception, bdct:
log.warn("WARN %s %s %s", record.name, get_id(x), bdct)
try:
y = x.extract(record.seq).translate(table=translation_table_id, cds=False)
extracted_seqs.append(y)
except Exception, bcdt2:
log.warn("ERROR %s %s %s", record.name, get_id(x), bcdt2)
else:
extracted_seqs = [x.extract(record.seq) for x in __seqs]
if informative:
defline = ' %s [start=%s,end=%s]' % (','.join(feature.qualifiers.get('product', [])), start, end)
else:
defline = ' [start=%s,end=%s]' % (start, end)
extracted_seq = ''.join(map(str, extracted_seqs))
if strip_stops:
extracted_seq = extracted_seq.replace('*', '')
yield [
SeqRecord(
Seq(extracted_seq.strip()),
id='gb|%s|lcl|%s' % (record.name, get_id(feature)),
description=defline
)
]
if __name__ == '__main__':
# Grab all of the filters from our plugin loader
gbk_tags = ["all", "-10_signal", "-35_signal", "3'UTR", "5'UTR",
"CAAT_signal", "CDS", "C_region", "D-loop", "D_segment",
"GC_signal", "J_segment", "LTR", "N_region", "RBS", "STS",
"S_region", "TATA_signal", "V_region", "V_segment",
"assembly_gap", "attenuator", "enhancer", "exon", "gap",
"gene", "iDNA", "intron", "mRNA", "mat_peptide", "misc_RNA",
"misc_binding", "misc_difference", "misc_feature",
"misc_recomb", "misc_signal", "misc_structure",
"mobile_element", "modified_base", "ncRNA", "old_sequence",
"operon", "oriT", "polyA_signal", "polyA_site",
"precursor_RNA", "prim_transcript", "primer_bind", "promoter",
"protein_bind", "rRNA", "rep_origin", "repeat_region",
"sig_peptide", "source", "stem_loop", "tRNA", "terminator",
"tmRNA", "transit_peptide", "unsure", "variation"]
parser = argparse.ArgumentParser(description='Export a subset of features from a Genbank file', epilog="")
parser.add_argument('genbank_file', type=file, help='Genbank file')
parser.add_argument('tag', nargs='+', type=str, choices=gbk_tags, help='tags to export')
parser.add_argument('--translate', action='store_true', help='Translate sequence')
parser.add_argument('--translation_table_id', help='Translation table ID', default=11)
parser.add_argument('--n_bases_upstream', type=int, help='Add N bases upstream to exported features', default=0)
parser.add_argument('--n_bases_downstream', type=int, help='Add N bases downstream to exported features', default=0)
parser.add_argument('--strip_stops', action='store_true', help='Remove stop codons')
parser.add_argument('--informative', action='store_true', help='More informative deflines')
args = vars(parser.parse_args())
for seq in extract_features(**args):
SeqIO.write(seq, sys.stdout, 'fasta')
| 44.503356 | 120 | 0.527975 |
import sys
import argparse
import logging
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.SeqFeature import SeqFeature, FeatureLocation
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def get_id(feature=None, parent_prefix=None):
result = ""
if parent_prefix is not None:
result += parent_prefix + '|'
if 'locus_tag' in feature.qualifiers:
result += feature.qualifiers['locus_tag'][0]
elif 'gene' in feature.qualifiers:
result += feature.qualifiers['gene'][0]
elif 'product' in feature.qualifiers:
result += feature.qualifiers['product'][0]
else:
result += '%s_%s_%s' % (feature.location.start, feature.location.end,
feature.location.strand)
return result
def ensure_location_in_bounds(start=0, end=0, parent_length=0):
while start < 0:
start += 3
while end < 0:
end += 3
while start > parent_length:
start -= 3
while end > parent_length:
end -= 3
return (start, end)
def extract_features(genbank_file=None, tag='CDS', translate=False,
n_bases_upstream=0, n_bases_downstream=0,
strip_stops=False, translation_table_id=11, informative=False):
for record in SeqIO.parse(genbank_file, "genbank"):
for feature in record.features:
if feature.type in tag:
start = int(feature.location.start)
end = int(feature.location.end)
strand = feature.location.strand
if n_bases_downstream != 0:
if strand > 0:
end += n_bases_downstream
else:
start -= n_bases_downstream
if strand > 0:
start -= n_bases_upstream
else:
end += n_bases_upstream
__seqs = []
if n_bases_upstream > 0:
__seqs.append(SeqFeature(FeatureLocation(start,
int(feature.location.start),
strand=strand),
type='domain'))
__seqs.append(feature)
if n_bases_downstream > 0:
__seqs.append(SeqFeature(FeatureLocation(int(feature.location.end),
end,
strand=strand),
type='domain'))
if translate:
extracted_seqs = []
for x in __seqs:
try:
y = x.extract(record.seq).translate(table=translation_table_id, cds=True)
extracted_seqs.append(y)
except Exception, bdct:
log.warn("WARN %s %s %s", record.name, get_id(x), bdct)
try:
y = x.extract(record.seq).translate(table=translation_table_id, cds=False)
extracted_seqs.append(y)
except Exception, bcdt2:
log.warn("ERROR %s %s %s", record.name, get_id(x), bcdt2)
else:
extracted_seqs = [x.extract(record.seq) for x in __seqs]
if informative:
defline = ' %s [start=%s,end=%s]' % (','.join(feature.qualifiers.get('product', [])), start, end)
else:
defline = ' [start=%s,end=%s]' % (start, end)
extracted_seq = ''.join(map(str, extracted_seqs))
if strip_stops:
extracted_seq = extracted_seq.replace('*', '')
yield [
SeqRecord(
Seq(extracted_seq.strip()),
id='gb|%s|lcl|%s' % (record.name, get_id(feature)),
description=defline
)
]
if __name__ == '__main__':
gbk_tags = ["all", "-10_signal", "-35_signal", "3'UTR", "5'UTR",
"CAAT_signal", "CDS", "C_region", "D-loop", "D_segment",
"GC_signal", "J_segment", "LTR", "N_region", "RBS", "STS",
"S_region", "TATA_signal", "V_region", "V_segment",
"assembly_gap", "attenuator", "enhancer", "exon", "gap",
"gene", "iDNA", "intron", "mRNA", "mat_peptide", "misc_RNA",
"misc_binding", "misc_difference", "misc_feature",
"misc_recomb", "misc_signal", "misc_structure",
"mobile_element", "modified_base", "ncRNA", "old_sequence",
"operon", "oriT", "polyA_signal", "polyA_site",
"precursor_RNA", "prim_transcript", "primer_bind", "promoter",
"protein_bind", "rRNA", "rep_origin", "repeat_region",
"sig_peptide", "source", "stem_loop", "tRNA", "terminator",
"tmRNA", "transit_peptide", "unsure", "variation"]
parser = argparse.ArgumentParser(description='Export a subset of features from a Genbank file', epilog="")
parser.add_argument('genbank_file', type=file, help='Genbank file')
parser.add_argument('tag', nargs='+', type=str, choices=gbk_tags, help='tags to export')
parser.add_argument('--translate', action='store_true', help='Translate sequence')
parser.add_argument('--translation_table_id', help='Translation table ID', default=11)
parser.add_argument('--n_bases_upstream', type=int, help='Add N bases upstream to exported features', default=0)
parser.add_argument('--n_bases_downstream', type=int, help='Add N bases downstream to exported features', default=0)
parser.add_argument('--strip_stops', action='store_true', help='Remove stop codons')
parser.add_argument('--informative', action='store_true', help='More informative deflines')
args = vars(parser.parse_args())
for seq in extract_features(**args):
SeqIO.write(seq, sys.stdout, 'fasta')
| false | true |
f72adfbd0b4913e9c0e119e52b6aa8237cc00b2a | 2,757 | py | Python | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 39 | 2021-03-31T21:15:48.000Z | 2022-03-30T03:34:14.000Z | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 8 | 2021-04-06T07:58:03.000Z | 2022-01-11T17:10:51.000Z | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 4 | 2021-04-06T03:28:56.000Z | 2022-03-06T19:57:50.000Z | import argparse
import os
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner,
OptimizerHook, build_optimizer)
from mmdet.apis import multi_gpu_test_search, single_gpu_test_search
from mmdet.core import wrap_fp16_model
from mmdet.datasets import (build_dataloader, build_dataset,
replace_ImageToTensor)
from mmdet.models import build_detector
import numpy as np
from torch.autograd import Variable
import collections
import sys
import time
import copy
from mmdet.core import encode_mask_results, tensor2imgs
import logging
sys.setrecursionlimit(10000)
import argparse
import torch.distributed as dist
import functools
import random
import os
from mmdet.models.necks.spos_opsc import OPS
PRIMITIVES = ['TDM_dcn', 'BUM_dcn', 'PCONV_dcn', 'FSM_dcn']
def countop(paths, channel):
opsize = 0
fp = 0
for path in paths:
op = OPS[path](channel, channel, True, True)
opsize += op.size
fp += op.fp
#print(opsize)
return opsize, fp
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('log',
help='train log file path',
default='./work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log')
args = parser.parse_args()
return args
def main():
args = parse_args()
print(args)
name = args.log
print(os.getcwd())
print(name)
#name = '/data/liangtingting/projects/panas_super/work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log'
op_name = os.path.splitext(name)[0] + '.txt'
print(op_name)
f = open(name, 'r')
wf = open(op_name,'w')
for line in f:
if '[' in line and 'AP' in line:
st = line.index('(')
ed = line.index(')')
paths = str(line[st+1:ed])
paths = paths.split(', ')
op_paths = [int(i) for i in paths]
channel = op_paths[-1]
cand = [PRIMITIVES[i] for i in op_paths[:-1]]
opsize, fp = countop(cand, channel)
ap = line.index('AP')
map = line[ap+3:ap+15]
wf.write(str(cand) + ' ' + str(channel) + ' ' + map + ' ' + str(opsize) + ' ' + str(fp) + '\n')
print(cand, channel, map, opsize, fp)
if 'top 50 result' in line:
break
if __name__ == '__main__':
main() | 31.689655 | 163 | 0.660863 | import argparse
import os
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner,
OptimizerHook, build_optimizer)
from mmdet.apis import multi_gpu_test_search, single_gpu_test_search
from mmdet.core import wrap_fp16_model
from mmdet.datasets import (build_dataloader, build_dataset,
replace_ImageToTensor)
from mmdet.models import build_detector
import numpy as np
from torch.autograd import Variable
import collections
import sys
import time
import copy
from mmdet.core import encode_mask_results, tensor2imgs
import logging
sys.setrecursionlimit(10000)
import argparse
import torch.distributed as dist
import functools
import random
import os
from mmdet.models.necks.spos_opsc import OPS
PRIMITIVES = ['TDM_dcn', 'BUM_dcn', 'PCONV_dcn', 'FSM_dcn']
def countop(paths, channel):
opsize = 0
fp = 0
for path in paths:
op = OPS[path](channel, channel, True, True)
opsize += op.size
fp += op.fp
return opsize, fp
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('log',
help='train log file path',
default='./work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log')
args = parser.parse_args()
return args
def main():
args = parse_args()
print(args)
name = args.log
print(os.getcwd())
print(name)
op_name = os.path.splitext(name)[0] + '.txt'
print(op_name)
f = open(name, 'r')
wf = open(op_name,'w')
for line in f:
if '[' in line and 'AP' in line:
st = line.index('(')
ed = line.index(')')
paths = str(line[st+1:ed])
paths = paths.split(', ')
op_paths = [int(i) for i in paths]
channel = op_paths[-1]
cand = [PRIMITIVES[i] for i in op_paths[:-1]]
opsize, fp = countop(cand, channel)
ap = line.index('AP')
map = line[ap+3:ap+15]
wf.write(str(cand) + ' ' + str(channel) + ' ' + map + ' ' + str(opsize) + ' ' + str(fp) + '\n')
print(cand, channel, map, opsize, fp)
if 'top 50 result' in line:
break
if __name__ == '__main__':
main() | true | true |
f72ae0a27f7cd75894571c6fa943dd5463f7ef49 | 15,394 | py | Python | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 8 | 2020-03-07T19:58:40.000Z | 2021-12-15T13:47:40.000Z | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 141 | 2020-01-17T22:47:35.000Z | 2022-03-31T18:29:47.000Z | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 3 | 2020-03-07T20:30:02.000Z | 2021-03-22T03:14:26.000Z | from __future__ import annotations
import unittest
from datetime import date, datetime
from cl_sii.dte.data_models import DteDataL1, DteXmlData
from cl_sii.dte.constants import TipoDteEnum
from cl_sii.dte.parse import DTE_XMLNS
from cl_sii.libs import encoding_utils
from cl_sii.libs import tz_utils
from cl_sii.libs import xml_utils
from cl_sii.rut import Rut
from cl_sii.rtc.data_models_aec import CesionAecXml, AecXml
from cl_sii.rtc.parse_aec import AEC_XML_SCHEMA_OBJ, parse_aec_xml, validate_aec_xml
from .utils import read_test_file_bytes
class AecXmlSchemaTest(unittest.TestCase):
"""
Tests for AEC XML schema.
"""
@unittest.skip("TODO: Implement for 'AEC_XML_SCHEMA_OBJ'.")
def test_AEC_XML_SCHEMA_OBJ(self):
self.assertIsNotNone(AEC_XML_SCHEMA_OBJ)
class AecXmlValidatorTest(unittest.TestCase):
"""
Tests for :func:`validate_aec_xml`.
"""
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
self.aec_1_xml_bytes = aec_xml_bytes
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
self.aec_2_xml_bytes = aec_xml_bytes
def test_validate_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
def test_validate_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
@unittest.skip("TODO: Implement for 'validate_aec_xml'.")
def test_validate_aec_xml_fail(self) -> None:
self.assertIsNotNone(validate_aec_xml)
class AecXmlParserTest(unittest.TestCase):
"""
Tests for :func:`parse_aec_xml`.
"""
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-signature-value-base64.txt',
),
)
self.aec_1_xml_bytes = aec_xml_bytes
self.aec_1_signature_value = aec_signature_value
self.aec_1_cert_der_bytes = aec_cert_der_bytes
self.aec_1_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_1_dte_signature_value = aec_dte_signature_value
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-signature-value-base64.txt',
),
)
self.aec_2_xml_bytes = aec_xml_bytes
self.aec_2_signature_value = aec_signature_value
self.aec_2_cert_der_bytes = aec_cert_der_bytes
self.aec_2_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_2_dte_signature_value = aec_dte_signature_value
def test_parse_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
aec_signature_value = self.aec_1_signature_value
aec_cert_der_bytes = self.aec_1_cert_der_bytes
aec_dte_signature_value = self.aec_1_dte_signature_value
aec_dte_cert_der_bytes = self.aec_1_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
emisor_razon_social='INGENIERIA ENACON SPA',
receptor_razon_social='MINERA LOS PELAMBRES',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 1, 36, 40),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='Ingenieria y Construccion',
emisor_email='ENACONLTDA@GMAIL.COM',
receptor_email=None,
),
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=1,
cedente_rut=Rut('76354771-K'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 10, 22, 2),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIMITADA',
cedente_direccion='MERCED 753 16 ARBOLEDA DE QUIILOTA',
cedente_email='enaconltda@gmail.com',
cedente_persona_autorizada_rut=Rut('76354771-K'),
cedente_persona_autorizada_nombre='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIM',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA '
'LIMITADA, RUT 76354771-K ha puesto a disposición del cesionario ST '
'CAPITAL S.A., RUT 76389992-6, el o los documentos donde constan los '
'recibos de las mercaderías entregadas o servicios prestados, entregados '
'por parte del deudor de la factura MINERA LOS PELAMBRES, RUT 96790240-3, '
'deacuerdo a lo establecido en la Ley N°19.983.'
),
),
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=2,
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='ST CAPITAL S.A.',
cedente_direccion='Isidora Goyenechea 2939 Oficina 602',
cedente_email='APrat@Financiaenlinea.com',
cesionario_razon_social='Fondo de Inversión Privado Deuda y Facturas',
cesionario_direccion='Arrayan 2750 Oficina 703 Providencia',
cesionario_email='solicitudes@stcapital.cl',
cedente_persona_autorizada_rut=Rut('16360379-9'),
cedente_persona_autorizada_nombre='ANDRES PRATS VIAL',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que ST CAPITAL S.A., RUT 76389992-6 ha puesto '
'a disposicion del cesionario Fondo de Inversión Privado Deuda y Facturas, '
'RUT 76598556-0, el documento validamente emitido al deudor MINERA LOS '
'PELAMBRES, RUT 96790240-3.'
),
),
],
contacto_nombre='ST Capital Servicios Financieros',
contacto_telefono=None,
contacto_email='APrat@Financiaenlinea.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
def test_parse_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
aec_signature_value = self.aec_2_signature_value
aec_cert_der_bytes = self.aec_2_cert_der_bytes
aec_dte_signature_value = self.aec_2_dte_signature_value
aec_dte_cert_der_bytes = self.aec_2_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
emisor_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
receptor_razon_social='EMPRESAS LA POLAR S.A.',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 3, 28, 13, 59, 52),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='COMERCIALIZACION DE PRODUCTOS PARA EL HOGAR',
emisor_email='ANGEL.PEZO@APCASESORIAS.CL',
receptor_email=None,
),
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
),
seq=1,
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=230992,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 4, 28),
cedente_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
cedente_direccion='LOS CIPRESES 2834',
cedente_email='camilo.perez@innovamobel.cl',
cedente_persona_autorizada_rut=Rut('76399752-9'),
cedente_persona_autorizada_nombre='COMERCIALIZADORA INNOVA MOBEL SPA',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que COMERCIALIZADORA INNOVA MOBEL SPA, RUT '
'76399752-9 ha puesto a disposición del cesionario ST CAPITAL S.A., RUT '
'76389992-6, el o los documentos donde constan los recibos de las '
'mercaderías entregadas o servicios prestados, entregados por parte del '
'deudor de la factura EMPRESAS LA POLAR S.A., RUT 96874030-K, deacuerdo a '
'lo establecido en la Ley N°19.983.'
),
),
],
contacto_nombre=None,
contacto_telefono=None,
contacto_email='fynpal-app-notif-st-capital@fynpal.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
| 44.235632 | 100 | 0.59023 | from __future__ import annotations
import unittest
from datetime import date, datetime
from cl_sii.dte.data_models import DteDataL1, DteXmlData
from cl_sii.dte.constants import TipoDteEnum
from cl_sii.dte.parse import DTE_XMLNS
from cl_sii.libs import encoding_utils
from cl_sii.libs import tz_utils
from cl_sii.libs import xml_utils
from cl_sii.rut import Rut
from cl_sii.rtc.data_models_aec import CesionAecXml, AecXml
from cl_sii.rtc.parse_aec import AEC_XML_SCHEMA_OBJ, parse_aec_xml, validate_aec_xml
from .utils import read_test_file_bytes
class AecXmlSchemaTest(unittest.TestCase):
@unittest.skip("TODO: Implement for 'AEC_XML_SCHEMA_OBJ'.")
def test_AEC_XML_SCHEMA_OBJ(self):
self.assertIsNotNone(AEC_XML_SCHEMA_OBJ)
class AecXmlValidatorTest(unittest.TestCase):
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
self.aec_1_xml_bytes = aec_xml_bytes
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
self.aec_2_xml_bytes = aec_xml_bytes
def test_validate_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
def test_validate_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
@unittest.skip("TODO: Implement for 'validate_aec_xml'.")
def test_validate_aec_xml_fail(self) -> None:
self.assertIsNotNone(validate_aec_xml)
class AecXmlParserTest(unittest.TestCase):
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-signature-value-base64.txt',
),
)
self.aec_1_xml_bytes = aec_xml_bytes
self.aec_1_signature_value = aec_signature_value
self.aec_1_cert_der_bytes = aec_cert_der_bytes
self.aec_1_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_1_dte_signature_value = aec_dte_signature_value
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-signature-value-base64.txt',
),
)
self.aec_2_xml_bytes = aec_xml_bytes
self.aec_2_signature_value = aec_signature_value
self.aec_2_cert_der_bytes = aec_cert_der_bytes
self.aec_2_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_2_dte_signature_value = aec_dte_signature_value
def test_parse_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
aec_signature_value = self.aec_1_signature_value
aec_cert_der_bytes = self.aec_1_cert_der_bytes
aec_dte_signature_value = self.aec_1_dte_signature_value
aec_dte_cert_der_bytes = self.aec_1_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
emisor_razon_social='INGENIERIA ENACON SPA',
receptor_razon_social='MINERA LOS PELAMBRES',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 1, 36, 40),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='Ingenieria y Construccion',
emisor_email='ENACONLTDA@GMAIL.COM',
receptor_email=None,
),
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=1,
cedente_rut=Rut('76354771-K'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 10, 22, 2),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIMITADA',
cedente_direccion='MERCED 753 16 ARBOLEDA DE QUIILOTA',
cedente_email='enaconltda@gmail.com',
cedente_persona_autorizada_rut=Rut('76354771-K'),
cedente_persona_autorizada_nombre='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIM',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA '
'LIMITADA, RUT 76354771-K ha puesto a disposición del cesionario ST '
'CAPITAL S.A., RUT 76389992-6, el o los documentos donde constan los '
'recibos de las mercaderías entregadas o servicios prestados, entregados '
'por parte del deudor de la factura MINERA LOS PELAMBRES, RUT 96790240-3, '
'deacuerdo a lo establecido en la Ley N°19.983.'
),
),
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=2,
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='ST CAPITAL S.A.',
cedente_direccion='Isidora Goyenechea 2939 Oficina 602',
cedente_email='APrat@Financiaenlinea.com',
cesionario_razon_social='Fondo de Inversión Privado Deuda y Facturas',
cesionario_direccion='Arrayan 2750 Oficina 703 Providencia',
cesionario_email='solicitudes@stcapital.cl',
cedente_persona_autorizada_rut=Rut('16360379-9'),
cedente_persona_autorizada_nombre='ANDRES PRATS VIAL',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que ST CAPITAL S.A., RUT 76389992-6 ha puesto '
'a disposicion del cesionario Fondo de Inversión Privado Deuda y Facturas, '
'RUT 76598556-0, el documento validamente emitido al deudor MINERA LOS '
'PELAMBRES, RUT 96790240-3.'
),
),
],
contacto_nombre='ST Capital Servicios Financieros',
contacto_telefono=None,
contacto_email='APrat@Financiaenlinea.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
def test_parse_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
aec_signature_value = self.aec_2_signature_value
aec_cert_der_bytes = self.aec_2_cert_der_bytes
aec_dte_signature_value = self.aec_2_dte_signature_value
aec_dte_cert_der_bytes = self.aec_2_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
emisor_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
receptor_razon_social='EMPRESAS LA POLAR S.A.',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 3, 28, 13, 59, 52),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='COMERCIALIZACION DE PRODUCTOS PARA EL HOGAR',
emisor_email='ANGEL.PEZO@APCASESORIAS.CL',
receptor_email=None,
),
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
),
seq=1,
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=230992,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 4, 28),
cedente_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
cedente_direccion='LOS CIPRESES 2834',
cedente_email='camilo.perez@innovamobel.cl',
cedente_persona_autorizada_rut=Rut('76399752-9'),
cedente_persona_autorizada_nombre='COMERCIALIZADORA INNOVA MOBEL SPA',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que COMERCIALIZADORA INNOVA MOBEL SPA, RUT '
'76399752-9 ha puesto a disposición del cesionario ST CAPITAL S.A., RUT '
'76389992-6, el o los documentos donde constan los recibos de las '
'mercaderías entregadas o servicios prestados, entregados por parte del '
'deudor de la factura EMPRESAS LA POLAR S.A., RUT 96874030-K, deacuerdo a '
'lo establecido en la Ley N°19.983.'
),
),
],
contacto_nombre=None,
contacto_telefono=None,
contacto_email='fynpal-app-notif-st-capital@fynpal.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
| true | true |
f72ae0f6c794d479f6cdc796193f0e7a465e9821 | 15,152 | py | Python | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Import modules
import matplotlib.colors
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn.preprocessing import LabelEncoder
import pickle
from sensor_stick.srv import GetNormals
from sensor_stick.features import compute_color_histograms
from sensor_stick.features import compute_normal_histograms
from visualization_msgs.msg import Marker
from sensor_stick.marker_tools import *
from sensor_stick.msg import DetectedObjectsArray
from sensor_stick.msg import DetectedObject
from sensor_stick.pcl_helper import *
import rospy
import tf
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from std_msgs.msg import Int32
from std_msgs.msg import String
from pr2_robot.srv import *
from rospy_message_converter import message_converter
import yaml
# Helper function to get surface normals
def get_normals(cloud):
get_normals_prox = rospy.ServiceProxy('/feature_extractor/get_normals', GetNormals)
return get_normals_prox(cloud).cluster
#Helper function to convert RGB to HSV
def rgb_to_hsv(rgb_list):
rgb_normalized = [1.0*rgb_list[0]/255, 1.0*rgb_list[1]/255, 1.0*rgb_list[2]/255]
hsv_normalized = matplotlib.colors.rgb_to_hsv([[rgb_normalized]])[0][0]
return hsv_normalized
bins_range=(0, 256)
nbins = 32
#Helper function to compute color histograms
def compute_color_histograms(cloud, using_hsv=False):
# Compute histograms for the clusters
point_colors_list = []
# Step through each point in the point cloud
for point in pc2.read_points(cloud, skip_nans=True):
rgb_list = float_to_rgb(point[3])
if using_hsv:
point_colors_list.append(rgb_to_hsv(rgb_list) * 255)
else:
point_colors_list.append(rgb_list)
# Populate lists with color values
channel_1_vals = []
channel_2_vals = []
channel_3_vals = []
for color in point_colors_list:
channel_1_vals.append(color[0])
channel_2_vals.append(color[1])
channel_3_vals.append(color[2])
# Compute histograms
# Compute the histogram of the HSV channels separately
h_hist = np.histogram(channel_1_vals, bins=nbins, range=bins_range)
s_hist = np.histogram(channel_2_vals, bins=nbins, range=bins_range)
v_hist = np.histogram(channel_3_vals, bins=nbins, range=bins_range)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((h_hist[0], s_hist[0], v_hist[0])).astype(np.float64)
# Normalize the result
normed_features = hist_features / np.sum(hist_features)
return normed_features
#Helper function to compute normal histograms
def compute_normal_histograms(normal_cloud):
norm_x_vals = []
norm_y_vals = []
norm_z_vals = []
for norm_component in pc2.read_points(normal_cloud,
field_names = ('normal_x', 'normal_y', 'normal_z'),
skip_nans=True):
norm_x_vals.append(norm_component[0])
norm_y_vals.append(norm_component[1])
norm_z_vals.append(norm_component[2])
# TODO: Compute histograms of normal values (just like with color)
x_hist = np.histogram(norm_x_vals, bins=nbins, range =bins_range)
y_hist = np.histogram(norm_y_vals, bins=nbins, range =bins_range)
z_hist = np.histogram(norm_z_vals, bins=nbins, range =bins_range)
# TODO: Concatenate and normalize the histograms
hist_features = np.concatenate((x_hist[0], y_hist[0], z_hist[0])).astype(np.float64)
normed_features = hist_features/ np.sum(hist_features)
return normed_features
# Helper function to create a yaml friendly dictionary from ROS messages
def make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose):
yaml_dict = {}
yaml_dict["test_scene_num"] = test_scene_num.data
yaml_dict["arm_name"] = arm_name.data
yaml_dict["object_name"] = object_name.data
yaml_dict["pick_pose"] = message_converter.convert_ros_message_to_dictionary(pick_pose)
yaml_dict["place_pose"] = message_converter.convert_ros_message_to_dictionary(place_pose)
print type(yaml_dict["arm_name"]), type(yaml_dict["pick_pose"])
return yaml_dict
# Helper function to output to yaml file
def send_to_yaml(yaml_filename, dict_list):
data_dict = {"object_list": dict_list}
with open(yaml_filename, 'w+') as outfile:
yaml.dump(data_dict, outfile, default_flow_style=False)
print "done yaml"
# Callback function for your Point Cloud Subscriber
def pcl_callback(pcl_msg):
# Convert ROS msg to PCL data
pcl_data=ros_to_pcl(pcl_msg)
# Voxel Grid filter
# Create a VoxelGrid filter object for our input point cloud
vox = pcl_data.make_voxel_grid_filter()
# Choose a voxel (also known as leaf) size
# Note: this (1) is a poor choice of leaf size
# Experiment and find the appropriate size!
LEAF_SIZE = 0.008
# Set the voxel (or leaf) size
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
# Call the filter function to obtain the resultant downsampled point cloud
cloud_filtered = vox.filter()
# Much like the previous filters, we start by creating a filter object:
cloud_filter = cloud_filtered.make_statistical_outlier_filter()
# Set the number of neighboring points to analyze for any given point
cloud_filter.set_mean_k(50)
# Set threshold scale factor
x = 1.0
# Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
cloud_filter.set_std_dev_mul_thresh(x)
# Finally call the filter function for magic
cloud_filtered = cloud_filter.filter()
# PassThrough filter
# Create a PassThrough filter object.
passthrough1 = cloud_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis1 = 'z'
passthrough1.set_filter_field_name(filter_axis1)
axis_min1 = 0.6
axis_max1 = 1.1
passthrough1.set_filter_limits(axis_min1, axis_max1)
# Finally use the filter function to obtain the resultant point cloud.
cloud_p1_filtered = passthrough1.filter()
# Create a PassThrough filter object.
passthrough2 = cloud_p1_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis2 = 'y'
passthrough2.set_filter_field_name(filter_axis2)
axis_min2 = -0.55
axis_max2 = 0.55
passthrough2.set_filter_limits(axis_min2, axis_max2)
cloud_p_filtered = passthrough2.filter()
# RANSAC plane segmentation
# Create the segmentation object
seg = cloud_p_filtered.make_segmenter()
# Set the model you wish to fit
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
max_distance = 0.03
seg.set_distance_threshold(max_distance)
# Call the segment function to obtain set of inlier indices and model coefficients
inliers, coefficients = seg.segment()
# Extract inliers
extracted_inliers = cloud_p_filtered.extract(inliers, negative=False)
# Extract outliers
extracted_outliers = cloud_p_filtered.extract(inliers, negative=True)
# Euclidean Clustering
white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ
tree = white_cloud.make_kdtree()
# Create a cluster extraction object
ec = white_cloud.make_EuclideanClusterExtraction()
# Set tolerances for distance threshold
# as well as minimum and maximum cluster size (in points)
# NOTE: These are poor choices of clustering parameters
# Your task is to experiment and find values that work for segmenting objects.
ec.set_ClusterTolerance(0.01)
ec.set_MinClusterSize(50)
ec.set_MaxClusterSize(3000)
# Search the k-d tree for clusters
ec.set_SearchMethod(tree)
# Extract indices for each of the discovered clusters
cluster_indices = ec.Extract()
# Create Cluster-Mask Point Cloud to visualize each cluster separately
#Assign a color corresponding to each segmented object in scene
cluster_color = get_color_list(len(cluster_indices))
color_cluster_point_list = []
for j, indices in enumerate(cluster_indices):
for i, indice in enumerate(indices):
color_cluster_point_list.append([white_cloud[indice][0],
white_cloud[indice][1],
white_cloud[indice][2],
rgb_to_float(cluster_color[j])])
#Create new cloud containing all clusters, each with unique color
cluster_cloud = pcl.PointCloud_PointXYZRGB()
cluster_cloud.from_list(color_cluster_point_list)
# Convert PCL data to ROS messages
ros_cluster_cloud = pcl_to_ros(cluster_cloud)
ros_cloud_objects = pcl_to_ros(extracted_outliers)
ros_cloud_table = pcl_to_ros(extracted_inliers)
# Publish ROS messages
pcl_cluster_cloud_pub.publish(ros_cluster_cloud)
pcl_objects_pub.publish(ros_cloud_objects)
pcl_table_pub.publish(ros_cloud_table)
# Classify the clusters! (loop through each detected cluster one at a time)
detected_objects_labels = []
detected_objects = []
labeled_features =[]
for index, pts_list in enumerate(cluster_indices):
# Grab the points for the cluster
pcl_cluster = extracted_outliers.extract(pts_list)
ros_cluster = pcl_to_ros(pcl_cluster)
# Compute the associated feature vector
# Extract histogram features
chists = compute_color_histograms(ros_cluster, using_hsv=True)
normals = get_normals(ros_cluster)
nhists = compute_normal_histograms(normals)
feature = np.concatenate((chists, nhists)).astype(np.float64)
#detected_objects.append([feature])
# Make the prediction
prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
label = encoder.inverse_transform(prediction)[0]
detected_objects_labels.append(label)
# Publish a label into RViz
label_pos = list(white_cloud[pts_list[0]])
label_pos[2] += .4
object_markers_pub.publish(make_label(label,label_pos, index))
# Add the detected object to the list of detected objects.
do = DetectedObject()
do.label = label
do.cloud = ros_cluster
detected_objects.append(do)
# Publish the list of detected objects
rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
detected_objects_pub.publish(detected_objects)
# Suggested location for where to invoke your pr2_mover() function within pcl_callback()
# Could add some logic to determine whether or not your object detections are robust
# before calling pr2_mover()
try:
pr2_mover(detected_objects)
except rospy.ROSInterruptException:
pass
# function to load parameters and request PickPlace service
def pr2_mover(detected):
# TODO: Initialize variables
test_scene_num = Int32()
object_name = String()
arm_name = String()
pick_pose = Pose()
place_pose = Pose()
dict_list = []
yaml_filename = 'output_3.yaml' #Change for different worlds
test_scene_num.data = 3 #Change for different worlds
labels = []
centroids = []
# TODO: Get/Read parameters
object_list_param = rospy.get_param('/object_list')
dropbox_param = rospy.get_param('/dropbox')
# TODO: Parse parameters into individual variables
for obj in detected:
#print obj.label
labels.append(obj.label)
points_arr = ros_to_pcl(obj.cloud).to_array()
centroids.append(np.mean(points_arr, axis=0)[:3])
# TODO: Rotate PR2 in place to capture side tables for the collision map
# TODO: Loop through the pick list
for i in range(0, len(object_list_param)):
object_name.data = object_list_param[i]['name']
object_group = object_list_param[i]['group']
for j in range(0,len(labels)):
if object_name.data == labels[j]:
pick_pose.position.x = np.asscalar(centroids[j][0])
pick_pose.position.y = np.asscalar(centroids[j][1])
pick_pose.position.z = np.asscalar(centroids[j][2])
#print pick_pose
# TODO: Get the PointCloud for a given object and obtain it's centroid
# TODO: Create 'place_pose' for the object
for j in range(0, len(dropbox_param)):
if object_group == dropbox_param[j]['group']:
place_pose.position.x = dropbox_param[j]['position'][0]
place_pose.position.y = dropbox_param[j]['position'][1]
place_pose.position.z = dropbox_param[j]['position'][2]
# TODO: Assign the arm to be used for pick_place
if object_group =='green':
arm_name.data = 'right'
elif object_group == 'red':
arm_name.data = 'left'
# TODO: Create a list of dictionaries (made with make_yaml_dict()) for later output to yaml format
print "Test_num:",type(test_scene_num),"Arm_name:", type(arm_name),"Ob_name:", type(object_name),"Pick_pose:", type(pick_pose),"Place_pose:", type(place_pose)
yaml_dict = make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose)
dict_list.append(yaml_dict)
# Wait for 'pick_place_routine' service to come up
rospy.wait_for_service('pick_place_routine')
#try:
#pick_place_routine = rospy.ServiceProxy('pick_place_routine', PickPlace)
# TODO: Insert your message variables to be sent as a service request
#resp = pick_place_routine(test_scene_num, object_name, arm_name, pick_pose, place_pose)
#print ("Response: ",resp.success)
#except rospy.ServiceException, e:
#print "Service call failed: %s"%e
# TODO: Output your request parameters into output yaml file
send_to_yaml(yaml_filename, dict_list)
if __name__ == '__main__':
# TODO: ROS node initialization
rospy.init_node('clustering', anonymous=True)
# TODO: Create Subscribers
pcl_sub = rospy.Subscriber("/pr2/world/points", pc2.PointCloud2, pcl_callback, queue_size=1)
# TODO: Create Publishers
detected_objects_pub = rospy.Publisher("/detected_objects", DetectedObjectsArray, queue_size=1)
object_markers_pub = rospy.Publisher("/object_markers", Marker, queue_size=1)
pcl_objects_pub = rospy.Publisher("/pcl_objects", PointCloud2, queue_size=1)
pcl_table_pub = rospy.Publisher("/pcl_table", PointCloud2, queue_size=1)
pcl_cluster_cloud_pub = rospy.Publisher("/pcl_clusters", PointCloud2, queue_size=1)
# Initialize color_list
get_color_list.color_list = []
# Load Model From disk
model = pickle.load(open('model.sav', 'rb'))
clf = model['classifier']
encoder = LabelEncoder()
encoder.classes_ = model['classes']
scaler = model['scaler']
# TODO: Spin while node is not shutdown
while not rospy.is_shutdown():
rospy.spin()
| 38.262626 | 159 | 0.714625 |
import matplotlib.colors
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn.preprocessing import LabelEncoder
import pickle
from sensor_stick.srv import GetNormals
from sensor_stick.features import compute_color_histograms
from sensor_stick.features import compute_normal_histograms
from visualization_msgs.msg import Marker
from sensor_stick.marker_tools import *
from sensor_stick.msg import DetectedObjectsArray
from sensor_stick.msg import DetectedObject
from sensor_stick.pcl_helper import *
import rospy
import tf
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from std_msgs.msg import Int32
from std_msgs.msg import String
from pr2_robot.srv import *
from rospy_message_converter import message_converter
import yaml
def get_normals(cloud):
get_normals_prox = rospy.ServiceProxy('/feature_extractor/get_normals', GetNormals)
return get_normals_prox(cloud).cluster
def rgb_to_hsv(rgb_list):
rgb_normalized = [1.0*rgb_list[0]/255, 1.0*rgb_list[1]/255, 1.0*rgb_list[2]/255]
hsv_normalized = matplotlib.colors.rgb_to_hsv([[rgb_normalized]])[0][0]
return hsv_normalized
bins_range=(0, 256)
nbins = 32
def compute_color_histograms(cloud, using_hsv=False):
point_colors_list = []
for point in pc2.read_points(cloud, skip_nans=True):
rgb_list = float_to_rgb(point[3])
if using_hsv:
point_colors_list.append(rgb_to_hsv(rgb_list) * 255)
else:
point_colors_list.append(rgb_list)
channel_1_vals = []
channel_2_vals = []
channel_3_vals = []
for color in point_colors_list:
channel_1_vals.append(color[0])
channel_2_vals.append(color[1])
channel_3_vals.append(color[2])
h_hist = np.histogram(channel_1_vals, bins=nbins, range=bins_range)
s_hist = np.histogram(channel_2_vals, bins=nbins, range=bins_range)
v_hist = np.histogram(channel_3_vals, bins=nbins, range=bins_range)
hist_features = np.concatenate((h_hist[0], s_hist[0], v_hist[0])).astype(np.float64)
normed_features = hist_features / np.sum(hist_features)
return normed_features
def compute_normal_histograms(normal_cloud):
norm_x_vals = []
norm_y_vals = []
norm_z_vals = []
for norm_component in pc2.read_points(normal_cloud,
field_names = ('normal_x', 'normal_y', 'normal_z'),
skip_nans=True):
norm_x_vals.append(norm_component[0])
norm_y_vals.append(norm_component[1])
norm_z_vals.append(norm_component[2])
x_hist = np.histogram(norm_x_vals, bins=nbins, range =bins_range)
y_hist = np.histogram(norm_y_vals, bins=nbins, range =bins_range)
z_hist = np.histogram(norm_z_vals, bins=nbins, range =bins_range)
hist_features = np.concatenate((x_hist[0], y_hist[0], z_hist[0])).astype(np.float64)
normed_features = hist_features/ np.sum(hist_features)
return normed_features
def make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose):
yaml_dict = {}
yaml_dict["test_scene_num"] = test_scene_num.data
yaml_dict["arm_name"] = arm_name.data
yaml_dict["object_name"] = object_name.data
yaml_dict["pick_pose"] = message_converter.convert_ros_message_to_dictionary(pick_pose)
yaml_dict["place_pose"] = message_converter.convert_ros_message_to_dictionary(place_pose)
print type(yaml_dict["arm_name"]), type(yaml_dict["pick_pose"])
return yaml_dict
def send_to_yaml(yaml_filename, dict_list):
data_dict = {"object_list": dict_list}
with open(yaml_filename, 'w+') as outfile:
yaml.dump(data_dict, outfile, default_flow_style=False)
print "done yaml"
def pcl_callback(pcl_msg):
pcl_data=ros_to_pcl(pcl_msg)
vox = pcl_data.make_voxel_grid_filter()
LEAF_SIZE = 0.008
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
cloud_filtered = vox.filter()
cloud_filter = cloud_filtered.make_statistical_outlier_filter()
cloud_filter.set_mean_k(50)
x = 1.0
cloud_filter.set_std_dev_mul_thresh(x)
cloud_filtered = cloud_filter.filter()
passthrough1 = cloud_filtered.make_passthrough_filter()
filter_axis1 = 'z'
passthrough1.set_filter_field_name(filter_axis1)
axis_min1 = 0.6
axis_max1 = 1.1
passthrough1.set_filter_limits(axis_min1, axis_max1)
cloud_p1_filtered = passthrough1.filter()
passthrough2 = cloud_p1_filtered.make_passthrough_filter()
filter_axis2 = 'y'
passthrough2.set_filter_field_name(filter_axis2)
axis_min2 = -0.55
axis_max2 = 0.55
passthrough2.set_filter_limits(axis_min2, axis_max2)
cloud_p_filtered = passthrough2.filter()
seg = cloud_p_filtered.make_segmenter()
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
max_distance = 0.03
seg.set_distance_threshold(max_distance)
inliers, coefficients = seg.segment()
extracted_inliers = cloud_p_filtered.extract(inliers, negative=False)
extracted_outliers = cloud_p_filtered.extract(inliers, negative=True)
white_cloud = XYZRGB_to_XYZ(extracted_outliers)
tree = white_cloud.make_kdtree()
ec = white_cloud.make_EuclideanClusterExtraction()
ec.set_ClusterTolerance(0.01)
ec.set_MinClusterSize(50)
ec.set_MaxClusterSize(3000)
ec.set_SearchMethod(tree)
cluster_indices = ec.Extract()
cluster_color = get_color_list(len(cluster_indices))
color_cluster_point_list = []
for j, indices in enumerate(cluster_indices):
for i, indice in enumerate(indices):
color_cluster_point_list.append([white_cloud[indice][0],
white_cloud[indice][1],
white_cloud[indice][2],
rgb_to_float(cluster_color[j])])
cluster_cloud = pcl.PointCloud_PointXYZRGB()
cluster_cloud.from_list(color_cluster_point_list)
ros_cluster_cloud = pcl_to_ros(cluster_cloud)
ros_cloud_objects = pcl_to_ros(extracted_outliers)
ros_cloud_table = pcl_to_ros(extracted_inliers)
pcl_cluster_cloud_pub.publish(ros_cluster_cloud)
pcl_objects_pub.publish(ros_cloud_objects)
pcl_table_pub.publish(ros_cloud_table)
detected_objects_labels = []
detected_objects = []
labeled_features =[]
for index, pts_list in enumerate(cluster_indices):
pcl_cluster = extracted_outliers.extract(pts_list)
ros_cluster = pcl_to_ros(pcl_cluster)
chists = compute_color_histograms(ros_cluster, using_hsv=True)
normals = get_normals(ros_cluster)
nhists = compute_normal_histograms(normals)
feature = np.concatenate((chists, nhists)).astype(np.float64)
prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
label = encoder.inverse_transform(prediction)[0]
detected_objects_labels.append(label)
label_pos = list(white_cloud[pts_list[0]])
label_pos[2] += .4
object_markers_pub.publish(make_label(label,label_pos, index))
do = DetectedObject()
do.label = label
do.cloud = ros_cluster
detected_objects.append(do)
rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
detected_objects_pub.publish(detected_objects)
try:
pr2_mover(detected_objects)
except rospy.ROSInterruptException:
pass
def pr2_mover(detected):
test_scene_num = Int32()
object_name = String()
arm_name = String()
pick_pose = Pose()
place_pose = Pose()
dict_list = []
yaml_filename = 'output_3.yaml'
test_scene_num.data = 3
labels = []
centroids = []
object_list_param = rospy.get_param('/object_list')
dropbox_param = rospy.get_param('/dropbox')
for obj in detected:
labels.append(obj.label)
points_arr = ros_to_pcl(obj.cloud).to_array()
centroids.append(np.mean(points_arr, axis=0)[:3])
for i in range(0, len(object_list_param)):
object_name.data = object_list_param[i]['name']
object_group = object_list_param[i]['group']
for j in range(0,len(labels)):
if object_name.data == labels[j]:
pick_pose.position.x = np.asscalar(centroids[j][0])
pick_pose.position.y = np.asscalar(centroids[j][1])
pick_pose.position.z = np.asscalar(centroids[j][2])
# TODO: Create 'place_pose' for the object
for j in range(0, len(dropbox_param)):
if object_group == dropbox_param[j]['group']:
place_pose.position.x = dropbox_param[j]['position'][0]
place_pose.position.y = dropbox_param[j]['position'][1]
place_pose.position.z = dropbox_param[j]['position'][2]
# TODO: Assign the arm to be used for pick_place
if object_group =='green':
arm_name.data = 'right'
elif object_group == 'red':
arm_name.data = 'left'
# TODO: Create a list of dictionaries (made with make_yaml_dict()) for later output to yaml format
print "Test_num:",type(test_scene_num),"Arm_name:", type(arm_name),"Ob_name:", type(object_name),"Pick_pose:", type(pick_pose),"Place_pose:", type(place_pose)
yaml_dict = make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose)
dict_list.append(yaml_dict)
# Wait for 'pick_place_routine' service to come up
rospy.wait_for_service('pick_place_routine')
#try:
#pick_place_routine = rospy.ServiceProxy('pick_place_routine', PickPlace)
# TODO: Insert your message variables to be sent as a service request
#resp = pick_place_routine(test_scene_num, object_name, arm_name, pick_pose, place_pose)
#print ("Response: ",resp.success)
#except rospy.ServiceException, e:
#print "Service call failed: %s"%e
# TODO: Output your request parameters into output yaml file
send_to_yaml(yaml_filename, dict_list)
if __name__ == '__main__':
# TODO: ROS node initialization
rospy.init_node('clustering', anonymous=True)
# TODO: Create Subscribers
pcl_sub = rospy.Subscriber("/pr2/world/points", pc2.PointCloud2, pcl_callback, queue_size=1)
# TODO: Create Publishers
detected_objects_pub = rospy.Publisher("/detected_objects", DetectedObjectsArray, queue_size=1)
object_markers_pub = rospy.Publisher("/object_markers", Marker, queue_size=1)
pcl_objects_pub = rospy.Publisher("/pcl_objects", PointCloud2, queue_size=1)
pcl_table_pub = rospy.Publisher("/pcl_table", PointCloud2, queue_size=1)
pcl_cluster_cloud_pub = rospy.Publisher("/pcl_clusters", PointCloud2, queue_size=1)
# Initialize color_list
get_color_list.color_list = []
# Load Model From disk
model = pickle.load(open('model.sav', 'rb'))
clf = model['classifier']
encoder = LabelEncoder()
encoder.classes_ = model['classes']
scaler = model['scaler']
# TODO: Spin while node is not shutdown
while not rospy.is_shutdown():
rospy.spin()
| false | true |
f72ae161a0eb4e5d0974932d1ca4ef7364cf371f | 152 | py | Python | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 2 | 2021-09-14T13:20:55.000Z | 2022-02-24T14:18:24.000Z | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 46 | 2021-09-08T08:39:45.000Z | 2022-03-29T12:31:05.000Z | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 1 | 2021-12-30T23:02:23.000Z | 2021-12-30T23:02:23.000Z | from aiocloudflare.commons.auth import Auth
class Import_(Auth):
_endpoint1 = "zones"
_endpoint2 = "dns_records/import"
_endpoint3 = None
| 19 | 43 | 0.723684 | from aiocloudflare.commons.auth import Auth
class Import_(Auth):
_endpoint1 = "zones"
_endpoint2 = "dns_records/import"
_endpoint3 = None
| true | true |
f72ae291978b1bc7fcf2a7bbfa465ce316156938 | 596 | py | Python | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | 1 | 2021-07-19T10:15:08.000Z | 2021-07-19T10:15:08.000Z | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | null | null | null | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
def callback(msg):
L,M,R = msg.ranges[719],msg.ranges[360],msg.ranges[0]
move.linear.x = .2
if M < 1.2:
move.linear.x = .05
move.angular.z = .1
elif L > 30 and R > 30 and M > 30:
move.linear.x = .2
move.angular.z = 0
pub.publish(move)
rospy.init_node('topics_quiz_node')
pub = rospy.Publisher('/cmd_vel',Twist)
sub = rospy.Subscriber('/kobuki/laser/scan',
LaserScan,callback)
rate = rospy.Rate(2)
move = Twist()
rospy.spin()
| 24.833333 | 57 | 0.642617 |
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
def callback(msg):
L,M,R = msg.ranges[719],msg.ranges[360],msg.ranges[0]
move.linear.x = .2
if M < 1.2:
move.linear.x = .05
move.angular.z = .1
elif L > 30 and R > 30 and M > 30:
move.linear.x = .2
move.angular.z = 0
pub.publish(move)
rospy.init_node('topics_quiz_node')
pub = rospy.Publisher('/cmd_vel',Twist)
sub = rospy.Subscriber('/kobuki/laser/scan',
LaserScan,callback)
rate = rospy.Rate(2)
move = Twist()
rospy.spin()
| true | true |
f72ae3fa136caa90b5e27aab7455fdec4407560e | 2,016 | py | Python | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel(object):
def __init__(self):
self._batch_id = None
self._page_size = None
self._produce_order_id = None
@property
def batch_id(self):
return self._batch_id
@batch_id.setter
def batch_id(self, value):
self._batch_id = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def produce_order_id(self):
return self._produce_order_id
@produce_order_id.setter
def produce_order_id(self, value):
self._produce_order_id = value
def to_alipay_dict(self):
params = dict()
if self.batch_id:
if hasattr(self.batch_id, 'to_alipay_dict'):
params['batch_id'] = self.batch_id.to_alipay_dict()
else:
params['batch_id'] = self.batch_id
if self.page_size:
if hasattr(self.page_size, 'to_alipay_dict'):
params['page_size'] = self.page_size.to_alipay_dict()
else:
params['page_size'] = self.page_size
if self.produce_order_id:
if hasattr(self.produce_order_id, 'to_alipay_dict'):
params['produce_order_id'] = self.produce_order_id.to_alipay_dict()
else:
params['produce_order_id'] = self.produce_order_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel()
if 'batch_id' in d:
o.batch_id = d['batch_id']
if 'page_size' in d:
o.page_size = d['page_size']
if 'produce_order_id' in d:
o.produce_order_id = d['produce_order_id']
return o
| 28.394366 | 83 | 0.613095 |
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel(object):
def __init__(self):
self._batch_id = None
self._page_size = None
self._produce_order_id = None
@property
def batch_id(self):
return self._batch_id
@batch_id.setter
def batch_id(self, value):
self._batch_id = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def produce_order_id(self):
return self._produce_order_id
@produce_order_id.setter
def produce_order_id(self, value):
self._produce_order_id = value
def to_alipay_dict(self):
params = dict()
if self.batch_id:
if hasattr(self.batch_id, 'to_alipay_dict'):
params['batch_id'] = self.batch_id.to_alipay_dict()
else:
params['batch_id'] = self.batch_id
if self.page_size:
if hasattr(self.page_size, 'to_alipay_dict'):
params['page_size'] = self.page_size.to_alipay_dict()
else:
params['page_size'] = self.page_size
if self.produce_order_id:
if hasattr(self.produce_order_id, 'to_alipay_dict'):
params['produce_order_id'] = self.produce_order_id.to_alipay_dict()
else:
params['produce_order_id'] = self.produce_order_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel()
if 'batch_id' in d:
o.batch_id = d['batch_id']
if 'page_size' in d:
o.page_size = d['page_size']
if 'produce_order_id' in d:
o.produce_order_id = d['produce_order_id']
return o
| true | true |
f72ae4fe9cb98106976c818db916bbe6b063c51a | 2,243 | py | Python | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 810 | 2018-12-25T15:16:11.000Z | 2020-05-14T09:49:40.000Z | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 701 | 2018-12-21T05:18:43.000Z | 2020-05-16T01:30:21.000Z | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 155 | 2018-12-22T11:05:04.000Z | 2020-05-14T07:33:41.000Z | from datetime import timedelta
import pytest
from feast import PushSource
from feast.batch_feature_view import BatchFeatureView
from feast.data_format import AvroFormat
from feast.data_source import KafkaSource
from feast.infra.offline_stores.file_source import FileSource
from feast.stream_feature_view import StreamFeatureView
def test_create_batch_feature_view():
batch_source = FileSource(path="some path")
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=batch_source,
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
def test_create_stream_feature_view():
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
StreamFeatureView(
name="test kafka stream feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
push_source = PushSource(
name="push source", batch_source=FileSource(path="some path")
)
StreamFeatureView(
name="test push source feature view",
entities=[],
ttl=timedelta(days=30),
source=push_source,
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=FileSource(path="some path"),
)
| 27.353659 | 79 | 0.628622 | from datetime import timedelta
import pytest
from feast import PushSource
from feast.batch_feature_view import BatchFeatureView
from feast.data_format import AvroFormat
from feast.data_source import KafkaSource
from feast.infra.offline_stores.file_source import FileSource
from feast.stream_feature_view import StreamFeatureView
def test_create_batch_feature_view():
batch_source = FileSource(path="some path")
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=batch_source,
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
def test_create_stream_feature_view():
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
StreamFeatureView(
name="test kafka stream feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
push_source = PushSource(
name="push source", batch_source=FileSource(path="some path")
)
StreamFeatureView(
name="test push source feature view",
entities=[],
ttl=timedelta(days=30),
source=push_source,
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=FileSource(path="some path"),
)
| true | true |
f72ae59ecb83441e8b44b0616951c153ac6dd839 | 8,726 | py | Python | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | 2 | 2019-07-29T15:45:31.000Z | 2019-11-17T23:33:58.000Z | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | null | null | null | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | 1 | 2019-01-06T15:18:58.000Z | 2019-01-06T15:18:58.000Z | # -*- coding: utf-8 -*-
# Copyright (C) 2005 Michael Urman
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import warnings
from mutagen._util import DictMixin, loadfile
from mutagen._compat import izip
class FileType(DictMixin):
"""FileType(filething, **kwargs)
Args:
filething (filething): A filename or a file-like object
Subclasses might take further options via keyword arguments.
An abstract object wrapping tags and audio stream information.
Each file format has different potential tags and stream
information.
FileTypes implement an interface very similar to Metadata; the
dict interface, save, load, and delete calls on a FileType call
the appropriate methods on its tag data.
Attributes:
info (`StreamInfo`): contains length, bitrate, sample rate
tags (`Tags`): metadata tags, if any, otherwise `None`
"""
__module__ = "mutagen"
info = None
tags = None
filename = None
_mimes = ["application/octet-stream"]
def __init__(self, *args, **kwargs):
if not args and not kwargs:
warnings.warn("FileType constructor requires a filename",
DeprecationWarning)
else:
self.load(*args, **kwargs)
@loadfile()
def load(self, filething, *args, **kwargs):
raise NotImplementedError
def __getitem__(self, key):
"""Look up a metadata tag key.
If the file has no tags at all, a KeyError is raised.
"""
if self.tags is None:
raise KeyError(key)
else:
return self.tags[key]
def __setitem__(self, key, value):
"""Set a metadata tag.
If the file has no tags, an appropriate format is added (but
not written until save is called).
"""
if self.tags is None:
self.add_tags()
self.tags[key] = value
def __delitem__(self, key):
"""Delete a metadata tag key.
If the file has no tags at all, a KeyError is raised.
"""
if self.tags is None:
raise KeyError(key)
else:
del(self.tags[key])
def keys(self):
"""Return a list of keys in the metadata tag.
If the file has no tags at all, an empty list is returned.
"""
if self.tags is None:
return []
else:
return self.tags.keys()
@loadfile(writable=True)
def delete(self, filething=None):
"""delete(filething=None)
Remove tags from a file.
In cases where the tagging format is independent of the file type
(for example `mutagen.id3.ID3`) all traces of the tagging format will
be removed.
In cases where the tag is part of the file type, all tags and
padding will be removed.
The tags attribute will be cleared as well if there is one.
Does nothing if the file has no tags.
Raises:
mutagen.MutagenError: if deleting wasn't possible
"""
if self.tags is not None:
return self.tags.delete(filething)
@loadfile(writable=True)
def save(self, filething=None, **kwargs):
"""save(filething=None, **kwargs)
Save metadata tags.
Raises:
MutagenError: if saving wasn't possible
"""
if self.tags is not None:
return self.tags.save(filething, **kwargs)
def pprint(self):
"""
Returns:
text: stream information and comment key=value pairs.
"""
stream = "%s (%s)" % (self.info.pprint(), self.mime[0])
try:
tags = self.tags.pprint()
except AttributeError:
return stream
else:
return stream + ((tags and "\n" + tags) or "")
def add_tags(self):
"""Adds new tags to the file.
Raises:
mutagen.MutagenError:
if tags already exist or adding is not possible.
"""
raise NotImplementedError
@property
def mime(self):
"""A list of mime types (:class:`mutagen.text`)"""
mimes = []
for Kind in type(self).__mro__:
for mime in getattr(Kind, '_mimes', []):
if mime not in mimes:
mimes.append(mime)
return mimes
@staticmethod
def score(filename, fileobj, header):
"""Returns a score for how likely the file can be parsed by this type.
Args:
filename (fspath): a file path
fileobj (fileobj): a file object open in rb mode. Position is
undefined
header (bytes): data of undefined length, starts with the start of
the file.
Returns:
int: negative if definitely not a matching type, otherwise a score,
the bigger the more certain that the file can be loaded.
"""
raise NotImplementedError
class StreamInfo(object):
"""Abstract stream information object.
Provides attributes for length, bitrate, sample rate etc.
See the implementations for details.
"""
__module__ = "mutagen"
def pprint(self):
"""
Returns:
text: Print stream information
"""
raise NotImplementedError
@loadfile(method=False)
def File(filething, options=None, easy=False):
"""File(filething, options=None, easy=False)
Guess the type of the file and try to open it.
The file type is decided by several things, such as the first 128
bytes (which usually contains a file type identifier), the
filename extension, and the presence of existing tags.
If no appropriate type could be found, None is returned.
Args:
filething (filething)
options: Sequence of :class:`FileType` implementations,
defaults to all included ones.
easy (bool): If the easy wrappers should be returnd if available.
For example :class:`EasyMP3 <mp3.EasyMP3>` instead of
:class:`MP3 <mp3.MP3>`.
Returns:
FileType: A FileType instance for the detected type or `None` in case
the type couln't be determined.
Raises:
MutagenError: in case the detected type fails to load the file.
"""
if options is None:
from mutagen.asf import ASF
from mutagen.apev2 import APEv2File
from mutagen.flac import FLAC
if easy:
from mutagen.easyid3 import EasyID3FileType as ID3FileType
else:
from mutagen.id3 import ID3FileType
if easy:
from mutagen.mp3 import EasyMP3 as MP3
else:
from mutagen.mp3 import MP3
from mutagen.oggflac import OggFLAC
from mutagen.oggspeex import OggSpeex
from mutagen.oggtheora import OggTheora
from mutagen.oggvorbis import OggVorbis
from mutagen.oggopus import OggOpus
if easy:
from mutagen.trueaudio import EasyTrueAudio as TrueAudio
else:
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
if easy:
from mutagen.easymp4 import EasyMP4 as MP4
else:
from mutagen.mp4 import MP4
from mutagen.musepack import Musepack
from mutagen.monkeysaudio import MonkeysAudio
from mutagen.optimfrog import OptimFROG
from mutagen.aiff import AIFF
from mutagen.aac import AAC
from mutagen.smf import SMF
from mutagen.dsf import DSF
options = [MP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
FLAC, AIFF, APEv2File, MP4, ID3FileType, WavPack,
Musepack, MonkeysAudio, OptimFROG, ASF, OggOpus, AAC,
SMF, DSF]
if not options:
return None
fileobj = filething.fileobj
try:
header = fileobj.read(128)
except IOError:
header = b""
# Sort by name after score. Otherwise import order affects
# Kind sort order, which affects treatment of things with
# equals scores.
results = [(Kind.score(filething.name, fileobj, header), Kind.__name__)
for Kind in options]
results = list(izip(results, options))
results.sort()
(score, name), Kind = results[-1]
if score > 0:
try:
fileobj.seek(0, 0)
except IOError:
pass
return Kind(fileobj, filename=filething.filename)
else:
return None
| 28.990033 | 79 | 0.607609 |
import warnings
from mutagen._util import DictMixin, loadfile
from mutagen._compat import izip
class FileType(DictMixin):
__module__ = "mutagen"
info = None
tags = None
filename = None
_mimes = ["application/octet-stream"]
def __init__(self, *args, **kwargs):
if not args and not kwargs:
warnings.warn("FileType constructor requires a filename",
DeprecationWarning)
else:
self.load(*args, **kwargs)
@loadfile()
def load(self, filething, *args, **kwargs):
raise NotImplementedError
def __getitem__(self, key):
if self.tags is None:
raise KeyError(key)
else:
return self.tags[key]
def __setitem__(self, key, value):
if self.tags is None:
self.add_tags()
self.tags[key] = value
def __delitem__(self, key):
if self.tags is None:
raise KeyError(key)
else:
del(self.tags[key])
def keys(self):
if self.tags is None:
return []
else:
return self.tags.keys()
@loadfile(writable=True)
def delete(self, filething=None):
if self.tags is not None:
return self.tags.delete(filething)
@loadfile(writable=True)
def save(self, filething=None, **kwargs):
if self.tags is not None:
return self.tags.save(filething, **kwargs)
def pprint(self):
stream = "%s (%s)" % (self.info.pprint(), self.mime[0])
try:
tags = self.tags.pprint()
except AttributeError:
return stream
else:
return stream + ((tags and "\n" + tags) or "")
def add_tags(self):
raise NotImplementedError
@property
def mime(self):
mimes = []
for Kind in type(self).__mro__:
for mime in getattr(Kind, '_mimes', []):
if mime not in mimes:
mimes.append(mime)
return mimes
@staticmethod
def score(filename, fileobj, header):
raise NotImplementedError
class StreamInfo(object):
__module__ = "mutagen"
def pprint(self):
raise NotImplementedError
@loadfile(method=False)
def File(filething, options=None, easy=False):
if options is None:
from mutagen.asf import ASF
from mutagen.apev2 import APEv2File
from mutagen.flac import FLAC
if easy:
from mutagen.easyid3 import EasyID3FileType as ID3FileType
else:
from mutagen.id3 import ID3FileType
if easy:
from mutagen.mp3 import EasyMP3 as MP3
else:
from mutagen.mp3 import MP3
from mutagen.oggflac import OggFLAC
from mutagen.oggspeex import OggSpeex
from mutagen.oggtheora import OggTheora
from mutagen.oggvorbis import OggVorbis
from mutagen.oggopus import OggOpus
if easy:
from mutagen.trueaudio import EasyTrueAudio as TrueAudio
else:
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
if easy:
from mutagen.easymp4 import EasyMP4 as MP4
else:
from mutagen.mp4 import MP4
from mutagen.musepack import Musepack
from mutagen.monkeysaudio import MonkeysAudio
from mutagen.optimfrog import OptimFROG
from mutagen.aiff import AIFF
from mutagen.aac import AAC
from mutagen.smf import SMF
from mutagen.dsf import DSF
options = [MP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
FLAC, AIFF, APEv2File, MP4, ID3FileType, WavPack,
Musepack, MonkeysAudio, OptimFROG, ASF, OggOpus, AAC,
SMF, DSF]
if not options:
return None
fileobj = filething.fileobj
try:
header = fileobj.read(128)
except IOError:
header = b""
results = [(Kind.score(filething.name, fileobj, header), Kind.__name__)
for Kind in options]
results = list(izip(results, options))
results.sort()
(score, name), Kind = results[-1]
if score > 0:
try:
fileobj.seek(0, 0)
except IOError:
pass
return Kind(fileobj, filename=filething.filename)
else:
return None
| true | true |
f72ae5ad21d0d2e7c0cc825a649cff1858a27800 | 5,781 | py | Python | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 5 | 2020-05-17T04:48:25.000Z | 2022-01-27T09:36:45.000Z | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 1 | 2020-05-17T06:21:52.000Z | 2020-05-22T13:49:33.000Z | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 1 | 2021-01-28T03:00:27.000Z | 2021-01-28T03:00:27.000Z | """Example Extorter, useful as a starting point"""
import typing
import logging
import dataclasses
import datetime
# 3rdparty
import slugify
# We use ibflex
from ibflex import parser, FlexStatement, CashAction
from coolbeans.extort.base import ExtortionProtocol
from coolbeans.tools.seeds import Trade, Transfer, Expense, Income, EventDetail
logger = logging.getLogger(__name__)
def trade_key(trade):
if trade.openCloseIndicator:
o = trade.openCloseIndicator.name + ':'
else:
o = ''
return f"{o}{trade.tradeDate.strftime('%Y-%m-%d')}:{trade.ibOrderID}"
def clean_symbol(symbol: str) -> str:
symbol = slugify.slugify(symbol, separator='_')
if symbol[0].isdigit():
symbol = "X" + symbol
symbol = symbol.upper()
return symbol
class Extorter(ExtortionProtocol):
FILE_OPEN_MODE = None # This requires a file-name, not a
ib_account_id = ""
def extort(self, stream: typing.Union[typing.IO[typing.AnyStr], str]):
"""Extract as much information as possible from the workbook"""
for statement in parser.parse(stream).FlexStatements:
for record in self.extract_cash(statement):
yield dataclasses.asdict(record)
for trade in self.extract_trades(statement):
yield dataclasses.asdict(trade)
@staticmethod
def extract_cash(statement: FlexStatement):
"""
Args:
statement: The Statement to extract entries from
Returns:
iterator of DataClass instances for these records
"""
for record in statement.CashTransactions:
date = record.dateTime
if record.type in (
CashAction.DEPOSITWITHDRAW,
):
yield Transfer(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
event_detail=EventDetail.TRANSFER_DEPOSIT.name if record.amount > 0 else EventDetail.TRANSFER_WITHDRAWAL.name,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
elif record.amount < 0:
event_detail = EventDetail.EXPENSE_FEES
if record.type in (CashAction.BONDINTPAID, CashAction.BROKERINTPAID):
event_detail = EventDetail.EXPENSE_INTEREST
if record.type == CashAction.WHTAX:
event_detail = EventDetail.EXPENSE_TAX
yield Expense(
id=record.transactionID,
date=date,
amount=record.amount,
event_detail=event_detail,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
else:
yield Income(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
@staticmethod
def extract_trades(statement: FlexStatement):
"""Pull Trades from a FlexStatement
"""
by_order: typing.Dict[str, Trade] = {}
for trade in statement.Trades:
key = trade_key(trade)
assert key.strip(), f"Invalid Key {len(key)}"
if not trade.openCloseIndicator:
# This isn't a trade at all.
continue
if key in by_order:
combined = by_order[key]
combined.add_trade(
quantity=trade.quantity * trade.multiplier,
price=trade.tradePrice,
fees=trade.ibCommission
)
else:
seed = Trade(
id=key,
date=trade.tradeDate,
price=trade.tradePrice,
currency=trade.currency,
quantity=trade.quantity * trade.multiplier,
commodity=clean_symbol(trade.symbol),
fees=trade.ibCommission,
fees_currency=trade.ibCommissionCurrency,
subaccount=trade.accountId,
event_detail=EventDetail.TRADE_OPEN if trade.openCloseIndicator.name == 'OPEN' else EventDetail.TRADE_CLOSE,
meta={
'exchange': trade.exchange,
'symbol': trade.symbol,
}
)
by_order[key] = seed
for trade in by_order.values():
yield trade
# if trade.securityID is None and "." in trade.symbol:
# # FOREX Trade, not really a valid Symbol at all
# # TODO: Better check than blank securityID
# # Usually [currency].[commodity]. For example GBP.JPY
# # In that case trade.currency is JPY, so we just need to parse out the GBP part
# safe_symbol, _ = trade.symbol.split('.')
# else:
# safe_symbol = self.clean_symbol(trade.symbol)
| 33.034286 | 130 | 0.530877 |
import typing
import logging
import dataclasses
import datetime
import slugify
from ibflex import parser, FlexStatement, CashAction
from coolbeans.extort.base import ExtortionProtocol
from coolbeans.tools.seeds import Trade, Transfer, Expense, Income, EventDetail
logger = logging.getLogger(__name__)
def trade_key(trade):
if trade.openCloseIndicator:
o = trade.openCloseIndicator.name + ':'
else:
o = ''
return f"{o}{trade.tradeDate.strftime('%Y-%m-%d')}:{trade.ibOrderID}"
def clean_symbol(symbol: str) -> str:
symbol = slugify.slugify(symbol, separator='_')
if symbol[0].isdigit():
symbol = "X" + symbol
symbol = symbol.upper()
return symbol
class Extorter(ExtortionProtocol):
FILE_OPEN_MODE = None
ib_account_id = ""
def extort(self, stream: typing.Union[typing.IO[typing.AnyStr], str]):
for statement in parser.parse(stream).FlexStatements:
for record in self.extract_cash(statement):
yield dataclasses.asdict(record)
for trade in self.extract_trades(statement):
yield dataclasses.asdict(trade)
@staticmethod
def extract_cash(statement: FlexStatement):
for record in statement.CashTransactions:
date = record.dateTime
if record.type in (
CashAction.DEPOSITWITHDRAW,
):
yield Transfer(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
event_detail=EventDetail.TRANSFER_DEPOSIT.name if record.amount > 0 else EventDetail.TRANSFER_WITHDRAWAL.name,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
elif record.amount < 0:
event_detail = EventDetail.EXPENSE_FEES
if record.type in (CashAction.BONDINTPAID, CashAction.BROKERINTPAID):
event_detail = EventDetail.EXPENSE_INTEREST
if record.type == CashAction.WHTAX:
event_detail = EventDetail.EXPENSE_TAX
yield Expense(
id=record.transactionID,
date=date,
amount=record.amount,
event_detail=event_detail,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
else:
yield Income(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
@staticmethod
def extract_trades(statement: FlexStatement):
by_order: typing.Dict[str, Trade] = {}
for trade in statement.Trades:
key = trade_key(trade)
assert key.strip(), f"Invalid Key {len(key)}"
if not trade.openCloseIndicator:
continue
if key in by_order:
combined = by_order[key]
combined.add_trade(
quantity=trade.quantity * trade.multiplier,
price=trade.tradePrice,
fees=trade.ibCommission
)
else:
seed = Trade(
id=key,
date=trade.tradeDate,
price=trade.tradePrice,
currency=trade.currency,
quantity=trade.quantity * trade.multiplier,
commodity=clean_symbol(trade.symbol),
fees=trade.ibCommission,
fees_currency=trade.ibCommissionCurrency,
subaccount=trade.accountId,
event_detail=EventDetail.TRADE_OPEN if trade.openCloseIndicator.name == 'OPEN' else EventDetail.TRADE_CLOSE,
meta={
'exchange': trade.exchange,
'symbol': trade.symbol,
}
)
by_order[key] = seed
for trade in by_order.values():
yield trade
# if trade.securityID is None and "." in trade.symbol:
# # FOREX Trade, not really a valid Symbol at all
# # TODO: Better check than blank securityID
# # Usually [currency].[commodity]. For example GBP.JPY
# # In that case trade.currency is JPY, so we just need to parse out the GBP part
# safe_symbol, _ = trade.symbol.split('.')
# else:
# safe_symbol = self.clean_symbol(trade.symbol)
| true | true |
f72ae5e176716f5b8b5bebf5ecd595df75c371dc | 1,555 | py | Python | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | 4 | 2022-02-22T05:12:18.000Z | 2022-03-29T01:56:37.000Z | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | null | null | null | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | 3 | 2022-02-23T03:14:56.000Z | 2022-03-14T12:22:05.000Z | #!/usr/bin/env python3
import asyncio
import random
import matplotlib.pyplot as plt
import pathmagic
with pathmagic.context():
from Simulator import Simulator
from MissionPlanner import MissionPlanner
if __name__ == "__main__":
# define the world
map_width_meter = 25.0
map_height_meter = 25.0
map_resolution = 2
value_non_obs = 0 # the cell is empty
value_obs = 255 # the cell is blocked
# create a simulator
MySimulator = Simulator(map_width_meter, map_height_meter, map_resolution, value_non_obs, value_obs)
# number of obstacles
num_obs = 250
# [width, length] size of each obstacle [meter]
size_obs = [1, 1]
# generate random obstacles
MySimulator.generate_random_obs(num_obs, size_obs)
# randomly generate agents and targets
num_agents = 1
num_targets = 8
agents_position, targets_position = MySimulator.generate_agents_and_targets(num_agents, num_targets)
# average agent velocity in cells
agent_velocity_ave = [random.randint(4,8) for i in range(num_agents)]
# planning and visualization frequency in Hz
planning_frequency = 5
# initialize a planner
MyPlanner = MissionPlanner(MySimulator)
# run the planner online
asyncio.run(MyPlanner.run_planner({"agents_position": agents_position,
"targets_position": targets_position,
"agent_velocity_ave": agent_velocity_ave,
"planning_frequency": planning_frequency}))
| 34.555556 | 104 | 0.686817 |
import asyncio
import random
import matplotlib.pyplot as plt
import pathmagic
with pathmagic.context():
from Simulator import Simulator
from MissionPlanner import MissionPlanner
if __name__ == "__main__":
map_width_meter = 25.0
map_height_meter = 25.0
map_resolution = 2
value_non_obs = 0
value_obs = 255
MySimulator = Simulator(map_width_meter, map_height_meter, map_resolution, value_non_obs, value_obs)
num_obs = 250
size_obs = [1, 1]
MySimulator.generate_random_obs(num_obs, size_obs)
num_agents = 1
num_targets = 8
agents_position, targets_position = MySimulator.generate_agents_and_targets(num_agents, num_targets)
agent_velocity_ave = [random.randint(4,8) for i in range(num_agents)]
planning_frequency = 5
MyPlanner = MissionPlanner(MySimulator)
asyncio.run(MyPlanner.run_planner({"agents_position": agents_position,
"targets_position": targets_position,
"agent_velocity_ave": agent_velocity_ave,
"planning_frequency": planning_frequency}))
| true | true |
f72ae622b3e7a87cfbd8de23dda483349b388bb1 | 26,682 | py | Python | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | 2 | 2021-08-13T14:44:45.000Z | 2022-01-10T07:41:40.000Z | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | #
# Copyright(c) 2019-2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import random
from itertools import permutations
import pytest
from api.cas.ioclass_config import IoClass
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools import fs_utils
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_tools.fio.fio import Fio
from test_tools.fio.fio_param import ReadWrite, IoEngine
from test_utils.filesystem.file import File
from test_utils.os_utils import sync, Udev
from .io_class_common import *
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_lba():
"""Write data to random lba and check if it is cached according to range
defined in ioclass rule"""
cache, core = prepare()
ioclass_id = 1
min_cached_lba = 56
max_cached_lba = 200
iterations = 100
dd_size = Size(1, Unit.Blocks512)
dd_count = 1
# Prepare ioclass config
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"lba:ge:{min_cached_lba}&lba:le:{max_cached_lba}&done",
ioclass_config_path=ioclass_config_path,
)
# Prepare cache for test
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
cache.flush_cache()
# Check if lbas from defined range are cached
dirty_count = 0
# '8' step is set to prevent writing cache line more than once
TestRun.LOGGER.info(f"Writing to one sector in each cache line from range.")
for lba in range(min_cached_lba, max_cached_lba, 8):
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(lba)
)
dd.run()
sync()
dirty_count += 1
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != dirty_count:
TestRun.LOGGER.error(f"LBA {lba} not cached")
cache.flush_cache()
# Check if lba outside of defined range are not cached
TestRun.LOGGER.info(f"Writing to random sectors outside of cached range.")
for i in range(iterations):
rand_lba = random.randrange(2000)
if min_cached_lba <= rand_lba <= max_cached_lba:
continue
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(rand_lba)
)
dd.run()
sync()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.LOGGER.error(f"Inappropriately cached lba: {rand_lba}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_request_size():
cache, core = prepare()
ioclass_id = 1
iterations = 100
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"request_size:ge:8192&request_size:le:16384&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
Udev.disable()
# Check if requests with appropriate size are cached
TestRun.LOGGER.info(
f"Check if requests with size within defined range are cached"
)
cached_req_sizes = [Size(2, Unit.Blocks4096), Size(4, Unit.Blocks4096)]
for i in range(iterations):
cache.flush_cache()
req_size = random.choice(cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
TestRun.fail("Incorrect number of dirty blocks!")
cache.flush_cache()
# Check if requests with inappropriate size are not cached
TestRun.LOGGER.info(
f"Check if requests with size outside defined range are not cached"
)
not_cached_req_sizes = [
Size(1, Unit.Blocks4096),
Size(8, Unit.Blocks4096),
Size(16, Unit.Blocks4096),
]
for i in range(iterations):
req_size = random.choice(not_cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.fail("Dirty data present!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", list(Filesystem) + [False])
def test_ioclass_direct(filesystem):
"""
Perform buffered/direct IO to/from files or raw block device.
Data from buffered IO should be cached.
Data from buffered IO should not be cached and if performed to/from already cached data
should cause reclassification to unclassified IO class.
"""
cache, core = prepare()
Udev.disable()
ioclass_id = 1
io_size = Size(random.randint(1000, 2000), Unit.Blocks4096)
# direct IO class
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
fio = (
Fio().create_command()
.io_engine(IoEngine.libaio)
.size(io_size)
.offset(io_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/tmp_file" if filesystem else core.system_path)
)
if filesystem:
TestRun.LOGGER.info(
f"Preparing {filesystem.name} filesystem and mounting {core.system_path} at"
f" {mountpoint}"
)
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
else:
TestRun.LOGGER.info("Testing on raw exported object")
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered writes were cached!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct writes to {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct writes was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Buffered reads from {'file' if filesystem else 'device'}")
fio.remove_param("readwrite").remove_param("direct")
fio.read_write(ReadWrite.read)
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered reads did not cause reclassification!"
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct reads from {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct reads was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_metadata(filesystem):
"""
Perform operations on files that cause metadata update.
Determine if every such operation results in increased writes to cached metadata.
Exact values may not be tested as each file system has different metadata structure.
"""
cache, core = prepare()
Udev.disable()
ioclass_id = random.randint(1, ioclass_config.MAX_IO_CLASS_ID)
# metadata IO class
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="metadata&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
requests_to_metadata_before = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
TestRun.LOGGER.info("Creating 20 test files")
files = []
for i in range(1, 21):
file_path = f"{mountpoint}/test_file_{i}"
dd = (
Dd()
.input("/dev/urandom")
.output(file_path)
.count(random.randint(5, 50))
.block_size(Size(1, Unit.MebiByte))
.oflag("sync")
)
dd.run()
files.append(File(file_path))
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while creating files!")
requests_to_metadata_before = requests_to_metadata_after
TestRun.LOGGER.info("Renaming all test files")
for file in files:
file.move(f"{file.full_path}_renamed")
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while renaming files!")
requests_to_metadata_before = requests_to_metadata_after
test_dir_path = f"{mountpoint}/test_dir"
TestRun.LOGGER.info(f"Creating directory {test_dir_path}")
fs_utils.create_directory(path=test_dir_path)
TestRun.LOGGER.info(f"Moving test files into {test_dir_path}")
for file in files:
file.move(test_dir_path)
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while moving files!")
TestRun.LOGGER.info(f"Removing {test_dir_path}")
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while deleting directory with files!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_id_as_condition(filesystem):
"""
Load config in which IO class ids are used as conditions in other IO class definitions.
Check if performed IO is properly classified.
"""
cache, core = prepare()
Udev.disable()
base_dir_path = f"{mountpoint}/base_dir"
ioclass_file_size = Size(random.randint(25, 50), Unit.MebiByte)
ioclass_file_size_bytes = int(ioclass_file_size.get_value(Unit.Byte))
# directory condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{base_dir_path}",
ioclass_config_path=ioclass_config_path,
)
# file size condition
ioclass_config.add_ioclass(
ioclass_id=2,
eviction_priority=1,
allocation=True,
rule=f"file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
# direct condition
ioclass_config.add_ioclass(
ioclass_id=3,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
# IO class 1 OR 2 condition
ioclass_config.add_ioclass(
ioclass_id=4,
eviction_priority=1,
allocation=True,
rule="io_class:1|io_class:2",
ioclass_config_path=ioclass_config_path,
)
# IO class 4 AND file size condition (same as IO class 2)
ioclass_config.add_ioclass(
ioclass_id=5,
eviction_priority=1,
allocation=True,
rule=f"io_class:4&file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
# IO class 3 condition
ioclass_config.add_ioclass(
ioclass_id=6,
eviction_priority=1,
allocation=True,
rule="io_class:3",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(base_dir_path)
sync()
# IO fulfilling IO class 1 condition (and not IO class 2)
# Should be classified as IO class 4
base_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(non_ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_1")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
if new_occupancy != base_occupancy + non_ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + non_ioclass_file_size}, actual: {new_occupancy}")
# IO fulfilling IO class 2 condition (and not IO class 1)
# Should be classified as IO class 5
base_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file_2")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
# IO fulfilling IO class 1 and 2 conditions
# Should be classified as IO class 5
base_occupancy = new_occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
# Same IO but direct
# Should be classified as IO class 6
base_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.direct()
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_or(filesystem):
"""
Load config with IO class combining 5 contradicting conditions connected by OR operator.
Check if every IO fulfilling one condition is classified properly.
"""
cache, core = prepare()
Udev.disable()
# directories OR condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{mountpoint}/dir1|directory:{mountpoint}/dir2|directory:"
f"{mountpoint}/dir3|directory:{mountpoint}/dir4|directory:{mountpoint}/dir5",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
for i in range(1, 6):
fs_utils.create_directory(f"{mountpoint}/dir{i}")
sync()
# Perform IO fulfilling each condition and check if occupancy raises
for i in range(1, 6):
file_size = Size(random.randint(25, 50), Unit.MebiByte)
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/dir{i}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.fail("Occupancy has not increased correctly!\n"
f"Expected: {base_occupancy + file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_and(filesystem):
"""
Load config with IO class combining 5 conditions contradicting at least one other condition
connected by AND operator.
Check if every IO fulfilling one of the conditions is not classified.
"""
cache, core = prepare()
Udev.disable()
file_size = Size(random.randint(25, 50), Unit.MebiByte)
file_size_bytes = int(file_size.get_value(Unit.Byte))
# directories OR condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"file_size:gt:{file_size_bytes}&file_size:lt:{file_size_bytes}&"
f"file_size:ge:{file_size_bytes}&file_size:le:{file_size_bytes}&"
f"file_size:eq:{file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
# Perform IO
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Unexpected occupancy increase!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_effective_ioclass(filesystem):
"""
title: Effective IO class with multiple non-exclusive conditions
description: |
Test CAS ability to properly classify IO fulfilling multiple conditions based on
IO class ids and presence of '&done' annotation in IO class rules
pass_criteria:
- In every iteration first IO is classified to the last in order IO class
- In every iteration second IO is classified to the IO class with '&done' annotation
"""
with TestRun.LOGGER.step(f"Test prepare"):
cache, core = prepare()
Udev.disable()
file_size = Size(10, Unit.Blocks4096)
file_size_bytes = int(file_size.get_value(Unit.Byte))
test_dir = f"{mountpoint}/test"
rules = ["direct", # rule contradicting other rules
f"directory:{test_dir}",
f"file_size:le:{2 * file_size_bytes}",
f"file_size:ge:{file_size_bytes // 2}"]
with TestRun.LOGGER.step(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}"):
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(test_dir)
sync()
for i, permutation in TestRun.iteration(enumerate(permutations(range(1, 5)), start=1)):
with TestRun.LOGGER.step("Load IO classes in order specified by permutation"):
load_io_classes_in_permutation_order(rules, permutation, cache)
io_class_id = 3 if rules[permutation.index(4)] == "direct" else 4
with TestRun.LOGGER.step("Perform IO fulfilling the non-contradicting conditions"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio = (Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{test_dir}/test_file{i}"))
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the last non-contradicting IO class)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
with TestRun.LOGGER.step("Add '&done' to the second in order non-contradicting condition"):
io_class_id = add_done_to_second_non_exclusive_condition(rules, permutation, cache)
with TestRun.LOGGER.step("Repeat IO"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the IO class with '&done' annotation)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
def load_io_classes_in_permutation_order(rules, permutation, cache):
ioclass_config.remove_ioclass_config(ioclass_config_path=ioclass_config_path)
ioclass_config.create_ioclass_config(
add_default_rule=False, ioclass_config_path=ioclass_config_path
)
# To make test more precise all workload except of tested ioclass should be
# put in pass-through mode
ioclass_list = [IoClass.default(allocation=False)]
for n in range(len(rules)):
ioclass_list.append(IoClass(class_id=permutation[n], rule=rules[n]))
IoClass.save_list_to_config_file(ioclass_list,
add_default_rule=False,
ioclass_config_path=ioclass_config_path)
casadm.load_io_classes(cache.cache_id, file=ioclass_config_path)
def add_done_to_second_non_exclusive_condition(rules, permutation, cache):
non_exclusive_conditions = 0
second_class_id = 1
while True:
idx = permutation.index(second_class_id)
if rules[idx] != "direct":
non_exclusive_conditions += 1
if non_exclusive_conditions == 2:
break
second_class_id += 1
fs_utils.replace_first_pattern_occurrence(ioclass_config_path,
rules[idx], f"{rules[idx]}&done")
sync()
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
return second_class_id
| 39.296024 | 100 | 0.670227 |
import random
from itertools import permutations
import pytest
from api.cas.ioclass_config import IoClass
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools import fs_utils
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_tools.fio.fio import Fio
from test_tools.fio.fio_param import ReadWrite, IoEngine
from test_utils.filesystem.file import File
from test_utils.os_utils import sync, Udev
from .io_class_common import *
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_lba():
cache, core = prepare()
ioclass_id = 1
min_cached_lba = 56
max_cached_lba = 200
iterations = 100
dd_size = Size(1, Unit.Blocks512)
dd_count = 1
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"lba:ge:{min_cached_lba}&lba:le:{max_cached_lba}&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
cache.flush_cache()
dirty_count = 0
TestRun.LOGGER.info(f"Writing to one sector in each cache line from range.")
for lba in range(min_cached_lba, max_cached_lba, 8):
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(lba)
)
dd.run()
sync()
dirty_count += 1
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != dirty_count:
TestRun.LOGGER.error(f"LBA {lba} not cached")
cache.flush_cache()
TestRun.LOGGER.info(f"Writing to random sectors outside of cached range.")
for i in range(iterations):
rand_lba = random.randrange(2000)
if min_cached_lba <= rand_lba <= max_cached_lba:
continue
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(rand_lba)
)
dd.run()
sync()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.LOGGER.error(f"Inappropriately cached lba: {rand_lba}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_request_size():
cache, core = prepare()
ioclass_id = 1
iterations = 100
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"request_size:ge:8192&request_size:le:16384&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
Udev.disable()
TestRun.LOGGER.info(
f"Check if requests with size within defined range are cached"
)
cached_req_sizes = [Size(2, Unit.Blocks4096), Size(4, Unit.Blocks4096)]
for i in range(iterations):
cache.flush_cache()
req_size = random.choice(cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
TestRun.fail("Incorrect number of dirty blocks!")
cache.flush_cache()
TestRun.LOGGER.info(
f"Check if requests with size outside defined range are not cached"
)
not_cached_req_sizes = [
Size(1, Unit.Blocks4096),
Size(8, Unit.Blocks4096),
Size(16, Unit.Blocks4096),
]
for i in range(iterations):
req_size = random.choice(not_cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.fail("Dirty data present!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", list(Filesystem) + [False])
def test_ioclass_direct(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_id = 1
io_size = Size(random.randint(1000, 2000), Unit.Blocks4096)
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
fio = (
Fio().create_command()
.io_engine(IoEngine.libaio)
.size(io_size)
.offset(io_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/tmp_file" if filesystem else core.system_path)
)
if filesystem:
TestRun.LOGGER.info(
f"Preparing {filesystem.name} filesystem and mounting {core.system_path} at"
f" {mountpoint}"
)
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
else:
TestRun.LOGGER.info("Testing on raw exported object")
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered writes were cached!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct writes to {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct writes was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Buffered reads from {'file' if filesystem else 'device'}")
fio.remove_param("readwrite").remove_param("direct")
fio.read_write(ReadWrite.read)
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered reads did not cause reclassification!"
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct reads from {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct reads was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_metadata(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_id = random.randint(1, ioclass_config.MAX_IO_CLASS_ID)
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="metadata&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
requests_to_metadata_before = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
TestRun.LOGGER.info("Creating 20 test files")
files = []
for i in range(1, 21):
file_path = f"{mountpoint}/test_file_{i}"
dd = (
Dd()
.input("/dev/urandom")
.output(file_path)
.count(random.randint(5, 50))
.block_size(Size(1, Unit.MebiByte))
.oflag("sync")
)
dd.run()
files.append(File(file_path))
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while creating files!")
requests_to_metadata_before = requests_to_metadata_after
TestRun.LOGGER.info("Renaming all test files")
for file in files:
file.move(f"{file.full_path}_renamed")
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while renaming files!")
requests_to_metadata_before = requests_to_metadata_after
test_dir_path = f"{mountpoint}/test_dir"
TestRun.LOGGER.info(f"Creating directory {test_dir_path}")
fs_utils.create_directory(path=test_dir_path)
TestRun.LOGGER.info(f"Moving test files into {test_dir_path}")
for file in files:
file.move(test_dir_path)
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while moving files!")
TestRun.LOGGER.info(f"Removing {test_dir_path}")
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while deleting directory with files!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_id_as_condition(filesystem):
cache, core = prepare()
Udev.disable()
base_dir_path = f"{mountpoint}/base_dir"
ioclass_file_size = Size(random.randint(25, 50), Unit.MebiByte)
ioclass_file_size_bytes = int(ioclass_file_size.get_value(Unit.Byte))
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{base_dir_path}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=2,
eviction_priority=1,
allocation=True,
rule=f"file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=3,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=4,
eviction_priority=1,
allocation=True,
rule="io_class:1|io_class:2",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=5,
eviction_priority=1,
allocation=True,
rule=f"io_class:4&file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=6,
eviction_priority=1,
allocation=True,
rule="io_class:3",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(base_dir_path)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(non_ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_1")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
if new_occupancy != base_occupancy + non_ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + non_ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file_2")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = new_occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.direct()
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_or(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{mountpoint}/dir1|directory:{mountpoint}/dir2|directory:"
f"{mountpoint}/dir3|directory:{mountpoint}/dir4|directory:{mountpoint}/dir5",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
for i in range(1, 6):
fs_utils.create_directory(f"{mountpoint}/dir{i}")
sync()
for i in range(1, 6):
file_size = Size(random.randint(25, 50), Unit.MebiByte)
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/dir{i}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.fail("Occupancy has not increased correctly!\n"
f"Expected: {base_occupancy + file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_and(filesystem):
cache, core = prepare()
Udev.disable()
file_size = Size(random.randint(25, 50), Unit.MebiByte)
file_size_bytes = int(file_size.get_value(Unit.Byte))
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"file_size:gt:{file_size_bytes}&file_size:lt:{file_size_bytes}&"
f"file_size:ge:{file_size_bytes}&file_size:le:{file_size_bytes}&"
f"file_size:eq:{file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Unexpected occupancy increase!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_effective_ioclass(filesystem):
with TestRun.LOGGER.step(f"Test prepare"):
cache, core = prepare()
Udev.disable()
file_size = Size(10, Unit.Blocks4096)
file_size_bytes = int(file_size.get_value(Unit.Byte))
test_dir = f"{mountpoint}/test"
rules = ["direct",
f"directory:{test_dir}",
f"file_size:le:{2 * file_size_bytes}",
f"file_size:ge:{file_size_bytes // 2}"]
with TestRun.LOGGER.step(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}"):
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(test_dir)
sync()
for i, permutation in TestRun.iteration(enumerate(permutations(range(1, 5)), start=1)):
with TestRun.LOGGER.step("Load IO classes in order specified by permutation"):
load_io_classes_in_permutation_order(rules, permutation, cache)
io_class_id = 3 if rules[permutation.index(4)] == "direct" else 4
with TestRun.LOGGER.step("Perform IO fulfilling the non-contradicting conditions"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio = (Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{test_dir}/test_file{i}"))
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the last non-contradicting IO class)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
with TestRun.LOGGER.step("Add '&done' to the second in order non-contradicting condition"):
io_class_id = add_done_to_second_non_exclusive_condition(rules, permutation, cache)
with TestRun.LOGGER.step("Repeat IO"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the IO class with '&done' annotation)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
def load_io_classes_in_permutation_order(rules, permutation, cache):
ioclass_config.remove_ioclass_config(ioclass_config_path=ioclass_config_path)
ioclass_config.create_ioclass_config(
add_default_rule=False, ioclass_config_path=ioclass_config_path
)
ioclass_list = [IoClass.default(allocation=False)]
for n in range(len(rules)):
ioclass_list.append(IoClass(class_id=permutation[n], rule=rules[n]))
IoClass.save_list_to_config_file(ioclass_list,
add_default_rule=False,
ioclass_config_path=ioclass_config_path)
casadm.load_io_classes(cache.cache_id, file=ioclass_config_path)
def add_done_to_second_non_exclusive_condition(rules, permutation, cache):
non_exclusive_conditions = 0
second_class_id = 1
while True:
idx = permutation.index(second_class_id)
if rules[idx] != "direct":
non_exclusive_conditions += 1
if non_exclusive_conditions == 2:
break
second_class_id += 1
fs_utils.replace_first_pattern_occurrence(ioclass_config_path,
rules[idx], f"{rules[idx]}&done")
sync()
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
return second_class_id
| true | true |
f72ae69658ad2a7325bb00e82f738fd441ad6552 | 1,684 | py | Python | flask_controller/controller.py | AlexFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | 3 | 2020-10-19T08:18:51.000Z | 2022-02-06T04:29:38.000Z | flask_controller/controller.py | TypicalFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | null | null | null | flask_controller/controller.py | TypicalFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | null | null | null | from abc import ABC
def route(rule, **options):
"""Decorator for defining routes of FlaskController classes.
Acts in the same way ass @app.route.
Can be used for a class to set a base route too.
Args:
path (str): The path of the newly defined route
options: refer to flasks docs for those, all of them can be used
"""
def decorator(f):
f._route = (rule, options)
return f
return decorator
class FlaskController(ABC):
"""Baseclass for the Controller Classes.
Extend tis class and use it in conjunction with the route decoractor
to define routes for your flask app.
Use the register method to add your defined routes to a flask app.
"""
def __init__(self):
super(FlaskController, self).__init__()
def register(self, app):
"""Adds the routes of a Controller to a Flask instance.
Args:
app (Flask)
"""
members = dir(self)
routes = []
for member in members:
if hasattr(getattr(self, member), "_route"):
if member is not "__class__":
routes.append(member)
self._register_routes(routes, app)
def _register_routes(self, routes, app):
for route in routes:
func = getattr(self, route)
real_route = self._generate_route(func._route[0])
options = func._route[1]
app.add_url_rule(real_route, route + real_route, func, **options)
def _generate_route(self, route):
base_route = ""
if hasattr(self, "_route"):
base_route = self._route[0]
return base_route + route
| 28.066667 | 78 | 0.604513 | from abc import ABC
def route(rule, **options):
def decorator(f):
f._route = (rule, options)
return f
return decorator
class FlaskController(ABC):
def __init__(self):
super(FlaskController, self).__init__()
def register(self, app):
members = dir(self)
routes = []
for member in members:
if hasattr(getattr(self, member), "_route"):
if member is not "__class__":
routes.append(member)
self._register_routes(routes, app)
def _register_routes(self, routes, app):
for route in routes:
func = getattr(self, route)
real_route = self._generate_route(func._route[0])
options = func._route[1]
app.add_url_rule(real_route, route + real_route, func, **options)
def _generate_route(self, route):
base_route = ""
if hasattr(self, "_route"):
base_route = self._route[0]
return base_route + route
| true | true |
f72ae6adfbd9100ea1e159819c5e0ed61df33f44 | 24,028 | py | Python | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 353 | 2020-12-10T10:47:17.000Z | 2022-03-31T23:08:29.000Z | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 80 | 2020-12-10T09:54:22.000Z | 2022-03-30T22:08:45.000Z | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 63 | 2020-12-10T17:10:34.000Z | 2022-03-28T16:27:07.000Z | # Torch
from torch.jit.annotations import BroadcastingList2, BroadcastingList3 # noqa: F401
from torch.testing._internal.common_methods_invocations import non_differentiable, create_input, \
unpack_variables
import torch.nn.functional as F
import torch
import torch.cuda
import torch.jit
import torch.jit._logging
import torch.jit.frontend
from torch.testing._internal.common_nn import module_tests, new_module_tests
from copy import deepcopy
import math # noqa: F401
# Testing utils
from torch._six import inf
torch.set_default_dtype(torch.double)
L = 20
M = 10
S = 5
# NB: JIT script tests for all nn functional interfaces, script mode does
# not support in_place operations yet, so no inplace operation tests added.
# removed all the deprecated functions
#
# (
# method name,
# input size/constructing fn,
# args (tuple represents shape of a tensor arg),
# test variant name(will be used at test name suffix,
# 'inplace' skips grad tests), // optional
# (True, nonfusible_nodes, fusible_nodes) for autodiff // optional
# fn to determine if test should be skipped, // optional
# fn mapping output to part that should be gradcheck'ed, // optional
# kwargs for function, // optional
# )
nn_functional_tests = [
('conv1d', (S, S, S), ((S, S, S),)),
('conv2d', (S, S, S, S), ((S, S, S, S),)),
('conv3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_transpose1d', (S, S, S), ((S, S, S),)),
('conv_transpose2d', (S, S, S, S), ((S, S, S, S),)),
('conv_transpose3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_tbc', (S, S, S), ((S, S, S), (S,), 2)),
('avg_pool1d', (S, S, S), (3,)),
('avg_pool2d', (S, S, S, S), (3,), '', (True,)),
('avg_pool3d', (S, S, S, S, S), (3,)),
('fractional_max_pool2d', (S, S, S, S), (3, [2, 3],)),
('max_pool1d', (S, S, S), (2, 1)),
('max_pool1d', (S, S, S), (2, 1, 1, 1, False, True), 'with_indices'),
('max_pool2d', (S, S, S, S), (2, 1), '', (True, 'aten::max_pool2d_with_indices')),
('max_pool2d', (S, S, S, S), (2, 1, 1, 1, False, True), 'with_indices', (True, 'aten::max_pool2d_with_indices')),
('max_pool3d', (S, S, S, S, S), (2, 1)),
('max_unpool1d', torch.tensor([[[2., 4]]]), (torch.tensor([[[1, 3]]]), 2, 2, 0)),
('max_unpool2d', torch.tensor([[[[2., 4]]]]), (torch.tensor([[[[1, 3]]]]), 2, 2, 0)),
('max_unpool3d', torch.tensor([[[[[2., 4]]]]]), (torch.tensor([[[[[1, 3]]]]]), 2, 2, 0)),
('lp_pool1d', (S, S, S), (2., 3, 2,)),
('lp_pool2d', (S, S, S, S), (2., 3, 2,)),
('adaptive_max_pool1d', (S, S, S), (5,)),
('adaptive_max_pool2d', (S, S, S, S), ([5, 7],)),
('adaptive_max_pool3d', (S, S, S, S, S), ([3, 2, 2],)),
('adaptive_avg_pool1d', (S, S, S), (5,), '', (True,)),
('adaptive_avg_pool2d', (S, S, S, S), ([5, 7],), '', (True,)),
('adaptive_avg_pool3d', (S, S, S, S, S), ([3, 2, 2],), '', (True,)),
('dropout', (S, S, S), (0.5,), '', (True,
['aten::bernoulli_',
'aten::empty_like', 'aten::mul', 'aten::div'])),
('alpha_dropout', (S, S, S), (0.5,)),
('dropout2d', (S, S, S), (0.5,)),
('dropout3d', (S, S, S), (0.5,)),
('feature_alpha_dropout', (S, S, S), (0.5,)),
('threshold', (S, S, S), (0.1, 2.), '', (True,)),
('threshold', (S, S, S), (0.1, 2., True), 'inplace'),
('relu', (S, S, S), (), '', (True,)),
('relu', (S, S, S), (), 'inplace'),
('glu', (S - 1, S - 1, S - 1), (),),
('hardtanh', (S, S, S), (-0.5, 0.5),),
('hardtanh', (S, S, S), (-0.5, 0.5, True), 'inplace'),
('relu6', (S, S, S), (),),
('relu6', (S, S, S), (True), 'inplace'),
('elu', (S, S, S), (0.9,),),
('elu', (S, S, S), (0.9, True), 'inplace'),
('selu', (S, S, S), (),),
('selu', (S, S, S), (True), 'inplace'),
('celu', (S, S, S), (0.9,),),
('celu', (S, S, S), (0.9, True), 'inplace'),
('leaky_relu', (S, S, S), (0.02,),),
('leaky_relu', (S, S, S), (0.02,), 'inplace'),
('rrelu', (S, S), (0.1, 0.3, False),),
('rrelu', (S, S), (0.1, 0.3, False, True), 'inplace'),
('hardshrink', (S, S, S), (0.4,),),
('tanhshrink', (S, S, S), (),),
('softsign', (S, S, S), (),),
('softplus', (S, S, S), (),),
('softmin', (S, S, S), (0,),),
('softmax', (S, S, S), (0,), '', (True,)),
('softmax', (S, S, S), (0, 3, torch.double), 'with_all_args', (True,)),
('tanh', (S, S, S), (), '', (True,)),
('sigmoid', (S, S, S), (), '', (True,)),
('log_softmax', (S, S, S), (0,), '', (True,)),
('linear', (S, S), ((M, S),), '', (True, ['aten::t', 'aten::matmul'])),
('linear', (S, S), ((M, S), (M,)), 'addmm', (True, ['aten::add', 'aten::mm'])),
('bilinear', (S, S, S), ((S, S, M), torch.zeros(M, S, M),),),
('embedding', torch.tensor([[1, 2, 4, 5], [4, 3, 2, 5]]), (torch.rand(6, 3), ), '', (True,)),
('embedding_bag', torch.tensor([1, 2, 4, 2]), (torch.rand(5, 3), torch.tensor([0, 4]),),),
('batch_norm', (S, S), (non_differentiable(torch.randn(S)), non_differentiable(torch.ones(S)), ),
'', (False, 'aten::_batch_norm_impl_index')),
('instance_norm', (S, S, S), (non_differentiable(torch.zeros(S)), non_differentiable(torch.ones(S))),),
('layer_norm', (S, S, S, S), ([5],), '',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),), 'with_only_weight',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], None, non_differentiable(torch.rand(S)),), 'with_only_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),
non_differentiable(torch.rand(S))), 'with_weight_and_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index', 'aten::addcmul'])),
('group_norm', (S, S, S), (1, torch.rand(5),),),
('local_response_norm', (S, S, S), (2, ),),
('nll_loss', F.log_softmax(torch.randn(3, 5), dim=0), (torch.tensor([1, 0, 4]),), '', (True, 'aten::nll_loss_forward')),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2),),),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2), True, True), 'full'),
('kl_div', F.log_softmax(torch.randn(S, 10), 1), (F.softmax(torch.randn(S, 10), 1),),),
('cross_entropy', (3, S), (torch.randint(S, (3,), dtype=torch.int64),),),
('binary_cross_entropy_with_logits', (3,), (torch.empty(3).random_(2), ),),
('smooth_l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('mse_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('smooth_l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('mse_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('margin_ranking_loss', (3, S), ((3, S), (S,)),),
('hinge_embedding_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('multilabel_soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('cosine_embedding_loss', (S, S), ((S, S), non_differentiable(torch.rand(S,))),),
('pixel_shuffle', (1, 9, 4, 4), (3,),),
('affine_grid', (S, 2, 3), (torch.Size([S, 1, 7, 7]),),),
('pad', (3, 3, 4, 2), ([1, 1],),),
('pairwise_distance', (S, S), ((S, S),),),
('pdist', (S, S), (),),
('cosine_similarity', (S, S), ((S, S),),),
('triplet_margin_loss', (S, S), ((S, S), (S, S)),),
('normalize', (S, S, S), (),),
('unfold', (S, S, S, S), ([2, 3]),),
('fold', (1, 3 * 2 * 2, 12), ([4, 5], [2, 2]),),
('grid_sample', (S, S, S, S), (non_differentiable(torch.rand(S, S, S, 2)),),),
('gumbel_softmax', (S, S), (2.,), '', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('gumbel_softmax', (S, S), (2., True,), 'hard', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('multilabel_margin_loss', torch.tensor([[0.2, -0.2, 0.07]]), (torch.tensor([[0, 0, 1]]),),),
('multi_margin_loss', (S, S), (non_differentiable(torch.randint(S, (S, ), dtype=torch.int64)),
1, 1., non_differentiable(torch.randn(S))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(), (non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(),
(non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2)), None, None, 'mean'), 'size_average'),
('ctc_loss', torch.rand(S, S, S).log_softmax(2).detach().requires_grad_(),
(torch.randint(1, S, (S, S), dtype=torch.long), torch.full((S,), S, dtype=torch.long),
torch.randint(1, S, (S,), dtype=torch.long))),
('upsample', torch.randn(S, S, M, M), (None, 2.), 'with_scale'),
('upsample', torch.randn(S, S, M, M), (4,), 'with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'nearest_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'nearest_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'nearest_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'area_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'area_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'area_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bilinear_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bilinear_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bilinear_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bicubic_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bicubic_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bicubic_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'nearest_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'nearest_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'nearest_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'area_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'area_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'area_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'linear_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'linear_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'linear_3d_with_size'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'nearest_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'nearest_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'area_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'area_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'area_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'trilinear_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'trilinear_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'trilinear_5d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2, None, 'nearest', None, False),
'nearest_4d_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'nearest', None, False),
'nearest_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bilinear', None, False),
'bilinear_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bilinear', None, False),
'bilinear_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bicubic', None, False),
'bicubic_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bicubic', None, False),
'bicubic_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'nearest', None, False),
'nearest_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'nearest', None, False),
'nearest_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'linear', None, False),
'linear_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'linear', None, False),
'linear_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'nearest', None, False),
'nearest_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'nearest', None, False),
'nearest_5d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'trilinear', None, False),
'trilinear_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'trilinear', None, False),
'trilinear_5d_with_size_not_recompute_scale_factor'),
]
script_template = '''
def the_method({}):
return {}
'''
def get_call(method_name, func_type, args, kwargs):
kwargs_str = ', '.join([k + '=' + str(v) for k, v in kwargs.items()])
self_arg = args[0]
if(func_type == 'method'):
args = args[1:]
argument_str = ', '.join(args)
argument_str += ', ' if len(args) and len(kwargs) else ''
argument_str += kwargs_str
if func_type == 'functional':
call = 'torch.{}({})'.format(method_name, argument_str)
elif func_type == 'method':
call = '{}.{}({})'.format(self_arg, method_name, argument_str)
elif func_type == 'nn_functional':
call = 'torch.nn.functional.{}({})'.format(method_name, argument_str)
else:
raise 'Unsupported function type'
return call
def get_constant(x):
if x == inf:
return 'math.inf'
if x == -inf:
return '-math.inf'
return x
def get_script_args(args):
formals = []
tensors = []
actuals = []
for arg in args:
if isinstance(arg, torch.Tensor):
name = 'i{}'.format(len(formals))
formals.append(name)
actuals.append(name)
tensors.append(arg)
elif isinstance(arg, str):
actuals.append("'{}'".format(arg))
else:
actuals.append(str(get_constant(arg)))
return (formals, tensors, actuals)
# create a script function from (name, func_type, output_process_fn),
# and returns the compiled function and example inputs
def gen_script_fn_and_args(method_name, func_type, *args, **kwargs):
formals, tensors, actuals = get_script_args(args)
call = get_call(method_name, func_type, actuals, kwargs)
script = script_template.format(', '.join(formals), call)
CU = torch.jit.CompilationUnit(script)
return CU.the_method, tensors
# create a script function from (name, func_type, output_process_fn),
# returns a function takes in (args, kwargs) and runs the compiled function and
# then applies the post process fn to the outputs
def create_script_fn(self, method_name, func_type, output_process_fn):
def script_fn(*args, **kwargs):
fn, tensors = gen_script_fn_and_args(method_name, func_type, *args, **kwargs)
self.assertExportImport(fn.graph, tensors)
output = output_process_fn(fn(*tensors))
script_fn.last_graph = fn.graph_for(*tensors)
return output
return script_fn
# make a new function where all non-tensor arguments in 'args' have been partially
# applied, and all tensor arguments remain.
# used to trace functions when some arguments are not tensors
def partial_apply_nontensors(fn, args, **kwargs):
source = ['t' if isinstance(arg, torch.Tensor) else 's' for arg in args]
def new_fn(*tensors_):
tensors = iter(tensors_)
return fn(*(args[i] if s == 's' else next(tensors) for i, s in enumerate(source)), **kwargs)
return new_fn, [arg for arg in args if isinstance(arg, torch.Tensor)]
# create a trace function from input fn
def create_traced_fn(self, fn):
def traced_fn(*inputs, **kwargs):
fn_tensors, inputs_tensors = partial_apply_nontensors(fn, inputs, **kwargs)
# `check_trace` is set to False because check_trace is run with @no_grad
# Also, `check_against_reference` already does all the checks
# against python function
traced = torch.jit.trace(fn_tensors, inputs_tensors, check_trace=False)
self.assertExportImport(traced.graph, inputs_tensors)
output = traced(*inputs_tensors)
traced_fn.last_graph = traced.graph_for(*inputs_tensors)
return output
return traced_fn
# known to be failing in script
EXCLUDE_SCRIPT = {
'test_norm_fro_default',
'test_norm_fro_cpu',
'test_norm_nuc',
'test_norm_fro',
'test_norm_nuc_batched',
# aten op has additional cudnn argument
'test_nn_unfold',
# flaky test - TODO fix
'test_nn_ctc_loss',
# unknown builtin op
'test_nn_fold',
# jit doesn't support sparse tensors.
'test_to_sparse'
}
# generates a script function and set of example inputs
# from a specified test in the format of nn_functional_tests
def get_nn_functional_compiled_fn_and_inputs(name, self_size, args, variant_name='', *extra_args):
test_name = 'test_nn_' + name
if variant_name != '':
test_name = test_name + '_' + variant_name
no_grad = variant_name == 'inplace'
self_variable = create_input((self_size,))[0][0]
kwargs = None
# need to record this because methods can change the size (e.g. unsqueeze)
args_variable, kwargs_variable = create_input(args)
self_tensor = deepcopy(self_variable.data)
args_tensor = deepcopy(unpack_variables(args_variable))
f_args_variable = (self_variable,) + args_variable
f_args_tensor = (self_tensor,) + args_tensor
with torch.jit._disable_emit_hooks():
script_fn, inputs = gen_script_fn_and_args(name, "nn_functional", *f_args_variable)
return script_fn, inputs
# additional modules test
# TODO: delete this list once we make all nn_tests work
additional_module_tests = [
{
'module_name': 'Bilinear',
'constructor_args': (S, S, M),
'input_size': (S, S),
'extra_args': ((S, S),)
},
{
'module_name': 'RNNCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'LSTMCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'GRUCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'MultiheadAttention',
'constructor_args': (128, 8),
'input_size': (10, 8, 128),
'extra_args': (torch.randn(10, 8, 128), torch.randn(10, 8, 128)),
'slowTest': True
},
{
'module_name': 'Transformer',
'constructor_args': (1, 1, 1, 1, 2),
'input_size': (3, 1, 1),
'extra_args': (torch.randn(1, 1, 1),),
'slowTest': True
}
]
EXCLUDE_SCRIPT_MODULES = {
'test_nn_AdaptiveAvgPool2d_tuple_none',
'test_nn_AdaptiveAvgPool3d_tuple_none',
'test_nn_AdaptiveMaxPool2d_tuple_none',
'test_nn_AdaptiveMaxPool3d_tuple_none',
# Doesn't use future division, so this is not supported
'test_nn_CrossMapLRN2d',
}
script_method_template = '''
def forward({}):
return {}
'''
def create_script_module(self, nn_module, constructor_args, *args, **kwargs):
def script_module(*args, **kwargs):
formals, tensors, actuals = get_script_args(args)
method_args = ', '.join(['self'] + actuals)
call_args_str = ', '.join(actuals)
call = "self.submodule({})".format(call_args_str)
script = script_method_template.format(method_args, call)
submodule_constants = []
if kwargs.get('is_constant'):
submodule_constants = ['submodule']
# Create module to use the script method
class TheModule(torch.jit.ScriptModule):
__constants__ = submodule_constants
def __init__(self):
super(TheModule, self).__init__()
self.submodule = nn_module(*constructor_args)
def make_module(script):
module = TheModule()
# check __repr__
str(module)
module.define(script)
return module
module = make_module(script)
if self:
self.assertExportImportModule(module, tensors)
module(*args)
create_script_module.last_graph = module.graph
return module
return script_module
def get_nn_module_name_from_kwargs(**kwargs):
if 'module_name' in kwargs:
return kwargs['module_name']
elif 'fullname' in kwargs:
return kwargs['fullname']
elif 'constructor' in kwargs:
return kwargs['constructor'].__name__
def get_nn_mod_test_name(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
return 'test_nn_{}'.format(test_name)
def get_nn_module_class_from_kwargs(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
index = name.find("_")
if index == -1:
return name
else:
return name[0:name.find("_")]
def try_get_nn_module_compiled_mod_and_inputs(*args, **kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
if 'desc' in kwargs and 'eval' in kwargs['desc']:
# eval() is not supported, so skip these tests
return
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
test_name = get_nn_mod_test_name(**kwargs)
if test_name in EXCLUDE_SCRIPT_MODULES:
return
if 'constructor' in kwargs:
nn_module = kwargs['constructor']
else:
nn_module = getattr(torch.nn, name)
if "FunctionalModule" in str(nn_module):
return
if 'constructor_args_fn' in kwargs:
constructor_args = kwargs['constructor_args_fn']()
else:
constructor_args = kwargs.get('constructor_args', ())
# Set up inputs from tuple of sizes or constructor fn
if 'input_fn' in kwargs:
input = kwargs['input_fn']()
else:
input = (kwargs['input_size'],)
# Extra parameters to forward()
if 'extra_args' in kwargs:
input = input + kwargs['extra_args']
if 'target_size' in kwargs:
input = input + (kwargs['target_size'],)
elif 'target_fn' in kwargs:
if torch.is_tensor(input):
input = (input,)
input = input + (kwargs['target_fn'](),)
args_variable, kwargs_variable = create_input(input)
f_args_variable = deepcopy(unpack_variables(args_variable))
out_var = deepcopy(f_args_variable)
args, mod = f_args_variable, create_script_module(None, nn_module, constructor_args, *f_args_variable)(*f_args_variable)
return mod, out_var
def get_all_nn_module_tests():
return module_tests + new_module_tests + additional_module_tests
| 44.91215 | 125 | 0.579657 |
from torch.jit.annotations import BroadcastingList2, BroadcastingList3
from torch.testing._internal.common_methods_invocations import non_differentiable, create_input, \
unpack_variables
import torch.nn.functional as F
import torch
import torch.cuda
import torch.jit
import torch.jit._logging
import torch.jit.frontend
from torch.testing._internal.common_nn import module_tests, new_module_tests
from copy import deepcopy
import math
from torch._six import inf
torch.set_default_dtype(torch.double)
L = 20
M = 10
S = 5
# kwargs for function, // optional
# )
nn_functional_tests = [
('conv1d', (S, S, S), ((S, S, S),)),
('conv2d', (S, S, S, S), ((S, S, S, S),)),
('conv3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_transpose1d', (S, S, S), ((S, S, S),)),
('conv_transpose2d', (S, S, S, S), ((S, S, S, S),)),
('conv_transpose3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_tbc', (S, S, S), ((S, S, S), (S,), 2)),
('avg_pool1d', (S, S, S), (3,)),
('avg_pool2d', (S, S, S, S), (3,), '', (True,)),
('avg_pool3d', (S, S, S, S, S), (3,)),
('fractional_max_pool2d', (S, S, S, S), (3, [2, 3],)),
('max_pool1d', (S, S, S), (2, 1)),
('max_pool1d', (S, S, S), (2, 1, 1, 1, False, True), 'with_indices'),
('max_pool2d', (S, S, S, S), (2, 1), '', (True, 'aten::max_pool2d_with_indices')),
('max_pool2d', (S, S, S, S), (2, 1, 1, 1, False, True), 'with_indices', (True, 'aten::max_pool2d_with_indices')),
('max_pool3d', (S, S, S, S, S), (2, 1)),
('max_unpool1d', torch.tensor([[[2., 4]]]), (torch.tensor([[[1, 3]]]), 2, 2, 0)),
('max_unpool2d', torch.tensor([[[[2., 4]]]]), (torch.tensor([[[[1, 3]]]]), 2, 2, 0)),
('max_unpool3d', torch.tensor([[[[[2., 4]]]]]), (torch.tensor([[[[[1, 3]]]]]), 2, 2, 0)),
('lp_pool1d', (S, S, S), (2., 3, 2,)),
('lp_pool2d', (S, S, S, S), (2., 3, 2,)),
('adaptive_max_pool1d', (S, S, S), (5,)),
('adaptive_max_pool2d', (S, S, S, S), ([5, 7],)),
('adaptive_max_pool3d', (S, S, S, S, S), ([3, 2, 2],)),
('adaptive_avg_pool1d', (S, S, S), (5,), '', (True,)),
('adaptive_avg_pool2d', (S, S, S, S), ([5, 7],), '', (True,)),
('adaptive_avg_pool3d', (S, S, S, S, S), ([3, 2, 2],), '', (True,)),
('dropout', (S, S, S), (0.5,), '', (True,
['aten::bernoulli_',
'aten::empty_like', 'aten::mul', 'aten::div'])),
('alpha_dropout', (S, S, S), (0.5,)),
('dropout2d', (S, S, S), (0.5,)),
('dropout3d', (S, S, S), (0.5,)),
('feature_alpha_dropout', (S, S, S), (0.5,)),
('threshold', (S, S, S), (0.1, 2.), '', (True,)),
('threshold', (S, S, S), (0.1, 2., True), 'inplace'),
('relu', (S, S, S), (), '', (True,)),
('relu', (S, S, S), (), 'inplace'),
('glu', (S - 1, S - 1, S - 1), (),),
('hardtanh', (S, S, S), (-0.5, 0.5),),
('hardtanh', (S, S, S), (-0.5, 0.5, True), 'inplace'),
('relu6', (S, S, S), (),),
('relu6', (S, S, S), (True), 'inplace'),
('elu', (S, S, S), (0.9,),),
('elu', (S, S, S), (0.9, True), 'inplace'),
('selu', (S, S, S), (),),
('selu', (S, S, S), (True), 'inplace'),
('celu', (S, S, S), (0.9,),),
('celu', (S, S, S), (0.9, True), 'inplace'),
('leaky_relu', (S, S, S), (0.02,),),
('leaky_relu', (S, S, S), (0.02,), 'inplace'),
('rrelu', (S, S), (0.1, 0.3, False),),
('rrelu', (S, S), (0.1, 0.3, False, True), 'inplace'),
('hardshrink', (S, S, S), (0.4,),),
('tanhshrink', (S, S, S), (),),
('softsign', (S, S, S), (),),
('softplus', (S, S, S), (),),
('softmin', (S, S, S), (0,),),
('softmax', (S, S, S), (0,), '', (True,)),
('softmax', (S, S, S), (0, 3, torch.double), 'with_all_args', (True,)),
('tanh', (S, S, S), (), '', (True,)),
('sigmoid', (S, S, S), (), '', (True,)),
('log_softmax', (S, S, S), (0,), '', (True,)),
('linear', (S, S), ((M, S),), '', (True, ['aten::t', 'aten::matmul'])),
('linear', (S, S), ((M, S), (M,)), 'addmm', (True, ['aten::add', 'aten::mm'])),
('bilinear', (S, S, S), ((S, S, M), torch.zeros(M, S, M),),),
('embedding', torch.tensor([[1, 2, 4, 5], [4, 3, 2, 5]]), (torch.rand(6, 3), ), '', (True,)),
('embedding_bag', torch.tensor([1, 2, 4, 2]), (torch.rand(5, 3), torch.tensor([0, 4]),),),
('batch_norm', (S, S), (non_differentiable(torch.randn(S)), non_differentiable(torch.ones(S)), ),
'', (False, 'aten::_batch_norm_impl_index')),
('instance_norm', (S, S, S), (non_differentiable(torch.zeros(S)), non_differentiable(torch.ones(S))),),
('layer_norm', (S, S, S, S), ([5],), '',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),), 'with_only_weight',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], None, non_differentiable(torch.rand(S)),), 'with_only_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),
non_differentiable(torch.rand(S))), 'with_weight_and_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index', 'aten::addcmul'])),
('group_norm', (S, S, S), (1, torch.rand(5),),),
('local_response_norm', (S, S, S), (2, ),),
('nll_loss', F.log_softmax(torch.randn(3, 5), dim=0), (torch.tensor([1, 0, 4]),), '', (True, 'aten::nll_loss_forward')),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2),),),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2), True, True), 'full'),
('kl_div', F.log_softmax(torch.randn(S, 10), 1), (F.softmax(torch.randn(S, 10), 1),),),
('cross_entropy', (3, S), (torch.randint(S, (3,), dtype=torch.int64),),),
('binary_cross_entropy_with_logits', (3,), (torch.empty(3).random_(2), ),),
('smooth_l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('mse_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('smooth_l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('mse_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('margin_ranking_loss', (3, S), ((3, S), (S,)),),
('hinge_embedding_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('multilabel_soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('cosine_embedding_loss', (S, S), ((S, S), non_differentiable(torch.rand(S,))),),
('pixel_shuffle', (1, 9, 4, 4), (3,),),
('affine_grid', (S, 2, 3), (torch.Size([S, 1, 7, 7]),),),
('pad', (3, 3, 4, 2), ([1, 1],),),
('pairwise_distance', (S, S), ((S, S),),),
('pdist', (S, S), (),),
('cosine_similarity', (S, S), ((S, S),),),
('triplet_margin_loss', (S, S), ((S, S), (S, S)),),
('normalize', (S, S, S), (),),
('unfold', (S, S, S, S), ([2, 3]),),
('fold', (1, 3 * 2 * 2, 12), ([4, 5], [2, 2]),),
('grid_sample', (S, S, S, S), (non_differentiable(torch.rand(S, S, S, 2)),),),
('gumbel_softmax', (S, S), (2.,), '', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('gumbel_softmax', (S, S), (2., True,), 'hard', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('multilabel_margin_loss', torch.tensor([[0.2, -0.2, 0.07]]), (torch.tensor([[0, 0, 1]]),),),
('multi_margin_loss', (S, S), (non_differentiable(torch.randint(S, (S, ), dtype=torch.int64)),
1, 1., non_differentiable(torch.randn(S))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(), (non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(),
(non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2)), None, None, 'mean'), 'size_average'),
('ctc_loss', torch.rand(S, S, S).log_softmax(2).detach().requires_grad_(),
(torch.randint(1, S, (S, S), dtype=torch.long), torch.full((S,), S, dtype=torch.long),
torch.randint(1, S, (S,), dtype=torch.long))),
('upsample', torch.randn(S, S, M, M), (None, 2.), 'with_scale'),
('upsample', torch.randn(S, S, M, M), (4,), 'with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'nearest_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'nearest_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'nearest_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'area_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'area_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'area_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bilinear_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bilinear_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bilinear_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bicubic_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bicubic_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bicubic_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'nearest_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'nearest_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'nearest_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'area_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'area_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'area_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'linear_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'linear_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'linear_3d_with_size'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'nearest_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'nearest_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'area_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'area_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'area_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'trilinear_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'trilinear_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'trilinear_5d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2, None, 'nearest', None, False),
'nearest_4d_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'nearest', None, False),
'nearest_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bilinear', None, False),
'bilinear_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bilinear', None, False),
'bilinear_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bicubic', None, False),
'bicubic_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bicubic', None, False),
'bicubic_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'nearest', None, False),
'nearest_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'nearest', None, False),
'nearest_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'linear', None, False),
'linear_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'linear', None, False),
'linear_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'nearest', None, False),
'nearest_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'nearest', None, False),
'nearest_5d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'trilinear', None, False),
'trilinear_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'trilinear', None, False),
'trilinear_5d_with_size_not_recompute_scale_factor'),
]
script_template = '''
def the_method({}):
return {}
'''
def get_call(method_name, func_type, args, kwargs):
kwargs_str = ', '.join([k + '=' + str(v) for k, v in kwargs.items()])
self_arg = args[0]
if(func_type == 'method'):
args = args[1:]
argument_str = ', '.join(args)
argument_str += ', ' if len(args) and len(kwargs) else ''
argument_str += kwargs_str
if func_type == 'functional':
call = 'torch.{}({})'.format(method_name, argument_str)
elif func_type == 'method':
call = '{}.{}({})'.format(self_arg, method_name, argument_str)
elif func_type == 'nn_functional':
call = 'torch.nn.functional.{}({})'.format(method_name, argument_str)
else:
raise 'Unsupported function type'
return call
def get_constant(x):
if x == inf:
return 'math.inf'
if x == -inf:
return '-math.inf'
return x
def get_script_args(args):
formals = []
tensors = []
actuals = []
for arg in args:
if isinstance(arg, torch.Tensor):
name = 'i{}'.format(len(formals))
formals.append(name)
actuals.append(name)
tensors.append(arg)
elif isinstance(arg, str):
actuals.append("'{}'".format(arg))
else:
actuals.append(str(get_constant(arg)))
return (formals, tensors, actuals)
# create a script function from (name, func_type, output_process_fn),
# and returns the compiled function and example inputs
def gen_script_fn_and_args(method_name, func_type, *args, **kwargs):
formals, tensors, actuals = get_script_args(args)
call = get_call(method_name, func_type, actuals, kwargs)
script = script_template.format(', '.join(formals), call)
CU = torch.jit.CompilationUnit(script)
return CU.the_method, tensors
# create a script function from (name, func_type, output_process_fn),
# returns a function takes in (args, kwargs) and runs the compiled function and
# then applies the post process fn to the outputs
def create_script_fn(self, method_name, func_type, output_process_fn):
def script_fn(*args, **kwargs):
fn, tensors = gen_script_fn_and_args(method_name, func_type, *args, **kwargs)
self.assertExportImport(fn.graph, tensors)
output = output_process_fn(fn(*tensors))
script_fn.last_graph = fn.graph_for(*tensors)
return output
return script_fn
# make a new function where all non-tensor arguments in 'args' have been partially
# applied, and all tensor arguments remain.
# used to trace functions when some arguments are not tensors
def partial_apply_nontensors(fn, args, **kwargs):
source = ['t' if isinstance(arg, torch.Tensor) else 's' for arg in args]
def new_fn(*tensors_):
tensors = iter(tensors_)
return fn(*(args[i] if s == 's' else next(tensors) for i, s in enumerate(source)), **kwargs)
return new_fn, [arg for arg in args if isinstance(arg, torch.Tensor)]
# create a trace function from input fn
def create_traced_fn(self, fn):
def traced_fn(*inputs, **kwargs):
fn_tensors, inputs_tensors = partial_apply_nontensors(fn, inputs, **kwargs)
# `check_trace` is set to False because check_trace is run with @no_grad
# Also, `check_against_reference` already does all the checks
# against python function
traced = torch.jit.trace(fn_tensors, inputs_tensors, check_trace=False)
self.assertExportImport(traced.graph, inputs_tensors)
output = traced(*inputs_tensors)
traced_fn.last_graph = traced.graph_for(*inputs_tensors)
return output
return traced_fn
# known to be failing in script
EXCLUDE_SCRIPT = {
'test_norm_fro_default',
'test_norm_fro_cpu',
'test_norm_nuc',
'test_norm_fro',
'test_norm_nuc_batched',
# aten op has additional cudnn argument
'test_nn_unfold',
# flaky test - TODO fix
'test_nn_ctc_loss',
# unknown builtin op
'test_nn_fold',
# jit doesn't support sparse tensors.
'test_to_sparse'
}
def get_nn_functional_compiled_fn_and_inputs(name, self_size, args, variant_name='', *extra_args):
test_name = 'test_nn_' + name
if variant_name != '':
test_name = test_name + '_' + variant_name
no_grad = variant_name == 'inplace'
self_variable = create_input((self_size,))[0][0]
kwargs = None
args_variable, kwargs_variable = create_input(args)
self_tensor = deepcopy(self_variable.data)
args_tensor = deepcopy(unpack_variables(args_variable))
f_args_variable = (self_variable,) + args_variable
f_args_tensor = (self_tensor,) + args_tensor
with torch.jit._disable_emit_hooks():
script_fn, inputs = gen_script_fn_and_args(name, "nn_functional", *f_args_variable)
return script_fn, inputs
additional_module_tests = [
{
'module_name': 'Bilinear',
'constructor_args': (S, S, M),
'input_size': (S, S),
'extra_args': ((S, S),)
},
{
'module_name': 'RNNCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'LSTMCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'GRUCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'MultiheadAttention',
'constructor_args': (128, 8),
'input_size': (10, 8, 128),
'extra_args': (torch.randn(10, 8, 128), torch.randn(10, 8, 128)),
'slowTest': True
},
{
'module_name': 'Transformer',
'constructor_args': (1, 1, 1, 1, 2),
'input_size': (3, 1, 1),
'extra_args': (torch.randn(1, 1, 1),),
'slowTest': True
}
]
EXCLUDE_SCRIPT_MODULES = {
'test_nn_AdaptiveAvgPool2d_tuple_none',
'test_nn_AdaptiveAvgPool3d_tuple_none',
'test_nn_AdaptiveMaxPool2d_tuple_none',
'test_nn_AdaptiveMaxPool3d_tuple_none',
'test_nn_CrossMapLRN2d',
}
script_method_template = '''
def forward({}):
return {}
'''
def create_script_module(self, nn_module, constructor_args, *args, **kwargs):
def script_module(*args, **kwargs):
formals, tensors, actuals = get_script_args(args)
method_args = ', '.join(['self'] + actuals)
call_args_str = ', '.join(actuals)
call = "self.submodule({})".format(call_args_str)
script = script_method_template.format(method_args, call)
submodule_constants = []
if kwargs.get('is_constant'):
submodule_constants = ['submodule']
# Create module to use the script method
class TheModule(torch.jit.ScriptModule):
__constants__ = submodule_constants
def __init__(self):
super(TheModule, self).__init__()
self.submodule = nn_module(*constructor_args)
def make_module(script):
module = TheModule()
# check __repr__
str(module)
module.define(script)
return module
module = make_module(script)
if self:
self.assertExportImportModule(module, tensors)
module(*args)
create_script_module.last_graph = module.graph
return module
return script_module
def get_nn_module_name_from_kwargs(**kwargs):
if 'module_name' in kwargs:
return kwargs['module_name']
elif 'fullname' in kwargs:
return kwargs['fullname']
elif 'constructor' in kwargs:
return kwargs['constructor'].__name__
def get_nn_mod_test_name(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
return 'test_nn_{}'.format(test_name)
def get_nn_module_class_from_kwargs(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
index = name.find("_")
if index == -1:
return name
else:
return name[0:name.find("_")]
def try_get_nn_module_compiled_mod_and_inputs(*args, **kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
if 'desc' in kwargs and 'eval' in kwargs['desc']:
# eval() is not supported, so skip these tests
return
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
test_name = get_nn_mod_test_name(**kwargs)
if test_name in EXCLUDE_SCRIPT_MODULES:
return
if 'constructor' in kwargs:
nn_module = kwargs['constructor']
else:
nn_module = getattr(torch.nn, name)
if "FunctionalModule" in str(nn_module):
return
if 'constructor_args_fn' in kwargs:
constructor_args = kwargs['constructor_args_fn']()
else:
constructor_args = kwargs.get('constructor_args', ())
# Set up inputs from tuple of sizes or constructor fn
if 'input_fn' in kwargs:
input = kwargs['input_fn']()
else:
input = (kwargs['input_size'],)
# Extra parameters to forward()
if 'extra_args' in kwargs:
input = input + kwargs['extra_args']
if 'target_size' in kwargs:
input = input + (kwargs['target_size'],)
elif 'target_fn' in kwargs:
if torch.is_tensor(input):
input = (input,)
input = input + (kwargs['target_fn'](),)
args_variable, kwargs_variable = create_input(input)
f_args_variable = deepcopy(unpack_variables(args_variable))
out_var = deepcopy(f_args_variable)
args, mod = f_args_variable, create_script_module(None, nn_module, constructor_args, *f_args_variable)(*f_args_variable)
return mod, out_var
def get_all_nn_module_tests():
return module_tests + new_module_tests + additional_module_tests
| true | true |
f72ae77f6af21241e139bcfcb73ffd4cb6993215 | 566 | py | Python | setup.py | galperins4/python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | 1 | 2018-06-15T11:19:23.000Z | 2018-06-15T11:19:23.000Z | setup.py | galperins4/mirror-python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | null | null | null | setup.py | galperins4/mirror-python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | null | null | null | import sys
import setuptools
requires = [
'requests>=2.19.1',
'backoff>=1.6.0',
'flatten_dict>=0.3.0'
]
tests_require = []
extras_require = {}
setuptools.setup(
name='hedera-python-client',
description='Python API client for Hedera Hashgraph.',
version='0.0.1',
author='TBD',
author_email='TBD',
url='https://github.com/galperins4/hedera-python-client',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
install_requires=requires,
extras_require=extras_require,
tests_require=tests_require,
)
| 20.214286 | 68 | 0.676678 | import sys
import setuptools
requires = [
'requests>=2.19.1',
'backoff>=1.6.0',
'flatten_dict>=0.3.0'
]
tests_require = []
extras_require = {}
setuptools.setup(
name='hedera-python-client',
description='Python API client for Hedera Hashgraph.',
version='0.0.1',
author='TBD',
author_email='TBD',
url='https://github.com/galperins4/hedera-python-client',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
install_requires=requires,
extras_require=extras_require,
tests_require=tests_require,
)
| true | true |
f72ae7e848291c51786e5d2a992f0c9c85761179 | 7,832 | py | Python | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_object_storage_replication_policy_facts
short_description: Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
- List the replication policies associated with a bucket.
- If I(replication_id) is specified, the details of a single ReplicationPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
namespace_name:
description:
- The Object Storage namespace used for the request.
type: str
required: true
bucket_name:
description:
- "The name of the bucket. Avoid entering confidential information.
Example: `my-new-bucket1`"
type: str
required: true
replication_id:
description:
- The ID of the replication policy.
- Required to get a specific replication_policy.
type: str
aliases: ["id"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: Get a specific replication_policy
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
replication_id: "ocid1.replication.oc1..xxxxxxEXAMPLExxxxxx"
- name: List replication_policies
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
"""
RETURN = """
replication_policies:
description:
- List of ReplicationPolicy resources
returned: on success
type: complex
contains:
id:
description:
- The id of the replication policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
name:
description:
- The name of the policy.
returned: on success
type: str
sample: name_example
destination_region_name:
description:
- "The destination region to replicate to, for example \\"us-ashburn-1\\"."
returned: on success
type: str
sample: destination_region_name_example
destination_bucket_name:
description:
- The bucket to replicate to in the destination region. Replication policy creation does not automatically
create a destination bucket. Create the destination bucket before creating the policy.
returned: on success
type: str
sample: destination_bucket_name_example
time_created:
description:
- The date when the replication policy was created as per L(RFC 3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_last_sync:
description:
- Changes made to the source bucket before this time has been replicated.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
status:
description:
- The replication status of the policy. If the status is CLIENT_ERROR, once the user fixes the issue
described in the status message, the status will become ACTIVE.
returned: on success
type: str
sample: ACTIVE
status_message:
description:
- A human-readable description of the status.
returned: on success
type: str
sample: status_message_example
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"name": "name_example",
"destination_region_name": "destination_region_name_example",
"destination_bucket_name": "destination_bucket_name_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_last_sync": "2013-10-20T19:20:30+01:00",
"status": "ACTIVE",
"status_message": "status_message_example"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.object_storage import ObjectStorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ReplicationPolicyFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"namespace_name",
"bucket_name",
"replication_id",
]
def get_required_params_for_list(self):
return [
"namespace_name",
"bucket_name",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_replication_policy,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
replication_id=self.module.params.get("replication_id"),
)
def list_resources(self):
optional_list_method_params = [
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_replication_policies,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
**optional_kwargs
)
ReplicationPolicyFactsHelperCustom = get_custom_class(
"ReplicationPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
ReplicationPolicyFactsHelperCustom, ReplicationPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
namespace_name=dict(type="str", required=True),
bucket_name=dict(type="str", required=True),
replication_id=dict(aliases=["id"], type="str"),
name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="replication_policy",
service_client_class=ObjectStorageClient,
namespace="object_storage",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(replication_policies=result)
if __name__ == "__main__":
main()
| 32.633333 | 122 | 0.655388 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_object_storage_replication_policy_facts
short_description: Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
- List the replication policies associated with a bucket.
- If I(replication_id) is specified, the details of a single ReplicationPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
namespace_name:
description:
- The Object Storage namespace used for the request.
type: str
required: true
bucket_name:
description:
- "The name of the bucket. Avoid entering confidential information.
Example: `my-new-bucket1`"
type: str
required: true
replication_id:
description:
- The ID of the replication policy.
- Required to get a specific replication_policy.
type: str
aliases: ["id"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: Get a specific replication_policy
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
replication_id: "ocid1.replication.oc1..xxxxxxEXAMPLExxxxxx"
- name: List replication_policies
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
"""
RETURN = """
replication_policies:
description:
- List of ReplicationPolicy resources
returned: on success
type: complex
contains:
id:
description:
- The id of the replication policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
name:
description:
- The name of the policy.
returned: on success
type: str
sample: name_example
destination_region_name:
description:
- "The destination region to replicate to, for example \\"us-ashburn-1\\"."
returned: on success
type: str
sample: destination_region_name_example
destination_bucket_name:
description:
- The bucket to replicate to in the destination region. Replication policy creation does not automatically
create a destination bucket. Create the destination bucket before creating the policy.
returned: on success
type: str
sample: destination_bucket_name_example
time_created:
description:
- The date when the replication policy was created as per L(RFC 3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_last_sync:
description:
- Changes made to the source bucket before this time has been replicated.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
status:
description:
- The replication status of the policy. If the status is CLIENT_ERROR, once the user fixes the issue
described in the status message, the status will become ACTIVE.
returned: on success
type: str
sample: ACTIVE
status_message:
description:
- A human-readable description of the status.
returned: on success
type: str
sample: status_message_example
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"name": "name_example",
"destination_region_name": "destination_region_name_example",
"destination_bucket_name": "destination_bucket_name_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_last_sync": "2013-10-20T19:20:30+01:00",
"status": "ACTIVE",
"status_message": "status_message_example"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.object_storage import ObjectStorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ReplicationPolicyFactsHelperGen(OCIResourceFactsHelperBase):
def get_required_params_for_get(self):
return [
"namespace_name",
"bucket_name",
"replication_id",
]
def get_required_params_for_list(self):
return [
"namespace_name",
"bucket_name",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_replication_policy,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
replication_id=self.module.params.get("replication_id"),
)
def list_resources(self):
optional_list_method_params = [
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_replication_policies,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
**optional_kwargs
)
ReplicationPolicyFactsHelperCustom = get_custom_class(
"ReplicationPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
ReplicationPolicyFactsHelperCustom, ReplicationPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
namespace_name=dict(type="str", required=True),
bucket_name=dict(type="str", required=True),
replication_id=dict(aliases=["id"], type="str"),
name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="replication_policy",
service_client_class=ObjectStorageClient,
namespace="object_storage",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(replication_policies=result)
if __name__ == "__main__":
main()
| true | true |
f72ae8822be3a2b344c2b3ee4a5a5f5d65da61a6 | 3,218 | py | Python | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | null | null | null | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | 14 | 2020-06-05T20:19:18.000Z | 2021-09-22T18:18:23.000Z | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import tensorflow_hub as hub
import numpy as np
import tensorflow_text
import json, re, os
from threading import Thread
from keywords import keywords
embeddings = {}
embed = None
def loadModelData():
''' Loads Tensorflow enconder and pre-encodes the problem data
'''
global embed
global embeddings
embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-multilingual-large/2")
feature_types = ['Sintoma', 'Tipificacao_Nivel_1', 'Tipificacao_Nivel_2', 'Tipificacao_Nivel_3']
with open(os.getcwd() + '/input_options.json') as json_file:
data = json.load(json_file)
for typ in feature_types:
embedProblemData(data, typ, embeddings)
def embedProblemData(data, feature_type, embeddings):
''' Calculates embeddings for all the values of feature_type
:param: data
:param: feature type
:param: dict that maps feature values to their embeddings
'''
raw_features = [x for x in data[feature_type]]
proc_features = [x.lower() for x in raw_features]
feature_embeddings = embed(proc_features)["outputs"]
for i in range(0, len(raw_features)):
embeddings[raw_features[i]] = feature_embeddings[i]
def replaceWithKeywords(line, keywords):
''' Replaces matches in line with a keyword
:param: string to look for expressions
:param: dictionary object that matches keywords with expressions
:return: list of versions of the line with replaced expressions
'''
keyworded_versions = [line]
for keyword, matches in keywords.items():
keyworded_versions.extend([re.sub(match, keyword, line) for match in matches if re.search(match, line)])
return keyworded_versions
def getFeatureSuggestion(line, keywords, ss_vals, ss_embeddings, category):
''' Calculates feature from category that is semantically closest to the one described in
line
:param: target
:param:
'''
ll = line.lower()
line_versions = replaceWithKeywords(ll, keywords['common'])
if category>0:
line_versions.extend(replaceWithKeywords(ll, keywords['tip_'+str(category)]))
sentence_embeddings = [embed(line_version)["outputs"] for line_version in line_versions]
similarity_matrices = [list(np.inner(sent_emb, ss_embeddings)[0])
for sent_emb in sentence_embeddings]
max_values = [max(similarity_matrice) for similarity_matrice in similarity_matrices]
max_abs = max(max_values)
similarity_matrix = similarity_matrices[max_values.index(max_abs)]
sugestao = ss_vals[similarity_matrix.index(max_abs)]
return sugestao, max_abs
def extractProblemData(prob_desc, search_space, category):
''' Extracts the string in the search space that is semantically
closest to the problem description
:param: problem description
:param: search space of the possible strings
:param: search space category (simptome or typification)
:return: closest string that belongs to search_space and confidence
'''
ss_embeddings = [embeddings[ss_val] for ss_val in search_space]
return getFeatureSuggestion(prob_desc, keywords, search_space, ss_embeddings, category)
| 37.858824 | 112 | 0.720945 |
import tensorflow_hub as hub
import numpy as np
import tensorflow_text
import json, re, os
from threading import Thread
from keywords import keywords
embeddings = {}
embed = None
def loadModelData():
global embed
global embeddings
embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-multilingual-large/2")
feature_types = ['Sintoma', 'Tipificacao_Nivel_1', 'Tipificacao_Nivel_2', 'Tipificacao_Nivel_3']
with open(os.getcwd() + '/input_options.json') as json_file:
data = json.load(json_file)
for typ in feature_types:
embedProblemData(data, typ, embeddings)
def embedProblemData(data, feature_type, embeddings):
raw_features = [x for x in data[feature_type]]
proc_features = [x.lower() for x in raw_features]
feature_embeddings = embed(proc_features)["outputs"]
for i in range(0, len(raw_features)):
embeddings[raw_features[i]] = feature_embeddings[i]
def replaceWithKeywords(line, keywords):
keyworded_versions = [line]
for keyword, matches in keywords.items():
keyworded_versions.extend([re.sub(match, keyword, line) for match in matches if re.search(match, line)])
return keyworded_versions
def getFeatureSuggestion(line, keywords, ss_vals, ss_embeddings, category):
ll = line.lower()
line_versions = replaceWithKeywords(ll, keywords['common'])
if category>0:
line_versions.extend(replaceWithKeywords(ll, keywords['tip_'+str(category)]))
sentence_embeddings = [embed(line_version)["outputs"] for line_version in line_versions]
similarity_matrices = [list(np.inner(sent_emb, ss_embeddings)[0])
for sent_emb in sentence_embeddings]
max_values = [max(similarity_matrice) for similarity_matrice in similarity_matrices]
max_abs = max(max_values)
similarity_matrix = similarity_matrices[max_values.index(max_abs)]
sugestao = ss_vals[similarity_matrix.index(max_abs)]
return sugestao, max_abs
def extractProblemData(prob_desc, search_space, category):
ss_embeddings = [embeddings[ss_val] for ss_val in search_space]
return getFeatureSuggestion(prob_desc, keywords, search_space, ss_embeddings, category)
| true | true |
f72ae89046ac8b319ed71a62b07e68d530306531 | 3,901 | py | Python | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, window, asc, desc, lead, lag, udf, hour
from pyspark.sql.functions import month, year, lit, when, collect_list, struct, mean, stddev, stddev_pop
import pyspark.sql.functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import FloatType, IntegerType, DateType, TimestampType
from pyspark import SparkConf
import yaml
import datetime
import os
from math import isnan
conf = SparkConf()
conf.set("spark.jars", os.getenv("HOME") + "/.ivy2/jars/org.postgresql_postgresql-42.1.1.jar")
conf.set("spark.executor.extrajavaoptions", "-Xmx15000m")
conf.set("spark.executor.memory", "15g")
conf.set("spark.driver.memory", "15g")
conf.set("spark.storage.memoryFraction", "0")
spark = SparkSession.builder \
.config(conf=conf) \
.master("local[4]") \
.appName("SAIDI Calculator") \
.getOrCreate()
config = open('config.yaml')
config = yaml.load(config)
#connect to the database
pw_df = spark.read.jdbc("jdbc:postgresql://timescale.lab11.eecs.umich.edu/powerwatch", "pw_dedupe",
properties={"user": config['user'], "password": config['password'],"driver":"org.postgresql.Driver"})
#read the data that we care about
pw_df = pw_df.select(pw_df['core_id'],pw_df['time'],pw_df['is_powered'],pw_df['product_id'],pw_df['millis'],pw_df['last_unplug_millis'],pw_df['last_plug_millis'])
pw_df = pw_df.filter("product_id = 7008 OR product_id= 7009")
#now we need to created a window function that looks at the leading lagging edge of is powered and detects transitions
#then we can filter out all data that is not a transition
def detectTransition(value1, value2):
if(value1 == value2):
return 0
else:
return 1
udfDetectTransition = udf(detectTransition, IntegerType())
w = Window.partitionBy("core_id").orderBy(asc("time"))
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("transition", udfDetectTransition("is_powered",is_powered_lag))
#filter out all transitions
pw_df = pw_df.filter("transition != 0")
#now count each outage (really restoration)
def countOutage(value1, value2, value3):
if(value1 == False and value2 == True and value3 == True):
return 1
else:
return 0
udfCountTransition = udf(countOutage, IntegerType())
is_powered_lead = lead("is_powered",1).over(w)
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("outage", udfCountTransition("is_powered", is_powered_lead, is_powered_lag))
#now find all the exact outage and restore times using millis
def timeCorrect(time, millis, unplugMillis):
if(unplugMillis == 0 or millis == None or unplugMillis == None or isnan(millis) or isnan(unplugMillis)):
return time
elif unplugMillis > millis:
return time
else:
return time - datetime.timedelta(microseconds = (int(millis)-int(unplugMillis))*1000)
udftimeCorrect = udf(timeCorrect, TimestampType())
pw_df = pw_df.withColumn("outage_time", udftimeCorrect("time","millis","last_unplug_millis"))
#now filter out everything that is not an outage. We should have a time and end_time for every outage
pw_df = pw_df.filter("outage != 0")
w = Window.orderBy(asc("outage_time")).rowsBetween(-1,1)
pw_df = pw_df.withColumn("outage_window_list",collect_list(F.struct("outage_time","core_id")).over(w))
def filterOutage(time, imei, timeList):
times = []
for i in timeList:
if imei != i[1]:
t = (i[0] - time).total_seconds()
if(t > 0):
times.append(t)
if len(times) > 0:
return min(times)
return None
udfFilterTransition = udf(filterOutage, FloatType())
pw_df = pw_df.withColumn("seconds_until_next_unplug", udfFilterTransition("outage_time","core_id","outage_window_list"))
print(pw_df.stat.approxQuantile("seconds_until_next_unplug", [x*0.01 for x in range(0,100)], 0.0))
| 40.216495 | 162 | 0.722892 |
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, window, asc, desc, lead, lag, udf, hour
from pyspark.sql.functions import month, year, lit, when, collect_list, struct, mean, stddev, stddev_pop
import pyspark.sql.functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import FloatType, IntegerType, DateType, TimestampType
from pyspark import SparkConf
import yaml
import datetime
import os
from math import isnan
conf = SparkConf()
conf.set("spark.jars", os.getenv("HOME") + "/.ivy2/jars/org.postgresql_postgresql-42.1.1.jar")
conf.set("spark.executor.extrajavaoptions", "-Xmx15000m")
conf.set("spark.executor.memory", "15g")
conf.set("spark.driver.memory", "15g")
conf.set("spark.storage.memoryFraction", "0")
spark = SparkSession.builder \
.config(conf=conf) \
.master("local[4]") \
.appName("SAIDI Calculator") \
.getOrCreate()
config = open('config.yaml')
config = yaml.load(config)
pw_df = spark.read.jdbc("jdbc:postgresql://timescale.lab11.eecs.umich.edu/powerwatch", "pw_dedupe",
properties={"user": config['user'], "password": config['password'],"driver":"org.postgresql.Driver"})
pw_df = pw_df.select(pw_df['core_id'],pw_df['time'],pw_df['is_powered'],pw_df['product_id'],pw_df['millis'],pw_df['last_unplug_millis'],pw_df['last_plug_millis'])
pw_df = pw_df.filter("product_id = 7008 OR product_id= 7009")
def detectTransition(value1, value2):
if(value1 == value2):
return 0
else:
return 1
udfDetectTransition = udf(detectTransition, IntegerType())
w = Window.partitionBy("core_id").orderBy(asc("time"))
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("transition", udfDetectTransition("is_powered",is_powered_lag))
pw_df = pw_df.filter("transition != 0")
def countOutage(value1, value2, value3):
if(value1 == False and value2 == True and value3 == True):
return 1
else:
return 0
udfCountTransition = udf(countOutage, IntegerType())
is_powered_lead = lead("is_powered",1).over(w)
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("outage", udfCountTransition("is_powered", is_powered_lead, is_powered_lag))
def timeCorrect(time, millis, unplugMillis):
if(unplugMillis == 0 or millis == None or unplugMillis == None or isnan(millis) or isnan(unplugMillis)):
return time
elif unplugMillis > millis:
return time
else:
return time - datetime.timedelta(microseconds = (int(millis)-int(unplugMillis))*1000)
udftimeCorrect = udf(timeCorrect, TimestampType())
pw_df = pw_df.withColumn("outage_time", udftimeCorrect("time","millis","last_unplug_millis"))
pw_df = pw_df.filter("outage != 0")
w = Window.orderBy(asc("outage_time")).rowsBetween(-1,1)
pw_df = pw_df.withColumn("outage_window_list",collect_list(F.struct("outage_time","core_id")).over(w))
def filterOutage(time, imei, timeList):
times = []
for i in timeList:
if imei != i[1]:
t = (i[0] - time).total_seconds()
if(t > 0):
times.append(t)
if len(times) > 0:
return min(times)
return None
udfFilterTransition = udf(filterOutage, FloatType())
pw_df = pw_df.withColumn("seconds_until_next_unplug", udfFilterTransition("outage_time","core_id","outage_window_list"))
print(pw_df.stat.approxQuantile("seconds_until_next_unplug", [x*0.01 for x in range(0,100)], 0.0))
| true | true |
f72ae8f83fbcedd3eb02039ff2317a6935549fc8 | 5,975 | py | Python | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from contextlib import contextmanager
import socket
import time
from lightlab import visalogger as logger
from rvisa.util import from_ascii_block
class InstrumentSessionBase(ABC):
''' Base class for Instrument sessions, to be inherited and specialized
by VISAObject and PrologixGPIBObject'''
@abstractmethod
def spoll(self):
pass
@abstractmethod
def LLO(self):
pass
@abstractmethod
def LOC(self):
pass
@abstractmethod
def open(self):
pass
@abstractmethod
def close(self):
pass
@abstractmethod
def write(self):
pass
@abstractmethod
def query(self):
pass
@abstractmethod
def wait(self):
pass
@abstractmethod
def clear(self):
pass
@abstractmethod
def query_raw_binary(self):
pass
def query_ascii_values(self, message, converter='f', separator=',',
container=list):
''' Taken from pvisa.'''
block = self.query(message)
return from_ascii_block(block, converter, separator, container)
def instrID(self):
r"""Returns the \*IDN? string"""
return self.query('*IDN?')
@property
@abstractmethod
def timeout(self):
pass
@timeout.setter
@abstractmethod
def timeout(self, newTimeout):
pass
CR = '\r'
LF = '\n'
class TCPSocketConnection(object):
''' Opens a TCP socket connection, much like netcat.
Usage:
s = TCPSocketConnection('socket-server.school.edu', 1111)
s.connect() # connects to socket and leaves it open
s.send('command') # sends the command through the socket
r = s.recv(1000) # receives a message of up to 1000 bytes
s.disconnect() # shuts down connection
'''
port = None #: socket server's port number
_socket = None
_termination = None
def __init__(self, ip_address, port, timeout=2, termination=LF):
"""
Args:
ip_address (str): hostname or ip address of the socket server
port (int): socket server's port number
timeout (float): timeout in seconds for establishing socket
connection to socket server, default 2.
"""
self.timeout = timeout
self.port = port
self.ip_address = ip_address
self._termination = termination
def _send(self, socket, value):
encoded_value = (('%s' % value) + self._termination).encode('ascii')
sent = socket.sendall(encoded_value)
return sent
def _recv(self, socket, msg_length=2048):
received_value = socket.recv(msg_length)
return received_value.decode('ascii')
def connect(self):
''' Connects to the socket and leaves the connection open.
If already connected, does nothing.
Returns:
socket object.
'''
if self._socket is None:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
try:
logger.debug("Attempting new connection (timeout = %s)", str(self.timeout))
init_time = time.time()
s.settimeout(self.timeout)
s.connect((self.ip_address, self.port))
except socket.error:
# avoiding shutdown to prevent sending any data to remote socket
# https://stackoverflow.com/questions/13109899/does-socket-become-unusable-after-connect-fails
# s.shutdown(socket.SHUT_WR)
s.close()
del s
logger.error('Cannot connect to resource.')
raise
else:
final_time = time.time()
elapsed_time_ms = 1e3 * (final_time - init_time)
logger.debug("Connected. Time elapsed: %s msec", '{:.2f}'.format(elapsed_time_ms))
self._socket = s
return self._socket
else:
return self._socket
def disconnect(self):
''' If connected, disconnects and kills the socket.'''
if self._socket is not None:
self._socket.shutdown(socket.SHUT_WR)
self._socket.close()
self._socket = None
@contextmanager
def connected(self):
''' Context manager for ensuring that the socket is connected while
sending and receiving commands to remote socket.
This is safe to use everywhere, even if the socket is previously connected.
It can also be nested.
This is useful to bundle multiple commands that you desire to be
executed together in a single socket connection, for example:
.. code-block:: python
def query(self, query_msg, msg_length=2048):
with self.connected():
self._send(self._socket, query_msg)
recv = self._recv(self._socket, msg_length)
return recv
'''
previously_connected = (self._socket is not None)
self.connect()
try:
yield self
finally:
if not previously_connected:
self.disconnect()
def startup(self):
raise NotImplementedError
def send(self, value):
''' Sends an ASCII string to the socket server. Auto-connects if necessary.
Args:
value (str): value to be sent
'''
with self.connected():
sent = self._send(self._socket, value)
return sent
def recv(self, msg_length=2048):
''' Receives an ASCII string from the socket server. Auto-connects if necessary.
Args:
msg_length (int): maximum message length.
'''
with self.connected():
recv = self._recv(self._socket, msg_length)
return recv
def query(self, query_msg, msg_length=2048):
raise NotImplementedError
| 29.146341 | 110 | 0.594979 | from abc import ABC, abstractmethod
from contextlib import contextmanager
import socket
import time
from lightlab import visalogger as logger
from rvisa.util import from_ascii_block
class InstrumentSessionBase(ABC):
@abstractmethod
def spoll(self):
pass
@abstractmethod
def LLO(self):
pass
@abstractmethod
def LOC(self):
pass
@abstractmethod
def open(self):
pass
@abstractmethod
def close(self):
pass
@abstractmethod
def write(self):
pass
@abstractmethod
def query(self):
pass
@abstractmethod
def wait(self):
pass
@abstractmethod
def clear(self):
pass
@abstractmethod
def query_raw_binary(self):
pass
def query_ascii_values(self, message, converter='f', separator=',',
container=list):
block = self.query(message)
return from_ascii_block(block, converter, separator, container)
def instrID(self):
return self.query('*IDN?')
@property
@abstractmethod
def timeout(self):
pass
@timeout.setter
@abstractmethod
def timeout(self, newTimeout):
pass
CR = '\r'
LF = '\n'
class TCPSocketConnection(object):
port = None
_socket = None
_termination = None
def __init__(self, ip_address, port, timeout=2, termination=LF):
self.timeout = timeout
self.port = port
self.ip_address = ip_address
self._termination = termination
def _send(self, socket, value):
encoded_value = (('%s' % value) + self._termination).encode('ascii')
sent = socket.sendall(encoded_value)
return sent
def _recv(self, socket, msg_length=2048):
received_value = socket.recv(msg_length)
return received_value.decode('ascii')
def connect(self):
if self._socket is None:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
try:
logger.debug("Attempting new connection (timeout = %s)", str(self.timeout))
init_time = time.time()
s.settimeout(self.timeout)
s.connect((self.ip_address, self.port))
except socket.error:
# avoiding shutdown to prevent sending any data to remote socket
# https://stackoverflow.com/questions/13109899/does-socket-become-unusable-after-connect-fails
# s.shutdown(socket.SHUT_WR)
s.close()
del s
logger.error('Cannot connect to resource.')
raise
else:
final_time = time.time()
elapsed_time_ms = 1e3 * (final_time - init_time)
logger.debug("Connected. Time elapsed: %s msec", '{:.2f}'.format(elapsed_time_ms))
self._socket = s
return self._socket
else:
return self._socket
def disconnect(self):
if self._socket is not None:
self._socket.shutdown(socket.SHUT_WR)
self._socket.close()
self._socket = None
@contextmanager
def connected(self):
previously_connected = (self._socket is not None)
self.connect()
try:
yield self
finally:
if not previously_connected:
self.disconnect()
def startup(self):
raise NotImplementedError
def send(self, value):
with self.connected():
sent = self._send(self._socket, value)
return sent
def recv(self, msg_length=2048):
with self.connected():
recv = self._recv(self._socket, msg_length)
return recv
def query(self, query_msg, msg_length=2048):
raise NotImplementedError
| true | true |
f72ae943e83fcbed48d9e3f084fe924867622c96 | 2,382 | py | Python | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | null | null | null | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | null | null | null | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | 1 | 2020-07-30T13:18:16.000Z | 2020-07-30T13:18:16.000Z | #!/usr/bin/env python3
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""ADO user API wrapper."""
import logging
from typing import cast
from simple_ado.base_client import ADOBaseClient
from simple_ado.context import ADOContext
from simple_ado.exceptions import ADOException
from simple_ado.http_client import ADOHTTPClient
from simple_ado.types import TeamFoundationId
class ADOUserClient(ADOBaseClient):
"""Wrapper class around the ADO user APIs.
:param context: The context information for the client
:param http_client: The HTTP client to use for the client
:param log: The logger to use
"""
def __init__(
self, context: ADOContext, http_client: ADOHTTPClient, log: logging.Logger
) -> None:
super().__init__(context, http_client, log.getChild("user"))
def get_team_foundation_id(self, identity: str) -> TeamFoundationId:
"""Fetch the unique Team Foundation GUID for a given identity.
:param str identity: The identity to fetch for (should be email for users and display name for groups)
:returns: The team foundation ID
:raises ADOException: If we can't get the identity from the response
"""
request_url = self.http_client.api_endpoint(is_default_collection=False, is_project=False)
request_url += "/IdentityPicker/Identities?api-version=5.1-preview.1"
body = {
"query": identity,
"identityTypes": ["user", "group"],
"operationScopes": ["ims"],
"properties": ["DisplayName", "Mail"],
"filterByAncestorEntityIds": [],
"filterByEntityIds": [],
}
response = self.http_client.post(request_url, json_data=body)
response_data = self.http_client.decode_response(response)
try:
result = response_data["results"][0]["identities"][0]
except:
raise ADOException("Could not resolve identity: " + identity)
if result["entityType"] == "User" and identity.lower() == result["mail"].lower():
return cast(TeamFoundationId, str(result["localId"]))
if result["entityType"] == "Group" and identity.lower() == result["displayName"].lower():
return cast(TeamFoundationId, str(result["localId"]))
raise ADOException("Could not resolve identity: " + identity)
| 35.552239 | 110 | 0.670025 |
import logging
from typing import cast
from simple_ado.base_client import ADOBaseClient
from simple_ado.context import ADOContext
from simple_ado.exceptions import ADOException
from simple_ado.http_client import ADOHTTPClient
from simple_ado.types import TeamFoundationId
class ADOUserClient(ADOBaseClient):
def __init__(
self, context: ADOContext, http_client: ADOHTTPClient, log: logging.Logger
) -> None:
super().__init__(context, http_client, log.getChild("user"))
def get_team_foundation_id(self, identity: str) -> TeamFoundationId:
request_url = self.http_client.api_endpoint(is_default_collection=False, is_project=False)
request_url += "/IdentityPicker/Identities?api-version=5.1-preview.1"
body = {
"query": identity,
"identityTypes": ["user", "group"],
"operationScopes": ["ims"],
"properties": ["DisplayName", "Mail"],
"filterByAncestorEntityIds": [],
"filterByEntityIds": [],
}
response = self.http_client.post(request_url, json_data=body)
response_data = self.http_client.decode_response(response)
try:
result = response_data["results"][0]["identities"][0]
except:
raise ADOException("Could not resolve identity: " + identity)
if result["entityType"] == "User" and identity.lower() == result["mail"].lower():
return cast(TeamFoundationId, str(result["localId"]))
if result["entityType"] == "Group" and identity.lower() == result["displayName"].lower():
return cast(TeamFoundationId, str(result["localId"]))
raise ADOException("Could not resolve identity: " + identity)
| true | true |
f72aea0d6cc0cce475a487b99abf5840a183729c | 152 | py | Python | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | 1 | 2021-11-15T14:55:36.000Z | 2021-11-15T14:55:36.000Z | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | null | null | null | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | null | null | null | from django.apps import AppConfig
class ControllerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'controller'
| 21.714286 | 56 | 0.769737 | from django.apps import AppConfig
class ControllerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'controller'
| true | true |
f72aeafc60f1c50f2b50e3c33dc739dfa7cb4e8a | 1,675 | py | Python | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | import numpy as np
import itk
import matplotlib.pyplot as plt
# Input file name
input_filename = './jenga_g_150.png'
# Set dimension
Dimension = 2
# Read input image
itk_image = itk.imread(input_filename)
# Setting for input image (Grayscale)
InputPixelType = itk.UC
InputImageType = itk.Image[InputPixelType, Dimension]
# Loading
reader = itk.ImageFileReader[InputImageType].New()
reader.SetFileName(input_filename)
# Apply a filter: Thresholding
thresholdFilter = itk.BinaryThresholdImageFilter[InputImageType,InputImageType].New()
thresholdFilter.SetInput(reader.GetOutput())
thresholdFilter.SetUpperThreshold(200)
thresholdFilter.SetOutsideValue(1)
thresholdFilter.SetInsideValue(0)
StructuringElementType = itk.FlatStructuringElement[Dimension]
structuringElement = StructuringElementType.Ball(3)
# Apply Opening (erosion and dilation)
erodeFilter = itk.BinaryErodeImageFilter[InputImageType,InputImageType,StructuringElementType].New()
erodeFilter.SetInput(thresholdFilter.GetOutput())
erodeFilter.SetKernel(structuringElement)
erodeFilter.SetForegroundValue(1)
dilateFilter = itk.BinaryDilateImageFilter[InputImageType,InputImageType,StructuringElementType].New()
dilateFilter.SetInput(erodeFilter.GetOutput())
dilateFilter.SetKernel(structuringElement)
dilateFilter.SetForegroundValue(1)
dilateFilter.Update()
# Plot the input and output images.
plt.figure(figsize=(12, 4), dpi=50)
plt.subplot(1,3,1),plt.title("original"),plt.imshow(itk_image, cmap="gray")
plt.subplot(1,3,2),plt.title("threshold"),plt.imshow(thresholdFilter.GetOutput())
plt.subplot(1,3,3),plt.title("output"),plt.imshow(dilateFilter.GetOutput())
plt.savefig("./img/jenga_opening2d.png") | 33.5 | 102 | 0.819104 | import numpy as np
import itk
import matplotlib.pyplot as plt
input_filename = './jenga_g_150.png'
Dimension = 2
itk_image = itk.imread(input_filename)
InputPixelType = itk.UC
InputImageType = itk.Image[InputPixelType, Dimension]
reader = itk.ImageFileReader[InputImageType].New()
reader.SetFileName(input_filename)
thresholdFilter = itk.BinaryThresholdImageFilter[InputImageType,InputImageType].New()
thresholdFilter.SetInput(reader.GetOutput())
thresholdFilter.SetUpperThreshold(200)
thresholdFilter.SetOutsideValue(1)
thresholdFilter.SetInsideValue(0)
StructuringElementType = itk.FlatStructuringElement[Dimension]
structuringElement = StructuringElementType.Ball(3)
erodeFilter = itk.BinaryErodeImageFilter[InputImageType,InputImageType,StructuringElementType].New()
erodeFilter.SetInput(thresholdFilter.GetOutput())
erodeFilter.SetKernel(structuringElement)
erodeFilter.SetForegroundValue(1)
dilateFilter = itk.BinaryDilateImageFilter[InputImageType,InputImageType,StructuringElementType].New()
dilateFilter.SetInput(erodeFilter.GetOutput())
dilateFilter.SetKernel(structuringElement)
dilateFilter.SetForegroundValue(1)
dilateFilter.Update()
plt.figure(figsize=(12, 4), dpi=50)
plt.subplot(1,3,1),plt.title("original"),plt.imshow(itk_image, cmap="gray")
plt.subplot(1,3,2),plt.title("threshold"),plt.imshow(thresholdFilter.GetOutput())
plt.subplot(1,3,3),plt.title("output"),plt.imshow(dilateFilter.GetOutput())
plt.savefig("./img/jenga_opening2d.png") | true | true |
f72aed1738f6ccb62f4bf6aeaaf1bcc63b40247b | 2,587 | py | Python | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | null | null | null | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | null | null | null | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | 1 | 2020-07-16T12:47:24.000Z | 2020-07-16T12:47:24.000Z | # -*- coding: utf-8 -*-
# Upside Travel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import boto3
import clamav
from common import AV_DEFINITION_PATH
from common import AV_DEFINITION_S3_BUCKET
from common import AV_DEFINITION_S3_PREFIX
from common import CLAMAVLIB_PATH
from common import get_timestamp
import shutil
def lambda_handler(event, context):
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
print("Script starting at %s\n" % (get_timestamp()))
for root, dirs, files in os.walk(AV_DEFINITION_PATH):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
to_download = clamav.update_defs_from_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
)
print("Skipping clamav definition download %s\n" % (get_timestamp()))
# for download in to_download.values():
# s3_path = download["s3_path"]
# local_path = download["local_path"]
# print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
# s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
# print("Downloading definition file %s complete!" % (local_path))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
# If main.cvd gets updated (very rare), we will need to force freshclam
# to download the compressed version to keep file sizes down.
# The existence of main.cud is the trigger to know this has happened.
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
clamav.upload_defs_to_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH
)
print("Script finished at %s\n" % get_timestamp()) | 39.8 | 87 | 0.719366 |
import os
import boto3
import clamav
from common import AV_DEFINITION_PATH
from common import AV_DEFINITION_S3_BUCKET
from common import AV_DEFINITION_S3_PREFIX
from common import CLAMAVLIB_PATH
from common import get_timestamp
import shutil
def lambda_handler(event, context):
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
print("Script starting at %s\n" % (get_timestamp()))
for root, dirs, files in os.walk(AV_DEFINITION_PATH):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
to_download = clamav.update_defs_from_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
)
print("Skipping clamav definition download %s\n" % (get_timestamp()))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
clamav.upload_defs_to_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH
)
print("Script finished at %s\n" % get_timestamp()) | true | true |
f72aeddbd79707ad743350eba5e76f34ba47af5c | 15,728 | py | Python | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | """Keras implementation of SSD."""
import keras.backend as K
from keras.layers import Activation
from keras.layers import AtrousConvolution2D
from keras.layers import Convolution2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import merge
from keras.layers import Reshape
from keras.layers import ZeroPadding2D
from keras.models import Model
from ssd_layers import Normalize
from ssd_layers import PriorBox
def SSD300(input_shape, num_classes=21):
"""SSD300 architecture.
# Arguments
input_shape: Shape of the input image,
expected to be either (300, 300, 3) or (3, 300, 300)(not tested).
num_classes: Number of classes including background.
# References
https://arxiv.org/abs/1512.02325
"""
net = {}
# Block 1 卷积层块
input_tensor = input_tensor = Input(shape=input_shape)
img_size = (input_shape[1], input_shape[0])
net['input'] = input_tensor
# 二维卷积层对二维输入进行滑动窗卷积
# keras.layers.Conv2D(filters, kernel_size, strides=(1, 1), padding='valid', data_format=None,
# dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform',
# bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None,
# kernel_constraint=None, bias_constraint=None)
net['conv1_1'] = Convolution2D(64, 3, 3, # 64个过滤器;kernel_size:3,卷积窗口大小;strides:步长;
activation='relu', # 激活函数:ReLU
border_mode='same', # 过滤模式:same/valid
name='conv1_1')(net['input'])
net['conv1_2'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_2')(net['conv1_1'])
# 对空间数据的最大池化
# keras.layers.MaxPooling2D(pool_size=(2, 2), strides=None, padding='valid', data_format=None)
# strides 默认为 None,为 None 时大小等于
net['pool1'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool1')(net['conv1_2'])
# Block 2 卷积层块
net['conv2_1'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_1')(net['pool1'])
net['conv2_2'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_2')(net['conv2_1'])
net['pool2'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool2')(net['conv2_2'])
# Block 3 卷积层块
net['conv3_1'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_1')(net['pool2'])
net['conv3_2'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_2')(net['conv3_1'])
net['conv3_3'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_3')(net['conv3_2'])
net['pool3'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool3')(net['conv3_3'])
# Block 4 卷积层块
net['conv4_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_1')(net['pool3'])
net['conv4_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_2')(net['conv4_1'])
net['conv4_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_3')(net['conv4_2'])
net['pool4'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool4')(net['conv4_3'])
# Block 5 卷积层块
net['conv5_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_1')(net['pool4'])
net['conv5_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_2')(net['conv5_1'])
net['conv5_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_3')(net['conv5_2'])
net['pool5'] = MaxPooling2D((3, 3), strides=(1, 1), border_mode='same',
name='pool5')(net['conv5_3'])
# FC6 该层对二维输入进行Atrous卷积,也即膨胀卷积或带孔洞的卷积。
net['fc6'] = AtrousConvolution2D(1024, 3, 3, atrous_rate=(6, 6),
activation='relu', border_mode='same',
name='fc6')(net['pool5'])
# x = Dropout(0.5, name='drop6')(x)
# FC7
net['fc7'] = Convolution2D(1024, 1, 1, activation='relu',
border_mode='same', name='fc7')(net['fc6'])
# x = Dropout(0.5, name='drop7')(x)
# Block 6
net['conv6_1'] = Convolution2D(256, 1, 1, activation='relu',
border_mode='same',
name='conv6_1')(net['fc7'])
net['conv6_2'] = Convolution2D(512, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv6_2')(net['conv6_1'])
# Block 7
net['conv7_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv7_1')(net['conv6_2'])
net['conv7_2'] = ZeroPadding2D()(net['conv7_1'])
net['conv7_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='valid',
name='conv7_2')(net['conv7_2'])
# Block 8
net['conv8_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv8_1')(net['conv7_2'])
net['conv8_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv8_2')(net['conv8_1'])
# Last Pool
net['pool6'] = GlobalAveragePooling2D(name='pool6')(net['conv8_2'])
# Prediction from conv4_3
# keras.layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)
# axis: 整数,需要标准化的轴 (通常是特征轴)
# 批量标准化层 (Ioffe and Szegedy, 2014)。在每一个批次的数据中标准化前一层的激活项, 即,应用一个维持激活项平均值接近 0,标准差接近 1 的转换。
net['conv4_3_norm'] = Normalize(20, name='conv4_3_norm')(net['conv4_3'])
num_priors = 3
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv4_3_norm_mbox_loc')(net['conv4_3_norm'])
net['conv4_3_norm_mbox_loc'] = x
flatten = Flatten(name='conv4_3_norm_mbox_loc_flat')
net['conv4_3_norm_mbox_loc_flat'] = flatten(net['conv4_3_norm_mbox_loc'])
name = 'conv4_3_norm_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv4_3_norm'])
net['conv4_3_norm_mbox_conf'] = x
flatten = Flatten(name='conv4_3_norm_mbox_conf_flat')
net['conv4_3_norm_mbox_conf_flat'] = flatten(net['conv4_3_norm_mbox_conf'])
priorbox = PriorBox(img_size, 30.0, aspect_ratios=[2],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv4_3_norm_mbox_priorbox')
net['conv4_3_norm_mbox_priorbox'] = priorbox(net['conv4_3_norm'])
# Prediction from fc7
num_priors = 6
net['fc7_mbox_loc'] = Convolution2D(num_priors * 4, 3, 3,
border_mode='same',
name='fc7_mbox_loc')(net['fc7'])
flatten = Flatten(name='fc7_mbox_loc_flat')
net['fc7_mbox_loc_flat'] = flatten(net['fc7_mbox_loc'])
name = 'fc7_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
net['fc7_mbox_conf'] = Convolution2D(num_priors * num_classes, 3, 3,
border_mode='same',
name=name)(net['fc7'])
flatten = Flatten(name='fc7_mbox_conf_flat')
net['fc7_mbox_conf_flat'] = flatten(net['fc7_mbox_conf'])
priorbox = PriorBox(img_size, 60.0, max_size=114.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='fc7_mbox_priorbox')
net['fc7_mbox_priorbox'] = priorbox(net['fc7'])
# Prediction from conv6_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv6_2_mbox_loc')(net['conv6_2'])
net['conv6_2_mbox_loc'] = x
flatten = Flatten(name='conv6_2_mbox_loc_flat')
net['conv6_2_mbox_loc_flat'] = flatten(net['conv6_2_mbox_loc'])
name = 'conv6_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv6_2'])
net['conv6_2_mbox_conf'] = x
flatten = Flatten(name='conv6_2_mbox_conf_flat')
net['conv6_2_mbox_conf_flat'] = flatten(net['conv6_2_mbox_conf'])
priorbox = PriorBox(img_size, 114.0, max_size=168.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv6_2_mbox_priorbox')
net['conv6_2_mbox_priorbox'] = priorbox(net['conv6_2'])
# Prediction from conv7_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv7_2_mbox_loc')(net['conv7_2'])
net['conv7_2_mbox_loc'] = x
flatten = Flatten(name='conv7_2_mbox_loc_flat')
net['conv7_2_mbox_loc_flat'] = flatten(net['conv7_2_mbox_loc'])
name = 'conv7_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv7_2'])
net['conv7_2_mbox_conf'] = x
flatten = Flatten(name='conv7_2_mbox_conf_flat')
net['conv7_2_mbox_conf_flat'] = flatten(net['conv7_2_mbox_conf'])
priorbox = PriorBox(img_size, 168.0, max_size=222.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv7_2_mbox_priorbox')
net['conv7_2_mbox_priorbox'] = priorbox(net['conv7_2'])
# Prediction from conv8_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv8_2_mbox_loc')(net['conv8_2'])
net['conv8_2_mbox_loc'] = x
flatten = Flatten(name='conv8_2_mbox_loc_flat')
net['conv8_2_mbox_loc_flat'] = flatten(net['conv8_2_mbox_loc'])
name = 'conv8_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv8_2'])
net['conv8_2_mbox_conf'] = x
flatten = Flatten(name='conv8_2_mbox_conf_flat')
net['conv8_2_mbox_conf_flat'] = flatten(net['conv8_2_mbox_conf'])
priorbox = PriorBox(img_size, 222.0, max_size=276.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv8_2_mbox_priorbox')
net['conv8_2_mbox_priorbox'] = priorbox(net['conv8_2'])
# Prediction from pool6
num_priors = 6
x = Dense(num_priors * 4, name='pool6_mbox_loc_flat')(net['pool6'])
net['pool6_mbox_loc_flat'] = x
name = 'pool6_mbox_conf_flat'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Dense(num_priors * num_classes, name=name)(net['pool6'])
net['pool6_mbox_conf_flat'] = x
priorbox = PriorBox(img_size, 276.0, max_size=330.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='pool6_mbox_priorbox')
if K.image_dim_ordering() == 'tf':
target_shape = (1, 1, 256)
else:
target_shape = (256, 1, 1)
net['pool6_reshaped'] = Reshape(target_shape,
name='pool6_reshaped')(net['pool6'])
net['pool6_mbox_priorbox'] = priorbox(net['pool6_reshaped'])
# Gather all predictions
net['mbox_loc'] = merge([net['conv4_3_norm_mbox_loc_flat'],
net['fc7_mbox_loc_flat'],
net['conv6_2_mbox_loc_flat'],
net['conv7_2_mbox_loc_flat'],
net['conv8_2_mbox_loc_flat'],
net['pool6_mbox_loc_flat']],
mode='concat', concat_axis=1, name='mbox_loc')
net['mbox_conf'] = merge([net['conv4_3_norm_mbox_conf_flat'],
net['fc7_mbox_conf_flat'],
net['conv6_2_mbox_conf_flat'],
net['conv7_2_mbox_conf_flat'],
net['conv8_2_mbox_conf_flat'],
net['pool6_mbox_conf_flat']],
mode='concat', concat_axis=1, name='mbox_conf')
net['mbox_priorbox'] = merge([net['conv4_3_norm_mbox_priorbox'],
net['fc7_mbox_priorbox'],
net['conv6_2_mbox_priorbox'],
net['conv7_2_mbox_priorbox'],
net['conv8_2_mbox_priorbox'],
net['pool6_mbox_priorbox']],
mode='concat', concat_axis=1,
name='mbox_priorbox')
if hasattr(net['mbox_loc'], '_keras_shape'):
num_boxes = net['mbox_loc']._keras_shape[-1] // 4
elif hasattr(net['mbox_loc'], 'int_shape'):
num_boxes = K.int_shape(net['mbox_loc'])[-1] // 4
net['mbox_loc'] = Reshape((num_boxes, 4),
name='mbox_loc_final')(net['mbox_loc'])
net['mbox_conf'] = Reshape((num_boxes, num_classes),
name='mbox_conf_logits')(net['mbox_conf'])
net['mbox_conf'] = Activation('softmax',
name='mbox_conf_final')(net['mbox_conf'])
net['predictions'] = merge([net['mbox_loc'],
net['mbox_conf'],
net['mbox_priorbox']],
mode='concat', concat_axis=2,
name='predictions')
model = Model(net['input'], net['predictions'])
return model
| 51.398693 | 127 | 0.532363 |
import keras.backend as K
from keras.layers import Activation
from keras.layers import AtrousConvolution2D
from keras.layers import Convolution2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import merge
from keras.layers import Reshape
from keras.layers import ZeroPadding2D
from keras.models import Model
from ssd_layers import Normalize
from ssd_layers import PriorBox
def SSD300(input_shape, num_classes=21):
net = {}
input_tensor = input_tensor = Input(shape=input_shape)
img_size = (input_shape[1], input_shape[0])
net['input'] = input_tensor
net['conv1_1'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_1')(net['input'])
net['conv1_2'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_2')(net['conv1_1'])
net['pool1'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool1')(net['conv1_2'])
net['conv2_1'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_1')(net['pool1'])
net['conv2_2'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_2')(net['conv2_1'])
net['pool2'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool2')(net['conv2_2'])
net['conv3_1'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_1')(net['pool2'])
net['conv3_2'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_2')(net['conv3_1'])
net['conv3_3'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_3')(net['conv3_2'])
net['pool3'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool3')(net['conv3_3'])
net['conv4_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_1')(net['pool3'])
net['conv4_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_2')(net['conv4_1'])
net['conv4_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_3')(net['conv4_2'])
net['pool4'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool4')(net['conv4_3'])
net['conv5_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_1')(net['pool4'])
net['conv5_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_2')(net['conv5_1'])
net['conv5_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_3')(net['conv5_2'])
net['pool5'] = MaxPooling2D((3, 3), strides=(1, 1), border_mode='same',
name='pool5')(net['conv5_3'])
net['fc6'] = AtrousConvolution2D(1024, 3, 3, atrous_rate=(6, 6),
activation='relu', border_mode='same',
name='fc6')(net['pool5'])
net['fc7'] = Convolution2D(1024, 1, 1, activation='relu',
border_mode='same', name='fc7')(net['fc6'])
net['conv6_1'] = Convolution2D(256, 1, 1, activation='relu',
border_mode='same',
name='conv6_1')(net['fc7'])
net['conv6_2'] = Convolution2D(512, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv6_2')(net['conv6_1'])
net['conv7_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv7_1')(net['conv6_2'])
net['conv7_2'] = ZeroPadding2D()(net['conv7_1'])
net['conv7_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='valid',
name='conv7_2')(net['conv7_2'])
net['conv8_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv8_1')(net['conv7_2'])
net['conv8_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv8_2')(net['conv8_1'])
net['pool6'] = GlobalAveragePooling2D(name='pool6')(net['conv8_2'])
net['conv4_3_norm'] = Normalize(20, name='conv4_3_norm')(net['conv4_3'])
num_priors = 3
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv4_3_norm_mbox_loc')(net['conv4_3_norm'])
net['conv4_3_norm_mbox_loc'] = x
flatten = Flatten(name='conv4_3_norm_mbox_loc_flat')
net['conv4_3_norm_mbox_loc_flat'] = flatten(net['conv4_3_norm_mbox_loc'])
name = 'conv4_3_norm_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv4_3_norm'])
net['conv4_3_norm_mbox_conf'] = x
flatten = Flatten(name='conv4_3_norm_mbox_conf_flat')
net['conv4_3_norm_mbox_conf_flat'] = flatten(net['conv4_3_norm_mbox_conf'])
priorbox = PriorBox(img_size, 30.0, aspect_ratios=[2],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv4_3_norm_mbox_priorbox')
net['conv4_3_norm_mbox_priorbox'] = priorbox(net['conv4_3_norm'])
num_priors = 6
net['fc7_mbox_loc'] = Convolution2D(num_priors * 4, 3, 3,
border_mode='same',
name='fc7_mbox_loc')(net['fc7'])
flatten = Flatten(name='fc7_mbox_loc_flat')
net['fc7_mbox_loc_flat'] = flatten(net['fc7_mbox_loc'])
name = 'fc7_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
net['fc7_mbox_conf'] = Convolution2D(num_priors * num_classes, 3, 3,
border_mode='same',
name=name)(net['fc7'])
flatten = Flatten(name='fc7_mbox_conf_flat')
net['fc7_mbox_conf_flat'] = flatten(net['fc7_mbox_conf'])
priorbox = PriorBox(img_size, 60.0, max_size=114.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='fc7_mbox_priorbox')
net['fc7_mbox_priorbox'] = priorbox(net['fc7'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv6_2_mbox_loc')(net['conv6_2'])
net['conv6_2_mbox_loc'] = x
flatten = Flatten(name='conv6_2_mbox_loc_flat')
net['conv6_2_mbox_loc_flat'] = flatten(net['conv6_2_mbox_loc'])
name = 'conv6_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv6_2'])
net['conv6_2_mbox_conf'] = x
flatten = Flatten(name='conv6_2_mbox_conf_flat')
net['conv6_2_mbox_conf_flat'] = flatten(net['conv6_2_mbox_conf'])
priorbox = PriorBox(img_size, 114.0, max_size=168.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv6_2_mbox_priorbox')
net['conv6_2_mbox_priorbox'] = priorbox(net['conv6_2'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv7_2_mbox_loc')(net['conv7_2'])
net['conv7_2_mbox_loc'] = x
flatten = Flatten(name='conv7_2_mbox_loc_flat')
net['conv7_2_mbox_loc_flat'] = flatten(net['conv7_2_mbox_loc'])
name = 'conv7_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv7_2'])
net['conv7_2_mbox_conf'] = x
flatten = Flatten(name='conv7_2_mbox_conf_flat')
net['conv7_2_mbox_conf_flat'] = flatten(net['conv7_2_mbox_conf'])
priorbox = PriorBox(img_size, 168.0, max_size=222.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv7_2_mbox_priorbox')
net['conv7_2_mbox_priorbox'] = priorbox(net['conv7_2'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv8_2_mbox_loc')(net['conv8_2'])
net['conv8_2_mbox_loc'] = x
flatten = Flatten(name='conv8_2_mbox_loc_flat')
net['conv8_2_mbox_loc_flat'] = flatten(net['conv8_2_mbox_loc'])
name = 'conv8_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv8_2'])
net['conv8_2_mbox_conf'] = x
flatten = Flatten(name='conv8_2_mbox_conf_flat')
net['conv8_2_mbox_conf_flat'] = flatten(net['conv8_2_mbox_conf'])
priorbox = PriorBox(img_size, 222.0, max_size=276.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv8_2_mbox_priorbox')
net['conv8_2_mbox_priorbox'] = priorbox(net['conv8_2'])
num_priors = 6
x = Dense(num_priors * 4, name='pool6_mbox_loc_flat')(net['pool6'])
net['pool6_mbox_loc_flat'] = x
name = 'pool6_mbox_conf_flat'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Dense(num_priors * num_classes, name=name)(net['pool6'])
net['pool6_mbox_conf_flat'] = x
priorbox = PriorBox(img_size, 276.0, max_size=330.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='pool6_mbox_priorbox')
if K.image_dim_ordering() == 'tf':
target_shape = (1, 1, 256)
else:
target_shape = (256, 1, 1)
net['pool6_reshaped'] = Reshape(target_shape,
name='pool6_reshaped')(net['pool6'])
net['pool6_mbox_priorbox'] = priorbox(net['pool6_reshaped'])
net['mbox_loc'] = merge([net['conv4_3_norm_mbox_loc_flat'],
net['fc7_mbox_loc_flat'],
net['conv6_2_mbox_loc_flat'],
net['conv7_2_mbox_loc_flat'],
net['conv8_2_mbox_loc_flat'],
net['pool6_mbox_loc_flat']],
mode='concat', concat_axis=1, name='mbox_loc')
net['mbox_conf'] = merge([net['conv4_3_norm_mbox_conf_flat'],
net['fc7_mbox_conf_flat'],
net['conv6_2_mbox_conf_flat'],
net['conv7_2_mbox_conf_flat'],
net['conv8_2_mbox_conf_flat'],
net['pool6_mbox_conf_flat']],
mode='concat', concat_axis=1, name='mbox_conf')
net['mbox_priorbox'] = merge([net['conv4_3_norm_mbox_priorbox'],
net['fc7_mbox_priorbox'],
net['conv6_2_mbox_priorbox'],
net['conv7_2_mbox_priorbox'],
net['conv8_2_mbox_priorbox'],
net['pool6_mbox_priorbox']],
mode='concat', concat_axis=1,
name='mbox_priorbox')
if hasattr(net['mbox_loc'], '_keras_shape'):
num_boxes = net['mbox_loc']._keras_shape[-1] // 4
elif hasattr(net['mbox_loc'], 'int_shape'):
num_boxes = K.int_shape(net['mbox_loc'])[-1] // 4
net['mbox_loc'] = Reshape((num_boxes, 4),
name='mbox_loc_final')(net['mbox_loc'])
net['mbox_conf'] = Reshape((num_boxes, num_classes),
name='mbox_conf_logits')(net['mbox_conf'])
net['mbox_conf'] = Activation('softmax',
name='mbox_conf_final')(net['mbox_conf'])
net['predictions'] = merge([net['mbox_loc'],
net['mbox_conf'],
net['mbox_priorbox']],
mode='concat', concat_axis=2,
name='predictions')
model = Model(net['input'], net['predictions'])
return model
| true | true |
f72aedf20d5a4dd130832d24767e5a8c5c2c559a | 850 | py | Python | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.nic.ve/property_nameservers_missing
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicVePropertyNameserversMissing(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.ve/property_nameservers_missing.txt"
host = "whois.nic.ve"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
| 31.481481 | 94 | 0.711765 |
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicVePropertyNameserversMissing(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.ve/property_nameservers_missing.txt"
host = "whois.nic.ve"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
| true | true |
f72aee673e41aaa5710037678b883636f5df28d7 | 7,947 | py | Python | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | 22 | 2022-01-27T09:59:50.000Z | 2022-03-30T07:06:49.000Z | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.pylint.subsystem import (
Pylint,
PylintFieldSet,
PylintFirstPartyPlugins,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.util_rules import partition, pex_from_targets
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import (
Pex,
PexRequest,
VenvPex,
VenvPexProcess,
VenvPexRequest,
)
from pants.backend.python.util_rules.pex_from_targets import RequirementsPexRequest
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
)
from pants.core.goals.lint import REPORT_DIR, LintResult, LintResults, LintTargetsRequest
from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.collection import Collection
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import CoarsenedTargets, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class PylintPartition:
root_field_sets: FrozenOrderedSet[PylintFieldSet]
closure: FrozenOrderedSet[Target]
resolve_description: str | None
interpreter_constraints: InterpreterConstraints
def description(self) -> str:
ics = str(sorted(str(c) for c in self.interpreter_constraints))
return f"{self.resolve_description}, {ics}" if self.resolve_description else ics
class PylintPartitions(Collection[PylintPartition]):
pass
class PylintRequest(LintTargetsRequest):
field_set_type = PylintFieldSet
name = Pylint.options_scope
def generate_argv(source_files: SourceFiles, pylint: Pylint) -> Tuple[str, ...]:
args = []
if pylint.config is not None:
args.append(f"--rcfile={pylint.config}")
args.append("--jobs={pants_concurrency}")
args.extend(pylint.args)
args.extend(source_files.files)
return tuple(args)
@rule(level=LogLevel.DEBUG)
async def pylint_lint_partition(
partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
requirements_pex_get = Get(
Pex,
RequirementsPexRequest(
(fs.address for fs in partition.root_field_sets),
# NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
# a different version for the requirements than the other two PEXes, which can result
# in a PEX runtime error about missing dependencies.
hardcoded_interpreter_constraints=partition.interpreter_constraints,
),
)
pylint_pex_get = Get(
Pex,
PexRequest,
pylint.to_pex_request(
interpreter_constraints=partition.interpreter_constraints,
extra_requirements=first_party_plugins.requirement_strings,
),
)
prepare_python_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
field_set_sources_get = Get(
SourceFiles, SourceFilesRequest(fs.source for fs in partition.root_field_sets)
)
# Ensure that the empty report dir exists.
report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)]))
(
pylint_pex,
requirements_pex,
prepared_python_sources,
field_set_sources,
report_directory,
) = await MultiGet(
pylint_pex_get,
requirements_pex_get,
prepare_python_sources_get,
field_set_sources_get,
report_directory_digest_get,
)
pylint_runner_pex, config_files = await MultiGet(
Get(
VenvPex,
VenvPexRequest(
PexRequest(
output_filename="pylint_runner.pex",
interpreter_constraints=partition.interpreter_constraints,
main=pylint.main,
internal_only=True,
pex_path=[pylint_pex, requirements_pex],
),
# TODO(John Sirois): Remove this (change to the default of symlinks) when we can
# upgrade to a version of Pylint with https://github.com/PyCQA/pylint/issues/1470
# resolved.
site_packages_copies=True,
),
),
Get(
ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
),
)
pythonpath = list(prepared_python_sources.source_roots)
if first_party_plugins:
pythonpath.append(first_party_plugins.PREFIX)
input_digest = await Get(
Digest,
MergeDigests(
(
config_files.snapshot.digest,
first_party_plugins.sources_digest,
prepared_python_sources.source_files.snapshot.digest,
report_directory,
)
),
)
result = await Get(
FallibleProcessResult,
VenvPexProcess(
pylint_runner_pex,
argv=generate_argv(field_set_sources, pylint),
input_digest=input_digest,
output_directories=(REPORT_DIR,),
extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
concurrency_available=len(partition.root_field_sets),
description=f"Run Pylint on {pluralize(len(partition.root_field_sets), 'file')}.",
level=LogLevel.DEBUG,
),
)
report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
return LintResult.from_fallible_process_result(
result,
partition_description=partition.description(),
report=report,
)
@rule(desc="Determine if necessary to partition Pylint input", level=LogLevel.DEBUG)
async def pylint_determine_partitions(
request: PylintRequest, python_setup: PythonSetup, first_party_plugins: PylintFirstPartyPlugins
) -> PylintPartitions:
resolve_and_interpreter_constraints_to_coarsened_targets = (
await partition._by_interpreter_constraints_and_resolve(request.field_sets, python_setup)
)
first_party_ics = InterpreterConstraints.create_from_compatibility_fields(
first_party_plugins.interpreter_constraints_fields, python_setup
)
return PylintPartitions(
PylintPartition(
FrozenOrderedSet(roots),
FrozenOrderedSet(CoarsenedTargets(root_cts).closure()),
resolve if len(python_setup.resolves) > 1 else None,
InterpreterConstraints.merge((interpreter_constraints, first_party_ics)),
)
for (resolve, interpreter_constraints), (roots, root_cts) in sorted(
resolve_and_interpreter_constraints_to_coarsened_targets.items()
)
)
@rule(desc="Lint using Pylint", level=LogLevel.DEBUG)
async def pylint_lint(request: PylintRequest, pylint: Pylint) -> LintResults:
if pylint.skip:
return LintResults([], linter_name=request.name)
partitions = await Get(PylintPartitions, PylintRequest, request)
partitioned_results = await MultiGet(
Get(LintResult, PylintPartition, partition) for partition in partitions
)
return LintResults(partitioned_results, linter_name=request.name)
def rules():
return [
*collect_rules(),
UnionRule(LintTargetsRequest, PylintRequest),
*pex_from_targets.rules(),
]
| 35.959276 | 100 | 0.707059 |
from __future__ import annotations
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.pylint.subsystem import (
Pylint,
PylintFieldSet,
PylintFirstPartyPlugins,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.util_rules import partition, pex_from_targets
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import (
Pex,
PexRequest,
VenvPex,
VenvPexProcess,
VenvPexRequest,
)
from pants.backend.python.util_rules.pex_from_targets import RequirementsPexRequest
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
)
from pants.core.goals.lint import REPORT_DIR, LintResult, LintResults, LintTargetsRequest
from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.collection import Collection
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import CoarsenedTargets, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class PylintPartition:
root_field_sets: FrozenOrderedSet[PylintFieldSet]
closure: FrozenOrderedSet[Target]
resolve_description: str | None
interpreter_constraints: InterpreterConstraints
def description(self) -> str:
ics = str(sorted(str(c) for c in self.interpreter_constraints))
return f"{self.resolve_description}, {ics}" if self.resolve_description else ics
class PylintPartitions(Collection[PylintPartition]):
pass
class PylintRequest(LintTargetsRequest):
field_set_type = PylintFieldSet
name = Pylint.options_scope
def generate_argv(source_files: SourceFiles, pylint: Pylint) -> Tuple[str, ...]:
args = []
if pylint.config is not None:
args.append(f"--rcfile={pylint.config}")
args.append("--jobs={pants_concurrency}")
args.extend(pylint.args)
args.extend(source_files.files)
return tuple(args)
@rule(level=LogLevel.DEBUG)
async def pylint_lint_partition(
partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
requirements_pex_get = Get(
Pex,
RequirementsPexRequest(
(fs.address for fs in partition.root_field_sets),
hardcoded_interpreter_constraints=partition.interpreter_constraints,
),
)
pylint_pex_get = Get(
Pex,
PexRequest,
pylint.to_pex_request(
interpreter_constraints=partition.interpreter_constraints,
extra_requirements=first_party_plugins.requirement_strings,
),
)
prepare_python_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
field_set_sources_get = Get(
SourceFiles, SourceFilesRequest(fs.source for fs in partition.root_field_sets)
)
report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)]))
(
pylint_pex,
requirements_pex,
prepared_python_sources,
field_set_sources,
report_directory,
) = await MultiGet(
pylint_pex_get,
requirements_pex_get,
prepare_python_sources_get,
field_set_sources_get,
report_directory_digest_get,
)
pylint_runner_pex, config_files = await MultiGet(
Get(
VenvPex,
VenvPexRequest(
PexRequest(
output_filename="pylint_runner.pex",
interpreter_constraints=partition.interpreter_constraints,
main=pylint.main,
internal_only=True,
pex_path=[pylint_pex, requirements_pex],
),
site_packages_copies=True,
),
),
Get(
ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
),
)
pythonpath = list(prepared_python_sources.source_roots)
if first_party_plugins:
pythonpath.append(first_party_plugins.PREFIX)
input_digest = await Get(
Digest,
MergeDigests(
(
config_files.snapshot.digest,
first_party_plugins.sources_digest,
prepared_python_sources.source_files.snapshot.digest,
report_directory,
)
),
)
result = await Get(
FallibleProcessResult,
VenvPexProcess(
pylint_runner_pex,
argv=generate_argv(field_set_sources, pylint),
input_digest=input_digest,
output_directories=(REPORT_DIR,),
extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
concurrency_available=len(partition.root_field_sets),
description=f"Run Pylint on {pluralize(len(partition.root_field_sets), 'file')}.",
level=LogLevel.DEBUG,
),
)
report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
return LintResult.from_fallible_process_result(
result,
partition_description=partition.description(),
report=report,
)
@rule(desc="Determine if necessary to partition Pylint input", level=LogLevel.DEBUG)
async def pylint_determine_partitions(
request: PylintRequest, python_setup: PythonSetup, first_party_plugins: PylintFirstPartyPlugins
) -> PylintPartitions:
resolve_and_interpreter_constraints_to_coarsened_targets = (
await partition._by_interpreter_constraints_and_resolve(request.field_sets, python_setup)
)
first_party_ics = InterpreterConstraints.create_from_compatibility_fields(
first_party_plugins.interpreter_constraints_fields, python_setup
)
return PylintPartitions(
PylintPartition(
FrozenOrderedSet(roots),
FrozenOrderedSet(CoarsenedTargets(root_cts).closure()),
resolve if len(python_setup.resolves) > 1 else None,
InterpreterConstraints.merge((interpreter_constraints, first_party_ics)),
)
for (resolve, interpreter_constraints), (roots, root_cts) in sorted(
resolve_and_interpreter_constraints_to_coarsened_targets.items()
)
)
@rule(desc="Lint using Pylint", level=LogLevel.DEBUG)
async def pylint_lint(request: PylintRequest, pylint: Pylint) -> LintResults:
if pylint.skip:
return LintResults([], linter_name=request.name)
partitions = await Get(PylintPartitions, PylintRequest, request)
partitioned_results = await MultiGet(
Get(LintResult, PylintPartition, partition) for partition in partitions
)
return LintResults(partitioned_results, linter_name=request.name)
def rules():
return [
*collect_rules(),
UnionRule(LintTargetsRequest, PylintRequest),
*pex_from_targets.rules(),
]
| true | true |
f72aef2c46434bd7fee98942b7dd5f4091b26225 | 9,102 | py | Python | homeassistant/components/philips_js/media_player.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 6 | 2020-07-18T16:33:25.000Z | 2021-09-26T09:52:04.000Z | homeassistant/components/philips_js/media_player.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 47 | 2020-07-23T07:14:33.000Z | 2022-03-31T06:01:46.000Z | homeassistant/components/philips_js/media_player.py | klauern/home-assistant-core | c18ba6aec0627e6afb6442c678edb5ff2bb17db6 | [
"Apache-2.0"
] | 5 | 2020-03-29T00:29:13.000Z | 2021-09-06T20:58:40.000Z | """Media Player component to integrate TVs exposing the Joint Space API."""
from datetime import timedelta
import logging
from haphilipsjs import PhilipsTV
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import call_later, track_time_interval
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_NAME = "Philips TV"
DEFAULT_API_VERSION = "1"
DEFAULT_SCAN_INTERVAL = 30
DELAY_ACTION_DEFAULT = 2.0
DELAY_ACTION_ON = 10.0
PREFIX_SEPARATOR = ": "
PREFIX_SOURCE = "Input"
PREFIX_CHANNEL = "Channel"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
)
def _inverted(data):
return {v: k for k, v in data.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Philips TV platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
api_version = config.get(CONF_API_VERSION)
turn_on_action = config.get(CONF_ON_ACTION)
tvapi = PhilipsTV(host, api_version)
on_script = Script(hass, turn_on_action) if turn_on_action else None
add_entities([PhilipsTVMediaPlayer(tvapi, name, on_script)])
class PhilipsTVMediaPlayer(MediaPlayerEntity):
"""Representation of a Philips TV exposing the JointSpace API."""
def __init__(self, tv, name, on_script):
"""Initialize the Philips TV."""
self._tv = tv
self._name = name
self._sources = {}
self._channels = {}
self._on_script = on_script
self._supports = SUPPORT_PHILIPS_JS
if self._on_script:
self._supports |= SUPPORT_TURN_ON
self._update_task = None
def _update_soon(self, delay):
"""Reschedule update task."""
if self._update_task:
self._update_task()
self._update_task = None
self.schedule_update_ha_state(force_refresh=False)
def update_forced(event_time):
self.schedule_update_ha_state(force_refresh=True)
def update_and_restart(event_time):
update_forced(event_time)
self._update_task = track_time_interval(
self.hass, update_forced, timedelta(seconds=DEFAULT_SCAN_INTERVAL)
)
call_later(self.hass, delay, update_and_restart)
async def async_added_to_hass(self):
"""Start running updates once we are added to hass."""
await self.hass.async_add_executor_job(self._update_soon, 0)
@property
def name(self):
"""Return the device name."""
return self._name
@property
def should_poll(self):
"""Device should be polled."""
return False
@property
def supported_features(self):
"""Flag media player features that are supported."""
return self._supports
@property
def state(self):
"""Get the device state. An exception means OFF state."""
if self._tv.on:
return STATE_ON
return STATE_OFF
@property
def source(self):
"""Return the current input source."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
name = self._channels.get(self._tv.channel_id)
prefix = PREFIX_CHANNEL
else:
name = self._sources.get(self._tv.source_id)
prefix = PREFIX_SOURCE
if name is None:
return None
return prefix + PREFIX_SEPARATOR + name
@property
def source_list(self):
"""List of available input sources."""
complete = []
for source in self._sources.values():
complete.append(PREFIX_SOURCE + PREFIX_SEPARATOR + source)
for channel in self._channels.values():
complete.append(PREFIX_CHANNEL + PREFIX_SEPARATOR + channel)
return complete
def select_source(self, source):
"""Set the input source."""
data = source.split(PREFIX_SEPARATOR, 1)
if data[0] == PREFIX_SOURCE:
source_id = _inverted(self._sources).get(data[1])
if source_id:
self._tv.setSource(source_id)
elif data[0] == PREFIX_CHANNEL:
channel_id = _inverted(self._channels).get(data[1])
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._tv.volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._tv.muted
def turn_on(self):
"""Turn on the device."""
if self._on_script:
self._on_script.run()
self._update_soon(DELAY_ACTION_ON)
def turn_off(self):
"""Turn off the device."""
self._tv.sendKey("Standby")
self._tv.on = False
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_up(self):
"""Send volume up command."""
self._tv.sendKey("VolumeUp")
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_down(self):
"""Send volume down command."""
self._tv.sendKey("VolumeDown")
self._update_soon(DELAY_ACTION_DEFAULT)
def mute_volume(self, mute):
"""Send mute command."""
self._tv.setVolume(None, mute)
self._update_soon(DELAY_ACTION_DEFAULT)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._tv.setVolume(volume, self._tv.muted)
self._update_soon(DELAY_ACTION_DEFAULT)
def media_previous_track(self):
"""Send rewind command."""
self._tv.sendKey("Previous")
self._update_soon(DELAY_ACTION_DEFAULT)
def media_next_track(self):
"""Send fast forward command."""
self._tv.sendKey("Next")
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def media_channel(self):
"""Get current channel if it's a channel."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def media_title(self):
"""Title of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return self._sources.get(self._tv.source_id)
@property
def media_content_type(self):
"""Return content type of playing media."""
if self._tv.source_id == "tv" or self._tv.source_id == "11":
return MEDIA_TYPE_CHANNEL
if self._tv.source_id is None and self._tv.channels:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_content_id(self):
"""Content type of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"channel_list": list(self._channels.values())}
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
channel_id = _inverted(self._channels).get(media_id)
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
def update(self):
"""Get the latest data and update device state."""
self._tv.update()
self._sources = {
srcid: source["name"] or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
self._channels = {
chid: channel["name"] for chid, channel in (self._tv.channels or {}).items()
}
| 30.854237 | 88 | 0.64777 | from datetime import timedelta
import logging
from haphilipsjs import PhilipsTV
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import call_later, track_time_interval
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_NAME = "Philips TV"
DEFAULT_API_VERSION = "1"
DEFAULT_SCAN_INTERVAL = 30
DELAY_ACTION_DEFAULT = 2.0
DELAY_ACTION_ON = 10.0
PREFIX_SEPARATOR = ": "
PREFIX_SOURCE = "Input"
PREFIX_CHANNEL = "Channel"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
)
def _inverted(data):
return {v: k for k, v in data.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
api_version = config.get(CONF_API_VERSION)
turn_on_action = config.get(CONF_ON_ACTION)
tvapi = PhilipsTV(host, api_version)
on_script = Script(hass, turn_on_action) if turn_on_action else None
add_entities([PhilipsTVMediaPlayer(tvapi, name, on_script)])
class PhilipsTVMediaPlayer(MediaPlayerEntity):
def __init__(self, tv, name, on_script):
self._tv = tv
self._name = name
self._sources = {}
self._channels = {}
self._on_script = on_script
self._supports = SUPPORT_PHILIPS_JS
if self._on_script:
self._supports |= SUPPORT_TURN_ON
self._update_task = None
def _update_soon(self, delay):
if self._update_task:
self._update_task()
self._update_task = None
self.schedule_update_ha_state(force_refresh=False)
def update_forced(event_time):
self.schedule_update_ha_state(force_refresh=True)
def update_and_restart(event_time):
update_forced(event_time)
self._update_task = track_time_interval(
self.hass, update_forced, timedelta(seconds=DEFAULT_SCAN_INTERVAL)
)
call_later(self.hass, delay, update_and_restart)
async def async_added_to_hass(self):
await self.hass.async_add_executor_job(self._update_soon, 0)
@property
def name(self):
return self._name
@property
def should_poll(self):
return False
@property
def supported_features(self):
return self._supports
@property
def state(self):
if self._tv.on:
return STATE_ON
return STATE_OFF
@property
def source(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
name = self._channels.get(self._tv.channel_id)
prefix = PREFIX_CHANNEL
else:
name = self._sources.get(self._tv.source_id)
prefix = PREFIX_SOURCE
if name is None:
return None
return prefix + PREFIX_SEPARATOR + name
@property
def source_list(self):
complete = []
for source in self._sources.values():
complete.append(PREFIX_SOURCE + PREFIX_SEPARATOR + source)
for channel in self._channels.values():
complete.append(PREFIX_CHANNEL + PREFIX_SEPARATOR + channel)
return complete
def select_source(self, source):
data = source.split(PREFIX_SEPARATOR, 1)
if data[0] == PREFIX_SOURCE:
source_id = _inverted(self._sources).get(data[1])
if source_id:
self._tv.setSource(source_id)
elif data[0] == PREFIX_CHANNEL:
channel_id = _inverted(self._channels).get(data[1])
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def volume_level(self):
return self._tv.volume
@property
def is_volume_muted(self):
return self._tv.muted
def turn_on(self):
if self._on_script:
self._on_script.run()
self._update_soon(DELAY_ACTION_ON)
def turn_off(self):
self._tv.sendKey("Standby")
self._tv.on = False
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_up(self):
self._tv.sendKey("VolumeUp")
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_down(self):
self._tv.sendKey("VolumeDown")
self._update_soon(DELAY_ACTION_DEFAULT)
def mute_volume(self, mute):
self._tv.setVolume(None, mute)
self._update_soon(DELAY_ACTION_DEFAULT)
def set_volume_level(self, volume):
self._tv.setVolume(volume, self._tv.muted)
self._update_soon(DELAY_ACTION_DEFAULT)
def media_previous_track(self):
self._tv.sendKey("Previous")
self._update_soon(DELAY_ACTION_DEFAULT)
def media_next_track(self):
self._tv.sendKey("Next")
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def media_channel(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def media_title(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return self._sources.get(self._tv.source_id)
@property
def media_content_type(self):
if self._tv.source_id == "tv" or self._tv.source_id == "11":
return MEDIA_TYPE_CHANNEL
if self._tv.source_id is None and self._tv.channels:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_content_id(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def device_state_attributes(self):
return {"channel_list": list(self._channels.values())}
def play_media(self, media_type, media_id, **kwargs):
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
channel_id = _inverted(self._channels).get(media_id)
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
def update(self):
self._tv.update()
self._sources = {
srcid: source["name"] or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
self._channels = {
chid: channel["name"] for chid, channel in (self._tv.channels or {}).items()
}
| true | true |
f72aef7afd8a21811ad53f8b289714ccd5098693 | 8,333 | py | Python | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | 1 | 2022-01-20T16:33:19.000Z | 2022-01-20T16:33:19.000Z | import os
import logging
import subprocess
import yaml
import pandas as pd
from .example_filetype_format import FileTypeFormat
from . import process_functions
logger = logging.getLogger(__name__)
class Assayinfo(FileTypeFormat):
'''
Assay information file type
'''
_fileType = "assayinfo"
_process_kwargs = ["newPath", "databaseSynId"]
def _validateFilename(self, filepath_list):
assert os.path.basename(filepath_list[0]) == "assay_information.yaml"
def process_steps(self, assay_info_df, newPath, databaseSynId):
# databaseSynId = kwargs['databaseSynId']
# Must pass in a list
process_assay_info_df = self._process(assay_info_df)
col = ['SEQ_ASSAY_ID', 'is_paired_end', 'library_selection',
'library_strategy', 'platform', 'read_length',
'instrument_model', 'gene_padding', 'number_of_genes',
'variant_classifications', 'CENTER']
process_functions.updateData(
self.syn,
databaseSynId,
process_assay_info_df,
self.center,
col=col,
filterByColumn="CENTER",
toDelete=True)
process_assay_info_df.to_csv(newPath, sep="\t", index=False)
return(newPath)
def _process(self, df):
'''
Processing function for Assay information
- Standardizes SEQ_ASSAY_ID
- Default 10 for gene_padding
- Fills in variant_classifications
Args:
df: Assay information dataframe
Returns:
dataframe: Processed dataframe
'''
seq_assay_ids = [
assay.upper().replace('_', '-') for assay in df['SEQ_ASSAY_ID']]
df['SEQ_ASSAY_ID'] = seq_assay_ids
if process_functions.checkColExist(df, "gene_padding"):
df['gene_padding'] = df['gene_padding'].fillna(10)
df['gene_padding'] = df['gene_padding'].astype(int)
else:
df['gene_padding'] = 10
if not process_functions.checkColExist(df, "variant_classifications"):
df['variant_classifications'] = pd.np.nan
df['CENTER'] = self.center
return(df)
def _get_dataframe(self, filepath_list):
'''
Takes in yaml file, returns dataframe
'''
filepath = filepath_list[0]
try:
with open(filepath, 'r') as yamlfile:
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# Must add this because yaml load deprecation
panel_info_dict = yaml.load(yamlfile, Loader=yaml.FullLoader)
except Exception:
raise ValueError(
"assay_information.yaml: Can't read in your file. "
"Please make sure the file is a correctly formatted yaml")
assay_info_df = pd.DataFrame(panel_info_dict)
assay_info_df = assay_info_df.transpose()
assay_info_df['SEQ_ASSAY_ID'] = assay_info_df.index
assay_info_df.reset_index(drop=True, inplace=True)
return(assay_info_df)
def _validate(self, assay_info_df):
'''
Validates the values of assay information file
Args:
assay_info_df: assay information dataframe
Returns:
tuple: error and warning
'''
total_error = ""
warning = ""
if process_functions.checkColExist(assay_info_df, "SEQ_ASSAY_ID"):
all_seq_assays = assay_info_df.SEQ_ASSAY_ID.unique()
if not all([assay.startswith(self.center)
for assay in all_seq_assays]):
total_error += \
"Assay_information.yaml: Please make sure your all your" +\
" SEQ_ASSAY_IDs start with your center abbreviation.\n"
else:
total_error += \
"Assay_information.yaml: Must have SEQ_ASSAY_ID column.\n"
read_group_dict = process_functions.get_gdc_data_dictionary(
"read_group")
read_group_headers = read_group_dict['properties']
warn, error = process_functions.check_col_and_values(
assay_info_df,
'is_paired_end',
[True, False],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df, 'library_selection',
read_group_headers['library_selection']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'library_strategy',
read_group_headers['library_strategy']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'platform',
read_group_headers['platform']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
instrument_model = read_group_headers['instrument_model']['enum']
instrument_model.append(None)
warn, error = process_functions.check_col_and_values(
assay_info_df,
'instrument_model',
instrument_model,
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
variant_classes = \
['Splice_Site', 'Nonsense_Mutation', 'Frame_Shift_Del',
'Frame_Shift_Ins', 'Nonstop_Mutation', 'Translation_Start_Site',
'In_Frame_Ins', 'In_Frame_Del', 'Missense_Mutation',
'Intron', 'Splice_Region', 'Silent', 'RNA', "5'UTR", "3'UTR",
'IGR', "5'Flank", "3'Flank", None]
warn, error = process_functions.check_col_and_values(
assay_info_df,
'variant_classifications',
variant_classes,
filename="Assay_information.yaml",
na_allowed=True)
warning += warn
total_error += error
# if not process_functions.checkColExist(
# assay_info_df, "target_capture_kit"):
# total_error += ("Assay_information.yaml: "
# "Must have target_capture_kit column.\n")
if process_functions.checkColExist(assay_info_df, "read_length"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["read_length"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your read_length. "
"It must be an integer or null.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have read_length column.\n")
if process_functions.checkColExist(assay_info_df, "number_of_genes"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["number_of_genes"]]):
total_error += \
("Assay_information.yaml: "
"Please double check your number_of_genes. "
"It must be an integer.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have number_of_genes column.\n")
if process_functions.checkColExist(assay_info_df, "gene_padding"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["gene_padding"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your gene_padding. "
"It must be an integer or blank.\n")
else:
warning += \
("Assay_information.yaml: "
"gene_padding is by default 10 if not specified.\n")
return(total_error, warning)
| 36.388646 | 89 | 0.582503 | import os
import logging
import subprocess
import yaml
import pandas as pd
from .example_filetype_format import FileTypeFormat
from . import process_functions
logger = logging.getLogger(__name__)
class Assayinfo(FileTypeFormat):
_fileType = "assayinfo"
_process_kwargs = ["newPath", "databaseSynId"]
def _validateFilename(self, filepath_list):
assert os.path.basename(filepath_list[0]) == "assay_information.yaml"
def process_steps(self, assay_info_df, newPath, databaseSynId):
process_assay_info_df = self._process(assay_info_df)
col = ['SEQ_ASSAY_ID', 'is_paired_end', 'library_selection',
'library_strategy', 'platform', 'read_length',
'instrument_model', 'gene_padding', 'number_of_genes',
'variant_classifications', 'CENTER']
process_functions.updateData(
self.syn,
databaseSynId,
process_assay_info_df,
self.center,
col=col,
filterByColumn="CENTER",
toDelete=True)
process_assay_info_df.to_csv(newPath, sep="\t", index=False)
return(newPath)
def _process(self, df):
seq_assay_ids = [
assay.upper().replace('_', '-') for assay in df['SEQ_ASSAY_ID']]
df['SEQ_ASSAY_ID'] = seq_assay_ids
if process_functions.checkColExist(df, "gene_padding"):
df['gene_padding'] = df['gene_padding'].fillna(10)
df['gene_padding'] = df['gene_padding'].astype(int)
else:
df['gene_padding'] = 10
if not process_functions.checkColExist(df, "variant_classifications"):
df['variant_classifications'] = pd.np.nan
df['CENTER'] = self.center
return(df)
def _get_dataframe(self, filepath_list):
filepath = filepath_list[0]
try:
with open(filepath, 'r') as yamlfile:
panel_info_dict = yaml.load(yamlfile, Loader=yaml.FullLoader)
except Exception:
raise ValueError(
"assay_information.yaml: Can't read in your file. "
"Please make sure the file is a correctly formatted yaml")
assay_info_df = pd.DataFrame(panel_info_dict)
assay_info_df = assay_info_df.transpose()
assay_info_df['SEQ_ASSAY_ID'] = assay_info_df.index
assay_info_df.reset_index(drop=True, inplace=True)
return(assay_info_df)
def _validate(self, assay_info_df):
total_error = ""
warning = ""
if process_functions.checkColExist(assay_info_df, "SEQ_ASSAY_ID"):
all_seq_assays = assay_info_df.SEQ_ASSAY_ID.unique()
if not all([assay.startswith(self.center)
for assay in all_seq_assays]):
total_error += \
"Assay_information.yaml: Please make sure your all your" +\
" SEQ_ASSAY_IDs start with your center abbreviation.\n"
else:
total_error += \
"Assay_information.yaml: Must have SEQ_ASSAY_ID column.\n"
read_group_dict = process_functions.get_gdc_data_dictionary(
"read_group")
read_group_headers = read_group_dict['properties']
warn, error = process_functions.check_col_and_values(
assay_info_df,
'is_paired_end',
[True, False],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df, 'library_selection',
read_group_headers['library_selection']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'library_strategy',
read_group_headers['library_strategy']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'platform',
read_group_headers['platform']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
instrument_model = read_group_headers['instrument_model']['enum']
instrument_model.append(None)
warn, error = process_functions.check_col_and_values(
assay_info_df,
'instrument_model',
instrument_model,
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
variant_classes = \
['Splice_Site', 'Nonsense_Mutation', 'Frame_Shift_Del',
'Frame_Shift_Ins', 'Nonstop_Mutation', 'Translation_Start_Site',
'In_Frame_Ins', 'In_Frame_Del', 'Missense_Mutation',
'Intron', 'Splice_Region', 'Silent', 'RNA', "5'UTR", "3'UTR",
'IGR', "5'Flank", "3'Flank", None]
warn, error = process_functions.check_col_and_values(
assay_info_df,
'variant_classifications',
variant_classes,
filename="Assay_information.yaml",
na_allowed=True)
warning += warn
total_error += error
# if not process_functions.checkColExist(
# assay_info_df, "target_capture_kit"):
# total_error += ("Assay_information.yaml: "
# "Must have target_capture_kit column.\n")
if process_functions.checkColExist(assay_info_df, "read_length"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["read_length"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your read_length. "
"It must be an integer or null.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have read_length column.\n")
if process_functions.checkColExist(assay_info_df, "number_of_genes"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["number_of_genes"]]):
total_error += \
("Assay_information.yaml: "
"Please double check your number_of_genes. "
"It must be an integer.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have number_of_genes column.\n")
if process_functions.checkColExist(assay_info_df, "gene_padding"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["gene_padding"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your gene_padding. "
"It must be an integer or blank.\n")
else:
warning += \
("Assay_information.yaml: "
"gene_padding is by default 10 if not specified.\n")
return(total_error, warning)
| true | true |
f72af06f509cb3b16be313e070fe087431a96b9c | 1,550 | py | Python | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | import argparse
import pandas as pd
import json
import pickle
import numpy as np
from pathlib import Path
from scipy.special import softmax
import shutil
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str)
parser.add_argument('--raw_result_dir', type=str)
parser.add_argument('--output_dir', type=str)
args = parser.parse_args()
with open(args.config, 'r') as f:
config_json = json.load(f)
for config in config_json:
class_bias_result = []
for no_try in range(16):
if (config['dataset'] != 'CelebA') or (not config['training_type'] in ['no-constraints', 'l2-penalty', 'fair-alm']):
continue
exp_result_path = Path(
args.raw_result_dir,
"{0}_{1}_{2}_{3}/{4}".format(config['network'],
config['training_type'],
config['dataset'],
config['random_seed'],
str(no_try)))
p = Path(exp_result_path, 'checkpoint')
ckpt_path = Path(p, 'ckpt_80.t7')
if config['training_type'] == 'no-constraints':
tech = 'A-Base'
elif config['training_type'] == 'l2-penalty':
tech = 'A-L2'
elif config['training_type'] == 'fair-alm':
tech = 'A-ALM'
copy_path = Path(args.output_dir, tech, 'run_' + str(no_try).zfill(2) + '.pth')
copy_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(ckpt_path, copy_path) | 35.227273 | 124 | 0.570323 | import argparse
import pandas as pd
import json
import pickle
import numpy as np
from pathlib import Path
from scipy.special import softmax
import shutil
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str)
parser.add_argument('--raw_result_dir', type=str)
parser.add_argument('--output_dir', type=str)
args = parser.parse_args()
with open(args.config, 'r') as f:
config_json = json.load(f)
for config in config_json:
class_bias_result = []
for no_try in range(16):
if (config['dataset'] != 'CelebA') or (not config['training_type'] in ['no-constraints', 'l2-penalty', 'fair-alm']):
continue
exp_result_path = Path(
args.raw_result_dir,
"{0}_{1}_{2}_{3}/{4}".format(config['network'],
config['training_type'],
config['dataset'],
config['random_seed'],
str(no_try)))
p = Path(exp_result_path, 'checkpoint')
ckpt_path = Path(p, 'ckpt_80.t7')
if config['training_type'] == 'no-constraints':
tech = 'A-Base'
elif config['training_type'] == 'l2-penalty':
tech = 'A-L2'
elif config['training_type'] == 'fair-alm':
tech = 'A-ALM'
copy_path = Path(args.output_dir, tech, 'run_' + str(no_try).zfill(2) + '.pth')
copy_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(ckpt_path, copy_path) | true | true |
f72af113f201219d494c2ae51b9d0c0fae085aeb | 925 | py | Python | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python3
def diffOne(a, b):
count = 0
for i in range(len(a)):
if a[i] != b[i]:
count += 1
if count == 2:
return False
return bool(count)
def func(inputArray, curr):
if len(inputArray) == 1:
return diffOne(inputArray[0], curr)
for i in range(len(inputArray)):
if diffOne(inputArray[i], curr):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
def stringsRearrangement(inputArray):
for i in range(len(inputArray)):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
| 30.833333 | 73 | 0.572973 |
def diffOne(a, b):
count = 0
for i in range(len(a)):
if a[i] != b[i]:
count += 1
if count == 2:
return False
return bool(count)
def func(inputArray, curr):
if len(inputArray) == 1:
return diffOne(inputArray[0], curr)
for i in range(len(inputArray)):
if diffOne(inputArray[i], curr):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
def stringsRearrangement(inputArray):
for i in range(len(inputArray)):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
| true | true |
f72af114783cb0a76af49c20e78ca72551409642 | 1,378 | py | Python | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | 1 | 2017-08-17T11:59:45.000Z | 2017-08-17T11:59:45.000Z | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | null | null | null | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | null | null | null | from setuptools import setup, Extension
import numpy
import os
import config
def find(name, path):
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
return '';
print('locating directories...')
defines = [ ('MAJOR_VERSION',0),('MINOR_VERSION',1) ]
include_dirs = [ numpy.get_include() ]
libraries = []
library_dirs = []
print('checking for tiffio.h...')
if find('tiffio.h', config.tiff_include_dir) != '':
defines.append( ('cimg_use_tiff',1) )
include_dirs.append( config.tiff_include_dir )
libraries.append( 'tiff' )
library_dirs.append( config.tiff_library_dir )
print('checking for png.h...')
if find('png.h', config.png_include_dir ) != '':
defines.append( ('cimg_use_png',1) )
include_dirs.append( config.png_include_dir )
libraries.append( 'png' )
library_dirs.append( config.png_library_dir )
for lib in config.libs:
libraries.append( lib )
print('Setting up extension...')
easy_image_io = Extension('easy_image_io',
define_macros=defines,
sources=['easy_image_io.cpp'],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries )
print('Building extension...')
setup(name='easy_image_io', version='0.1', ext_modules=[ easy_image_io ] )
| 30.622222 | 74 | 0.650218 | from setuptools import setup, Extension
import numpy
import os
import config
def find(name, path):
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
return '';
print('locating directories...')
defines = [ ('MAJOR_VERSION',0),('MINOR_VERSION',1) ]
include_dirs = [ numpy.get_include() ]
libraries = []
library_dirs = []
print('checking for tiffio.h...')
if find('tiffio.h', config.tiff_include_dir) != '':
defines.append( ('cimg_use_tiff',1) )
include_dirs.append( config.tiff_include_dir )
libraries.append( 'tiff' )
library_dirs.append( config.tiff_library_dir )
print('checking for png.h...')
if find('png.h', config.png_include_dir ) != '':
defines.append( ('cimg_use_png',1) )
include_dirs.append( config.png_include_dir )
libraries.append( 'png' )
library_dirs.append( config.png_library_dir )
for lib in config.libs:
libraries.append( lib )
print('Setting up extension...')
easy_image_io = Extension('easy_image_io',
define_macros=defines,
sources=['easy_image_io.cpp'],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries )
print('Building extension...')
setup(name='easy_image_io', version='0.1', ext_modules=[ easy_image_io ] )
| true | true |
f72af1e60284b4758cddcb59383f494df80a1a1a | 148,700 | py | Python | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 6 | 2016-08-31T14:42:36.000Z | 2021-09-05T23:55:47.000Z | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 1 | 2016-10-20T10:52:06.000Z | 2016-10-20T18:47:19.000Z | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 5 | 2016-08-31T14:48:11.000Z | 2021-09-05T23:55:33.000Z | from emojitations.emojitypes import EmojiAnnotations
emoji = [
EmojiAnnotations(emoji='😀', codepoints=(128512,), name='ծիծաղող դեմք', slug='ծիծաղող_դեմք', annotations=frozenset({'դեմք', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😁', codepoints=(128513,), name='ծիծաղող դեմք ժպտացող աչքերով', slug='ծիծաղող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😂', codepoints=(128514,), name='դեմք ուրախության արցունքներով', slug='դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'դեմք', 'ծիծաղել', 'արցունք'})),
EmojiAnnotations(emoji='😃', codepoints=(128515,), name='ժպտացող դեմք բաց բերանով', slug='ժպտացող_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😄', codepoints=(128516,), name='ժպտացող դեմք բաց բերանով և ժպտացող աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ժպտացող_աչքերով', annotations=frozenset({'բաց', 'աչք', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😅', codepoints=(128517,), name='ժպտացող դեմք բաց բերանով և սառը քրտինքով', slug='ժպտացող_դեմք_բաց_բերանով_և_սառը_քրտինքով', annotations=frozenset({'բաց', 'սառը', 'դեմք', 'ժպտալ', 'քրտինք'})),
EmojiAnnotations(emoji='😆', codepoints=(128518,), name='ժպտացող դեմք բաց բերանով և ամուր փակած աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ամուր_փակած_աչքերով', annotations=frozenset({'ժպտալ', 'գոհ', 'ծիծաղել', 'դեմք', 'բաց', 'բերան'})),
EmojiAnnotations(emoji='😉', codepoints=(128521,), name='աչքով անող դեմք', slug='աչքով_անող_դեմք', annotations=frozenset({'դեմք', 'աչքով անել'})),
EmojiAnnotations(emoji='😊', codepoints=(128522,), name='ժպտացող դեմք ժպտացող աչքերով', slug='ժպտացող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'շիկնել'})),
EmojiAnnotations(emoji='😋', codepoints=(128523,), name='համեղ ուտելիք վայելող դեմք', slug='համեղ_ուտելիք_վայելող_դեմք', annotations=frozenset({'դեմք', 'վեյելել', 'ժպտալ', 'համեղ', 'նյամ'})),
EmojiAnnotations(emoji='😎', codepoints=(128526,), name='ժպտացող դեմք արևային ակնոցով', slug='ժպտացող_դեմք_արևային_ակնոցով', annotations=frozenset({'աչք', 'ակնոց', 'զիլ', 'ժպտալ', 'պայծառ', 'արևային ակնոց', 'դեմք', 'եղանակ', 'արև'})),
EmojiAnnotations(emoji='😍', codepoints=(128525,), name='ժպտացող դեմք սրտաձև աչքերով', slug='ժպտացող_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'սիրտ', 'ժպտալ', 'սեր'})),
EmojiAnnotations(emoji='😘', codepoints=(128536,), name='համբույր ուղարկող դեմք', slug='համբույր_ուղարկող_դեմք', annotations=frozenset({'դեմք', 'սիրտ', 'համբուրել'})),
EmojiAnnotations(emoji='😗', codepoints=(128535,), name='համբուրող դեմք', slug='համբուրող_դեմք', annotations=frozenset({'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='😙', codepoints=(128537,), name='համբուրող դեմք ժպտացող աչքերով', slug='համբուրող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'համբուրել', 'ժպտալ'})),
EmojiAnnotations(emoji='😚', codepoints=(128538,), name='համբուրող դեմք փակ աչքերով', slug='համբուրող_դեմք_փակ_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'փակ', 'համբուրել'})),
EmojiAnnotations(emoji='☺', codepoints=(9786,), name='ժպտացող դեմք', slug='ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ', 'անկաշկանդ'})),
EmojiAnnotations(emoji='\U0001f642', codepoints=(128578,), name='թեթևակի ժպտացող դեմք', slug='թեթևակի_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ'})),
EmojiAnnotations(emoji='\U0001f917', codepoints=(129303,), name='գրկող դեմք', slug='գրկող_դեմք', annotations=frozenset({'գրկախառնում', 'դեմք', 'գրկախառնվել'})),
EmojiAnnotations(emoji='😇', codepoints=(128519,), name='ժպտացող դեմք լուսապսակով', slug='ժպտացող_դեմք_լուսապսակով', annotations=frozenset({'անմեղ', 'լուսապսակ', 'ժպտալ', 'հրեշտակ', 'դեմք', 'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f914', codepoints=(129300,), name='մտածող դեմք', slug='մտածող_դեմք', annotations=frozenset({'մտածող', 'դեմք'})),
EmojiAnnotations(emoji='😐', codepoints=(128528,), name='չեզոք դեմք', slug='չեզոք_դեմք', annotations=frozenset({'դեմք', 'չեզոք', 'անվրդով'})),
EmojiAnnotations(emoji='😑', codepoints=(128529,), name='անհույզ դեմք', slug='անհույզ_դեմք', annotations=frozenset({'դեմք', 'ոչինչ չարտահայտող', 'անարտահայտիչ', 'առանց էմոցիաների'})),
EmojiAnnotations(emoji='😶', codepoints=(128566,), name='առանց բերանի դեմք', slug='առանց_բերանի_դեմք', annotations=frozenset({'դեմք', 'լուռ', 'բերան', 'հանգիստ'})),
EmojiAnnotations(emoji='\U0001f644', codepoints=(128580,), name='պտտվող աչքերով դեմք', slug='պտտվող_աչքերով_դեմք', annotations=frozenset({'դեմք', 'աչքեր', 'պտտվող'})),
EmojiAnnotations(emoji='😏', codepoints=(128527,), name='կեղծ ժպտացող դեմք', slug='կեղծ_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'կեղծ ժպտալ'})),
EmojiAnnotations(emoji='😣', codepoints=(128547,), name='համառող դեմք', slug='համառող_դեմք', annotations=frozenset({'դեմք', 'համառել'})),
EmojiAnnotations(emoji='😥', codepoints=(128549,), name='հիասթափված; բայց թեթևացած դեմք', slug='հիասթափված;_բայց_թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած', 'հիասթափված'})),
EmojiAnnotations(emoji='😮', codepoints=(128558,), name='բաց բերանով դեմք', slug='բաց_բերանով_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'համակրանք'})),
EmojiAnnotations(emoji='\U0001f910', codepoints=(129296,), name='ճարմանդավոր բերանով դեմք', slug='ճարմանդավոր_բերանով_դեմք', annotations=frozenset({'դեմք', 'բերան', 'ճարմանդ'})),
EmojiAnnotations(emoji='😯', codepoints=(128559,), name='սաստված դեմք', slug='սաստված_դեմք', annotations=frozenset({'զարմացած', 'դեմք', 'սաստված', 'ապշած'})),
EmojiAnnotations(emoji='😪', codepoints=(128554,), name='քնատ դեմք', slug='քնատ_դեմք', annotations=frozenset({'քնել', 'դեմք'})),
EmojiAnnotations(emoji='😫', codepoints=(128555,), name='հոգնած դեմք', slug='հոգնած_դեմք', annotations=frozenset({'դեմք', 'հոգնած'})),
EmojiAnnotations(emoji='😴', codepoints=(128564,), name='քնած դեմք', slug='քնած_դեմք', annotations=frozenset({'քնել', 'դեմք', 'խռռ'})),
EmojiAnnotations(emoji='😌', codepoints=(128524,), name='թեթևացած դեմք', slug='թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած'})),
EmojiAnnotations(emoji='\U0001f913', codepoints=(129299,), name='գերազանցիկի դեմք', slug='գերազանցիկի_դեմք', annotations=frozenset({'դեմք', 'ցնդած', 'հիմար'})),
EmojiAnnotations(emoji='😛', codepoints=(128539,), name='լեզու հանած դեմք', slug='լեզու_հանած_դեմք', annotations=frozenset({'դեմք', 'լեզու'})),
EmojiAnnotations(emoji='😜', codepoints=(128540,), name='լեզու հանած և աչքով անող դեմք', slug='լեզու_հանած_և_աչքով_անող_դեմք', annotations=frozenset({'աչք', 'դեմք', 'կատակել', 'լեզու', 'աչքով անել'})),
EmojiAnnotations(emoji='😝', codepoints=(128541,), name='լեզու հանած և ամուր փակած աչքերով դեմք', slug='լեզու_հանած_և_ամուր_փակած_աչքերով_դեմք', annotations=frozenset({'աչք', 'դեմք', 'սարսափելի', 'համ', 'լեզու'})),
EmojiAnnotations(emoji='☹', codepoints=(9785,), name='խոժոռված դեմք', slug='խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='\U0001f641', codepoints=(128577,), name='թեթևակի խոժոռված դեմք', slug='թեթևակի_խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='😒', codepoints=(128530,), name='անտրամադիր դեմք', slug='անտրամադիր_դեմք', annotations=frozenset({'դեմք', 'անտրամադիր', 'դժբախտ'})),
EmojiAnnotations(emoji='😓', codepoints=(128531,), name='սառը քրտինքով դեմք', slug='սառը_քրտինքով_դեմք', annotations=frozenset({'սառը', 'դեմք', 'քրտինք'})),
EmojiAnnotations(emoji='😔', codepoints=(128532,), name='մտածկոտ դեմք', slug='մտածկոտ_դեմք', annotations=frozenset({'դեմք', 'մռայլված', 'մտածկոտ'})),
EmojiAnnotations(emoji='😕', codepoints=(128533,), name='շփոթված դեմք', slug='շփոթված_դեմք', annotations=frozenset({'դեմք', 'շփոթված'})),
EmojiAnnotations(emoji='😖', codepoints=(128534,), name='ցնցված դեմք', slug='ցնցված_դեմք', annotations=frozenset({'դեմք', 'ցնցված'})),
EmojiAnnotations(emoji='\U0001f643', codepoints=(128579,), name='գլխնիվայր դեմք', slug='գլխնիվայր_դեմք', annotations=frozenset({'դեմք', 'գլխնիվայր'})),
EmojiAnnotations(emoji='😷', codepoints=(128567,), name='բժշկական դիմակով դեմք', slug='բժշկական_դիմակով_դեմք', annotations=frozenset({'հիվանդ', 'բժիշկ', 'սառը', 'դեմք', 'բժշկական', 'դիմակ'})),
EmojiAnnotations(emoji='\U0001f912', codepoints=(129298,), name='ջերմաչափով դեմք', slug='ջերմաչափով_դեմք', annotations=frozenset({'դեմք', 'հիվանդ', 'ջերմաչափ'})),
EmojiAnnotations(emoji='\U0001f915', codepoints=(129301,), name='գլխակապով դեմք', slug='գլխակապով_դեմք', annotations=frozenset({'դեմք', 'վիրակապ', 'վնասվածք'})),
EmojiAnnotations(emoji='\U0001f911', codepoints=(129297,), name='թղթադրամը բերանին դեմք', slug='թղթադրամը_բերանին_դեմք', annotations=frozenset({'դեմք', 'փող', 'բերան'})),
EmojiAnnotations(emoji='😲', codepoints=(128562,), name='ապշահար դեմք', slug='ապշահար_դեմք', annotations=frozenset({'դեմք', 'ցնցված', 'ապշահար', 'ամբողջովին'})),
EmojiAnnotations(emoji='😞', codepoints=(128542,), name='հիասթափված դեմք', slug='հիասթափված_դեմք', annotations=frozenset({'դեմք', 'հիասթափված'})),
EmojiAnnotations(emoji='😟', codepoints=(128543,), name='անհանգստացած դեմք', slug='անհանգստացած_դեմք', annotations=frozenset({'անհանգստացած', 'դեմք'})),
EmojiAnnotations(emoji='😤', codepoints=(128548,), name='քթից գոլորշի հանող դեմք', slug='քթից_գոլորշի_հանող_դեմք', annotations=frozenset({'դեմք', 'հաղթած', 'հաղթանակ'})),
EmojiAnnotations(emoji='😢', codepoints=(128546,), name='արտասվող դեմք', slug='արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😭', codepoints=(128557,), name='բարձրաձայն արտասվող դեմք', slug='բարձրաձայն_արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'հեկեկալ', 'արտասուք'})),
EmojiAnnotations(emoji='😦', codepoints=(128550,), name='բաց բերանով խոժոռված դեմք', slug='բաց_բերանով_խոժոռված_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'խոժոռված'})),
EmojiAnnotations(emoji='😧', codepoints=(128551,), name='վշտահար դեմք', slug='վշտահար_դեմք', annotations=frozenset({'վշտահար', 'դեմք'})),
EmojiAnnotations(emoji='😨', codepoints=(128552,), name='վախեցած դեմք', slug='վախեցած_դեմք', annotations=frozenset({'վախեցած', 'դեմք', 'սարսափած', 'վախ'})),
EmojiAnnotations(emoji='😩', codepoints=(128553,), name='ուժասպառ դեմք', slug='ուժասպառ_դեմք', annotations=frozenset({'դեմք', 'հոգնած', 'ուժասպառ'})),
EmojiAnnotations(emoji='😬', codepoints=(128556,), name='ծամածռվող դեմք', slug='ծամածռվող_դեմք', annotations=frozenset({'դեմք', 'ծամածռություն'})),
EmojiAnnotations(emoji='😰', codepoints=(128560,), name='բաց բերանով և սառը քրտինքով դեմք', slug='բաց_բերանով_և_սառը_քրտինքով_դեմք', annotations=frozenset({'հապշտապ', 'բաց', 'սառը', 'դեմք', 'կապույտ', 'բերան', 'քրտինք'})),
EmojiAnnotations(emoji='😱', codepoints=(128561,), name='վախից գոռացող դեմք', slug='վախից_գոռացող_դեմք', annotations=frozenset({'վախեցած', 'ծամել', 'սարսափած', 'վախ', 'դեմք', 'ճչալ'})),
EmojiAnnotations(emoji='😳', codepoints=(128563,), name='շիկնած դեմք', slug='շիկնած_դեմք', annotations=frozenset({'դեմք', 'հիացած', 'շիկնած'})),
EmojiAnnotations(emoji='😵', codepoints=(128565,), name='գլխապտույտ ունեցող դեմք', slug='գլխապտույտ_ունեցող_դեմք', annotations=frozenset({'դեմք', 'գլխապտույտ'})),
EmojiAnnotations(emoji='😡', codepoints=(128545,), name='դժգոհ դեմք', slug='դժգոհ_դեմք', annotations=frozenset({'բարկացած', 'դեմք', 'զայրույթ', 'կարմիր', 'խենք', 'դժգոհ'})),
EmojiAnnotations(emoji='😠', codepoints=(128544,), name='բարկացած դեմք', slug='բարկացած_դեմք', annotations=frozenset({'դեմք', 'խենք', 'բարկացած'})),
EmojiAnnotations(emoji='😈', codepoints=(128520,), name='ժպտացող դեմք եղջյուրներով', slug='ժպտացող_դեմք_եղջյուրներով', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ժպտալ', 'եղջյուրներ'})),
EmojiAnnotations(emoji='👿', codepoints=(128127,), name='սատանայի ճուտ', slug='սատանայի_ճուտ', annotations=frozenset({'դեմք', 'սատանա', 'հեքիաթ', 'ֆանտազիա', 'դև'})),
EmojiAnnotations(emoji='👹', codepoints=(128121,), name='մարդակեր հսկա', slug='մարդակեր_հսկա', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👺', codepoints=(128122,), name='չար ոգի', slug='չար_ոգի', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='💀', codepoints=(128128,), name='գանգ', slug='գանգ', annotations=frozenset({'դեմք', 'հեքիաթ', 'մարմին', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='☠', codepoints=(9760,), name='գանգ և խաչված ոսկորներ', slug='գանգ_և_խաչված_ոսկորներ', annotations=frozenset({'մարմին', 'ոսկորներ', 'գանգ', 'խաչված', 'դեմք', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='👻', codepoints=(128123,), name='ուրվական', slug='ուրվական', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'արարած', 'հրեշ'})),
EmojiAnnotations(emoji='👽', codepoints=(128125,), name='այլմոլորակային', slug='այլմոլորակային', annotations=frozenset({'տիեզերք', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👾', codepoints=(128126,), name='այլմոլորակային հրեշ', slug='այլմոլորակային_հրեշ', annotations=frozenset({'տիեզերք', 'այլմոլորակային', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='\U0001f916', codepoints=(129302,), name='ռոբոտի դեմք', slug='ռոբոտի_դեմք', annotations=frozenset({'դեմք', 'ռոբոտ', 'հրեշ'})),
EmojiAnnotations(emoji='💩', codepoints=(128169,), name='կեղտի կույտ', slug='կեղտի_կույտ', annotations=frozenset({'գոմաղբ', 'կոմիքս', 'դեմք', 'կեղտ', 'հրեշ', 'կղանք'})),
EmojiAnnotations(emoji='😺', codepoints=(128570,), name='ժպտացող կատվի դեմք բաց բերանով', slug='ժպտացող_կատվի_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'կատու', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😸', codepoints=(128568,), name='ծիծաղող կատվի դեմք ժպտացող աչքերով', slug='ծիծաղող_կատվի_դեմք_ժպտացող_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'ժպտալ', 'կանաչ'})),
EmojiAnnotations(emoji='😹', codepoints=(128569,), name='կատվի դեմք ուրախության արցունքներով', slug='կատվի_դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'կատու', 'դեմք', 'արտասուք'})),
EmojiAnnotations(emoji='😻', codepoints=(128571,), name='ժպտացող կատվի դեմք սրտաձև աչքերով', slug='ժպտացող_կատվի_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'սիրտ', 'ժպտալ', 'սեր', 'դեմք', 'կատու'})),
EmojiAnnotations(emoji='😼', codepoints=(128572,), name='կատվի դեմք ծամածռված ժպիտով', slug='կատվի_դեմք_ծամածռված_ժպիտով', annotations=frozenset({'ծամածռված', 'կատու', 'դեմք', 'հեգնական', 'ժպտալ'})),
EmojiAnnotations(emoji='😽', codepoints=(128573,), name='համբուրող կատվի դեմք փակ աչքերով', slug='համբուրող_կատվի_դեմք_փակ_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='🙀', codepoints=(128576,), name='ուժասպառ կատվի դեմք', slug='ուժասպառ_կատվի_դեմք', annotations=frozenset({'զարմացած', 'կատու', 'դեմք', 'ուժասպառ', 'օհ'})),
EmojiAnnotations(emoji='😿', codepoints=(128575,), name='արտասվող կատվի դեմք', slug='արտասվող_կատվի_դեմք', annotations=frozenset({'արտասվել', 'կատու', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😾', codepoints=(128574,), name='դժգոհ կատվի դեմք', slug='դժգոհ_կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'դժգոհ'})),
EmojiAnnotations(emoji='🙈', codepoints=(128584,), name='ոչինչ չեմ տեսնում', slug='ոչինչ_չեմ_տեսնում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'տեսնել', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙉', codepoints=(128585,), name='ոչինչ չեմ լսում', slug='ոչինչ_չեմ_լսում', annotations=frozenset({'լսել', 'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙊', codepoints=(128586,), name='ոչինչ չեմ ասում', slug='ոչինչ_չեմ_ասում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված', 'խոսալ'})),
EmojiAnnotations(emoji='👧', codepoints=(128103,), name='աղջիկ', slug='աղջիկ', annotations=frozenset({'օրիորդ', 'կենդանակերպ', 'կույս'})),
EmojiAnnotations(emoji='👴', codepoints=(128116,), name='տարեց տղամարդ', slug='տարեց_տղամարդ', annotations=frozenset({'տղամարդ', 'տարեց'})),
EmojiAnnotations(emoji='👵', codepoints=(128117,), name='տարեց կին', slug='տարեց_կին', annotations=frozenset({'տարեց', 'կին'})),
EmojiAnnotations(emoji='👮', codepoints=(128110,), name='ոստիկան', slug='ոստիկան', annotations=frozenset({'սպա', 'ոստիկանություն'})),
EmojiAnnotations(emoji='👲', codepoints=(128114,), name='չինական գլխարկով մարդ', slug='չինական_գլխարկով_մարդ', annotations=frozenset({'գլխարկ', 'մարդ', 'չինական'})),
EmojiAnnotations(emoji='👳', codepoints=(128115,), name='չալմայով մարդ', slug='չալմայով_մարդ', annotations=frozenset({'չալմա', 'մարդ'})),
EmojiAnnotations(emoji='👷', codepoints=(128119,), name='շինարար', slug='շինարար', annotations=frozenset({'գլխարկ', 'շինարարություն', 'աշխատող'})),
EmojiAnnotations(emoji='⛑', codepoints=(9937,), name='սպիտակ խաչով սաղավարտ', slug='սպիտակ_խաչով_սաղավարտ', annotations=frozenset({'գլխարկ', 'դեմք', 'խաչ', 'սաղավարտ', 'օգնություն'})),
EmojiAnnotations(emoji='👸', codepoints=(128120,), name='արքայադուստր', slug='արքայադուստր', annotations=frozenset({'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f575', codepoints=(128373,), name='խուզարկու', slug='խուզարկու', annotations=frozenset({'լրտես'})),
EmojiAnnotations(emoji='🎅', codepoints=(127877,), name='սանտա կլաուս', slug='սանտա_կլաուս', annotations=frozenset({'տոն', 'սանտա', 'հեքիաթ', 'ֆանտազիա', 'սուրբ ծնունդ', 'հայր'})),
EmojiAnnotations(emoji='👼', codepoints=(128124,), name='մանուկ-հրեշտակ', slug='մանուկ_հրեշտակ', annotations=frozenset({'երեխա', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'հրեշտակ'})),
EmojiAnnotations(emoji='💆', codepoints=(128134,), name='դեմքի մերսում', slug='դեմքի_մերսում', annotations=frozenset({'սրահ', 'մերսում'})),
EmojiAnnotations(emoji='💇', codepoints=(128135,), name='սանրվածք', slug='սանրվածք', annotations=frozenset({'վարսավիր', 'գեղեցկություն', 'սրահ'})),
EmojiAnnotations(emoji='👰', codepoints=(128112,), name='քողով հարս', slug='քողով_հարս', annotations=frozenset({'քող', 'հարս', 'հարսանիք'})),
EmojiAnnotations(emoji='🙍', codepoints=(128589,), name='խոժոռված դեմքով անձ', slug='խոժոռված_դեմքով_անձ', annotations=frozenset({'խոժոռված', 'ժեստ'})),
EmojiAnnotations(emoji='🙎', codepoints=(128590,), name='դժգոհ անձ', slug='դժգոհ_անձ', annotations=frozenset({'ժեստ', 'դժգոհ'})),
EmojiAnnotations(emoji='🙅', codepoints=(128581,), name='ոչ ցույց տվող', slug='ոչ_ցույց_տվող', annotations=frozenset({'արգելված', 'ձեռք', 'ժեստ', 'ոչ'})),
EmojiAnnotations(emoji='🙆', codepoints=(128582,), name='ok ցույց տվող', slug='ok_ցույց_տվող', annotations=frozenset({'ձեռք', 'ժեստ', 'ok'})),
EmojiAnnotations(emoji='💁', codepoints=(128129,), name='տեղեկատու բյուրոյի աշխատող', slug='տեղեկատու_բյուրոյի_աշխատող', annotations=frozenset({'հանդուգն', 'ձեռք', 'օգնել', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🙋', codepoints=(128587,), name='ձեռք բարձրացնող ուրախ անձ', slug='ձեռք_բարձրացնող_ուրախ_անձ', annotations=frozenset({'ձեռք', 'երջանիկ', 'ժեստ', 'բարձրացված'})),
EmojiAnnotations(emoji='🙇', codepoints=(128583,), name='խոնարհվող անձ', slug='խոնարհվող_անձ', annotations=frozenset({'ներողություն խնդրել', 'ներողություն', 'ժեստ', 'խոնարհվել'})),
EmojiAnnotations(emoji='🙌', codepoints=(128588,), name='ձեռքերը բարձրացնող անձ', slug='ձեռքերը_բարձրացնող_անձ', annotations=frozenset({'մարմին', 'տոն', 'ժեստ', 'ձեռք', 'ուռա', 'բարձրացված'})),
EmojiAnnotations(emoji='🙏', codepoints=(128591,), name='միացված ձեռքի ափեր', slug='միացված_ձեռքի_ափեր', annotations=frozenset({'աղոթել', 'խնդրել', 'մարմին', 'խնդրում եմ', 'ժեստ', 'խոնարհվել', 'ձեռք', 'շնորհակալություն', 'միացված'})),
EmojiAnnotations(emoji='\U0001f5e3', codepoints=(128483,), name='խոսացող գլուխ', slug='խոսացող_գլուխ', annotations=frozenset({'դեմք', 'գլուխ', 'ուրվագիծ', 'խոսացող', 'խոսալ'})),
EmojiAnnotations(emoji='👤', codepoints=(128100,), name='ուրվագծված կիսանդրի', slug='ուրվագծված_կիսանդրի', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='👥', codepoints=(128101,), name='ուրվագծված կիսանդրիներ', slug='ուրվագծված_կիսանդրիներ', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='🚶', codepoints=(128694,), name='հետիոտն', slug='հետիոտն', annotations=frozenset({'քայլել', 'զբոսանք', 'զբոսնել'})),
EmojiAnnotations(emoji='🏃', codepoints=(127939,), name='վազող', slug='վազող', annotations=frozenset({'մարաթոն', 'վազք'})),
EmojiAnnotations(emoji='👯', codepoints=(128111,), name='պարող կանայք', slug='պարող_կանայք', annotations=frozenset({'ականջ', 'աղջիկ', 'կին', 'պարող', 'ճագար'})),
EmojiAnnotations(emoji='\U0001f574', codepoints=(128372,), name='տեղում ճախրող գործնական կոստյումով մարդ', slug='տեղում_ճախրող_գործնական_կոստյումով_մարդ', annotations=frozenset({'կոստյում', 'բիզնես', 'մարդ'})),
EmojiAnnotations(emoji='💏', codepoints=(128143,), name='համբույր', slug='համբույր', annotations=frozenset({'զույգ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💑', codepoints=(128145,), name='սրտիկով զույգ', slug='սրտիկով_զույգ', annotations=frozenset({'զույգ', 'սիրտ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='👪', codepoints=(128106,), name='ընտանիք', slug='ընտանիք', annotations=frozenset({'երեխա', 'մայր', 'հայր'})),
EmojiAnnotations(emoji='👫', codepoints=(128107,), name='իրար ձեռք բռնած մարդ և կին', slug='իրար_ձեռք_բռնած_մարդ_և_կին', annotations=frozenset({'զույգ', 'ձեռք', 'տղամարդ', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='👬', codepoints=(128108,), name='իրար ձեռք բռնած երկու տղամարդ', slug='իրար_ձեռք_բռնած_երկու_տղամարդ', annotations=frozenset({'երկվորյակ', 'ձեռք', 'տղամարդ', 'բռնել', 'կենդանակերպ', 'զույգ'})),
EmojiAnnotations(emoji='👭', codepoints=(128109,), name='իրար ձեռք բռնած երկու կին', slug='իրար_ձեռք_բռնած_երկու_կին', annotations=frozenset({'զույգ', 'ձեռք', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='\U0001f3fb', codepoints=(127995,), name='մաշկի տիպ-1-2', slug='մաշկի_տիպ_1_2', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fc', codepoints=(127996,), name='մաշկի տիպ-3', slug='մաշկի_տիպ_3', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fd', codepoints=(127997,), name='մաշկի տիպ-4', slug='մաշկի_տիպ_4', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fe', codepoints=(127998,), name='մաշկի տիպ-5', slug='մաշկի_տիպ_5', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3ff', codepoints=(127999,), name='մաշկի տիպ-6', slug='մաշկի_տիպ_6', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='💪', codepoints=(128170,), name='ձգված բիցեպս', slug='ձգված_բիցեպս', annotations=frozenset({'բիցեպս', 'մարմին', 'կոմիքս', 'ձգել', 'մկան'})),
EmojiAnnotations(emoji='👈', codepoints=(128072,), name='դեպի ձախ ուղղված ցուցամատ', slug='դեպի_ձախ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👉', codepoints=(128073,), name='դեպի աջ ուղղված ցուցամատ', slug='դեպի_աջ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='☝', codepoints=(9757,), name='դեպի վեր ուղղված ցուցամատ ձեռքի ափի կողմից', slug='դեպի_վեր_ուղղված_ցուցամատ_ձեռքի_ափի_կողմից', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռքի ափ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='👆', codepoints=(128070,), name='դեպի վեր ուղղված ցուցամատ', slug='դեպի_վեր_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='\U0001f595', codepoints=(128405,), name='մեջտեղի մատ', slug='մեջտեղի_մատ', annotations=frozenset({'ձեռք', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👇', codepoints=(128071,), name='դեպի վար ուղղված ցուցամատ', slug='դեպի_վար_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'վար', 'մատ'})),
EmojiAnnotations(emoji='✌', codepoints=(9996,), name='հաղթական ձեռք', slug='հաղթական_ձեռք', annotations=frozenset({'ձեռք', 'v', 'մարմին', 'հաղթանակ'})),
EmojiAnnotations(emoji='\U0001f596', codepoints=(128406,), name='վուլկանցիների ողջույն', slug='վուլկանցիների_ողջույն', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'վուլկան'})),
EmojiAnnotations(emoji='\U0001f918', codepoints=(129304,), name='եղջյուրների նշան', slug='եղջյուրների_նշան', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'եղջյուրներ'})),
EmojiAnnotations(emoji='\U0001f590', codepoints=(128400,), name='բացված մատներով բարձրացված ձեռք', slug='բացված_մատներով_բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'բացված'})),
EmojiAnnotations(emoji='✋', codepoints=(9995,), name='բարձրացված ձեռք', slug='բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='👌', codepoints=(128076,), name='ok ցույց տվող ձեռք', slug='ok_ցույց_տվող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'ok'})),
EmojiAnnotations(emoji='👍', codepoints=(128077,), name='բութ մատը վեր', slug='բութ_մատը_վեր', annotations=frozenset({'բութ', '+1', 'ձեռք', 'մարմին', 'վեր'})),
EmojiAnnotations(emoji='👎', codepoints=(128078,), name='բութ մատը ներքև', slug='բութ_մատը_ներքև', annotations=frozenset({'-1', 'ներքև', 'ձեռք', 'մարմին', 'բութ մատ'})),
EmojiAnnotations(emoji='✊', codepoints=(9994,), name='բարձրացված բռունցք', slug='բարձրացված_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👊', codepoints=(128074,), name='հանդիպակաց բռունցք', slug='հանդիպակաց_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👋', codepoints=(128075,), name='թափահարող ձեռք', slug='թափահարող_ձեռք', annotations=frozenset({'ձեռք', 'թափահարել', 'թափահարող', 'մարմին'})),
EmojiAnnotations(emoji='👏', codepoints=(128079,), name='ծափահարող ձեռքեր', slug='ծափահարող_ձեռքեր', annotations=frozenset({'ձեռք', 'մարմին', 'ծափահարել'})),
EmojiAnnotations(emoji='👐', codepoints=(128080,), name='բաց ձեռքեր', slug='բաց_ձեռքեր', annotations=frozenset({'բաց', 'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='✍', codepoints=(9997,), name='գրող ձեռք', slug='գրող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'գրել'})),
EmojiAnnotations(emoji='💅', codepoints=(128133,), name='եղունգների լաքապատում', slug='եղունգների_լաքապատում', annotations=frozenset({'խնամք', 'մարմին', 'հղկել', 'մատնահարդարում', 'եղունգ', 'կոսմետիկա'})),
EmojiAnnotations(emoji='👂', codepoints=(128066,), name='ականջ', slug='ականջ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👃', codepoints=(128067,), name='քիթ', slug='քիթ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👣', codepoints=(128099,), name='ոտնահետքեր', slug='ոտնահետքեր', annotations=frozenset({'հագուստ', 'ոտնահետք', 'հետք', 'մարմին'})),
EmojiAnnotations(emoji='👀', codepoints=(128064,), name='աչքեր', slug='աչքեր', annotations=frozenset({'աչք', 'դեմք', 'մարմին'})),
EmojiAnnotations(emoji='\U0001f441', codepoints=(128065,), name='աչք', slug='աչք', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👅', codepoints=(128069,), name='լեզու', slug='լեզու', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👄', codepoints=(128068,), name='բերան', slug='բերան', annotations=frozenset({'շուրթեր', 'մարմին'})),
EmojiAnnotations(emoji='💋', codepoints=(128139,), name='համբույրի հետք', slug='համբույրի_հետք', annotations=frozenset({'սիրտ', 'շուրթեր', 'հետք', 'սիրավեպ', 'համբույր'})),
EmojiAnnotations(emoji='💘', codepoints=(128152,), name='նետահարված սիրտ', slug='նետահարված_սիրտ', annotations=frozenset({'նետ', 'սիրտ', 'սիրավեպ', 'կուպիդոն'})),
EmojiAnnotations(emoji='❤', codepoints=(10084,), name='կարմիր սիրտ', slug='կարմիր_սիրտ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='💓', codepoints=(128147,), name='բաբախող սիրտ', slug='բաբախող_սիրտ', annotations=frozenset({'սիրտ', 'սրտխփոց', 'պուլսացիա', 'բաբախյուն'})),
EmojiAnnotations(emoji='💔', codepoints=(128148,), name='կոտրված սիրտ', slug='կոտրված_սիրտ', annotations=frozenset({'սիրտ', 'կոտրված', 'կոտրել'})),
EmojiAnnotations(emoji='💕', codepoints=(128149,), name='երկու սրտեր', slug='երկու_սրտեր', annotations=frozenset({'սիրտ', 'սեր'})),
EmojiAnnotations(emoji='💖', codepoints=(128150,), name='շողշողացող սիրտ', slug='շողշողացող_սիրտ', annotations=frozenset({'սիրտ', 'կայծ', 'ոգևորված'})),
EmojiAnnotations(emoji='💗', codepoints=(128151,), name='աճող սիրտ', slug='աճող_սիրտ', annotations=frozenset({'նյարդային', 'սիրտ', 'սրտի զարկ', 'աճող', 'ոգևորված'})),
EmojiAnnotations(emoji='💙', codepoints=(128153,), name='կապույտ սիրտ', slug='կապույտ_սիրտ', annotations=frozenset({'կապույտ', 'սիրտ'})),
EmojiAnnotations(emoji='💚', codepoints=(128154,), name='կանաչ սիրտ', slug='կանաչ_սիրտ', annotations=frozenset({'սիրտ', 'կանաչ'})),
EmojiAnnotations(emoji='💛', codepoints=(128155,), name='դեղին սիրտ', slug='դեղին_սիրտ', annotations=frozenset({'սիրտ', 'դեղին'})),
EmojiAnnotations(emoji='💜', codepoints=(128156,), name='մանուշակագույն սիրտ', slug='մանուշակագույն_սիրտ', annotations=frozenset({'սիրտ', 'մանուշակագույն'})),
EmojiAnnotations(emoji='💝', codepoints=(128157,), name='ժապավենով սիրտ', slug='ժապավենով_սիրտ', annotations=frozenset({'սիրտ', 'ժապավեն', 'վալենտին'})),
EmojiAnnotations(emoji='💞', codepoints=(128158,), name='պտտվող սրտեր', slug='պտտվող_սրտեր', annotations=frozenset({'պտտվող', 'սիրտ'})),
EmojiAnnotations(emoji='💟', codepoints=(128159,), name='սրտաձև նախշ', slug='սրտաձև_նախշ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='❣', codepoints=(10083,), name='բացականչական նշանի տեսքով սիրտ', slug='բացականչական_նշանի_տեսքով_սիրտ', annotations=frozenset({'նշան', 'կետադրական', 'սիրտ', 'բացականչություն'})),
EmojiAnnotations(emoji='💌', codepoints=(128140,), name='սիրային նամակ', slug='սիրային_նամակ', annotations=frozenset({'սիրտ', 'նամակ', 'փոստ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='💤', codepoints=(128164,), name='խռռռ', slug='խռռռ', annotations=frozenset({'քնել', 'կոմիքս'})),
EmojiAnnotations(emoji='💢', codepoints=(128162,), name='զայրույթի նշան', slug='զայրույթի_նշան', annotations=frozenset({'զայրացած', 'կոմիքս', 'խենք'})),
EmojiAnnotations(emoji='💣', codepoints=(128163,), name='ռումբ', slug='ռումբ', annotations=frozenset({'կոմիքս'})),
EmojiAnnotations(emoji='💥', codepoints=(128165,), name='բախում', slug='բախում', annotations=frozenset({'բում', 'կոմիքս'})),
EmojiAnnotations(emoji='💦', codepoints=(128166,), name='քրտինքի կաթիլներ', slug='քրտինքի_կաթիլներ', annotations=frozenset({'ցայտող', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='💨', codepoints=(128168,), name='սրընթաց', slug='սրընթաց', annotations=frozenset({'կոմիքս', 'ընթանալ', 'սլանալ'})),
EmojiAnnotations(emoji='💫', codepoints=(128171,), name='գլխապտույտ', slug='գլխապտույտ', annotations=frozenset({'կոմիքս', 'աստղ'})),
EmojiAnnotations(emoji='💬', codepoints=(128172,), name='խոսքի ամպիկ', slug='խոսքի_ամպիկ', annotations=frozenset({'երկխոսություն', 'փուչիկ', 'կոմիքս', 'պղպջակ', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5e8', codepoints=(128488,), name='խոսքի ձախակողմյա ամպիկ', slug='խոսքի_ձախակողմյա_ամպիկ', annotations=frozenset({'երկխոսություն', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5ef', codepoints=(128495,), name='զայրույթի աջակողմյա ամպիկ', slug='զայրույթի_աջակողմյա_ամպիկ', annotations=frozenset({'զայրացած', 'փուչիկ', 'պղպջակ', 'խենք'})),
EmojiAnnotations(emoji='💭', codepoints=(128173,), name='մտքի ամպիկ', slug='մտքի_ամպիկ', annotations=frozenset({'փուչիկ', 'կոմիքս', 'պղպջակ', 'միտք'})),
EmojiAnnotations(emoji='👓', codepoints=(128083,), name='ակնոց', slug='ակնոց', annotations=frozenset({'հագուստ', 'աչք'})),
EmojiAnnotations(emoji='\U0001f576', codepoints=(128374,), name='արևային ակնոց', slug='արևային_ակնոց', annotations=frozenset({'աչք', 'ակնոց', 'մուգ'})),
EmojiAnnotations(emoji='👔', codepoints=(128084,), name='փողկապ', slug='փողկապ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👕', codepoints=(128085,), name='սպորտային վերնաշապիկ', slug='սպորտային_վերնաշապիկ', annotations=frozenset({'հագուստ', 'վերնաշապիկ', 'սպորտային'})),
EmojiAnnotations(emoji='👖', codepoints=(128086,), name='ջինս', slug='ջինս', annotations=frozenset({'հագուստ', 'տաբատ', 'շալվար'})),
EmojiAnnotations(emoji='👗', codepoints=(128087,), name='զգեստ', slug='զգեստ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👘', codepoints=(128088,), name='կիմոնո', slug='կիմոնո', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👙', codepoints=(128089,), name='բիկինի', slug='բիկինի', annotations=frozenset({'հագուստ', 'լողալ'})),
EmojiAnnotations(emoji='👚', codepoints=(128090,), name='կնոջ հագուստ', slug='կնոջ_հագուստ', annotations=frozenset({'հագուստ', 'կին'})),
EmojiAnnotations(emoji='👛', codepoints=(128091,), name='դրամապանակ', slug='դրամապանակ', annotations=frozenset({'հագուստ', 'մետաղադրամ'})),
EmojiAnnotations(emoji='👜', codepoints=(128092,), name='ձեռքի պայուսակ', slug='ձեռքի_պայուսակ', annotations=frozenset({'հագուստ', 'պայուսակ'})),
EmojiAnnotations(emoji='👝', codepoints=(128093,), name='պայուսակ', slug='պայուսակ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='\U0001f6cd', codepoints=(128717,), name='գնումների պայուսակ', slug='գնումների_պայուսակ', annotations=frozenset({'գնումներ', 'պայուսակ', 'հյուրանոց'})),
EmojiAnnotations(emoji='🎒', codepoints=(127890,), name='դպրոցական պայուսակ', slug='դպրոցական_պայուսակ', annotations=frozenset({'դպրոց', 'պայուսակ', 'ուսապարկ'})),
EmojiAnnotations(emoji='👞', codepoints=(128094,), name='տղամարդու կոշիկ', slug='տղամարդու_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'տղամարդ'})),
EmojiAnnotations(emoji='👟', codepoints=(128095,), name='սպորտային կոշիկ', slug='սպորտային_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կեդեր', 'մարզական'})),
EmojiAnnotations(emoji='👠', codepoints=(128096,), name='բարձրակրունկ կոշիկ', slug='բարձրակրունկ_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կրունկ', 'կին'})),
EmojiAnnotations(emoji='👡', codepoints=(128097,), name='կնոջ սանդալ', slug='կնոջ_սանդալ', annotations=frozenset({'հագուստ', 'կոշիկ', 'սանդալ', 'կին'})),
EmojiAnnotations(emoji='👢', codepoints=(128098,), name='կնոջ երկարաճիթք կոշիկ', slug='կնոջ_երկարաճիթք_կոշիկ', annotations=frozenset({'երկարաճիթք կոշիկ', 'հագուստ', 'կոշիկ', 'կին'})),
EmojiAnnotations(emoji='👑', codepoints=(128081,), name='թագ', slug='թագ', annotations=frozenset({'հագուստ', 'արքա', 'թագուհի'})),
EmojiAnnotations(emoji='👒', codepoints=(128082,), name='կնոջ գլխարկ', slug='կնոջ_գլխարկ', annotations=frozenset({'հագուստ', 'գլխարկ', 'կին'})),
EmojiAnnotations(emoji='🎩', codepoints=(127913,), name='ցիլինդր', slug='ցիլինդր', annotations=frozenset({'հագուստ', 'գլխարկ'})),
EmojiAnnotations(emoji='🎓', codepoints=(127891,), name='շրջանավարտի գլխարկ', slug='շրջանավարտի_գլխարկ', annotations=frozenset({'գլխարկ', 'տոն', 'հագուստ', 'ավարտական'})),
EmojiAnnotations(emoji='\U0001f4ff', codepoints=(128255,), name='տերողորմյա', slug='տերողորմյա', annotations=frozenset({'հագուստ', 'վզնոց', 'ուլունքներ', 'աղոթք', 'կրոն'})),
EmojiAnnotations(emoji='💄', codepoints=(128132,), name='շրթներկ', slug='շրթներկ', annotations=frozenset({'կոսմետիա', 'դիմահարդարում'})),
EmojiAnnotations(emoji='💍', codepoints=(128141,), name='մատանի', slug='մատանի', annotations=frozenset({'ադամանդ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💎', codepoints=(128142,), name='թանկարժեք քար', slug='թանկարժեք_քար', annotations=frozenset({'ադամանդ', 'ակն', 'սիրավեպ', 'գոհար'})),
EmojiAnnotations(emoji='🐵', codepoints=(128053,), name='կապիկի դեմք', slug='կապիկի_դեմք', annotations=frozenset({'դեմք', 'կապիկ'})),
EmojiAnnotations(emoji='🐶', codepoints=(128054,), name='շան դեմք', slug='շան_դեմք', annotations=frozenset({'դեմք', 'շուն', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐕', codepoints=(128021,), name='շուն', slug='շուն', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐩', codepoints=(128041,), name='պուդել', slug='պուդել', annotations=frozenset({'շուն'})),
EmojiAnnotations(emoji='🐺', codepoints=(128058,), name='գայլի դեմք', slug='գայլի_դեմք', annotations=frozenset({'դեմք', 'գայլ'})),
EmojiAnnotations(emoji='🐱', codepoints=(128049,), name='կատվի դեմք', slug='կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐈', codepoints=(128008,), name='կատու', slug='կատու', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='\U0001f981', codepoints=(129409,), name='առյուծի դեմք', slug='առյուծի_դեմք', annotations=frozenset({'դեմք', 'առյուծ', 'կենդանակերպ', 'կորյուն'})),
EmojiAnnotations(emoji='🐯', codepoints=(128047,), name='վագրի դեմք', slug='վագրի_դեմք', annotations=frozenset({'դեմք', 'վագր'})),
EmojiAnnotations(emoji='🐴', codepoints=(128052,), name='ձիու դեմք', slug='ձիու_դեմք', annotations=frozenset({'դեմք', 'ձի'})),
EmojiAnnotations(emoji='🐎', codepoints=(128014,), name='ձի', slug='ձի', annotations=frozenset({'մրցավազք', 'մրցավազքային ձի'})),
EmojiAnnotations(emoji='\U0001f984', codepoints=(129412,), name='միաեղջյուրի դեմք', slug='միաեղջյուրի_դեմք', annotations=frozenset({'դեմք', 'միաեղջյուր'})),
EmojiAnnotations(emoji='🐮', codepoints=(128046,), name='կովի դեմք', slug='կովի_դեմք', annotations=frozenset({'դեմք', 'կով'})),
EmojiAnnotations(emoji='🐂', codepoints=(128002,), name='ցուլիկ', slug='ցուլիկ', annotations=frozenset({'կենդանակերպ', 'ցուլ'})),
EmojiAnnotations(emoji='🐃', codepoints=(128003,), name='ջրագոմեշ', slug='ջրագոմեշ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🐷', codepoints=(128055,), name='խոզի դեմք', slug='խոզի_դեմք', annotations=frozenset({'դեմք', 'խոզ'})),
EmojiAnnotations(emoji='🐖', codepoints=(128022,), name='խոզ', slug='խոզ', annotations=frozenset({'էգ խոզ'})),
EmojiAnnotations(emoji='🐗', codepoints=(128023,), name='վարազ', slug='վարազ', annotations=frozenset({'խոզ'})),
EmojiAnnotations(emoji='🐽', codepoints=(128061,), name='խոզի քիթ', slug='խոզի_քիթ', annotations=frozenset({'դեմք', 'քիթ', 'խոզ'})),
EmojiAnnotations(emoji='🐏', codepoints=(128015,), name='արու ոչխար', slug='արու_ոչխար', annotations=frozenset({'ոչխար', 'կենդանակերպ', 'խոյ'})),
EmojiAnnotations(emoji='🐑', codepoints=(128017,), name='ոչխար', slug='ոչխար', annotations=frozenset({'մաքի'})),
EmojiAnnotations(emoji='🐐', codepoints=(128016,), name='այծ', slug='այծ', annotations=frozenset({'այծեղջյուր', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐪', codepoints=(128042,), name='ուղտ', slug='ուղտ', annotations=frozenset({'միասապատ', 'կուզ'})),
EmojiAnnotations(emoji='🐫', codepoints=(128043,), name='երկսապատավոր ուղտ', slug='երկսապատավոր_ուղտ', annotations=frozenset({'ուղտ', 'երկսապատանի', 'կուզ'})),
EmojiAnnotations(emoji='🐭', codepoints=(128045,), name='մկան դեմք', slug='մկան_դեմք', annotations=frozenset({'դեմք', 'մուկ'})),
EmojiAnnotations(emoji='🐹', codepoints=(128057,), name='գերմանամկան դեմք', slug='գերմանամկան_դեմք', annotations=frozenset({'դեմք', 'գերմանամուկ', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐰', codepoints=(128048,), name='ճագարի դեմք', slug='ճագարի_դեմք', annotations=frozenset({'դեմք', 'ընտանի կենդանի', 'նապաստակ', 'ճագար'})),
EmojiAnnotations(emoji='🐇', codepoints=(128007,), name='ճագար', slug='ճագար', annotations=frozenset({'ընտանի կենդանի', 'նապաստակ'})),
EmojiAnnotations(emoji='🐻', codepoints=(128059,), name='արջի դեմք', slug='արջի_դեմք', annotations=frozenset({'դեմք', 'արջ'})),
EmojiAnnotations(emoji='🐨', codepoints=(128040,), name='կոալա', slug='կոալա', annotations=frozenset({'արջ'})),
EmojiAnnotations(emoji='🐼', codepoints=(128060,), name='պանդայի դեմք', slug='պանդայի_դեմք', annotations=frozenset({'դեմք', 'պանդա'})),
EmojiAnnotations(emoji='🐾', codepoints=(128062,), name='թաթերի հետքեր', slug='թաթերի_հետքեր', annotations=frozenset({'ոտքեր', 'հետք', 'թաթ'})),
EmojiAnnotations(emoji='🐓', codepoints=(128019,), name='աքաղաղ', slug='աքաղաղ', annotations=frozenset({'աքաղաք'})),
EmojiAnnotations(emoji='🐣', codepoints=(128035,), name='ձվից դուրս եկող ճուտիկ', slug='ձվից_դուրս_եկող_ճուտիկ', annotations=frozenset({'ձագ', 'ձվից դուրս եկող', 'ճուտիկ'})),
EmojiAnnotations(emoji='🐤', codepoints=(128036,), name='ճուտիկ', slug='ճուտիկ', annotations=frozenset({'ձագ'})),
EmojiAnnotations(emoji='🐥', codepoints=(128037,), name='դեմքով կանգնած ճուտիկ', slug='դեմքով_կանգնած_ճուտիկ', annotations=frozenset({'ձագ', 'ճուտիկ'})),
EmojiAnnotations(emoji='\U0001f54a', codepoints=(128330,), name='աղավնի', slug='աղավնի', annotations=frozenset({'թռչուն', 'թռչել', 'խաղաղություն'})),
EmojiAnnotations(emoji='🐸', codepoints=(128056,), name='գորտի դեմք', slug='գորտի_դեմք', annotations=frozenset({'դեմք', 'գորտ'})),
EmojiAnnotations(emoji='🐍', codepoints=(128013,), name='օձ', slug='օձ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='🐲', codepoints=(128050,), name='վիշապի դեմք', slug='վիշապի_դեմք', annotations=frozenset({'վիշապ', 'դեմք', 'հեքիաթ'})),
EmojiAnnotations(emoji='🐉', codepoints=(128009,), name='վիշապ', slug='վիշապ', annotations=frozenset({'հեքիաթ'})),
EmojiAnnotations(emoji='🐳', codepoints=(128051,), name='ջուր ցայտեցնող կետաձուկ', slug='ջուր_ցայտեցնող_կետաձուկ', annotations=frozenset({'դեմք', 'կետաձուկ', 'ցայտում'})),
EmojiAnnotations(emoji='🐟', codepoints=(128031,), name='ձուկ', slug='ձուկ', annotations=frozenset({'կենդանակերպ', 'ձկներ'})),
EmojiAnnotations(emoji='🐠', codepoints=(128032,), name='արևադարձային ձուկ', slug='արևադարձային_ձուկ', annotations=frozenset({'ձուկ', 'արևադարձային'})),
EmojiAnnotations(emoji='🐡', codepoints=(128033,), name='փքաձուկ', slug='փքաձուկ', annotations=frozenset({'ձուկ'})),
EmojiAnnotations(emoji='🐚', codepoints=(128026,), name='պարուրաձև խխունջախեցի', slug='պարուրաձև_խխունջախեցի', annotations=frozenset({'պարույր', 'խխունջ'})),
EmojiAnnotations(emoji='\U0001f980', codepoints=(129408,), name='կրաբ', slug='կրաբ', annotations=frozenset({'խեցգետին', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐜', codepoints=(128028,), name='մրջյուն', slug='մրջյուն', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐝', codepoints=(128029,), name='մեղու', slug='մեղու', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐞', codepoints=(128030,), name='զատիկ', slug='զատիկ', annotations=frozenset({'միջատ', 'բզեզ'})),
EmojiAnnotations(emoji='\U0001f577', codepoints=(128375,), name='սարդ', slug='սարդ', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='\U0001f578', codepoints=(128376,), name='սարդոստայն', slug='սարդոստայն', annotations=frozenset({'սարդ', 'ոստայն'})),
EmojiAnnotations(emoji='\U0001f982', codepoints=(129410,), name='շագանակագույն կարիճ', slug='շագանակագույն_կարիճ', annotations=frozenset({'կարիճ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='💐', codepoints=(128144,), name='ծաղկեփունջ', slug='ծաղկեփունջ', annotations=frozenset({'ծաղիկ', 'սիրավեպ', 'բույս'})),
EmojiAnnotations(emoji='🌸', codepoints=(127800,), name='բալենու ծաղիկ', slug='բալենու_ծաղիկ', annotations=frozenset({'ծաղիկ', 'բույս', 'բալ'})),
EmojiAnnotations(emoji='💮', codepoints=(128174,), name='սպիտակ ծաղիկ', slug='սպիտակ_ծաղիկ', annotations=frozenset({'ծաղիկ'})),
EmojiAnnotations(emoji='\U0001f3f5', codepoints=(127989,), name='վարդանախշ', slug='վարդանախշ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌹', codepoints=(127801,), name='վարդ', slug='վարդ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌺', codepoints=(127802,), name='հիբիսկուս', slug='հիբիսկուս', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌻', codepoints=(127803,), name='արևածաղիկ', slug='արևածաղիկ', annotations=frozenset({'ծաղիկ', 'արև', 'բույս'})),
EmojiAnnotations(emoji='🌼', codepoints=(127804,), name='ծաղիկ', slug='ծաղիկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌷', codepoints=(127799,), name='կակաչ', slug='կակաչ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌱', codepoints=(127793,), name='ծիլ', slug='ծիլ', annotations=frozenset({'բույս', 'մատղաշ'})),
EmojiAnnotations(emoji='🌲', codepoints=(127794,), name='եղևնի', slug='եղևնի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌳', codepoints=(127795,), name='սաղարթավոր ծառ', slug='սաղարթավոր_ծառ', annotations=frozenset({'սաղարթավոր', 'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌴', codepoints=(127796,), name='արմավենի', slug='արմավենի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌵', codepoints=(127797,), name='կակտուս', slug='կակտուս', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌾', codepoints=(127806,), name='բրնձի հասկեր', slug='բրնձի_հասկեր', annotations=frozenset({'ականջ', 'բույս', 'բրինձ'})),
EmojiAnnotations(emoji='🌿', codepoints=(127807,), name='խոտաբույս', slug='խոտաբույս', annotations=frozenset({'տերև', 'բույս'})),
EmojiAnnotations(emoji='☘', codepoints=(9752,), name='երեքնուկ', slug='երեքնուկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍀', codepoints=(127808,), name='քառատերև երեքնուկ', slug='քառատերև_երեքնուկ', annotations=frozenset({'4', 'չորս', 'տերև', 'բույս', 'երեքնուկ'})),
EmojiAnnotations(emoji='🍁', codepoints=(127809,), name='թխկու տերև', slug='թխկու_տերև', annotations=frozenset({'տերև', 'թխկի', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍂', codepoints=(127810,), name='ընկած տերևներ', slug='ընկած_տերևներ', annotations=frozenset({'տերև', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍃', codepoints=(127811,), name='ճախրող տերևներ', slug='ճախրող_տերևներ', annotations=frozenset({'տերև', 'քամի', 'փչել', 'թրթռալ', 'բույս'})),
EmojiAnnotations(emoji='🍇', codepoints=(127815,), name='խաղող', slug='խաղող', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍈', codepoints=(127816,), name='սեխ', slug='սեխ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍉', codepoints=(127817,), name='ձմերուկ', slug='ձմերուկ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍊', codepoints=(127818,), name='մանդարին', slug='մանդարին', annotations=frozenset({'նարինջ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍋', codepoints=(127819,), name='կիտրոն', slug='կիտրոն', annotations=frozenset({'ցիտրուս', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍌', codepoints=(127820,), name='բանան', slug='բանան', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍍', codepoints=(127821,), name='արքայախնձոր', slug='արքայախնձոր', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍎', codepoints=(127822,), name='կարմիր խնձոր', slug='կարմիր_խնձոր', annotations=frozenset({'կարմիր', 'խնձոր', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍏', codepoints=(127823,), name='կանաչ խնձոր', slug='կանաչ_խնձոր', annotations=frozenset({'խնձոր', 'բույս', 'պտուղ', 'կանաչ'})),
EmojiAnnotations(emoji='🍐', codepoints=(127824,), name='տանձ', slug='տանձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍑', codepoints=(127825,), name='դեղձ', slug='դեղձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍒', codepoints=(127826,), name='բալ', slug='բալ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍓', codepoints=(127827,), name='ելակ', slug='ելակ', annotations=frozenset({'հատապտուղ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍅', codepoints=(127813,), name='լոլիկ', slug='լոլիկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🍆', codepoints=(127814,), name='սմբուկ', slug='սմբուկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🌽', codepoints=(127805,), name='եգիպտացորեն', slug='եգիպտացորեն', annotations=frozenset({'ականջ', 'բույս'})),
EmojiAnnotations(emoji='\U0001f336', codepoints=(127798,), name='կծու պղպեղ', slug='կծու_պղպեղ', annotations=frozenset({'պղպեղ', 'կծու', 'բույս'})),
EmojiAnnotations(emoji='🍄', codepoints=(127812,), name='սունկ', slug='սունկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌰', codepoints=(127792,), name='շագանակ', slug='շագանակ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍞', codepoints=(127838,), name='հաց', slug='հաց', annotations=frozenset({'բոքոն'})),
EmojiAnnotations(emoji='\U0001f9c0', codepoints=(129472,), name='պանրի կտոր', slug='պանրի_կտոր', annotations=frozenset({'պանիր'})),
EmojiAnnotations(emoji='🍖', codepoints=(127830,), name='ոսկորով միս', slug='ոսկորով_միս', annotations=frozenset({'ոսկոր', 'միս'})),
EmojiAnnotations(emoji='🍗', codepoints=(127831,), name='հավի բուդ', slug='հավի_բուդ', annotations=frozenset({'ոսկոր', 'բուդ', 'հավ', 'թռչնամիս'})),
EmojiAnnotations(emoji='🍔', codepoints=(127828,), name='համբուրգեր', slug='համբուրգեր', annotations=frozenset({'բուրգեր'})),
EmojiAnnotations(emoji='🍟', codepoints=(127839,), name='տապակած կարտոֆիլ', slug='տապակած_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'ֆրի'})),
EmojiAnnotations(emoji='🍕', codepoints=(127829,), name='պիցցա', slug='պիցցա', annotations=frozenset({'պանիր', 'կտոր'})),
EmojiAnnotations(emoji='\U0001f32d', codepoints=(127789,), name='հոթդոգ', slug='հոթդոգ', annotations=frozenset({'նրբերշիկ', 'ֆրանկֆուրտեր'})),
EmojiAnnotations(emoji='\U0001f32e', codepoints=(127790,), name='տակո', slug='տակո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='\U0001f32f', codepoints=(127791,), name='բուրիտո', slug='բուրիտո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='🍲', codepoints=(127858,), name='թասով ճաշ', slug='թասով_ճաշ', annotations=frozenset({'թաս', 'ճաշ'})),
EmojiAnnotations(emoji='🍱', codepoints=(127857,), name='բենտո արկղիկ', slug='բենտո_արկղիկ', annotations=frozenset({'բենտո', 'արկղիկ'})),
EmojiAnnotations(emoji='🍘', codepoints=(127832,), name='բրնձի կրեկեր', slug='բրնձի_կրեկեր', annotations=frozenset({'բրինձ', 'կրեկեր'})),
EmojiAnnotations(emoji='🍙', codepoints=(127833,), name='բրնձի գնդիկ', slug='բրնձի_գնդիկ', annotations=frozenset({'գնդիկ', 'ճապոնական', 'բրինձ'})),
EmojiAnnotations(emoji='🍚', codepoints=(127834,), name='եփած բրինձ', slug='եփած_բրինձ', annotations=frozenset({'եփած', 'բրինձ'})),
EmojiAnnotations(emoji='🍛', codepoints=(127835,), name='կարրիով բրինձ', slug='կարրիով_բրինձ', annotations=frozenset({'կարրի', 'բրինձ'})),
EmojiAnnotations(emoji='🍜', codepoints=(127836,), name='տաք ապուր', slug='տաք_ապուր', annotations=frozenset({'թաս', 'տաք', 'լապշա'})),
EmojiAnnotations(emoji='🍝', codepoints=(127837,), name='սպագետի', slug='սպագետի', annotations=frozenset({'մակարոնեղեն'})),
EmojiAnnotations(emoji='🍠', codepoints=(127840,), name='կարմրացրած քաղցր կարտոֆիլ', slug='կարմրացրած_քաղցր_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'կարմրացրած', 'քաղցր'})),
EmojiAnnotations(emoji='🍢', codepoints=(127842,), name='օդեն', slug='օդեն', annotations=frozenset({'ծովամթերք', 'շամփուր', 'քյաբաբ', 'փայտիկ'})),
EmojiAnnotations(emoji='🍤', codepoints=(127844,), name='տապակած ծովախեցգետին', slug='տապակած_ծովախեցգետին', annotations=frozenset({'տապակած', 'ծովախեցգետին'})),
EmojiAnnotations(emoji='🍥', codepoints=(127845,), name='ձկնային տորթ պտտանախշով', slug='ձկնային_տորթ_պտտանախշով', annotations=frozenset({'տորթ', 'խմորեղեն', 'ձուկ', 'պտտանախշ'})),
EmojiAnnotations(emoji='🍡', codepoints=(127841,), name='դանգո', slug='դանգո', annotations=frozenset({'շամփուր', 'փայտիկ', 'քաղցր', 'ճապոնական', 'դեսերտ'})),
EmojiAnnotations(emoji='🍦', codepoints=(127846,), name='լցնովի պաղպաղակ', slug='լցնովի_պաղպաղակ', annotations=frozenset({'քաղցր', 'պաղպաղակ', 'դեսերտ', 'կրեմ', 'լցնովի', 'փափուկ'})),
EmojiAnnotations(emoji='🍧', codepoints=(127847,), name='մանրացված սառույց', slug='մանրացված_սառույց', annotations=frozenset({'սառույց', 'մանրացված', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍨', codepoints=(127848,), name='պաղպաղակ', slug='պաղպաղակ', annotations=frozenset({'կրեմ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍩', codepoints=(127849,), name='դոնաթ', slug='դոնաթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍪', codepoints=(127850,), name='թխվածքաբլիթ', slug='թխվածքաբլիթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🎂', codepoints=(127874,), name='ծննդյան տորթ', slug='ծննդյան_տորթ', annotations=frozenset({'քաղցր', 'տոն', 'դեսերտ', 'տորթ', 'խմորեղեն', 'տարեդարձ'})),
EmojiAnnotations(emoji='🍰', codepoints=(127856,), name='տորթի կտոր', slug='տորթի_կտոր', annotations=frozenset({'կտոր', 'տորթ', 'խմորեղեն', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍫', codepoints=(127851,), name='շոկոլադե սալիկ', slug='շոկոլադե_սալիկ', annotations=frozenset({'սալիկ', 'շոկոլադ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍬', codepoints=(127852,), name='կոնֆետ', slug='կոնֆետ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍭', codepoints=(127853,), name='սառնաշաքար', slug='սառնաշաքար', annotations=frozenset({'կոնֆետ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍮', codepoints=(127854,), name='պուդինգ', slug='պուդինգ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍯', codepoints=(127855,), name='մեղրի կճուճ', slug='մեղրի_կճուճ', annotations=frozenset({'քաղցր', 'մեղր', 'կճուճ'})),
EmojiAnnotations(emoji='🍼', codepoints=(127868,), name='մանկական շիշ', slug='մանկական_շիշ', annotations=frozenset({'մանկական', 'շիշ', 'խմել', 'կաթ'})),
EmojiAnnotations(emoji='☕', codepoints=(9749,), name='տաք ըմպելիք', slug='տաք_ըմպելիք', annotations=frozenset({'սուրճ', 'թեյ', 'խմել', 'տաք', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍵', codepoints=(127861,), name='թեյի բաժակ առանց բռնակի', slug='թեյի_բաժակ_առանց_բռնակի', annotations=frozenset({'թեյի բաժակ', 'բաժակ', 'խմել', 'թեյ', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍶', codepoints=(127862,), name='սակե', slug='սակե', annotations=frozenset({'բար', 'շիշ', 'բաժակ', 'խմել', 'ըմպելիք'})),
EmojiAnnotations(emoji='\U0001f37e', codepoints=(127870,), name='թռչող խցանով շիշ', slug='թռչող_խցանով_շիշ', annotations=frozenset({'բար', 'խցան', 'շիշ', 'խմել', 'դուրս թռչել'})),
EmojiAnnotations(emoji='🍷', codepoints=(127863,), name='գինու բաժակ', slug='գինու_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'գինի', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍸', codepoints=(127864,), name='կոկտեյլի բաժակ', slug='կոկտեյլի_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'կոկտեյլ'})),
EmojiAnnotations(emoji='🍹', codepoints=(127865,), name='արևադարձային ընպելիք', slug='արևադարձային_ընպելիք', annotations=frozenset({'բար', 'խմել', 'արևադարձային'})),
EmojiAnnotations(emoji='🍺', codepoints=(127866,), name='գարեջրի գավաթ', slug='գարեջրի_գավաթ', annotations=frozenset({'բար', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='🍻', codepoints=(127867,), name='զրնգացող գարեջրի գավաթներ', slug='զրնգացող_գարեջրի_գավաթներ', annotations=frozenset({'բար', 'զրնգալ', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='\U0001f37d', codepoints=(127869,), name='դանակ և պատառաքաղ ափսեի հետ', slug='դանակ_և_պատառաքաղ_ափսեի_հետ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ', 'ափսե'})),
EmojiAnnotations(emoji='🍴', codepoints=(127860,), name='դանակ և պատառաքաղ', slug='դանակ_և_պատառաքաղ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ'})),
EmojiAnnotations(emoji='🍳', codepoints=(127859,), name='թավայով ձվածեղ', slug='թավայով_ձվածեղ', annotations=frozenset({'ձու', 'թավա', 'տապակել'})),
EmojiAnnotations(emoji='\U0001f3fa', codepoints=(127994,), name='սափոր', slug='սափոր', annotations=frozenset({'խմել', 'խոհարարություն', 'գործիք', 'կենդանակերպ', 'զենք', 'ջրհոս'})),
EmojiAnnotations(emoji='🌍', codepoints=(127757,), name='եվրոպան և աֆրիկան պատկերող գլոբուս', slug='եվրոպան_և_աֆրիկան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'եվրոպա', 'աֆրիկա'})),
EmojiAnnotations(emoji='🌎', codepoints=(127758,), name='ամերիկաները պատկերող գլոբուս', slug='ամերիկաները_պատկերող_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'ամերիկաներ', 'աշխարհ'})),
EmojiAnnotations(emoji='🌏', codepoints=(127759,), name='ասիան և ավստրալիան պատկերող գլոբուս', slug='ասիան_և_ավստրալիան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'ավստրալիա', 'ասիա'})),
EmojiAnnotations(emoji='🌐', codepoints=(127760,), name='միջօրեականներով գլոբուս', slug='միջօրեականներով_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'միջօրեականներ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f5fa', codepoints=(128506,), name='աշխարհի քարտեզ', slug='աշխարհի_քարտեզ', annotations=frozenset({'քարտեզ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f3d4', codepoints=(127956,), name='ձյունածածկ գագաթով լեռ', slug='ձյունածածկ_գագաթով_լեռ', annotations=frozenset({'սառը', 'ձյուն', 'լեռ'})),
EmojiAnnotations(emoji='🌋', codepoints=(127755,), name='հրաբուխ', slug='հրաբուխ', annotations=frozenset({'եղանակ', 'լեռ', 'ժայթքում'})),
EmojiAnnotations(emoji='🗻', codepoints=(128507,), name='ֆուջի լեռ', slug='ֆուջի_լեռ', annotations=frozenset({'լեռ', 'ֆուջի'})),
EmojiAnnotations(emoji='\U0001f3d6', codepoints=(127958,), name='լողափ հովանոցով', slug='լողափ_հովանոցով', annotations=frozenset({'լողափ', 'հովանոց'})),
EmojiAnnotations(emoji='\U0001f3dd', codepoints=(127965,), name='անմարդաբնակ կղզի', slug='անմարդաբնակ_կղզի', annotations=frozenset({'կղզի', 'անմարդաբնակ'})),
EmojiAnnotations(emoji='\U0001f3de', codepoints=(127966,), name='ազգային պարկ', slug='ազգային_պարկ', annotations=frozenset({'պարկ'})),
EmojiAnnotations(emoji='\U0001f3db', codepoints=(127963,), name='հունահռոմեական շինություն', slug='հունահռոմեական_շինություն', annotations=frozenset({'հունահռոմեական', 'շինություն'})),
EmojiAnnotations(emoji='\U0001f3d7', codepoints=(127959,), name='շենքի կառուցում', slug='շենքի_կառուցում', annotations=frozenset({'շենք', 'շինարարություն'})),
EmojiAnnotations(emoji='\U0001f3d8', codepoints=(127960,), name='տան շինարարություն', slug='տան_շինարարություն', annotations=frozenset({'շենք', 'տուն'})),
EmojiAnnotations(emoji='\U0001f3d9', codepoints=(127961,), name='քաղաքի համայնապատկեր', slug='քաղաքի_համայնապատկեր', annotations=frozenset({'քաղաք', 'շենք'})),
EmojiAnnotations(emoji='\U0001f3da', codepoints=(127962,), name='լքված շինություն', slug='լքված_շինություն', annotations=frozenset({'լքված', 'շենք', 'տուն'})),
EmojiAnnotations(emoji='🏠', codepoints=(127968,), name='բնակելի տուն', slug='բնակելի_տուն', annotations=frozenset({'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='🏡', codepoints=(127969,), name='այգիով տուն', slug='այգիով_տուն', annotations=frozenset({'այգի', 'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='⛪', codepoints=(9962,), name='եկեղեցի', slug='եկեղեցի', annotations=frozenset({'խաչ', 'շենք', 'կրոն', 'քրիստոնեական'})),
EmojiAnnotations(emoji='\U0001f54b', codepoints=(128331,), name='կաաբա', slug='կաաբա', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54c', codepoints=(128332,), name='մզկիթ', slug='մզկիթ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54d', codepoints=(128333,), name='սինագոգ', slug='սինագոգ', annotations=frozenset({'հրեա', 'հրեական', 'տաճար', 'կրոն'})),
EmojiAnnotations(emoji='⛩', codepoints=(9961,), name='սինտոյական տաճար', slug='սինտոյական_տաճար', annotations=frozenset({'տաճար', 'կրոն', 'սինտոյական'})),
EmojiAnnotations(emoji='🏢', codepoints=(127970,), name='գրասենյակային շենք', slug='գրասենյակային_շենք', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏣', codepoints=(127971,), name='ճապոնական փոստատուն', slug='ճապոնական_փոստատուն', annotations=frozenset({'փոստատուն', 'շենք', 'ճապոնական'})),
EmojiAnnotations(emoji='🏤', codepoints=(127972,), name='փոստատուն', slug='փոստատուն', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='🏥', codepoints=(127973,), name='հիվանդանոց', slug='հիվանդանոց', annotations=frozenset({'բժշկություն', 'շենք', 'բժիշկ'})),
EmojiAnnotations(emoji='🏦', codepoints=(127974,), name='բանկ', slug='բանկ', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏨', codepoints=(127976,), name='հյուրանոց', slug='հյուրանոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏩', codepoints=(127977,), name='սիրային հյուրանոց', slug='սիրային_հյուրանոց', annotations=frozenset({'շենք', 'հյուրանոց', 'սեր'})),
EmojiAnnotations(emoji='🏪', codepoints=(127978,), name='շուրջօրյա խանութ', slug='շուրջօրյա_խանութ', annotations=frozenset({'խանութ', 'շենք', 'շուրջօրյա'})),
EmojiAnnotations(emoji='🏫', codepoints=(127979,), name='դպրոց', slug='դպրոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏬', codepoints=(127980,), name='հանրախանութ', slug='հանրախանութ', annotations=frozenset({'խանութ', 'շենք'})),
EmojiAnnotations(emoji='🏭', codepoints=(127981,), name='գործարան', slug='գործարան', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏯', codepoints=(127983,), name='ճապոնական դղյակ', slug='ճապոնական_դղյակ', annotations=frozenset({'շենք', 'ճապոնական', 'դղյակ'})),
EmojiAnnotations(emoji='🏰', codepoints=(127984,), name='դղյակ', slug='դղյակ', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='💒', codepoints=(128146,), name='հարսանիք', slug='հարսանիք', annotations=frozenset({'մատուռ', 'սիրավեպ'})),
EmojiAnnotations(emoji='🗼', codepoints=(128508,), name='տոկիոյի աշտարակը', slug='տոկիոյի_աշտարակը', annotations=frozenset({'աշտարակ', 'տոկյո'})),
EmojiAnnotations(emoji='🗽', codepoints=(128509,), name='ազատության արձանը', slug='ազատության_արձանը', annotations=frozenset({'ազատություն', 'արձան'})),
EmojiAnnotations(emoji='🗾', codepoints=(128510,), name='ճապոնիայի քարտեզը', slug='ճապոնիայի_քարտեզը', annotations=frozenset({'քարտեզ', 'ճապոնիա'})),
EmojiAnnotations(emoji='⛺', codepoints=(9978,), name='վրան', slug='վրան', annotations=frozenset({'ճամբար'})),
EmojiAnnotations(emoji='🌁', codepoints=(127745,), name='մառախլապատ', slug='մառախլապատ', annotations=frozenset({'եղանակ', 'մառախուղ'})),
EmojiAnnotations(emoji='🌃', codepoints=(127747,), name='աստղազարդ գիշեր', slug='աստղազարդ_գիշեր', annotations=frozenset({'եղանակ', 'գիշեր', 'աստղ'})),
EmojiAnnotations(emoji='🌄', codepoints=(127748,), name='արևածագը լեռներում', slug='արևածագը_լեռներում', annotations=frozenset({'արևածագ', 'եղանակ', 'արև', 'լեռ', 'առավոտ'})),
EmojiAnnotations(emoji='🌅', codepoints=(127749,), name='արևածագ', slug='արևածագ', annotations=frozenset({'եղանակ', 'արև', 'առավոտ'})),
EmojiAnnotations(emoji='🌆', codepoints=(127750,), name='քաղաքի համայնապատկեր մթնշաղին', slug='քաղաքի_համայնապատկեր_մթնշաղին', annotations=frozenset({'լանդշաֆտ', 'երեկո', 'շենք', 'մթնշաղ', 'մայրամուտ', 'եղանակ', 'քաղաք', 'արև'})),
EmojiAnnotations(emoji='🌇', codepoints=(127751,), name='մայրամուտ', slug='մայրամուտ', annotations=frozenset({'եղանակ', 'արև', 'շենք', 'մթնշաղ'})),
EmojiAnnotations(emoji='🌉', codepoints=(127753,), name='կամուրջը գիշերով', slug='կամուրջը_գիշերով', annotations=frozenset({'եղանակ', 'գիշեր', 'կամուրջ'})),
EmojiAnnotations(emoji='♨', codepoints=(9832,), name='տաք աղբյուրներ', slug='տաք_աղբյուրներ', annotations=frozenset({'աղբյուրներ', 'տաք', 'հոսք'})),
EmojiAnnotations(emoji='🌌', codepoints=(127756,), name='ծիր կաթին', slug='ծիր_կաթին', annotations=frozenset({'եղանակ', 'տիեզերք'})),
EmojiAnnotations(emoji='🎠', codepoints=(127904,), name='կարուսելի ձի', slug='կարուսելի_ձի', annotations=frozenset({'ձի', 'կարուսել'})),
EmojiAnnotations(emoji='🎡', codepoints=(127905,), name='սատանայի անիվ', slug='սատանայի_անիվ', annotations=frozenset({'զվարճանքների այգի', 'անիվ', 'սատանայի'})),
EmojiAnnotations(emoji='🎢', codepoints=(127906,), name='ամերիկյան բլուրներ', slug='ամերիկյան_բլուրներ', annotations=frozenset({'զվարճանքների այգի', 'բլուրներ', 'ամերիկյան'})),
EmojiAnnotations(emoji='💈', codepoints=(128136,), name='վարսավիրի ձող', slug='վարսավիրի_ձող', annotations=frozenset({'վարսավիր', 'սանրվածք', 'ձող'})),
EmojiAnnotations(emoji='🎪', codepoints=(127914,), name='կրկեսային վրան', slug='կրկեսային_վրան', annotations=frozenset({'վրան', 'կրկես'})),
EmojiAnnotations(emoji='🎭', codepoints=(127917,), name='կատարողական արվեստ', slug='կատարողական_արվեստ', annotations=frozenset({'ներկայացում', 'թատրոն', 'դիմակ', 'արվեստ'})),
EmojiAnnotations(emoji='\U0001f5bc', codepoints=(128444,), name='շրջանակ նկարով', slug='շրջանակ_նկարով', annotations=frozenset({'նկարչություն', 'նկար', 'արվեստ', 'շրջանակ', 'թանգարան'})),
EmojiAnnotations(emoji='🎨', codepoints=(127912,), name='ներկապնակ', slug='ներկապնակ', annotations=frozenset({'նկարչություն', 'արվեստ', 'թանգարան'})),
EmojiAnnotations(emoji='🎰', codepoints=(127920,), name='խաղային ավտոմատ', slug='խաղային_ավտոմատ', annotations=frozenset({'խաղ', 'ավտոմատ'})),
EmojiAnnotations(emoji='🚂', codepoints=(128642,), name='շոգեքարշ', slug='շոգեքարշ', annotations=frozenset({'փոխադրամիջոց', 'գոլորշի', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚃', codepoints=(128643,), name='երկաթուղային վագոն', slug='երկաթուղային_վագոն', annotations=frozenset({'փոխադրամիջոց', 'տրոլեյբուս', 'էլեկտրական', 'երկաթուղի', 'վագոն', 'տրամվայ', 'գնացք'})),
EmojiAnnotations(emoji='🚄', codepoints=(128644,), name='ճեպընթաց գնացք', slug='ճեպընթաց_գնացք', annotations=frozenset({'գնացք', 'փոխադրամիջոց', 'սինկանսեն', 'երկաթուղի', 'արագություն'})),
EmojiAnnotations(emoji='🚅', codepoints=(128645,), name='ճեպընթաց գնացք կլոր քթով', slug='ճեպընթաց_գնացք_կլոր_քթով', annotations=frozenset({'փոխադրամիջոց', 'արագություն', 'կլոր քիթ', 'երկաթուղի', 'սինկանսեն', 'գնացք'})),
EmojiAnnotations(emoji='🚆', codepoints=(128646,), name='գնացք', slug='գնացք', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚇', codepoints=(128647,), name='մետրո', slug='մետրո', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚈', codepoints=(128648,), name='վերգետնյա մետրո', slug='վերգետնյա_մետրո', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚉', codepoints=(128649,), name='կայարան', slug='կայարան', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚊', codepoints=(128650,), name='տրամվայ', slug='տրամվայ', annotations=frozenset({'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚝', codepoints=(128669,), name='մոնոռելս', slug='մոնոռելս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚞', codepoints=(128670,), name='լեռնային երկաթուղի', slug='լեռնային_երկաթուղի', annotations=frozenset({'վագոն', 'լեռ', 'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚋', codepoints=(128651,), name='տրամվայի վագոն', slug='տրամվայի_վագոն', annotations=frozenset({'տրամվայ', 'վագոն', 'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚌', codepoints=(128652,), name='ավտոբուս', slug='ավտոբուս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚍', codepoints=(128653,), name='մոտեցող ավտոբուս', slug='մոտեցող_ավտոբուս', annotations=frozenset({'մոտեցող', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚎', codepoints=(128654,), name='տրոլեյբուս', slug='տրոլեյբուս', annotations=frozenset({'տրամվայ', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚏', codepoints=(128655,), name='ավտոբուսի կանգառ', slug='ավտոբուսի_կանգառ', annotations=frozenset({'ավտոբուս', 'կանգառ'})),
EmojiAnnotations(emoji='🚐', codepoints=(128656,), name='միկրոավտոբուս', slug='միկրոավտոբուս', annotations=frozenset({'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚑', codepoints=(128657,), name='շտապօգնության մեքենա', slug='շտապօգնության_մեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚒', codepoints=(128658,), name='հրշեջ մեքենա', slug='հրշեջ_մեքենա', annotations=frozenset({'քարշակ', 'փոխադրամիջոց', 'հրդեք', 'բեռնատար'})),
EmojiAnnotations(emoji='🚓', codepoints=(128659,), name='ոստիկանական մեքենա', slug='ոստիկանական_մեքենա', annotations=frozenset({'պարեկ', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚔', codepoints=(128660,), name='մոտեցող ոստիկանական մեքենա', slug='մոտեցող_ոստիկանական_մեքենա', annotations=frozenset({'մոտեցող', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚕', codepoints=(128661,), name='տաքսի', slug='տաքսի', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚖', codepoints=(128662,), name='մոտեցող տաքսի', slug='մոտեցող_տաքսի', annotations=frozenset({'մոտեցող', 'տաքսի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚗', codepoints=(128663,), name='ավտոմեքենա', slug='ավտոմեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚘', codepoints=(128664,), name='մոտեցող ավտոմեքենա', slug='մոտեցող_ավտոմեքենա', annotations=frozenset({'մոտեցող', 'մեքենա', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚙', codepoints=(128665,), name='ավտոֆուրգոն', slug='ավտոֆուրգոն', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚚', codepoints=(128666,), name='բեռնատար', slug='բեռնատար', annotations=frozenset({'առաքում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚛', codepoints=(128667,), name='կցորդով բեռնատար', slug='կցորդով_բեռնատար', annotations=frozenset({'փոխադրամիջոց', 'կցորդ', 'բեռնատար'})),
EmojiAnnotations(emoji='🚜', codepoints=(128668,), name='տրակտոր', slug='տրակտոր', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚲', codepoints=(128690,), name='հեծանիվ', slug='հեծանիվ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛽', codepoints=(9981,), name='բենզալցակայանի պոմպ', slug='բենզալցակայանի_պոմպ', annotations=frozenset({'բենզին', 'կայան', 'վառելիք', 'պոմպ', 'բենզալցակայան'})),
EmojiAnnotations(emoji='\U0001f6e3', codepoints=(128739,), name='ավտոմայրուղի', slug='ավտոմայրուղի', annotations=frozenset({'մայրուղի', 'ճանապարհ'})),
EmojiAnnotations(emoji='\U0001f6e4', codepoints=(128740,), name='երկաթուղի', slug='երկաթուղի', annotations=frozenset({'գնացք'})),
EmojiAnnotations(emoji='🚨', codepoints=(128680,), name='ոստիկանական մեքենայի փարոս', slug='ոստիկանական_մեքենայի_փարոս', annotations=frozenset({'լույս', 'ոստիկանություն', 'փարոս', 'պտտվող', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚥', codepoints=(128677,), name='հորիզոնական լուսակիր', slug='հորիզոնական_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚦', codepoints=(128678,), name='ուղղահայաց լուսակիր', slug='ուղղահայաց_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚧', codepoints=(128679,), name='շինարարություն', slug='շինարարություն', annotations=frozenset({'արգելապատնեշ'})),
EmojiAnnotations(emoji='⚓', codepoints=(9875,), name='խարիսխ', slug='խարիսխ', annotations=frozenset({'գործիք', 'նավ'})),
EmojiAnnotations(emoji='⛵', codepoints=(9973,), name='առագաստանավ', slug='առագաստանավ', annotations=frozenset({'նավակ', 'հանգստավայր', 'ծով', 'զբոսանավ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚣', codepoints=(128675,), name='թիանավակ', slug='թիանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚤', codepoints=(128676,), name='արագընթաց մոտորանավակ', slug='արագընթաց_մոտորանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f3', codepoints=(128755,), name='ուղևորատար նավ', slug='ուղևորատար_նավ', annotations=frozenset({'նավ', 'ուղևոր', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛴', codepoints=(9972,), name='լաստանավ', slug='լաստանավ', annotations=frozenset({'նավակ'})),
EmojiAnnotations(emoji='\U0001f6e5', codepoints=(128741,), name='մոտորանավ', slug='մոտորանավ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚢', codepoints=(128674,), name='նավ', slug='նավ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='✈', codepoints=(9992,), name='ինքնաթիռ', slug='ինքնաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6e9', codepoints=(128745,), name='փոքր ինքնաթիռ', slug='փոքր_ինքնաթիռ', annotations=frozenset({'ինքնաթիռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6eb', codepoints=(128747,), name='օդանավի մեկնում', slug='օդանավի_մեկնում', annotations=frozenset({'ինքնաթիռ', 'գրանցում', 'մեկնում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ec', codepoints=(128748,), name='օդանավի ժամանում', slug='օդանավի_ժամանում', annotations=frozenset({'վայրէջք', 'ժամանող', 'օդանավ', 'ժամանում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='💺', codepoints=(128186,), name='նստատեղ', slug='նստատեղ', annotations=frozenset({'բազկաթոռ'})),
EmojiAnnotations(emoji='🚁', codepoints=(128641,), name='ուղղաթիռ', slug='ուղղաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚟', codepoints=(128671,), name='կախովի երկաթուղի', slug='կախովի_երկաթուղի', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'կախովի'})),
EmojiAnnotations(emoji='🚠', codepoints=(128672,), name='լեռնային ճոպանուղի', slug='լեռնային_ճոպանուղի', annotations=frozenset({'գոնդոլա', 'ճոպան', 'լեռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚡', codepoints=(128673,), name='օդային տրամվայ', slug='օդային_տրամվայ', annotations=frozenset({'օդային', 'ճոպան', 'ճոպանուղի', 'գոնդոլա', 'վագոն', 'տրամվայ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚀', codepoints=(128640,), name='հրթիռ', slug='հրթիռ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f0', codepoints=(128752,), name='արբանյակ', slug='արբանյակ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ce', codepoints=(128718,), name='հյուրանոցային զանգ', slug='հյուրանոցային_զանգ', annotations=frozenset({'զանգ', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cc', codepoints=(128716,), name='մահճակալում պառկած մարդ', slug='մահճակալում_պառկած_մարդ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cf', codepoints=(128719,), name='մահճակալ', slug='մահճակալ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cb', codepoints=(128715,), name='բազմոց և լամպ', slug='բազմոց_և_լամպ', annotations=frozenset({'լամպ', 'բազմոց', 'հյուրանոց'})),
EmojiAnnotations(emoji='🚽', codepoints=(128701,), name='զուգարանակոնք', slug='զուգարանակոնք', annotations=frozenset({'զուգարան'})),
EmojiAnnotations(emoji='🚿', codepoints=(128703,), name='լոգարանի ցնցուղ', slug='լոգարանի_ցնցուղ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🛀', codepoints=(128704,), name='լոգանք ընդունող մարդ', slug='լոգանք_ընդունող_մարդ', annotations=frozenset({'լոգարան', 'լոգասենյակ'})),
EmojiAnnotations(emoji='🛁', codepoints=(128705,), name='լոգարան', slug='լոգարան', annotations=frozenset({'լոգասենյակ'})),
EmojiAnnotations(emoji='⌛', codepoints=(8987,), name='ավազի ժամացույց', slug='ավազի_ժամացույց', annotations=frozenset({'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏳', codepoints=(9203,), name='ավազի ժամացույց հոսող ավազով', slug='ավազի_ժամացույց_հոսող_ավազով', annotations=frozenset({'ավազի ժամացույց', 'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏰', codepoints=(9200,), name='զարթուցիչ', slug='զարթուցիչ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏱', codepoints=(9201,), name='վայրկյանաչափ', slug='վայրկյանաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏲', codepoints=(9202,), name='ժամաչափ', slug='ժամաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='\U0001f570', codepoints=(128368,), name='բուխարու ժամացույց', slug='բուխարու_ժամացույց', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='🕛', codepoints=(128347,), name='ժամը տասներկուսը', slug='ժամը_տասներկուսը', annotations=frozenset({'12', 'ժամ', 'տասներկու', '00', 'ժամացույց', '12:00'})),
EmojiAnnotations(emoji='🕧', codepoints=(128359,), name='տասներկուսն անց կես', slug='տասներկուսն_անց_կես', annotations=frozenset({'12', 'տասներկու', '12:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕐', codepoints=(128336,), name='ժամը մեկը', slug='ժամը_մեկը', annotations=frozenset({'ժամ', 'մեկ', '1', '1:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕜', codepoints=(128348,), name='մեկն անց կես', slug='մեկն_անց_կես', annotations=frozenset({'մեկ', '1', '1:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕑', codepoints=(128337,), name='ժամը երկուսը', slug='ժամը_երկուսը', annotations=frozenset({'ժամ', 'երկու', '00', '2:00', 'ժամացույց', '2'})),
EmojiAnnotations(emoji='🕝', codepoints=(128349,), name='երկուսն անց կես', slug='երկուսն_անց_կես', annotations=frozenset({'երկու', '2:30', 'ժամացույց', '2', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕒', codepoints=(128338,), name='ժամը երեքը', slug='ժամը_երեքը', annotations=frozenset({'ժամ', 'երեք', '3', '00', '3:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕞', codepoints=(128350,), name='երեքն անց կես', slug='երեքն_անց_կես', annotations=frozenset({'երեք', '3', '3:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕓', codepoints=(128339,), name='ժամը չորսը', slug='ժամը_չորսը', annotations=frozenset({'4:00', 'ժամ', '00', 'չորս', '4', 'ժամացույց'})),
EmojiAnnotations(emoji='🕟', codepoints=(128351,), name='չորսն անց կես', slug='չորսն_անց_կես', annotations=frozenset({'4:30', 'չորս', '4', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕔', codepoints=(128340,), name='ժամը հինգը', slug='ժամը_հինգը', annotations=frozenset({'ժամ', 'հինգ', '5:00', '5', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕠', codepoints=(128352,), name='հինգն անց կես', slug='հինգն_անց_կես', annotations=frozenset({'5:30', 'հինգ', '5', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕕', codepoints=(128341,), name='ժամը վեցը', slug='ժամը_վեցը', annotations=frozenset({'վեց', '6', 'ժամ', '6:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕡', codepoints=(128353,), name='վեցն անց կես', slug='վեցն_անց_կես', annotations=frozenset({'վեց', '6', '6:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕖', codepoints=(128342,), name='ժամը յոթը', slug='ժամը_յոթը', annotations=frozenset({'ժամ', 'յոթ', '7', '00', '7:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕢', codepoints=(128354,), name='յոթն անց կես', slug='յոթն_անց_կես', annotations=frozenset({'յոթ', '7', '7:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕗', codepoints=(128343,), name='ժամը ութը', slug='ժամը_ութը', annotations=frozenset({'ժամ', '8:00', '00', '8', 'ժամացույց', 'ութ'})),
EmojiAnnotations(emoji='🕣', codepoints=(128355,), name='ութն անց կես', slug='ութն_անց_կես', annotations=frozenset({'8:30', '8', 'ժամացույց', 'ութ', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕘', codepoints=(128344,), name='ժամը ինը', slug='ժամը_ինը', annotations=frozenset({'ժամ', '9:00', 'ինը', '00', '9', 'ժամացույց'})),
EmojiAnnotations(emoji='🕤', codepoints=(128356,), name='ինն անց կես', slug='ինն_անց_կես', annotations=frozenset({'ինը', '9', 'ժամացույց', 'երեսուն', '30', '9:30'})),
EmojiAnnotations(emoji='🕙', codepoints=(128345,), name='ժամը տասը', slug='ժամը_տասը', annotations=frozenset({'10', '10:00', 'ժամ', 'տասը', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕥', codepoints=(128357,), name='տասն անց կես', slug='տասն_անց_կես', annotations=frozenset({'10', 'տասը', '10:30', 'երեսուն', 'ժամացույց', '30'})),
EmojiAnnotations(emoji='🕚', codepoints=(128346,), name='ժամը տասնմեկը', slug='ժամը_տասնմեկը', annotations=frozenset({'11', 'ժամ', '00', '11:00', 'տասնմեկ', 'ժամացույց'})),
EmojiAnnotations(emoji='🕦', codepoints=(128358,), name='տասնմեկն անց կես', slug='տասնմեկն_անց_կես', annotations=frozenset({'11', '11:30', 'տասնմեկ', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🌑', codepoints=(127761,), name='նորալուսին', slug='նորալուսին', annotations=frozenset({'եղանակ', 'մութ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌒', codepoints=(127762,), name='աճող մահիկ', slug='աճող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'մահիկ', 'աճող'})),
EmojiAnnotations(emoji='🌓', codepoints=(127763,), name='լուսինն առաջին քառորդում', slug='լուսինն_առաջին_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌔', codepoints=(127764,), name='աճող ուռուցիկ լուսին', slug='աճող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'եղանակ', 'աճող'})),
EmojiAnnotations(emoji='🌕', codepoints=(127765,), name='լիալուսին', slug='լիալուսին', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌖', codepoints=(127766,), name='նվազող ուռուցիկ լուսին', slug='նվազող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'նվազող', 'եղանակ'})),
EmojiAnnotations(emoji='🌗', codepoints=(127767,), name='լուսինը երկրորդ քառորդում', slug='լուսինը_երկրորդ_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌘', codepoints=(127768,), name='նվազող մահիկ', slug='նվազող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'նվազող', 'մահիկ'})),
EmojiAnnotations(emoji='🌙', codepoints=(127769,), name='մահիկ', slug='մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌚', codepoints=(127770,), name='դեմքով նորալուսին', slug='դեմքով_նորալուսին', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌛', codepoints=(127771,), name='լուսինն առաջին քառորդում դեմքով', slug='լուսինն_առաջին_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='🌜', codepoints=(127772,), name='լուսինը երկրորդ քառորդում դեմքով', slug='լուսինը_երկրորդ_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='\U0001f321', codepoints=(127777,), name='ջերմաչափ', slug='ջերմաչափ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='☀', codepoints=(9728,), name='արև', slug='արև', annotations=frozenset({'եղանակ', 'տիեզերք', 'արևոտ', 'պայծառ', 'ճառագայթներ'})),
EmojiAnnotations(emoji='🌝', codepoints=(127773,), name='դեմքով լիալուսին', slug='դեմքով_լիալուսին', annotations=frozenset({'տիեզերք', 'լուսին', 'լիալուսին', 'պայծառ', 'դեմք', 'եղանակ'})),
EmojiAnnotations(emoji='🌞', codepoints=(127774,), name='դեմքով արև', slug='դեմքով_արև', annotations=frozenset({'եղանակ', 'դեմք', 'տիեզերք', 'արև', 'պայծառ'})),
EmojiAnnotations(emoji='⭐', codepoints=(11088,), name='սպիտակավուն աստղ', slug='սպիտակավուն_աստղ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='🌟', codepoints=(127775,), name='փայլող աստղ', slug='փայլող_աստղ', annotations=frozenset({'փայլող', 'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🌠', codepoints=(127776,), name='ընկնող աստղ', slug='ընկնող_աստղ', annotations=frozenset({'տիեզերք', 'աստղ', 'ընկնող'})),
EmojiAnnotations(emoji='☁', codepoints=(9729,), name='ամպ', slug='ամպ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='⛅', codepoints=(9925,), name='արև ամպի հետևում', slug='արև_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='⛈', codepoints=(9928,), name='կայծակով և անձրևով ամպ', slug='կայծակով_և_անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև', 'ամպրոպ'})),
EmojiAnnotations(emoji='\U0001f324', codepoints=(127780,), name='արև փոքր ամպի հետևում', slug='արև_փոքր_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f325', codepoints=(127781,), name='արև մեծ ամպի հետևում', slug='արև_մեծ_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f326', codepoints=(127782,), name='արև անձրևով ամպի հետևում', slug='արև_անձրևով_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f327', codepoints=(127783,), name='անձրևով ամպ', slug='անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f328', codepoints=(127784,), name='ձյունով ամպ', slug='ձյունով_ամպ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f329', codepoints=(127785,), name='կայծակով ամպ', slug='կայծակով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'կայծակ'})),
EmojiAnnotations(emoji='\U0001f32a', codepoints=(127786,), name='պտտահողմ', slug='պտտահողմ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32b', codepoints=(127787,), name='մառախուղ', slug='մառախուղ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32c', codepoints=(127788,), name='քամու երես', slug='քամու_երես', annotations=frozenset({'եղանակ', 'դեմք', 'քամի', 'փչել', 'ամպ'})),
EmojiAnnotations(emoji='🌀', codepoints=(127744,), name='ցիկլոն', slug='ցիկլոն', annotations=frozenset({'եղանակ', 'պտտվող', 'թայֆուն'})),
EmojiAnnotations(emoji='🌈', codepoints=(127752,), name='ծիածան', slug='ծիածան', annotations=frozenset({'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='🌂', codepoints=(127746,), name='փակ անձրևանոց', slug='փակ_անձրևանոց', annotations=frozenset({'հագուստ', 'անձրևանոց', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☂', codepoints=(9730,), name='անձրևանոց', slug='անձրևանոց', annotations=frozenset({'հագուստ', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☔', codepoints=(9748,), name='անձրևանոց անձրևի կաթիլներով', slug='անձրևանոց_անձրևի_կաթիլներով', annotations=frozenset({'հագուստ', 'կաթիլ', 'անձրևանոց', 'անձրև', 'եղանակ'})),
EmojiAnnotations(emoji='⛱', codepoints=(9969,), name='անձրևանոց գետնի վրա', slug='անձրևանոց_գետնի_վրա', annotations=frozenset({'անձրևանոց', 'եղանակ', 'արև', 'անձրև'})),
EmojiAnnotations(emoji='⚡', codepoints=(9889,), name='բարձր լարում', slug='բարձր_լարում', annotations=frozenset({'վտանգ', 'լարում', 'էլեկտրականություն', 'էլեկտրական', 'կայծակ'})),
EmojiAnnotations(emoji='❄', codepoints=(10052,), name='ձյան փաթիլ', slug='ձյան_փաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='☃', codepoints=(9731,), name='ձնեմարդ', slug='ձնեմարդ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='⛄', codepoints=(9924,), name='ձնեմարդ առանց ձյան', slug='ձնեմարդ_առանց_ձյան', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ձնեմարդ'})),
EmojiAnnotations(emoji='☄', codepoints=(9732,), name='գիսաստղ', slug='գիսաստղ', annotations=frozenset({'տիեզերք'})),
EmojiAnnotations(emoji='🔥', codepoints=(128293,), name='կրակ', slug='կրակ', annotations=frozenset({'գործիք', 'բոց'})),
EmojiAnnotations(emoji='💧', codepoints=(128167,), name='կաթիլ', slug='կաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='🌊', codepoints=(127754,), name='ծովի ալիք', slug='ծովի_ալիք', annotations=frozenset({'ալիք', 'եղանակ', 'օվկիանոս', 'ջուր'})),
EmojiAnnotations(emoji='🎃', codepoints=(127875,), name='ջեքի լապտեր', slug='ջեքի_լապտեր', annotations=frozenset({'լապտեր', 'տոն', 'հելոուին', 'ջեք'})),
EmojiAnnotations(emoji='🎄', codepoints=(127876,), name='տոնածառ', slug='տոնածառ', annotations=frozenset({'սուրբ ծնունդ', 'տոն', 'ծառ'})),
EmojiAnnotations(emoji='🎆', codepoints=(127878,), name='հրավառություն', slug='հրավառություն', annotations=frozenset({'տոնակատարություն'})),
EmojiAnnotations(emoji='🎇', codepoints=(127879,), name='բենգալյան կրակ', slug='բենգալյան_կրակ', annotations=frozenset({'տոնակատարություն', 'կայծ', 'հրավառություն'})),
EmojiAnnotations(emoji='✨', codepoints=(10024,), name='կայծեր', slug='կայծեր', annotations=frozenset({'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🎈', codepoints=(127880,), name='փուչիկ', slug='փուչիկ', annotations=frozenset({'տոն'})),
EmojiAnnotations(emoji='🎉', codepoints=(127881,), name='ճայթուկ', slug='ճայթուկ', annotations=frozenset({'տոն', 'երեկույթ'})),
EmojiAnnotations(emoji='🎊', codepoints=(127882,), name='կոնֆետի', slug='կոնֆետի', annotations=frozenset({'տոն', 'գունդ'})),
EmojiAnnotations(emoji='🎋', codepoints=(127883,), name='տանաբատա', slug='տանաբատա', annotations=frozenset({'դրոշակ', 'տոն', 'ճապոնական', 'ծառ'})),
EmojiAnnotations(emoji='🎌', codepoints=(127884,), name='խաչված դրոշակներ', slug='խաչված_դրոշակներ', annotations=frozenset({'տոն', 'խաչ', 'խաչված', 'ճապոնական'})),
EmojiAnnotations(emoji='🎍', codepoints=(127885,), name='բամբուկից դեկորացիա', slug='բամբուկից_դեկորացիա', annotations=frozenset({'բամբուկ', 'տոն', 'ճապոնական', 'դեկորացիա', 'բույս'})),
EmojiAnnotations(emoji='🎎', codepoints=(127886,), name='ճապոնական տիկնիկներ', slug='ճապոնական_տիկնիկներ', annotations=frozenset({'տոն', 'փառատոն', 'ճապոնական', 'տիկնիկ'})),
EmojiAnnotations(emoji='🎏', codepoints=(127887,), name='կարպերի տեսքով նավադրոշ', slug='կարպերի_տեսքով_նավադրոշ', annotations=frozenset({'տոն', 'նավադրոշ', 'կարպ'})),
EmojiAnnotations(emoji='🎐', codepoints=(127888,), name='քամու զանգակ', slug='քամու_զանգակ', annotations=frozenset({'տոն', 'քամի', 'զանգ'})),
EmojiAnnotations(emoji='🎑', codepoints=(127889,), name='լուսնի ծես', slug='լուսնի_ծես', annotations=frozenset({'տոն', 'լուսին', 'ծես'})),
EmojiAnnotations(emoji='🎀', codepoints=(127872,), name='ժապավեն', slug='ժապավեն', annotations=frozenset({'տոն', 'տոնակատարություն'})),
EmojiAnnotations(emoji='🎁', codepoints=(127873,), name='փաթեթավորված նվեր', slug='փաթեթավորված_նվեր', annotations=frozenset({'տոն', 'փաթեթավորված', 'արկղ', 'նվեր'})),
EmojiAnnotations(emoji='\U0001f396', codepoints=(127894,), name='ռազմական մեդալ', slug='ռազմական_մեդալ', annotations=frozenset({'տոն', 'ռազմական', 'մեդալ'})),
EmojiAnnotations(emoji='\U0001f397', codepoints=(127895,), name='հուշաժապավեն', slug='հուշաժապավեն', annotations=frozenset({'տոն', 'ժապավեն', 'հուշ'})),
EmojiAnnotations(emoji='\U0001f39e', codepoints=(127902,), name='տեսաժապավեն', slug='տեսաժապավեն', annotations=frozenset({'կադր', 'ժապավեն', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='\U0001f39f', codepoints=(127903,), name='մուտքի տոմս', slug='մուտքի_տոմս', annotations=frozenset({'տոմս', 'մուտք'})),
EmojiAnnotations(emoji='🎫', codepoints=(127915,), name='տոմս', slug='տոմս', annotations=frozenset({'մուտք'})),
EmojiAnnotations(emoji='⚽', codepoints=(9917,), name='ֆուտբոլի գնդակ', slug='ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='⚾', codepoints=(9918,), name='բեյսբոլի գնդակ', slug='բեյսբոլի_գնդակ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='🏀', codepoints=(127936,), name='բասկետբոլի գնդակ', slug='բասկետբոլի_գնդակ', annotations=frozenset({'գնդակ', 'բասկետբոլ'})),
EmojiAnnotations(emoji='🏈', codepoints=(127944,), name='ամերիկյան ֆուտբոլի գնդակ', slug='ամերիկյան_ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ', 'ամերիկյան'})),
EmojiAnnotations(emoji='🏉', codepoints=(127945,), name='ռեգբիի գնդակ', slug='ռեգբիի_գնդակ', annotations=frozenset({'ռեգբի', 'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='🎾', codepoints=(127934,), name='թենիսի գնդակ', slug='թենիսի_գնդակ', annotations=frozenset({'գնդակ', 'թենիս', 'մեծ'})),
EmojiAnnotations(emoji='🎱', codepoints=(127921,), name='բիլիարդ', slug='բիլիարդ', annotations=frozenset({'8', 'խաղ', '8 գնդակ', 'գնդակ', 'ութ'})),
EmojiAnnotations(emoji='🎳', codepoints=(127923,), name='բոուլինգ', slug='բոուլինգ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='⛳', codepoints=(9971,), name='գոլֆի դրոշակ', slug='գոլֆի_դրոշակ', annotations=frozenset({'գոլֆ', 'անցք'})),
EmojiAnnotations(emoji='\U0001f3cc', codepoints=(127948,), name='գոլֆ խաղացող', slug='գոլֆ_խաղացող', annotations=frozenset({'գոլֆ', 'գնդակ'})),
EmojiAnnotations(emoji='⛸', codepoints=(9976,), name='չմուշկ', slug='չմուշկ', annotations=frozenset({'սառույց'})),
EmojiAnnotations(emoji='🎣', codepoints=(127907,), name='կարթաձող', slug='կարթաձող', annotations=frozenset({'կարթ', 'ձուկ'})),
EmojiAnnotations(emoji='🎽', codepoints=(127933,), name='պտտվող շապիկ', slug='պտտվող_շապիկ', annotations=frozenset({'շապիկ', 'պտտվող', 'ժապավեն'})),
EmojiAnnotations(emoji='🎿', codepoints=(127935,), name='դահուկներ', slug='դահուկներ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='⛷', codepoints=(9975,), name='դահուկորդ', slug='դահուկորդ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='🏂', codepoints=(127938,), name='սնոուբորդիստ', slug='սնոուբորդիստ', annotations=frozenset({'ձյուն', 'դահուկ', 'սնոուբորդ'})),
EmojiAnnotations(emoji='🏄', codepoints=(127940,), name='սերֆեր', slug='սերֆեր', annotations=frozenset({'սերֆինգ'})),
EmojiAnnotations(emoji='🏇', codepoints=(127943,), name='ձիավազք', slug='ձիավազք', annotations=frozenset({'ձի', 'ժոկեյ', 'մրցարշավային ձի', 'մրցարշավ'})),
EmojiAnnotations(emoji='🏊', codepoints=(127946,), name='լողորդ', slug='լողորդ', annotations=frozenset({'լողալ'})),
EmojiAnnotations(emoji='⛹', codepoints=(9977,), name='գնդակով մարդ', slug='գնդակով_մարդ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3cb', codepoints=(127947,), name='ծանրորդ', slug='ծանրորդ', annotations=frozenset({'ծանրություն'})),
EmojiAnnotations(emoji='🚴', codepoints=(128692,), name='հեծանվորդ', slug='հեծանվորդ', annotations=frozenset({'հեծանիվ'})),
EmojiAnnotations(emoji='🚵', codepoints=(128693,), name='լեռնահեծանվորդ', slug='լեռնահեծանվորդ', annotations=frozenset({'հեծանիվ', 'լեռ', 'հեծանվորդ'})),
EmojiAnnotations(emoji='\U0001f3ce', codepoints=(127950,), name='մրցարշավային մեքենա', slug='մրցարշավային_մեքենա', annotations=frozenset({'մեքենա', 'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3cd', codepoints=(127949,), name='մոտոցիկլետ', slug='մոտոցիկլետ', annotations=frozenset({'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3c5', codepoints=(127941,), name='սպորտային մեդալ', slug='սպորտային_մեդալ', annotations=frozenset({'մեդալ'})),
EmojiAnnotations(emoji='🏆', codepoints=(127942,), name='գավաթ', slug='գավաթ', annotations=frozenset({'մրցանակ'})),
EmojiAnnotations(emoji='\U0001f3cf', codepoints=(127951,), name='կրիկետ', slug='կրիկետ', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d0', codepoints=(127952,), name='վոլեյբոլի գնդակ', slug='վոլեյբոլի_գնդակ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d1', codepoints=(127953,), name='խոտի հոկեյ', slug='խոտի_հոկեյ', annotations=frozenset({'խաղ', 'մական', 'գնդակ', 'դաշտ', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d2', codepoints=(127954,), name='մական և տափօղակ', slug='մական_և_տափօղակ', annotations=frozenset({'սառույց', 'խաղ', 'տափօղակ', 'մական', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d3', codepoints=(127955,), name='սեղանի թենիս', slug='սեղանի_թենիս', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ', 'ռակետ', 'ձեռնաթիակ'})),
EmojiAnnotations(emoji='\U0001f3f8', codepoints=(127992,), name='բադմինտոն', slug='բադմինտոն', annotations=frozenset({'փետրագնդակ', 'խաղ', 'ռակետ', 'ձեռնաթիակ', 'վոլան'})),
EmojiAnnotations(emoji='🎯', codepoints=(127919,), name='դիպուկ հարված', slug='դիպուկ_հարված', annotations=frozenset({'հարվածել', 'խաղ', 'դարթ', 'կենտրոն', 'նշանակետ', 'թիրախ'})),
EmojiAnnotations(emoji='🎮', codepoints=(127918,), name='տեսախաղ', slug='տեսախաղ', annotations=frozenset({'խաղ', 'վահանակ'})),
EmojiAnnotations(emoji='\U0001f579', codepoints=(128377,), name='ջոյսթիք', slug='ջոյսթիք', annotations=frozenset({'խաղ', 'տեսախաղ'})),
EmojiAnnotations(emoji='🎲', codepoints=(127922,), name='զառ', slug='զառ', annotations=frozenset({'խաղ'})),
EmojiAnnotations(emoji='♠', codepoints=(9824,), name='ղառ', slug='ղառ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♥', codepoints=(9829,), name='սիրտ', slug='սիրտ', annotations=frozenset({'թղթախաղ', 'խաղ', 'սրտեր'})),
EmojiAnnotations(emoji='♦', codepoints=(9830,), name='քյափ', slug='քյափ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♣', codepoints=(9827,), name='խաչ', slug='խաչ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='🃏', codepoints=(127183,), name='ջոկեր', slug='ջոկեր', annotations=frozenset({'թղթախաղ', 'խաղ', 'խաղալ'})),
EmojiAnnotations(emoji='🀄', codepoints=(126980,), name='մաջոնգի կարմիր վիշապ', slug='մաջոնգի_կարմիր_վիշապ', annotations=frozenset({'խաղ', 'մաջոնգ', 'կարմիր'})),
EmojiAnnotations(emoji='🎴', codepoints=(127924,), name='ծաղկի խաղաթղթեր', slug='ծաղկի_խաղաթղթեր', annotations=frozenset({'թղթախաղ', 'ծաղիկ', 'խաղ', 'խաղալ', 'ճապոնական'})),
EmojiAnnotations(emoji='🔇', codepoints=(128263,), name='բարձրախոսն անջատված է', slug='բարձրախոսն_անջատված_է', annotations=frozenset({'լուռ', 'բարձրախոս', 'անջատել ձայնը', 'հանգիստ', 'ձայն'})),
EmojiAnnotations(emoji='🔈', codepoints=(128264,), name='բարձրախոս', slug='բարձրախոս', annotations=frozenset({'ձայնի ուժգնություն', 'ձայն'})),
EmojiAnnotations(emoji='🔉', codepoints=(128265,), name='բարձրախոսը միացված է', slug='բարձրախոսը_միացված_է', annotations=frozenset({'ալիք', 'ցածր', 'բարձրախոս', 'ձայն'})),
EmojiAnnotations(emoji='🔊', codepoints=(128266,), name='բարձրախոսի ձայնը բարձր է', slug='բարձրախոսի_ձայնը_բարձր_է', annotations=frozenset({'բարձր', 'բարձրաձայն', 'երեք', 'ձայն', '3', 'բարձրախոս'})),
EmojiAnnotations(emoji='📢', codepoints=(128226,), name='մեծ բարձրախոս', slug='մեծ_բարձրախոս', annotations=frozenset({'բարձրաձայն', 'հասարակական'})),
EmojiAnnotations(emoji='📯', codepoints=(128239,), name='փոստային եղջյուր', slug='փոստային_եղջյուր', annotations=frozenset({'եղջյուր', 'փոստ', 'փոստային'})),
EmojiAnnotations(emoji='🔕', codepoints=(128277,), name='զանգակ շեղ գծիկով', slug='զանգակ_շեղ_գծիկով', annotations=frozenset({'զանգակ', 'հանգիստ', 'ոչ', 'արգելված', 'լուռ', 'անջատել ձայնը'})),
EmojiAnnotations(emoji='🎼', codepoints=(127932,), name='սոլի բանալի', slug='սոլի_բանալի', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎵', codepoints=(127925,), name='նոտա', slug='նոտա', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎶', codepoints=(127926,), name='նոտաներ', slug='նոտաներ', annotations=frozenset({'նոտա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f399', codepoints=(127897,), name='ստուդիայի խոսափող', slug='ստուդիայի_խոսափող', annotations=frozenset({'խոսափող', 'ստուդիա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39a', codepoints=(127898,), name='ձայնի բարձրության սահոց', slug='ձայնի_բարձրության_սահոց', annotations=frozenset({'մակարդակ', 'սահոց', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39b', codepoints=(127899,), name='կառավարման կոճակներ', slug='կառավարման_կոճակներ', annotations=frozenset({'կոճակներ', 'կառավարել', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎤', codepoints=(127908,), name='խոսափող', slug='խոսափող', annotations=frozenset({'կարաոկե'})),
EmojiAnnotations(emoji='🎷', codepoints=(127927,), name='սաքսոֆոն', slug='սաքսոֆոն', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎸', codepoints=(127928,), name='կիթառ', slug='կիթառ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎹', codepoints=(127929,), name='երաժշտական ստեղնաշար', slug='երաժշտական_ստեղնաշար', annotations=frozenset({'գործիք', 'ստեղնաշար', 'դաշնամուր', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎺', codepoints=(127930,), name='շեփոր', slug='շեփոր', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎻', codepoints=(127931,), name='ջութակ', slug='ջութակ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='📱', codepoints=(128241,), name='բջջային հեռախոս', slug='բջջային_հեռախոս', annotations=frozenset({'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📲', codepoints=(128242,), name='բջջային հեռախոս սլաքով', slug='բջջային_հեռախոս_սլաքով', annotations=frozenset({'հեռախոս', 'զանգել', 'սլաք', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📞', codepoints=(128222,), name='հեռախոսի լսափող', slug='հեռախոսի_լսափող', annotations=frozenset({'լսափող', 'հեռախոս'})),
EmojiAnnotations(emoji='📠', codepoints=(128224,), name='ֆաքսի մեքենա', slug='ֆաքսի_մեքենա', annotations=frozenset({'ֆաքս'})),
EmojiAnnotations(emoji='🔌', codepoints=(128268,), name='էլեկտրական խրոց', slug='էլեկտրական_խրոց', annotations=frozenset({'էլեկտրականություն', 'էլեկտրական', 'խրոց'})),
EmojiAnnotations(emoji='💻', codepoints=(128187,), name='նոթբուք', slug='նոթբուք', annotations=frozenset({'համակարգիչ', 'անձնական'})),
EmojiAnnotations(emoji='\U0001f5a8', codepoints=(128424,), name='տպիչ', slug='տպիչ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='⌨', codepoints=(9000,), name='ստեղնաշար', slug='ստեղնաշար', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='\U0001f5b1', codepoints=(128433,), name='համակարգչի մկնիկ', slug='համակարգչի_մկնիկ', annotations=frozenset({'համակարգիչ', 'մկնիկ', 'կոճակ', 'երեք', '3'})),
EmojiAnnotations(emoji='\U0001f5b2', codepoints=(128434,), name='թրեքբոլ', slug='թրեքբոլ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='💽', codepoints=(128189,), name='մինի սկավառակ', slug='մինի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'օպտիկական'})),
EmojiAnnotations(emoji='💾', codepoints=(128190,), name='ֆլոպի սկավառակ', slug='ֆլոպի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'ֆլոպի'})),
EmojiAnnotations(emoji='💿', codepoints=(128191,), name='օպտիկական սկավառակ', slug='օպտիկական_սկավառակ', annotations=frozenset({'օպտիկական', 'dvd', 'համակարգիչ', 'blu-ray', 'cd', 'սկավառակ'})),
EmojiAnnotations(emoji='📀', codepoints=(128192,), name='dvd', slug='dvd', annotations=frozenset({'համակարգիչ', 'cd', 'սկավառակ', 'օպտիկական', 'blu-ray'})),
EmojiAnnotations(emoji='🎥', codepoints=(127909,), name='ժապավենային տեսախցիկ', slug='ժապավենային_տեսախցիկ', annotations=frozenset({'տեսախցիկ', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='🎬', codepoints=(127916,), name='կինոդուբլների համարացույց', slug='կինոդուբլների_համարացույց', annotations=frozenset({'ֆիլմ', 'կինոդուբլ'})),
EmojiAnnotations(emoji='\U0001f4fd', codepoints=(128253,), name='ժապավենային պրոյեկտոր', slug='ժապավենային_պրոյեկտոր', annotations=frozenset({'պրոյեկտոր', 'ժապավեն', 'կինո', 'ֆիլմ', 'վիդեո'})),
EmojiAnnotations(emoji='📺', codepoints=(128250,), name='հեռուստացույց', slug='հեռուստացույց', annotations=frozenset({'tv', 'վիդեո'})),
EmojiAnnotations(emoji='📷', codepoints=(128247,), name='ֆոտոապարատ', slug='ֆոտոապարատ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='\U0001f4f8', codepoints=(128248,), name='ֆոտոապարատ լուսաթարթիչով', slug='ֆոտոապարատ_լուսաթարթիչով', annotations=frozenset({'լուսաթարթիչ', 'ֆոտոապարատ', 'վիդեո'})),
EmojiAnnotations(emoji='📹', codepoints=(128249,), name='տեսախցիկ', slug='տեսախցիկ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='📼', codepoints=(128252,), name='տեսաերիզ', slug='տեսաերիզ', annotations=frozenset({'երիզ', 'vhs', 'վիդեո'})),
EmojiAnnotations(emoji='🔍', codepoints=(128269,), name='ձախ ուղղված խոշորացույց', slug='ձախ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔎', codepoints=(128270,), name='աջ ուղղված խոշորացույց', slug='աջ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔬', codepoints=(128300,), name='մանրադիտակ', slug='մանրադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔭', codepoints=(128301,), name='հեռադիտակ', slug='հեռադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='📡', codepoints=(128225,), name='արբանյակային ալեհավաք', slug='արբանյակային_ալեհավաք', annotations=frozenset({'ափսե', 'արբանյակ', 'ալեհավաք'})),
EmojiAnnotations(emoji='\U0001f56f', codepoints=(128367,), name='մոմ', slug='մոմ', annotations=frozenset({'լույս'})),
EmojiAnnotations(emoji='💡', codepoints=(128161,), name='էլեկտրական լամպ', slug='էլեկտրական_լամպ', annotations=frozenset({'գաղափար', 'լամպ', 'էլեկտրական', 'լույս', 'կոմիքս'})),
EmojiAnnotations(emoji='🔦', codepoints=(128294,), name='գրպանի լապտեր', slug='գրպանի_լապտեր', annotations=frozenset({'գործիք', 'լապտեր', 'էլեկտրական', 'լույս'})),
EmojiAnnotations(emoji='🏮', codepoints=(127982,), name='թղթե կարմիր լապտեր', slug='թղթե_կարմիր_լապտեր', annotations=frozenset({'լապտեր', 'բար', 'լույս', 'կարմիր', 'ճապոնական'})),
EmojiAnnotations(emoji='📔', codepoints=(128212,), name='ձևավոր կազմով տետր', slug='ձևավոր_կազմով_տետր', annotations=frozenset({'գիրք', 'նոթատետր', 'ձևավորված', 'կազմ'})),
EmojiAnnotations(emoji='📕', codepoints=(128213,), name='փակված գիրք', slug='փակված_գիրք', annotations=frozenset({'գիրք', 'փակված'})),
EmojiAnnotations(emoji='📖', codepoints=(128214,), name='բացված գիրք', slug='բացված_գիրք', annotations=frozenset({'գիրք', 'բացված'})),
EmojiAnnotations(emoji='📗', codepoints=(128215,), name='կանաչ գիրք', slug='կանաչ_գիրք', annotations=frozenset({'գիրք', 'կանաչ'})),
EmojiAnnotations(emoji='📘', codepoints=(128216,), name='կապույտ գիրք', slug='կապույտ_գիրք', annotations=frozenset({'գիրք', 'կապույտ'})),
EmojiAnnotations(emoji='📙', codepoints=(128217,), name='նարնջագույն գիրք', slug='նարնջագույն_գիրք', annotations=frozenset({'գիրք', 'նարնջագույն'})),
EmojiAnnotations(emoji='📚', codepoints=(128218,), name='գրքեր', slug='գրքեր', annotations=frozenset({'գիրք'})),
EmojiAnnotations(emoji='📒', codepoints=(128210,), name='հաշվապահական մատյան', slug='հաշվապահական_մատյան', annotations=frozenset({'նոթատետր'})),
EmojiAnnotations(emoji='📃', codepoints=(128195,), name='կլորացած էջ', slug='կլորացած_էջ', annotations=frozenset({'կլորացած', 'էջ', 'փաստաթուղթ'})),
EmojiAnnotations(emoji='📜', codepoints=(128220,), name='գալարաթուղթ', slug='գալարաթուղթ', annotations=frozenset({'թուղթ'})),
EmojiAnnotations(emoji='📄', codepoints=(128196,), name='էջ', slug='էջ', annotations=frozenset({'փաստաթութղ'})),
EmojiAnnotations(emoji='📰', codepoints=(128240,), name='լրագիր', slug='լրագիր', annotations=frozenset({'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='\U0001f5de', codepoints=(128478,), name='կլորացրած լրագիր', slug='կլորացրած_լրագիր', annotations=frozenset({'լրագիր', 'կլորացրած', 'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='📑', codepoints=(128209,), name='էջանիշ ներդիրներ', slug='էջանիշ_ներդիրներ', annotations=frozenset({'նշել', 'էջանիշ', 'ներդիր', 'նշիչ'})),
EmojiAnnotations(emoji='🔖', codepoints=(128278,), name='էջանիշ', slug='էջանիշ', annotations=frozenset({'նշել'})),
EmojiAnnotations(emoji='💰', codepoints=(128176,), name='փողի պարկ', slug='փողի_պարկ', annotations=frozenset({'դոլար', 'փող', 'պարկ'})),
EmojiAnnotations(emoji='💴', codepoints=(128180,), name='իեն թղթադրամ', slug='իեն_թղթադրամ', annotations=frozenset({'բանկ', 'իեն', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💵', codepoints=(128181,), name='դոլար թղթադրամ', slug='դոլար_թղթադրամ', annotations=frozenset({'բանկ', 'դոլար', 'տարադրամ', 'փող', 'թղթադրամ'})),
EmojiAnnotations(emoji='💶', codepoints=(128182,), name='եվրո թղթադրամ', slug='եվրո_թղթադրամ', annotations=frozenset({'բանկ', 'եվրո', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💷', codepoints=(128183,), name='ֆունտ թղթադրամ', slug='ֆունտ_թղթադրամ', annotations=frozenset({'բանկ', 'փող', 'տարադրամ', 'ֆունտ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💸', codepoints=(128184,), name='փող թևերով', slug='փող_թևերով', annotations=frozenset({'թղթադրամ', 'դոլար', 'բանկ', 'փող', 'թռչել', 'թևեր'})),
EmojiAnnotations(emoji='💳', codepoints=(128179,), name='պլաստիկ քարտ', slug='պլաստիկ_քարտ', annotations=frozenset({'բանկ', 'վարկ', 'փող', 'քարտ'})),
EmojiAnnotations(emoji='💹', codepoints=(128185,), name='աճող դիագրամ իենով', slug='աճող_դիագրամ_իենով', annotations=frozenset({'իեն', 'վերև', 'միտում', 'բանկ', 'փող', 'տարրադրամ', 'գրաֆիկ', 'շուկա', 'բարձրանալ', 'դիագրամ', 'աճ'})),
EmojiAnnotations(emoji='✉', codepoints=(9993,), name='ծրար', slug='ծրար', annotations=frozenset({'էլփոտ', 'նամակ'})),
EmojiAnnotations(emoji='📧', codepoints=(128231,), name='էլեկտրոնային նամակ', slug='էլեկտրոնային_նամակ', annotations=frozenset({'փոստ', 'նամակ', 'էլփոստ'})),
EmojiAnnotations(emoji='📨', codepoints=(128232,), name='ստացվող ծրար', slug='ստացվող_ծրար', annotations=frozenset({'փոստ', 'ստանալ', 'ծրար', 'նամակ', 'էլփոստ', 'ստացվող'})),
EmojiAnnotations(emoji='📩', codepoints=(128233,), name='ծրար սլաքով', slug='ծրար_սլաքով', annotations=frozenset({'փոստ', 'ուղարկված', 'ծրար', 'նամակ', 'էլփոստ', 'ուղարկվող', 'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='📤', codepoints=(128228,), name='ելքի արկղ', slug='ելքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'ուղարկված', 'արկղ'})),
EmojiAnnotations(emoji='📥', codepoints=(128229,), name='մուտքի արկղ', slug='մուտքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'արկղ', 'ստանալ'})),
EmojiAnnotations(emoji='📦', codepoints=(128230,), name='ծանրոց', slug='ծանրոց', annotations=frozenset({'արկղ'})),
EmojiAnnotations(emoji='📫', codepoints=(128235,), name='փակ փոստարկղ բարձրացված դրոշակով', slug='փակ_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📪', codepoints=(128234,), name='փակ փոստարկղ իջեցված դրոշակով', slug='փակ_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📬', codepoints=(128236,), name='բաց փոստարկղ բարձրացված դրոշակով', slug='բաց_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📭', codepoints=(128237,), name='բաց փոստարկղ իջեցված դրոշակով', slug='բաց_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📮', codepoints=(128238,), name='փոստատուփ', slug='փոստատուփ', annotations=frozenset({'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='\U0001f5f3', codepoints=(128499,), name='քվեատուփ քվեաթերթիկով', slug='քվեատուփ_քվեաթերթիկով', annotations=frozenset({'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✒', codepoints=(10002,), name='սև գրչածայր', slug='սև_գրչածայր', annotations=frozenset({'գրչածայր', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58b', codepoints=(128395,), name='ինքնահոս գրիչ', slug='ինքնահոս_գրիչ', annotations=frozenset({'ինքնահոս', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58a', codepoints=(128394,), name='գրիչ', slug='գրիչ', annotations=frozenset({'գնդիկավոր գրիչ'})),
EmojiAnnotations(emoji='\U0001f58c', codepoints=(128396,), name='վրձին', slug='վրձին', annotations=frozenset({'ներկել', 'նկարել'})),
EmojiAnnotations(emoji='\U0001f58d', codepoints=(128397,), name='մոմամատիտ', slug='մոմամատիտ', annotations=frozenset({'գունավոր մատիտ'})),
EmojiAnnotations(emoji='📝', codepoints=(128221,), name='հուշաթերթ', slug='հուշաթերթ', annotations=frozenset({'մատիտ'})),
EmojiAnnotations(emoji='📁', codepoints=(128193,), name='թղթապանակ', slug='թղթապանակ', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='📂', codepoints=(128194,), name='բաց թղթապանակ', slug='բաց_թղթապանակ', annotations=frozenset({'բաց', 'թղթապանակ', 'ֆայլ'})),
EmojiAnnotations(emoji='\U0001f5c2', codepoints=(128450,), name='քարտադարանի բաժանարարներ', slug='քարտադարանի_բաժանարարներ', annotations=frozenset({'ինդեքս', 'բաժանարար', 'քարտ'})),
EmojiAnnotations(emoji='📅', codepoints=(128197,), name='օրացույց', slug='օրացույց', annotations=frozenset({'ամսաթիվ'})),
EmojiAnnotations(emoji='📆', codepoints=(128198,), name='պոկովի օրացույց', slug='պոկովի_օրացույց', annotations=frozenset({'օրացույց'})),
EmojiAnnotations(emoji='\U0001f5d2', codepoints=(128466,), name='պարուրավոր նոթատետր', slug='պարուրավոր_նոթատետր', annotations=frozenset({'գրքույկ', 'տետր', 'պարույր'})),
EmojiAnnotations(emoji='\U0001f5d3', codepoints=(128467,), name='պարուրավոր օրացույց', slug='պարուրավոր_օրացույց', annotations=frozenset({'օրացույց', 'գրքույկ', 'պարույր'})),
EmojiAnnotations(emoji='📇', codepoints=(128199,), name='քարտադարան', slug='քարտադարան', annotations=frozenset({'ինդեքս', 'քարտ'})),
EmojiAnnotations(emoji='📈', codepoints=(128200,), name='աճող դիագրամ', slug='աճող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'դիագրամ', 'վեր', 'աճ', 'միտում'})),
EmojiAnnotations(emoji='📉', codepoints=(128201,), name='նվազող դիագրամ', slug='նվազող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'ներքև', 'դիագրամ', 'միտում'})),
EmojiAnnotations(emoji='📊', codepoints=(128202,), name='գոտեձև գծապատկեր', slug='գոտեձև_գծապատկեր', annotations=frozenset({'գոտի', 'գրաֆիկ', 'դիագրամ'})),
EmojiAnnotations(emoji='📍', codepoints=(128205,), name='գնդասեղ', slug='գնդասեղ', annotations=frozenset({'քորոց'})),
EmojiAnnotations(emoji='\U0001f587', codepoints=(128391,), name='միացված սկրեպներ', slug='միացված_սկրեպներ', annotations=frozenset({'միացնել', 'սկրեպ'})),
EmojiAnnotations(emoji='📏', codepoints=(128207,), name='քանոն', slug='քանոն', annotations=frozenset({'ուղղանկյուն'})),
EmojiAnnotations(emoji='📐', codepoints=(128208,), name='եռանկյունի քանոն', slug='եռանկյունի_քանոն', annotations=frozenset({'եռանկյունի', 'քանոն'})),
EmojiAnnotations(emoji='✂', codepoints=(9986,), name='մկրատ', slug='մկրատ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5c3', codepoints=(128451,), name='քարտադարանի արկղ', slug='քարտադարանի_արկղ', annotations=frozenset({'ֆայլ', 'արկղ', 'քարտ'})),
EmojiAnnotations(emoji='\U0001f5c4', codepoints=(128452,), name='պահարան', slug='պահարան', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='🔒', codepoints=(128274,), name='կողպեք', slug='կողպեք', annotations=frozenset({'փակ'})),
EmojiAnnotations(emoji='🔓', codepoints=(128275,), name='բաց կողպեք', slug='բաց_կողպեք', annotations=frozenset({'բաց', 'ապակողպել', 'կողպեք'})),
EmojiAnnotations(emoji='🔏', codepoints=(128271,), name='կողպեք ինքնահոսով', slug='կողպեք_ինքնահոսով', annotations=frozenset({'գրչածայր', 'գաղտնիություն', 'կողպեք', 'թանաք', 'գրիչ'})),
EmojiAnnotations(emoji='🔐', codepoints=(128272,), name='փակ կողպեք բանալիով', slug='փակ_կողպեք_բանալիով', annotations=frozenset({'ապահով', 'փակ', 'բնալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔑', codepoints=(128273,), name='բանալի', slug='բանալի', annotations=frozenset({'գաղտնաբառ', 'կողպեք'})),
EmojiAnnotations(emoji='\U0001f5dd', codepoints=(128477,), name='հին բանալի', slug='հին_բանալի', annotations=frozenset({'հին', 'բանալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔨', codepoints=(128296,), name='մուրճ', slug='մուրճ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='⛏', codepoints=(9935,), name='քլունգ', slug='քլունգ', annotations=frozenset({'գործիք', 'հանք'})),
EmojiAnnotations(emoji='⚒', codepoints=(9874,), name='մուրճեր', slug='մուրճեր', annotations=frozenset({'գործիք', 'մուրճ'})),
EmojiAnnotations(emoji='\U0001f6e0', codepoints=(128736,), name='մուրճ և պտուտակաբանալի', slug='մուրճ_և_պտուտակաբանալի', annotations=frozenset({'գործիք', 'պտուտակաբանալի', 'մուրճ'})),
EmojiAnnotations(emoji='🔧', codepoints=(128295,), name='պտուտակաբանալի', slug='պտուտակաբանալի', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔩', codepoints=(128297,), name='մանեկ ու հեղույս', slug='մանեկ_ու_հեղույս', annotations=frozenset({'մանեկ', 'գործիք', 'հեղույս'})),
EmojiAnnotations(emoji='⚙', codepoints=(9881,), name='ատամնանիվ', slug='ատամնանիվ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5dc', codepoints=(128476,), name='մամլակ', slug='մամլակ', annotations=frozenset({'մամլիչ'})),
EmojiAnnotations(emoji='⚗', codepoints=(9879,), name='թորիչ', slug='թորիչ', annotations=frozenset({'քիմիա', 'գործիք'})),
EmojiAnnotations(emoji='⚖', codepoints=(9878,), name='նժարավոր կշեռք', slug='նժարավոր_կշեռք', annotations=frozenset({'հավասարակշռություն', 'կշեռք', 'գործիք', 'ծանրություն', 'արդարություն', 'կենդանակերպ'})),
EmojiAnnotations(emoji='⛓', codepoints=(9939,), name='շղթաներ', slug='շղթաներ', annotations=frozenset({'շղթա'})),
EmojiAnnotations(emoji='💉', codepoints=(128137,), name='ներարկիչ', slug='ներարկիչ', annotations=frozenset({'գործիք', 'հիվանդ', 'բժշկություն', 'ասեղ', 'բժիշկ'})),
EmojiAnnotations(emoji='💊', codepoints=(128138,), name='դեղահաբ', slug='դեղահաբ', annotations=frozenset({'հիվանդ', 'բժշկություն', 'բժիշկ'})),
EmojiAnnotations(emoji='\U0001f5e1', codepoints=(128481,), name='դաշույն', slug='դաշույն', annotations=frozenset({'զենք', 'դանակ'})),
EmojiAnnotations(emoji='🔪', codepoints=(128298,), name='խոհանոցային դանակ', slug='խոհանոցային_դանակ', annotations=frozenset({'գործիք', 'եփել', 'խոհարար', 'դանակ', 'զենք'})),
EmojiAnnotations(emoji='⚔', codepoints=(9876,), name='խաչված սրեր', slug='խաչված_սրեր', annotations=frozenset({'սրեր', 'խաչված', 'զենք'})),
EmojiAnnotations(emoji='🔫', codepoints=(128299,), name='ատրճանակ', slug='ատրճանակ', annotations=frozenset({'գործիք', 'զենք'})),
EmojiAnnotations(emoji='\U0001f6e1', codepoints=(128737,), name='վահան', slug='վահան', annotations=frozenset({'զենք'})),
EmojiAnnotations(emoji='\U0001f3f9', codepoints=(127993,), name='նետ ու աղեղ', slug='նետ_ու_աղեղ', annotations=frozenset({'գործիք', 'նետ', 'աղեղնավոր', 'զենք', 'աղեղ'})),
EmojiAnnotations(emoji='🏁', codepoints=(127937,), name='վանդակավոր դրոշ', slug='վանդակավոր_դրոշ', annotations=frozenset({'մրցարշավ', 'վանդակավոր'})),
EmojiAnnotations(emoji='\U0001f3f3', codepoints=(127987,), name='ծածանվող սպիտակ դրոշ', slug='ծածանվող_սպիտակ_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='\U0001f3f4', codepoints=(127988,), name='ծածանվող սև դրոշ', slug='ծածանվող_սև_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='🚩', codepoints=(128681,), name='եռանկյունի դրոշ', slug='եռանկյունի_դրոշ', annotations=frozenset({'փոստ'})),
EmojiAnnotations(emoji='⚰', codepoints=(9904,), name='դագաղ', slug='դագաղ', annotations=frozenset({'մահ'})),
EmojiAnnotations(emoji='⚱', codepoints=(9905,), name='աճյունասափոր', slug='աճյունասափոր', annotations=frozenset({'հուղարկավորություն', 'մահ'})),
EmojiAnnotations(emoji='🗿', codepoints=(128511,), name='մոաի', slug='մոաի', annotations=frozenset({'դեմք', 'մոայի', 'արձան'})),
EmojiAnnotations(emoji='\U0001f6e2', codepoints=(128738,), name='նավթի տակառ', slug='նավթի_տակառ', annotations=frozenset({'տակառ', 'նավթ'})),
EmojiAnnotations(emoji='🔮', codepoints=(128302,), name='բյուրեղյա գունդ', slug='բյուրեղյա_գունդ', annotations=frozenset({'բյուրեղ', 'բախտ', 'գործիք', 'հեքիաթ', 'ֆանտազիա', 'գունդ'})),
EmojiAnnotations(emoji='🏧', codepoints=(127975,), name='բանկոմատի նշան', slug='բանկոմատի_նշան', annotations=frozenset({'բանկ', 'գանձապահ', 'atm', 'բանկոմատ'})),
EmojiAnnotations(emoji='🚮', codepoints=(128686,), name='աղբամանի նշան', slug='աղբամանի_նշան', annotations=frozenset({'աղբ', 'աղբարկղ'})),
EmojiAnnotations(emoji='🚰', codepoints=(128688,), name='խմելու ջուր', slug='խմելու_ջուր', annotations=frozenset({'խմելու', 'խմել', 'ջուր'})),
EmojiAnnotations(emoji='♿', codepoints=(9855,), name='անվասայլակ', slug='անվասայլակ', annotations=frozenset({'մատչելիություն'})),
EmojiAnnotations(emoji='🚹', codepoints=(128697,), name='տղամարդկանց զուգարան', slug='տղամարդկանց_զուգարան', annotations=frozenset({'wc', 'տղամարդ', 'զուգարան'})),
EmojiAnnotations(emoji='🚺', codepoints=(128698,), name='կանանց զուգարան', slug='կանանց_զուգարան', annotations=frozenset({'wc', 'կին', 'զուգարան'})),
EmojiAnnotations(emoji='🚻', codepoints=(128699,), name='ընդհանուր զուգարան', slug='ընդհանուր_զուգարան', annotations=frozenset({'wc', 'զուգարան'})),
EmojiAnnotations(emoji='🚼', codepoints=(128700,), name='նորածնի նշան', slug='նորածնի_նշան', annotations=frozenset({'նորածին', 'փոխել'})),
EmojiAnnotations(emoji='🚾', codepoints=(128702,), name='զուգարան', slug='զուգարան', annotations=frozenset({'wc', 'ջուր'})),
EmojiAnnotations(emoji='🛂', codepoints=(128706,), name='անձնագրային ստուգում', slug='անձնագրային_ստուգում', annotations=frozenset({'անձնագիր', 'ստուգում'})),
EmojiAnnotations(emoji='🛄', codepoints=(128708,), name='ուղեբեռի վերաբերյալ բողոք', slug='ուղեբեռի_վերաբերյալ_բողոք', annotations=frozenset({'ուղեբեռ', 'բողոք'})),
EmojiAnnotations(emoji='🛅', codepoints=(128709,), name='ուղեբեռ պահախցում', slug='ուղեբեռ_պահախցում', annotations=frozenset({'ուղեբեռ', 'բեռ', 'պահարան'})),
EmojiAnnotations(emoji='🚸', codepoints=(128696,), name='ճանապարհը հատող երեխաներ', slug='ճանապարհը_հատող_երեխաներ', annotations=frozenset({'երեխա', 'երթևեկություն', 'հատող', 'հետիոտն'})),
EmojiAnnotations(emoji='⛔', codepoints=(9940,), name='մուտք չկա', slug='մուտք_չկա', annotations=frozenset({'ոչ', 'արգելված', 'երթևեկություն', 'մուտք'})),
EmojiAnnotations(emoji='🚫', codepoints=(128683,), name='արգելված է', slug='արգելված_է', annotations=frozenset({'ոչ', 'արգելված', 'մուտք'})),
EmojiAnnotations(emoji='🚳', codepoints=(128691,), name='հեծանիվների մուտքն արգելված է', slug='հեծանիվների_մուտքն_արգելված_է', annotations=frozenset({'փոխադրամիջոց', 'արգելված', 'հեծանիվ', 'ոչ'})),
EmojiAnnotations(emoji='🚭', codepoints=(128685,), name='չծխել', slug='չծխել', annotations=frozenset({'ծխել', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚯', codepoints=(128687,), name='չաղտոտել', slug='չաղտոտել', annotations=frozenset({'աղբ', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚱', codepoints=(128689,), name='խմելու ջուր չէ', slug='խմելու_ջուր_չէ', annotations=frozenset({'արգելված', 'խմելու', 'խմել', 'ջուր', 'ոչ'})),
EmojiAnnotations(emoji='🚷', codepoints=(128695,), name='հետիոտնների մուտքն արգելված է', slug='հետիոտնների_մուտքն_արգելված_է', annotations=frozenset({'հետիոտն', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='⬆', codepoints=(11014,), name='վերև սլաք', slug='վերև_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'հյուսիս'})),
EmojiAnnotations(emoji='↗', codepoints=(8599,), name='վերև աջ սլաք', slug='վերև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևելք'})),
EmojiAnnotations(emoji='➡', codepoints=(10145,), name='աջ սլաք', slug='աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'արևելք'})),
EmojiAnnotations(emoji='↘', codepoints=(8600,), name='ներքև աջ սլաք', slug='ներքև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հարավ-արևելք'})),
EmojiAnnotations(emoji='⬇', codepoints=(11015,), name='ներքև սլաք', slug='ներքև_սլաք', annotations=frozenset({'ներքև', 'ուղղություն', 'սլաք', 'գլխավոր', 'հարավ'})),
EmojiAnnotations(emoji='↙', codepoints=(8601,), name='ներքև ձախ սլաք', slug='ներքև_ձախ_սլաք', annotations=frozenset({'հարավ-արևմուտք', 'ուղղություն', 'սլաք'})),
EmojiAnnotations(emoji='⬅', codepoints=(11013,), name='ձախ սլաք', slug='ձախ_սլաք', annotations=frozenset({'արևմուտք', 'ուղղություն', 'սլաք', 'գլխավոր'})),
EmojiAnnotations(emoji='↖', codepoints=(8598,), name='վերև ձախ սլաք', slug='վերև_ձախ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևմուտք'})),
EmojiAnnotations(emoji='↕', codepoints=(8597,), name='վերև-ներքև սլաք', slug='վերև_ներքև_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↔', codepoints=(8596,), name='աջ-ձախ սլաք', slug='աջ_ձախ_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↩', codepoints=(8617,), name='աջ շրջադարձի սլաք', slug='աջ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↪', codepoints=(8618,), name='ձախ շրջադարձի սլաք', slug='ձախ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤴', codepoints=(10548,), name='ձախից վերև թեքվող սլաք', slug='ձախից_վերև_թեքվող_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤵', codepoints=(10549,), name='ձախից ներքև թեքվող սլաք', slug='ձախից_ներքև_թեքվող_սլաք', annotations=frozenset({'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='🔃', codepoints=(128259,), name='ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ', slug='ժամասլաքի_ուղղությամբ_ուղղահայաց_սլաքներ', annotations=frozenset({'վերաբեռնել', 'ժամասլաքի ուղղությամբ', 'սլաք'})),
EmojiAnnotations(emoji='🔄', codepoints=(128260,), name='ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ', slug='ժամասլաքին_հակառակ_ուղղությամբ_սլաքներով_կոճակ', annotations=frozenset({'հակառակ ուղղությամբ', 'սլաք', 'ժամասլաքին հակառակ ուղղությամբ'})),
EmojiAnnotations(emoji='🔙', codepoints=(128281,), name='հետ գրությամբ սլաք', slug='հետ_գրությամբ_սլաք', annotations=frozenset({'հետ', 'սլաք'})),
EmojiAnnotations(emoji='🔚', codepoints=(128282,), name='վերջ գրությամբ սլաք', slug='վերջ_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերջ'})),
EmojiAnnotations(emoji='🔛', codepoints=(128283,), name='միացված է գրությամբ սլաք', slug='միացված_է_գրությամբ_սլաք', annotations=frozenset({'նշան', 'սլաք', 'միացված է'})),
EmojiAnnotations(emoji='🔜', codepoints=(128284,), name='շուտով գրությամբ սլաք', slug='շուտով_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'շուտով'})),
EmojiAnnotations(emoji='🔝', codepoints=(128285,), name='վերև գրությամբ սլաք', slug='վերև_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերև', 'վեր'})),
EmojiAnnotations(emoji='\U0001f6d0', codepoints=(128720,), name='աղոթատեղի', slug='աղոթատեղի', annotations=frozenset({'պաշտամունք', 'կրոն'})),
EmojiAnnotations(emoji='⚛', codepoints=(9883,), name='ատոմի նշան', slug='ատոմի_նշան', annotations=frozenset({'ատոմ', 'աթեիստ'})),
EmojiAnnotations(emoji='\U0001f549', codepoints=(128329,), name='օմ', slug='օմ', annotations=frozenset({'կրոն', 'հնդիկ'})),
EmojiAnnotations(emoji='✡', codepoints=(10017,), name='դավթի աստղ', slug='դավթի_աստղ', annotations=frozenset({'դավիթ', 'հրեա', 'հրեական', 'կրոն', 'աստղ'})),
EmojiAnnotations(emoji='☸', codepoints=(9784,), name='դհարմայի անիվ', slug='դհարմայի_անիվ', annotations=frozenset({'դհարմա', 'անիվ', 'բուդդիստ', 'կրոն'})),
EmojiAnnotations(emoji='☯', codepoints=(9775,), name='ին և յան', slug='ին_և_յան', annotations=frozenset({'յին', 'դաո', 'դաոսիստ', 'կրոն', 'յան'})),
EmojiAnnotations(emoji='✝', codepoints=(10013,), name='լատինական խաչ', slug='լատինական_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☦', codepoints=(9766,), name='ուղղափառ խաչ', slug='ուղղափառ_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☪', codepoints=(9770,), name='աստղ և մահիկ', slug='աստղ_և_մահիկ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='☮', codepoints=(9774,), name='խաղաղության նշան', slug='խաղաղության_նշան', annotations=frozenset({'խաղաղություն'})),
EmojiAnnotations(emoji='\U0001f54e', codepoints=(128334,), name='մենորա', slug='մենորա', annotations=frozenset({'մոմակալ', 'աշտանակ', 'կրոն'})),
EmojiAnnotations(emoji='🔯', codepoints=(128303,), name='կետիկով վեցթևանի աստղ', slug='կետիկով_վեցթևանի_աստղ', annotations=frozenset({'բախտ', 'աստղ'})),
EmojiAnnotations(emoji='♻', codepoints=(9851,), name='վերամշակման նշան', slug='վերամշակման_նշան', annotations=frozenset({'վերամշակել'})),
EmojiAnnotations(emoji='📛', codepoints=(128219,), name='բեյջ', slug='բեյջ', annotations=frozenset({'անուն'})),
EmojiAnnotations(emoji='🔰', codepoints=(128304,), name='սկսնակ լինելու ճապոնական նշան', slug='սկսնակ_լինելու_ճապոնական_նշան', annotations=frozenset({'հեծանակ', 'սկսնակ', 'գործիք', 'դեղին', 'տերև', 'ճապոնական', 'կանաչ'})),
EmojiAnnotations(emoji='🔱', codepoints=(128305,), name='եռաժանի խորհրդանշան', slug='եռաժանի_խորհրդանշան', annotations=frozenset({'գործիք', 'եռաժանի', 'նավ', 'խարիսխ', 'զինանշան'})),
EmojiAnnotations(emoji='⭕', codepoints=(11093,), name='մեծ թավ շրջան', slug='մեծ_թավ_շրջան', annotations=frozenset({'օ', 'շրջան'})),
EmojiAnnotations(emoji='✅', codepoints=(9989,), name='սպիտակ թավ ստուգանշան', slug='սպիտակ_թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='☑', codepoints=(9745,), name='վանդակ ստուգանշանով', slug='վանդակ_ստուգանշանով', annotations=frozenset({'նշել', 'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✔', codepoints=(10004,), name='թավ ստուգանշան', slug='թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='✖', codepoints=(10006,), name='բազմապատկման թավ նշան', slug='բազմապատկման_թավ_նշան', annotations=frozenset({'բազմապատկում', 'x', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❌', codepoints=(10060,), name='խաչի նշան', slug='խաչի_նշան', annotations=frozenset({'բազմապատկում', 'x', 'նշել', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❎', codepoints=(10062,), name='խաչի նշանով կոճակ', slug='խաչի_նշանով_կոճակ', annotations=frozenset({'նշել', 'քառակուսի'})),
EmojiAnnotations(emoji='➕', codepoints=(10133,), name='գումարման թավ նշան', slug='գումարման_թավ_նշան', annotations=frozenset({'պլյուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➖', codepoints=(10134,), name='հանման թավ նշան', slug='հանման_թավ_նշան', annotations=frozenset({'մինուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➗', codepoints=(10135,), name='բաժանման թավ նշան', slug='բաժանման_թավ_նշան', annotations=frozenset({'բաժանում', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➰', codepoints=(10160,), name='ոլորուն հանգույց', slug='ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'հանգույց'})),
EmojiAnnotations(emoji='➿', codepoints=(10175,), name='կրկնակի ոլորուն հանգույց', slug='կրկնակի_ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'կրկնակի', 'հանգույց'})),
EmojiAnnotations(emoji='〽', codepoints=(12349,), name='իորիտեն', slug='իորիտեն', annotations=frozenset({'նշել', 'մաս'})),
EmojiAnnotations(emoji='✳', codepoints=(10035,), name='ութ թևանի աստղանիշ', slug='ութ_թևանի_աստղանիշ', annotations=frozenset({'աստղանիշ'})),
EmojiAnnotations(emoji='✴', codepoints=(10036,), name='աստղիկ', slug='աստղիկ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='💱', codepoints=(128177,), name='տարադրամի փոխանակում', slug='տարադրամի_փոխանակում', annotations=frozenset({'բանկ', 'փոխանակում', 'փող', 'տարադրամ'})),
EmojiAnnotations(emoji='💲', codepoints=(128178,), name='դոլարի թավ նշան', slug='դոլարի_թավ_նշան', annotations=frozenset({'դոլար', 'տարադրամ', 'փող'})),
EmojiAnnotations(emoji='‼', codepoints=(8252,), name='կրկնակի բացականչական նշան', slug='կրկնակի_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='⁉', codepoints=(8265,), name='բացականչական հարցական նշան', slug='բացականչական_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'բացականչություն'})),
EmojiAnnotations(emoji='❓', codepoints=(10067,), name='հարցական նշան', slug='հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց'})),
EmojiAnnotations(emoji='❔', codepoints=(10068,), name='սպիտակ հարցական նշան', slug='սպիտակ_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'ուրվագծված'})),
EmojiAnnotations(emoji='❕', codepoints=(10069,), name='սպիտակ բացականչական նշան', slug='սպիտակ_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'ուրվագծված', 'բացականչություն'})),
EmojiAnnotations(emoji='❗', codepoints=(10071,), name='բացականչական նշան', slug='բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='〰', codepoints=(12336,), name='ալիքաձև գծիկ', slug='ալիքաձև_գծիկ', annotations=frozenset({'ալիքաձև', 'կետադրություն', 'գծիկ'})),
EmojiAnnotations(emoji='™', codepoints=(8482,), name='ապրանքանիշ', slug='ապրանքանիշ', annotations=frozenset({'նշան', 'tm'})),
EmojiAnnotations(emoji='♈', codepoints=(9800,), name='խոյ', slug='խոյ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♉', codepoints=(9801,), name='ցուլ', slug='ցուլ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♊', codepoints=(9802,), name='երկվորյակներ', slug='երկվորյակներ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♋', codepoints=(9803,), name='խեցգետին', slug='խեցգետին', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♌', codepoints=(9804,), name='առյուծ', slug='առյուծ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♍', codepoints=(9805,), name='կույս', slug='կույս', annotations=frozenset({'օրիորդ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♎', codepoints=(9806,), name='կշեռք', slug='կշեռք', annotations=frozenset({'արդարադատություն', 'կենդանակերպ', 'հավասարակշռություն'})),
EmojiAnnotations(emoji='♏', codepoints=(9807,), name='կարիճ', slug='կարիճ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♐', codepoints=(9808,), name='աղեղնավոր', slug='աղեղնավոր', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♑', codepoints=(9809,), name='այծեղջյուր', slug='այծեղջյուր', annotations=frozenset({'այծ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♒', codepoints=(9810,), name='ջրհոս', slug='ջրհոս', annotations=frozenset({'կենդանակերպ', 'կրող', 'ջուր'})),
EmojiAnnotations(emoji='♓', codepoints=(9811,), name='ձկներ', slug='ձկներ', annotations=frozenset({'կենդանակերպ', 'ձուկ'})),
EmojiAnnotations(emoji='⛎', codepoints=(9934,), name='օձակիր', slug='օձակիր', annotations=frozenset({'օձ', 'կենդանակերպ', 'կրող'})),
EmojiAnnotations(emoji='🔀', codepoints=(128256,), name='խառնել կատարումները կոճակ', slug='խառնել_կատարումները_կոճակ', annotations=frozenset({'խաչված', 'սլաք'})),
EmojiAnnotations(emoji='🔁', codepoints=(128257,), name='անընդհատ կրկնել կոճակ', slug='անընդհատ_կրկնել_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'կրկնել'})),
EmojiAnnotations(emoji='🔂', codepoints=(128258,), name='կրկնել մեկ անգամ կոճակ', slug='կրկնել_մեկ_անգամ_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'մեկ անգամ'})),
EmojiAnnotations(emoji='▶', codepoints=(9654,), name='նվագարկել կոճակ', slug='նվագարկել_կոճակ', annotations=frozenset({'նվագարկել', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='⏩', codepoints=(9193,), name='արագ առաջ կոճակ', slug='արագ_առաջ_կոճակ', annotations=frozenset({'արագ', 'սլաք', 'կրկնակի', 'առաջ'})),
EmojiAnnotations(emoji='⏭', codepoints=(9197,), name='հաջորդ կատարումը կոճակ', slug='հաջորդ_կատարումը_կոճակ', annotations=frozenset({'հաջորդ տեսարանը', 'եռանկյուն', 'սլաք', 'հաջորդ կատարումը'})),
EmojiAnnotations(emoji='⏯', codepoints=(9199,), name='նվագարկել կամ դադար կոճակ', slug='նվագարկել_կամ_դադար_կոճակ', annotations=frozenset({'նվագարկել', 'դադար', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='◀', codepoints=(9664,), name='հետադարձել կոճակ', slug='հետադարձել_կոճակ', annotations=frozenset({'ձախ', 'եռանկյուն', 'սլաք', 'հետադարձել'})),
EmojiAnnotations(emoji='⏪', codepoints=(9194,), name='արագ հետադարձել կոճակ', slug='արագ_հետադարձել_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի', 'հետադարձել'})),
EmojiAnnotations(emoji='⏮', codepoints=(9198,), name='վերջին կատարումը կոճակ', slug='վերջին_կատարումը_կոճակ', annotations=frozenset({'նախորդ տեսարանը', 'նախորդ կատարումը', 'սլաք', 'եռանկյուն'})),
EmojiAnnotations(emoji='🔼', codepoints=(128316,), name='վերև կոճակ', slug='վերև_կոճակ', annotations=frozenset({'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏫', codepoints=(9195,), name='արագ վերև կոճակ', slug='արագ_վերև_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='🔽', codepoints=(128317,), name='ներքև կոճակ', slug='ներքև_կոճակ', annotations=frozenset({'ներքև', 'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏬', codepoints=(9196,), name='արագ ներքև կոճակ', slug='արագ_ներքև_կոճակ', annotations=frozenset({'ներքև', 'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='\u23f8', codepoints=(9208,), name='դադար կոճակ', slug='դադար_կոճակ', annotations=frozenset({'գծեր', 'դադար', 'կրկնակի', 'ուղղահայաց'})),
EmojiAnnotations(emoji='\u23f9', codepoints=(9209,), name='ստոպ կոճակ', slug='ստոպ_կոճակ', annotations=frozenset({'ստոպ', 'քառակուսի'})),
EmojiAnnotations(emoji='\u23fa', codepoints=(9210,), name='ձայնագրել կոճակ', slug='ձայնագրել_կոճակ', annotations=frozenset({'ձայնագրել', 'շրջան'})),
EmojiAnnotations(emoji='⏏', codepoints=(9167,), name='դուրս հանել կոճակ', slug='դուրս_հանել_կոճակ', annotations=frozenset({'դուրս հանել'})),
EmojiAnnotations(emoji='🎦', codepoints=(127910,), name='կինոմատոգրաֆիա', slug='կինոմատոգրաֆիա', annotations=frozenset({'տեսախցիկ', 'ժապավեն', 'ֆիլմ'})),
EmojiAnnotations(emoji='🔅', codepoints=(128261,), name='մթեցնել կոճակ', slug='մթեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'թույլ', 'մթեցնել'})),
EmojiAnnotations(emoji='🔆', codepoints=(128262,), name='պայծառեցնել կոճակ', slug='պայծառեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'պայծառ'})),
EmojiAnnotations(emoji='📶', codepoints=(128246,), name='անտենայի գծիկներ', slug='անտենայի_գծիկներ', annotations=frozenset({'գծիկ', 'ազդանշան', 'հեռախոս', 'շարժական', 'անտենա', 'բջջային'})),
EmojiAnnotations(emoji='📵', codepoints=(128245,), name='բջջային հեռախոսներն արգելվում են', slug='բջջային_հեռախոսներն_արգելվում_են', annotations=frozenset({'ոչ', 'արգելված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📳', codepoints=(128243,), name='թրթռազանգի ռեժիմ', slug='թրթռազանգի_ռեժիմ', annotations=frozenset({'ռեժիմ', 'թրթռում', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📴', codepoints=(128244,), name='բջջայինն անջատված է', slug='բջջայինն_անջատված_է', annotations=frozenset({'անջատված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='{#⃣}', codepoints=(123, 35, 8419, 125), name='ստեղն վանդականիշ', slug='ստեղն_վանդականիշ', annotations=frozenset({'ստեղն', 'վանդականիշ', 'ֆունտ'})),
EmojiAnnotations(emoji='{*⃣}', codepoints=(123, 42, 8419, 125), name='ստեղն աստղանիշ', slug='ստեղն_աստղանիշ', annotations=frozenset({'ստեղն', 'աստղանիշ', 'աստղ'})),
EmojiAnnotations(emoji='{0⃣}', codepoints=(123, 48, 8419, 125), name='ստեղն զրո', slug='ստեղն_զրո', annotations=frozenset({'0', 'ստեղն', 'զրո'})),
EmojiAnnotations(emoji='{1⃣}', codepoints=(123, 49, 8419, 125), name='ստեղն մեկ', slug='ստեղն_մեկ', annotations=frozenset({'ստեղն', 'մեկ', '1'})),
EmojiAnnotations(emoji='{2⃣}', codepoints=(123, 50, 8419, 125), name='ստեղն երկու', slug='ստեղն_երկու', annotations=frozenset({'ստեղն', 'երկու', '2'})),
EmojiAnnotations(emoji='{3⃣}', codepoints=(123, 51, 8419, 125), name='ստեղն երեք', slug='ստեղն_երեք', annotations=frozenset({'ստեղն', 'երեք', '3'})),
EmojiAnnotations(emoji='{4⃣}', codepoints=(123, 52, 8419, 125), name='ստեղն չորս', slug='ստեղն_չորս', annotations=frozenset({'4', 'ստեղն', 'չորս'})),
EmojiAnnotations(emoji='{5⃣}', codepoints=(123, 53, 8419, 125), name='ստեղն հինգ', slug='ստեղն_հինգ', annotations=frozenset({'ստեղն', '5', 'հինգ'})),
EmojiAnnotations(emoji='{6⃣}', codepoints=(123, 54, 8419, 125), name='ստեղն վեց', slug='ստեղն_վեց', annotations=frozenset({'ստեղն', 'վեց', '6'})),
EmojiAnnotations(emoji='{7⃣}', codepoints=(123, 55, 8419, 125), name='ստեղն յոթ', slug='ստեղն_յոթ', annotations=frozenset({'7', 'ստեղն', 'յոթ'})),
EmojiAnnotations(emoji='{8⃣}', codepoints=(123, 56, 8419, 125), name='ստեղն ութ', slug='ստեղն_ութ', annotations=frozenset({'8', 'ստեղն', 'ութ'})),
EmojiAnnotations(emoji='{9⃣}', codepoints=(123, 57, 8419, 125), name='ստեղն ինը', slug='ստեղն_ինը', annotations=frozenset({'ստեղն', 'ինը', '9'})),
EmojiAnnotations(emoji='🔟', codepoints=(128287,), name='ստեղն տասը', slug='ստեղն_տասը', annotations=frozenset({'ստեղն', '10', 'տասը'})),
EmojiAnnotations(emoji='💯', codepoints=(128175,), name='հարյուր միավոր', slug='հարյուր_միավոր', annotations=frozenset({'հարյուր', 'միավոր', '100', 'ամբողջ'})),
EmojiAnnotations(emoji='🔞', codepoints=(128286,), name='տասնութից ցածր արգելվում է', slug='տասնութից_ցածր_արգելվում_է', annotations=frozenset({'18', 'ոչ', 'արգելված', 'տարիքային սահմանափակում', 'անչափահաս', 'տասնութ'})),
EmojiAnnotations(emoji='🔠', codepoints=(128288,), name='լատինատառ մեծատառ ներածում', slug='լատինատառ_մեծատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'լատինական', 'մեծատառ'})),
EmojiAnnotations(emoji='🔡', codepoints=(128289,), name='լատինատառ փոքրատառ ներածում', slug='լատինատառ_փոքրատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'abcd', 'լատինական', 'փոքրատառ'})),
EmojiAnnotations(emoji='🔢', codepoints=(128290,), name='թվերի ներածում', slug='թվերի_ներածում', annotations=frozenset({'մուտքագրել', '1234', 'թվեր'})),
EmojiAnnotations(emoji='🔣', codepoints=(128291,), name='նշանների ներածում', slug='նշանների_ներածում', annotations=frozenset({'մուտքագրել'})),
EmojiAnnotations(emoji='🔤', codepoints=(128292,), name='լատինատառ ներածում', slug='լատինատառ_ներածում', annotations=frozenset({'abc', 'այբուբեն', 'տառեր', 'մուտքագրել', 'լատինական'})),
EmojiAnnotations(emoji='🅰', codepoints=(127344,), name='էյ կոճակ', slug='էյ_կոճակ', annotations=frozenset({'a', 'արյուն'})),
EmojiAnnotations(emoji='🆎', codepoints=(127374,), name='էյ-բի կոճակ', slug='էյ_բի_կոճակ', annotations=frozenset({'արյուն', 'ab'})),
EmojiAnnotations(emoji='🅱', codepoints=(127345,), name='բի կոճակ', slug='բի_կոճակ', annotations=frozenset({'b', 'արյուն'})),
EmojiAnnotations(emoji='🆑', codepoints=(127377,), name='սի-էլ', slug='սի_էլ', annotations=frozenset({'cl'})),
EmojiAnnotations(emoji='ℹ', codepoints=(8505,), name='տեղեկատու', slug='տեղեկատու', annotations=frozenset({'i', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🆔', codepoints=(127380,), name='այ-դի', slug='այ_դի', annotations=frozenset({'ինքնություն', 'id'})),
EmojiAnnotations(emoji='Ⓜ', codepoints=(9410,), name='էմ տառը շրջանակի մեջ', slug='էմ_տառը_շրջանակի_մեջ', annotations=frozenset({'m', 'շրջան'})),
EmojiAnnotations(emoji='🆖', codepoints=(127382,), name='էն-ջի', slug='էն_ջի', annotations=frozenset({'ng'})),
EmojiAnnotations(emoji='🅾', codepoints=(127358,), name='օ կոճակ', slug='օ_կոճակ', annotations=frozenset({'o', 'արյուն'})),
EmojiAnnotations(emoji='🆗', codepoints=(127383,), name='օքեյ', slug='օքեյ', annotations=frozenset({'ok'})),
EmojiAnnotations(emoji='🅿', codepoints=(127359,), name='փի կոճակ', slug='փի_կոճակ', annotations=frozenset({'կայանատեղի'})),
EmojiAnnotations(emoji='🆘', codepoints=(127384,), name='սոս', slug='սոս', annotations=frozenset({'օգնել', 'sos'})),
EmojiAnnotations(emoji='🆙', codepoints=(127385,), name='ափ կոճակ', slug='ափ_կոճակ', annotations=frozenset({'նշան', 'up', 'վեր'})),
EmojiAnnotations(emoji='🆚', codepoints=(127386,), name='վի-էս', slug='վի_էս', annotations=frozenset({'ընդդեմ', 'vs'})),
EmojiAnnotations(emoji='🈁', codepoints=(127489,), name='կատականա կոկո', slug='կատականա_կոկո', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈂', codepoints=(127490,), name='կատականա սա', slug='կատականա_սա', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈷', codepoints=(127543,), name='լուսին գաղափարագիր', slug='լուսին_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈶', codepoints=(127542,), name='գոյ գաղափարագիր', slug='գոյ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈯', codepoints=(127535,), name='մատ գաղափարագիր', slug='մատ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉐', codepoints=(127568,), name='առավելություն գաղափարագիր շրջանակի մեջ', slug='առավելություն_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈹', codepoints=(127545,), name='բաժանել գաղափարագիր', slug='բաժանել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈚', codepoints=(127514,), name='ժխտում գաղափարագիր', slug='ժխտում_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈲', codepoints=(127538,), name='արգելել գաղափարագիր', slug='արգելել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉑', codepoints=(127569,), name='ընդունել գաղափարագիր շրջանակի մեջ', slug='ընդունել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈸', codepoints=(127544,), name='կիրառել գաղափարագիր', slug='կիրառել_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈴', codepoints=(127540,), name='միասին գաղափարագիր', slug='միասին_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈳', codepoints=(127539,), name='դատարկ գաղափարագիր', slug='դատարկ_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='㊗', codepoints=(12951,), name='շնորհավորել գաղափարագիր շրջանակի մեջ', slug='շնորհավորել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'շնորհավորանք', 'չինարեն', 'գաղափարագիր', 'չինական'})),
EmojiAnnotations(emoji='㊙', codepoints=(12953,), name='գաղտնի գաղափարագիր շրջանակի մեջ', slug='գաղտնի_գաղափարագիր_շրջանակի__մեջ', annotations=frozenset({'գաղափարագիր', 'չինարեն', 'գաղտնիք', 'չինական'})),
EmojiAnnotations(emoji='🈺', codepoints=(127546,), name='աշխատում է գաղափարագիր', slug='աշխատում_է_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈵', codepoints=(127541,), name='լիություն գաղափարագիր', slug='լիություն_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='▪', codepoints=(9642,), name='սև փոքր քառակուսի', slug='սև_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='▫', codepoints=(9643,), name='սպիտակ փոքր քառակուսի', slug='սպիտակ_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◻', codepoints=(9723,), name='սպիտակ միջին չափի քառակուսի', slug='սպիտակ_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◼', codepoints=(9724,), name='սև միջին չափի քառակուսի', slug='սև_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◽', codepoints=(9725,), name='սպիտակ միջին-փոքր քառակուսի', slug='սպիտակ_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◾', codepoints=(9726,), name='սև միջին-փոքր քառակուսի', slug='սև_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬛', codepoints=(11035,), name='սև մեծ քառակուսի', slug='սև_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬜', codepoints=(11036,), name='սպիտակ մեծ քառակուսի', slug='սպիտակ_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔶', codepoints=(128310,), name='նարնջագույն մեծ շեղանկյուն', slug='նարնջագույն_մեծ_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔷', codepoints=(128311,), name='կապույտ մեծ շեղանկյուն', slug='կապույտ_մեծ_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔸', codepoints=(128312,), name='նարնջագույն փոքր շեղանկյուն', slug='նարնջագույն_փոքր_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔹', codepoints=(128313,), name='կապույտ փոքր շեղանկյուն', slug='կապույտ_փոքր_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔺', codepoints=(128314,), name='կարմիր եռանկյունի ուղղված վերև', slug='կարմիր_եռանկյունի_ուղղված_վերև', annotations=frozenset({'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='🔻', codepoints=(128315,), name='կարմիր եռանկյունի ուղղված ներքև', slug='կարմիր_եռանկյունի_ուղղված_ներքև', annotations=frozenset({'ներքև', 'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='💠', codepoints=(128160,), name='կետով շեղանկյուն', slug='կետով_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'կոմիքս', 'շեղանկյուն', 'ներսում'})),
EmojiAnnotations(emoji='🔘', codepoints=(128280,), name='կետակոճակ', slug='կետակոճակ', annotations=frozenset({'կետ', 'կոճակ', 'երկրաչափական', 'ռադիո'})),
EmojiAnnotations(emoji='🔲', codepoints=(128306,), name='սև քառակուսի կոճակ', slug='սև_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔳', codepoints=(128307,), name='սպիտակ քառակուսի կոճակ', slug='սպիտակ_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'ուրվագծված', 'քառակուսի'})),
EmojiAnnotations(emoji='⚪', codepoints=(9898,), name='սպիտակ շրջանակ', slug='սպիտակ_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='⚫', codepoints=(9899,), name='սև շրջանակ', slug='սև_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='🔴', codepoints=(128308,), name='կարմիր շրջանակ', slug='կարմիր_շրջանակ', annotations=frozenset({'երկրաչափական', 'կարմիր', 'շրջան'})),
EmojiAnnotations(emoji='🔵', codepoints=(128309,), name='կապույտ շրջանակ', slug='կապույտ_շրջանակ', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շրջան'})),] | 154.573805 | 252 | 0.714472 | from emojitations.emojitypes import EmojiAnnotations
emoji = [
EmojiAnnotations(emoji='😀', codepoints=(128512,), name='ծիծաղող դեմք', slug='ծիծաղող_դեմք', annotations=frozenset({'դեմք', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😁', codepoints=(128513,), name='ծիծաղող դեմք ժպտացող աչքերով', slug='ծիծաղող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😂', codepoints=(128514,), name='դեմք ուրախության արցունքներով', slug='դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'դեմք', 'ծիծաղել', 'արցունք'})),
EmojiAnnotations(emoji='😃', codepoints=(128515,), name='ժպտացող դեմք բաց բերանով', slug='ժպտացող_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😄', codepoints=(128516,), name='ժպտացող դեմք բաց բերանով և ժպտացող աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ժպտացող_աչքերով', annotations=frozenset({'բաց', 'աչք', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😅', codepoints=(128517,), name='ժպտացող դեմք բաց բերանով և սառը քրտինքով', slug='ժպտացող_դեմք_բաց_բերանով_և_սառը_քրտինքով', annotations=frozenset({'բաց', 'սառը', 'դեմք', 'ժպտալ', 'քրտինք'})),
EmojiAnnotations(emoji='😆', codepoints=(128518,), name='ժպտացող դեմք բաց բերանով և ամուր փակած աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ամուր_փակած_աչքերով', annotations=frozenset({'ժպտալ', 'գոհ', 'ծիծաղել', 'դեմք', 'բաց', 'բերան'})),
EmojiAnnotations(emoji='😉', codepoints=(128521,), name='աչքով անող դեմք', slug='աչքով_անող_դեմք', annotations=frozenset({'դեմք', 'աչքով անել'})),
EmojiAnnotations(emoji='😊', codepoints=(128522,), name='ժպտացող դեմք ժպտացող աչքերով', slug='ժպտացող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'շիկնել'})),
EmojiAnnotations(emoji='😋', codepoints=(128523,), name='համեղ ուտելիք վայելող դեմք', slug='համեղ_ուտելիք_վայելող_դեմք', annotations=frozenset({'դեմք', 'վեյելել', 'ժպտալ', 'համեղ', 'նյամ'})),
EmojiAnnotations(emoji='😎', codepoints=(128526,), name='ժպտացող դեմք արևային ակնոցով', slug='ժպտացող_դեմք_արևային_ակնոցով', annotations=frozenset({'աչք', 'ակնոց', 'զիլ', 'ժպտալ', 'պայծառ', 'արևային ակնոց', 'դեմք', 'եղանակ', 'արև'})),
EmojiAnnotations(emoji='😍', codepoints=(128525,), name='ժպտացող դեմք սրտաձև աչքերով', slug='ժպտացող_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'սիրտ', 'ժպտալ', 'սեր'})),
EmojiAnnotations(emoji='😘', codepoints=(128536,), name='համբույր ուղարկող դեմք', slug='համբույր_ուղարկող_դեմք', annotations=frozenset({'դեմք', 'սիրտ', 'համբուրել'})),
EmojiAnnotations(emoji='😗', codepoints=(128535,), name='համբուրող դեմք', slug='համբուրող_դեմք', annotations=frozenset({'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='😙', codepoints=(128537,), name='համբուրող դեմք ժպտացող աչքերով', slug='համբուրող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'համբուրել', 'ժպտալ'})),
EmojiAnnotations(emoji='😚', codepoints=(128538,), name='համբուրող դեմք փակ աչքերով', slug='համբուրող_դեմք_փակ_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'փակ', 'համբուրել'})),
EmojiAnnotations(emoji='☺', codepoints=(9786,), name='ժպտացող դեմք', slug='ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ', 'անկաշկանդ'})),
EmojiAnnotations(emoji='\U0001f642', codepoints=(128578,), name='թեթևակի ժպտացող դեմք', slug='թեթևակի_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ'})),
EmojiAnnotations(emoji='\U0001f917', codepoints=(129303,), name='գրկող դեմք', slug='գրկող_դեմք', annotations=frozenset({'գրկախառնում', 'դեմք', 'գրկախառնվել'})),
EmojiAnnotations(emoji='😇', codepoints=(128519,), name='ժպտացող դեմք լուսապսակով', slug='ժպտացող_դեմք_լուսապսակով', annotations=frozenset({'անմեղ', 'լուսապսակ', 'ժպտալ', 'հրեշտակ', 'դեմք', 'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f914', codepoints=(129300,), name='մտածող դեմք', slug='մտածող_դեմք', annotations=frozenset({'մտածող', 'դեմք'})),
EmojiAnnotations(emoji='😐', codepoints=(128528,), name='չեզոք դեմք', slug='չեզոք_դեմք', annotations=frozenset({'դեմք', 'չեզոք', 'անվրդով'})),
EmojiAnnotations(emoji='😑', codepoints=(128529,), name='անհույզ դեմք', slug='անհույզ_դեմք', annotations=frozenset({'դեմք', 'ոչինչ չարտահայտող', 'անարտահայտիչ', 'առանց էմոցիաների'})),
EmojiAnnotations(emoji='😶', codepoints=(128566,), name='առանց բերանի դեմք', slug='առանց_բերանի_դեմք', annotations=frozenset({'դեմք', 'լուռ', 'բերան', 'հանգիստ'})),
EmojiAnnotations(emoji='\U0001f644', codepoints=(128580,), name='պտտվող աչքերով դեմք', slug='պտտվող_աչքերով_դեմք', annotations=frozenset({'դեմք', 'աչքեր', 'պտտվող'})),
EmojiAnnotations(emoji='😏', codepoints=(128527,), name='կեղծ ժպտացող դեմք', slug='կեղծ_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'կեղծ ժպտալ'})),
EmojiAnnotations(emoji='😣', codepoints=(128547,), name='համառող դեմք', slug='համառող_դեմք', annotations=frozenset({'դեմք', 'համառել'})),
EmojiAnnotations(emoji='😥', codepoints=(128549,), name='հիասթափված; բայց թեթևացած դեմք', slug='հիասթափված;_բայց_թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած', 'հիասթափված'})),
EmojiAnnotations(emoji='😮', codepoints=(128558,), name='բաց բերանով դեմք', slug='բաց_բերանով_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'համակրանք'})),
EmojiAnnotations(emoji='\U0001f910', codepoints=(129296,), name='ճարմանդավոր բերանով դեմք', slug='ճարմանդավոր_բերանով_դեմք', annotations=frozenset({'դեմք', 'բերան', 'ճարմանդ'})),
EmojiAnnotations(emoji='😯', codepoints=(128559,), name='սաստված դեմք', slug='սաստված_դեմք', annotations=frozenset({'զարմացած', 'դեմք', 'սաստված', 'ապշած'})),
EmojiAnnotations(emoji='😪', codepoints=(128554,), name='քնատ դեմք', slug='քնատ_դեմք', annotations=frozenset({'քնել', 'դեմք'})),
EmojiAnnotations(emoji='😫', codepoints=(128555,), name='հոգնած դեմք', slug='հոգնած_դեմք', annotations=frozenset({'դեմք', 'հոգնած'})),
EmojiAnnotations(emoji='😴', codepoints=(128564,), name='քնած դեմք', slug='քնած_դեմք', annotations=frozenset({'քնել', 'դեմք', 'խռռ'})),
EmojiAnnotations(emoji='😌', codepoints=(128524,), name='թեթևացած դեմք', slug='թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած'})),
EmojiAnnotations(emoji='\U0001f913', codepoints=(129299,), name='գերազանցիկի դեմք', slug='գերազանցիկի_դեմք', annotations=frozenset({'դեմք', 'ցնդած', 'հիմար'})),
EmojiAnnotations(emoji='😛', codepoints=(128539,), name='լեզու հանած դեմք', slug='լեզու_հանած_դեմք', annotations=frozenset({'դեմք', 'լեզու'})),
EmojiAnnotations(emoji='😜', codepoints=(128540,), name='լեզու հանած և աչքով անող դեմք', slug='լեզու_հանած_և_աչքով_անող_դեմք', annotations=frozenset({'աչք', 'դեմք', 'կատակել', 'լեզու', 'աչքով անել'})),
EmojiAnnotations(emoji='😝', codepoints=(128541,), name='լեզու հանած և ամուր փակած աչքերով դեմք', slug='լեզու_հանած_և_ամուր_փակած_աչքերով_դեմք', annotations=frozenset({'աչք', 'դեմք', 'սարսափելի', 'համ', 'լեզու'})),
EmojiAnnotations(emoji='☹', codepoints=(9785,), name='խոժոռված դեմք', slug='խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='\U0001f641', codepoints=(128577,), name='թեթևակի խոժոռված դեմք', slug='թեթևակի_խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='😒', codepoints=(128530,), name='անտրամադիր դեմք', slug='անտրամադիր_դեմք', annotations=frozenset({'դեմք', 'անտրամադիր', 'դժբախտ'})),
EmojiAnnotations(emoji='😓', codepoints=(128531,), name='սառը քրտինքով դեմք', slug='սառը_քրտինքով_դեմք', annotations=frozenset({'սառը', 'դեմք', 'քրտինք'})),
EmojiAnnotations(emoji='😔', codepoints=(128532,), name='մտածկոտ դեմք', slug='մտածկոտ_դեմք', annotations=frozenset({'դեմք', 'մռայլված', 'մտածկոտ'})),
EmojiAnnotations(emoji='😕', codepoints=(128533,), name='շփոթված դեմք', slug='շփոթված_դեմք', annotations=frozenset({'դեմք', 'շփոթված'})),
EmojiAnnotations(emoji='😖', codepoints=(128534,), name='ցնցված դեմք', slug='ցնցված_դեմք', annotations=frozenset({'դեմք', 'ցնցված'})),
EmojiAnnotations(emoji='\U0001f643', codepoints=(128579,), name='գլխնիվայր դեմք', slug='գլխնիվայր_դեմք', annotations=frozenset({'դեմք', 'գլխնիվայր'})),
EmojiAnnotations(emoji='😷', codepoints=(128567,), name='բժշկական դիմակով դեմք', slug='բժշկական_դիմակով_դեմք', annotations=frozenset({'հիվանդ', 'բժիշկ', 'սառը', 'դեմք', 'բժշկական', 'դիմակ'})),
EmojiAnnotations(emoji='\U0001f912', codepoints=(129298,), name='ջերմաչափով դեմք', slug='ջերմաչափով_դեմք', annotations=frozenset({'դեմք', 'հիվանդ', 'ջերմաչափ'})),
EmojiAnnotations(emoji='\U0001f915', codepoints=(129301,), name='գլխակապով դեմք', slug='գլխակապով_դեմք', annotations=frozenset({'դեմք', 'վիրակապ', 'վնասվածք'})),
EmojiAnnotations(emoji='\U0001f911', codepoints=(129297,), name='թղթադրամը բերանին դեմք', slug='թղթադրամը_բերանին_դեմք', annotations=frozenset({'դեմք', 'փող', 'բերան'})),
EmojiAnnotations(emoji='😲', codepoints=(128562,), name='ապշահար դեմք', slug='ապշահար_դեմք', annotations=frozenset({'դեմք', 'ցնցված', 'ապշահար', 'ամբողջովին'})),
EmojiAnnotations(emoji='😞', codepoints=(128542,), name='հիասթափված դեմք', slug='հիասթափված_դեմք', annotations=frozenset({'դեմք', 'հիասթափված'})),
EmojiAnnotations(emoji='😟', codepoints=(128543,), name='անհանգստացած դեմք', slug='անհանգստացած_դեմք', annotations=frozenset({'անհանգստացած', 'դեմք'})),
EmojiAnnotations(emoji='😤', codepoints=(128548,), name='քթից գոլորշի հանող դեմք', slug='քթից_գոլորշի_հանող_դեմք', annotations=frozenset({'դեմք', 'հաղթած', 'հաղթանակ'})),
EmojiAnnotations(emoji='😢', codepoints=(128546,), name='արտասվող դեմք', slug='արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😭', codepoints=(128557,), name='բարձրաձայն արտասվող դեմք', slug='բարձրաձայն_արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'հեկեկալ', 'արտասուք'})),
EmojiAnnotations(emoji='😦', codepoints=(128550,), name='բաց բերանով խոժոռված դեմք', slug='բաց_բերանով_խոժոռված_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'խոժոռված'})),
EmojiAnnotations(emoji='😧', codepoints=(128551,), name='վշտահար դեմք', slug='վշտահար_դեմք', annotations=frozenset({'վշտահար', 'դեմք'})),
EmojiAnnotations(emoji='😨', codepoints=(128552,), name='վախեցած դեմք', slug='վախեցած_դեմք', annotations=frozenset({'վախեցած', 'դեմք', 'սարսափած', 'վախ'})),
EmojiAnnotations(emoji='😩', codepoints=(128553,), name='ուժասպառ դեմք', slug='ուժասպառ_դեմք', annotations=frozenset({'դեմք', 'հոգնած', 'ուժասպառ'})),
EmojiAnnotations(emoji='😬', codepoints=(128556,), name='ծամածռվող դեմք', slug='ծամածռվող_դեմք', annotations=frozenset({'դեմք', 'ծամածռություն'})),
EmojiAnnotations(emoji='😰', codepoints=(128560,), name='բաց բերանով և սառը քրտինքով դեմք', slug='բաց_բերանով_և_սառը_քրտինքով_դեմք', annotations=frozenset({'հապշտապ', 'բաց', 'սառը', 'դեմք', 'կապույտ', 'բերան', 'քրտինք'})),
EmojiAnnotations(emoji='😱', codepoints=(128561,), name='վախից գոռացող դեմք', slug='վախից_գոռացող_դեմք', annotations=frozenset({'վախեցած', 'ծամել', 'սարսափած', 'վախ', 'դեմք', 'ճչալ'})),
EmojiAnnotations(emoji='😳', codepoints=(128563,), name='շիկնած դեմք', slug='շիկնած_դեմք', annotations=frozenset({'դեմք', 'հիացած', 'շիկնած'})),
EmojiAnnotations(emoji='😵', codepoints=(128565,), name='գլխապտույտ ունեցող դեմք', slug='գլխապտույտ_ունեցող_դեմք', annotations=frozenset({'դեմք', 'գլխապտույտ'})),
EmojiAnnotations(emoji='😡', codepoints=(128545,), name='դժգոհ դեմք', slug='դժգոհ_դեմք', annotations=frozenset({'բարկացած', 'դեմք', 'զայրույթ', 'կարմիր', 'խենք', 'դժգոհ'})),
EmojiAnnotations(emoji='😠', codepoints=(128544,), name='բարկացած դեմք', slug='բարկացած_դեմք', annotations=frozenset({'դեմք', 'խենք', 'բարկացած'})),
EmojiAnnotations(emoji='😈', codepoints=(128520,), name='ժպտացող դեմք եղջյուրներով', slug='ժպտացող_դեմք_եղջյուրներով', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ժպտալ', 'եղջյուրներ'})),
EmojiAnnotations(emoji='👿', codepoints=(128127,), name='սատանայի ճուտ', slug='սատանայի_ճուտ', annotations=frozenset({'դեմք', 'սատանա', 'հեքիաթ', 'ֆանտազիա', 'դև'})),
EmojiAnnotations(emoji='👹', codepoints=(128121,), name='մարդակեր հսկա', slug='մարդակեր_հսկա', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👺', codepoints=(128122,), name='չար ոգի', slug='չար_ոգի', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='💀', codepoints=(128128,), name='գանգ', slug='գանգ', annotations=frozenset({'դեմք', 'հեքիաթ', 'մարմին', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='☠', codepoints=(9760,), name='գանգ և խաչված ոսկորներ', slug='գանգ_և_խաչված_ոսկորներ', annotations=frozenset({'մարմին', 'ոսկորներ', 'գանգ', 'խաչված', 'դեմք', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='👻', codepoints=(128123,), name='ուրվական', slug='ուրվական', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'արարած', 'հրեշ'})),
EmojiAnnotations(emoji='👽', codepoints=(128125,), name='այլմոլորակային', slug='այլմոլորակային', annotations=frozenset({'տիեզերք', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👾', codepoints=(128126,), name='այլմոլորակային հրեշ', slug='այլմոլորակային_հրեշ', annotations=frozenset({'տիեզերք', 'այլմոլորակային', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='\U0001f916', codepoints=(129302,), name='ռոբոտի դեմք', slug='ռոբոտի_դեմք', annotations=frozenset({'դեմք', 'ռոբոտ', 'հրեշ'})),
EmojiAnnotations(emoji='💩', codepoints=(128169,), name='կեղտի կույտ', slug='կեղտի_կույտ', annotations=frozenset({'գոմաղբ', 'կոմիքս', 'դեմք', 'կեղտ', 'հրեշ', 'կղանք'})),
EmojiAnnotations(emoji='😺', codepoints=(128570,), name='ժպտացող կատվի դեմք բաց բերանով', slug='ժպտացող_կատվի_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'կատու', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😸', codepoints=(128568,), name='ծիծաղող կատվի դեմք ժպտացող աչքերով', slug='ծիծաղող_կատվի_դեմք_ժպտացող_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'ժպտալ', 'կանաչ'})),
EmojiAnnotations(emoji='😹', codepoints=(128569,), name='կատվի դեմք ուրախության արցունքներով', slug='կատվի_դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'կատու', 'դեմք', 'արտասուք'})),
EmojiAnnotations(emoji='😻', codepoints=(128571,), name='ժպտացող կատվի դեմք սրտաձև աչքերով', slug='ժպտացող_կատվի_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'սիրտ', 'ժպտալ', 'սեր', 'դեմք', 'կատու'})),
EmojiAnnotations(emoji='😼', codepoints=(128572,), name='կատվի դեմք ծամածռված ժպիտով', slug='կատվի_դեմք_ծամածռված_ժպիտով', annotations=frozenset({'ծամածռված', 'կատու', 'դեմք', 'հեգնական', 'ժպտալ'})),
EmojiAnnotations(emoji='😽', codepoints=(128573,), name='համբուրող կատվի դեմք փակ աչքերով', slug='համբուրող_կատվի_դեմք_փակ_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='🙀', codepoints=(128576,), name='ուժասպառ կատվի դեմք', slug='ուժասպառ_կատվի_դեմք', annotations=frozenset({'զարմացած', 'կատու', 'դեմք', 'ուժասպառ', 'օհ'})),
EmojiAnnotations(emoji='😿', codepoints=(128575,), name='արտասվող կատվի դեմք', slug='արտասվող_կատվի_դեմք', annotations=frozenset({'արտասվել', 'կատու', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😾', codepoints=(128574,), name='դժգոհ կատվի դեմք', slug='դժգոհ_կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'դժգոհ'})),
EmojiAnnotations(emoji='🙈', codepoints=(128584,), name='ոչինչ չեմ տեսնում', slug='ոչինչ_չեմ_տեսնում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'տեսնել', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙉', codepoints=(128585,), name='ոչինչ չեմ լսում', slug='ոչինչ_չեմ_լսում', annotations=frozenset({'լսել', 'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙊', codepoints=(128586,), name='ոչինչ չեմ ասում', slug='ոչինչ_չեմ_ասում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված', 'խոսալ'})),
EmojiAnnotations(emoji='👧', codepoints=(128103,), name='աղջիկ', slug='աղջիկ', annotations=frozenset({'օրիորդ', 'կենդանակերպ', 'կույս'})),
EmojiAnnotations(emoji='👴', codepoints=(128116,), name='տարեց տղամարդ', slug='տարեց_տղամարդ', annotations=frozenset({'տղամարդ', 'տարեց'})),
EmojiAnnotations(emoji='👵', codepoints=(128117,), name='տարեց կին', slug='տարեց_կին', annotations=frozenset({'տարեց', 'կին'})),
EmojiAnnotations(emoji='👮', codepoints=(128110,), name='ոստիկան', slug='ոստիկան', annotations=frozenset({'սպա', 'ոստիկանություն'})),
EmojiAnnotations(emoji='👲', codepoints=(128114,), name='չինական գլխարկով մարդ', slug='չինական_գլխարկով_մարդ', annotations=frozenset({'գլխարկ', 'մարդ', 'չինական'})),
EmojiAnnotations(emoji='👳', codepoints=(128115,), name='չալմայով մարդ', slug='չալմայով_մարդ', annotations=frozenset({'չալմա', 'մարդ'})),
EmojiAnnotations(emoji='👷', codepoints=(128119,), name='շինարար', slug='շինարար', annotations=frozenset({'գլխարկ', 'շինարարություն', 'աշխատող'})),
EmojiAnnotations(emoji='⛑', codepoints=(9937,), name='սպիտակ խաչով սաղավարտ', slug='սպիտակ_խաչով_սաղավարտ', annotations=frozenset({'գլխարկ', 'դեմք', 'խաչ', 'սաղավարտ', 'օգնություն'})),
EmojiAnnotations(emoji='👸', codepoints=(128120,), name='արքայադուստր', slug='արքայադուստր', annotations=frozenset({'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f575', codepoints=(128373,), name='խուզարկու', slug='խուզարկու', annotations=frozenset({'լրտես'})),
EmojiAnnotations(emoji='🎅', codepoints=(127877,), name='սանտա կլաուս', slug='սանտա_կլաուս', annotations=frozenset({'տոն', 'սանտա', 'հեքիաթ', 'ֆանտազիա', 'սուրբ ծնունդ', 'հայր'})),
EmojiAnnotations(emoji='👼', codepoints=(128124,), name='մանուկ-հրեշտակ', slug='մանուկ_հրեշտակ', annotations=frozenset({'երեխա', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'հրեշտակ'})),
EmojiAnnotations(emoji='💆', codepoints=(128134,), name='դեմքի մերսում', slug='դեմքի_մերսում', annotations=frozenset({'սրահ', 'մերսում'})),
EmojiAnnotations(emoji='💇', codepoints=(128135,), name='սանրվածք', slug='սանրվածք', annotations=frozenset({'վարսավիր', 'գեղեցկություն', 'սրահ'})),
EmojiAnnotations(emoji='👰', codepoints=(128112,), name='քողով հարս', slug='քողով_հարս', annotations=frozenset({'քող', 'հարս', 'հարսանիք'})),
EmojiAnnotations(emoji='🙍', codepoints=(128589,), name='խոժոռված դեմքով անձ', slug='խոժոռված_դեմքով_անձ', annotations=frozenset({'խոժոռված', 'ժեստ'})),
EmojiAnnotations(emoji='🙎', codepoints=(128590,), name='դժգոհ անձ', slug='դժգոհ_անձ', annotations=frozenset({'ժեստ', 'դժգոհ'})),
EmojiAnnotations(emoji='🙅', codepoints=(128581,), name='ոչ ցույց տվող', slug='ոչ_ցույց_տվող', annotations=frozenset({'արգելված', 'ձեռք', 'ժեստ', 'ոչ'})),
EmojiAnnotations(emoji='🙆', codepoints=(128582,), name='ok ցույց տվող', slug='ok_ցույց_տվող', annotations=frozenset({'ձեռք', 'ժեստ', 'ok'})),
EmojiAnnotations(emoji='💁', codepoints=(128129,), name='տեղեկատու բյուրոյի աշխատող', slug='տեղեկատու_բյուրոյի_աշխատող', annotations=frozenset({'հանդուգն', 'ձեռք', 'օգնել', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🙋', codepoints=(128587,), name='ձեռք բարձրացնող ուրախ անձ', slug='ձեռք_բարձրացնող_ուրախ_անձ', annotations=frozenset({'ձեռք', 'երջանիկ', 'ժեստ', 'բարձրացված'})),
EmojiAnnotations(emoji='🙇', codepoints=(128583,), name='խոնարհվող անձ', slug='խոնարհվող_անձ', annotations=frozenset({'ներողություն խնդրել', 'ներողություն', 'ժեստ', 'խոնարհվել'})),
EmojiAnnotations(emoji='🙌', codepoints=(128588,), name='ձեռքերը բարձրացնող անձ', slug='ձեռքերը_բարձրացնող_անձ', annotations=frozenset({'մարմին', 'տոն', 'ժեստ', 'ձեռք', 'ուռա', 'բարձրացված'})),
EmojiAnnotations(emoji='🙏', codepoints=(128591,), name='միացված ձեռքի ափեր', slug='միացված_ձեռքի_ափեր', annotations=frozenset({'աղոթել', 'խնդրել', 'մարմին', 'խնդրում եմ', 'ժեստ', 'խոնարհվել', 'ձեռք', 'շնորհակալություն', 'միացված'})),
EmojiAnnotations(emoji='\U0001f5e3', codepoints=(128483,), name='խոսացող գլուխ', slug='խոսացող_գլուխ', annotations=frozenset({'դեմք', 'գլուխ', 'ուրվագիծ', 'խոսացող', 'խոսալ'})),
EmojiAnnotations(emoji='👤', codepoints=(128100,), name='ուրվագծված կիսանդրի', slug='ուրվագծված_կիսանդրի', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='👥', codepoints=(128101,), name='ուրվագծված կիսանդրիներ', slug='ուրվագծված_կիսանդրիներ', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='🚶', codepoints=(128694,), name='հետիոտն', slug='հետիոտն', annotations=frozenset({'քայլել', 'զբոսանք', 'զբոսնել'})),
EmojiAnnotations(emoji='🏃', codepoints=(127939,), name='վազող', slug='վազող', annotations=frozenset({'մարաթոն', 'վազք'})),
EmojiAnnotations(emoji='👯', codepoints=(128111,), name='պարող կանայք', slug='պարող_կանայք', annotations=frozenset({'ականջ', 'աղջիկ', 'կին', 'պարող', 'ճագար'})),
EmojiAnnotations(emoji='\U0001f574', codepoints=(128372,), name='տեղում ճախրող գործնական կոստյումով մարդ', slug='տեղում_ճախրող_գործնական_կոստյումով_մարդ', annotations=frozenset({'կոստյում', 'բիզնես', 'մարդ'})),
EmojiAnnotations(emoji='💏', codepoints=(128143,), name='համբույր', slug='համբույր', annotations=frozenset({'զույգ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💑', codepoints=(128145,), name='սրտիկով զույգ', slug='սրտիկով_զույգ', annotations=frozenset({'զույգ', 'սիրտ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='👪', codepoints=(128106,), name='ընտանիք', slug='ընտանիք', annotations=frozenset({'երեխա', 'մայր', 'հայր'})),
EmojiAnnotations(emoji='👫', codepoints=(128107,), name='իրար ձեռք բռնած մարդ և կին', slug='իրար_ձեռք_բռնած_մարդ_և_կին', annotations=frozenset({'զույգ', 'ձեռք', 'տղամարդ', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='👬', codepoints=(128108,), name='իրար ձեռք բռնած երկու տղամարդ', slug='իրար_ձեռք_բռնած_երկու_տղամարդ', annotations=frozenset({'երկվորյակ', 'ձեռք', 'տղամարդ', 'բռնել', 'կենդանակերպ', 'զույգ'})),
EmojiAnnotations(emoji='👭', codepoints=(128109,), name='իրար ձեռք բռնած երկու կին', slug='իրար_ձեռք_բռնած_երկու_կին', annotations=frozenset({'զույգ', 'ձեռք', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='\U0001f3fb', codepoints=(127995,), name='մաշկի տիպ-1-2', slug='մաշկի_տիպ_1_2', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fc', codepoints=(127996,), name='մաշկի տիպ-3', slug='մաշկի_տիպ_3', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fd', codepoints=(127997,), name='մաշկի տիպ-4', slug='մաշկի_տիպ_4', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fe', codepoints=(127998,), name='մաշկի տիպ-5', slug='մաշկի_տիպ_5', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3ff', codepoints=(127999,), name='մաշկի տիպ-6', slug='մաշկի_տիպ_6', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='💪', codepoints=(128170,), name='ձգված բիցեպս', slug='ձգված_բիցեպս', annotations=frozenset({'բիցեպս', 'մարմին', 'կոմիքս', 'ձգել', 'մկան'})),
EmojiAnnotations(emoji='👈', codepoints=(128072,), name='դեպի ձախ ուղղված ցուցամատ', slug='դեպի_ձախ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👉', codepoints=(128073,), name='դեպի աջ ուղղված ցուցամատ', slug='դեպի_աջ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='☝', codepoints=(9757,), name='դեպի վեր ուղղված ցուցամատ ձեռքի ափի կողմից', slug='դեպի_վեր_ուղղված_ցուցամատ_ձեռքի_ափի_կողմից', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռքի ափ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='👆', codepoints=(128070,), name='դեպի վեր ուղղված ցուցամատ', slug='դեպի_վեր_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='\U0001f595', codepoints=(128405,), name='մեջտեղի մատ', slug='մեջտեղի_մատ', annotations=frozenset({'ձեռք', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👇', codepoints=(128071,), name='դեպի վար ուղղված ցուցամատ', slug='դեպի_վար_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'վար', 'մատ'})),
EmojiAnnotations(emoji='✌', codepoints=(9996,), name='հաղթական ձեռք', slug='հաղթական_ձեռք', annotations=frozenset({'ձեռք', 'v', 'մարմին', 'հաղթանակ'})),
EmojiAnnotations(emoji='\U0001f596', codepoints=(128406,), name='վուլկանցիների ողջույն', slug='վուլկանցիների_ողջույն', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'վուլկան'})),
EmojiAnnotations(emoji='\U0001f918', codepoints=(129304,), name='եղջյուրների նշան', slug='եղջյուրների_նշան', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'եղջյուրներ'})),
EmojiAnnotations(emoji='\U0001f590', codepoints=(128400,), name='բացված մատներով բարձրացված ձեռք', slug='բացված_մատներով_բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'բացված'})),
EmojiAnnotations(emoji='✋', codepoints=(9995,), name='բարձրացված ձեռք', slug='բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='👌', codepoints=(128076,), name='ok ցույց տվող ձեռք', slug='ok_ցույց_տվող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'ok'})),
EmojiAnnotations(emoji='👍', codepoints=(128077,), name='բութ մատը վեր', slug='բութ_մատը_վեր', annotations=frozenset({'բութ', '+1', 'ձեռք', 'մարմին', 'վեր'})),
EmojiAnnotations(emoji='👎', codepoints=(128078,), name='բութ մատը ներքև', slug='բութ_մատը_ներքև', annotations=frozenset({'-1', 'ներքև', 'ձեռք', 'մարմին', 'բութ մատ'})),
EmojiAnnotations(emoji='✊', codepoints=(9994,), name='բարձրացված բռունցք', slug='բարձրացված_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👊', codepoints=(128074,), name='հանդիպակաց բռունցք', slug='հանդիպակաց_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👋', codepoints=(128075,), name='թափահարող ձեռք', slug='թափահարող_ձեռք', annotations=frozenset({'ձեռք', 'թափահարել', 'թափահարող', 'մարմին'})),
EmojiAnnotations(emoji='👏', codepoints=(128079,), name='ծափահարող ձեռքեր', slug='ծափահարող_ձեռքեր', annotations=frozenset({'ձեռք', 'մարմին', 'ծափահարել'})),
EmojiAnnotations(emoji='👐', codepoints=(128080,), name='բաց ձեռքեր', slug='բաց_ձեռքեր', annotations=frozenset({'բաց', 'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='✍', codepoints=(9997,), name='գրող ձեռք', slug='գրող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'գրել'})),
EmojiAnnotations(emoji='💅', codepoints=(128133,), name='եղունգների լաքապատում', slug='եղունգների_լաքապատում', annotations=frozenset({'խնամք', 'մարմին', 'հղկել', 'մատնահարդարում', 'եղունգ', 'կոսմետիկա'})),
EmojiAnnotations(emoji='👂', codepoints=(128066,), name='ականջ', slug='ականջ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👃', codepoints=(128067,), name='քիթ', slug='քիթ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👣', codepoints=(128099,), name='ոտնահետքեր', slug='ոտնահետքեր', annotations=frozenset({'հագուստ', 'ոտնահետք', 'հետք', 'մարմին'})),
EmojiAnnotations(emoji='👀', codepoints=(128064,), name='աչքեր', slug='աչքեր', annotations=frozenset({'աչք', 'դեմք', 'մարմին'})),
EmojiAnnotations(emoji='\U0001f441', codepoints=(128065,), name='աչք', slug='աչք', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👅', codepoints=(128069,), name='լեզու', slug='լեզու', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👄', codepoints=(128068,), name='բերան', slug='բերան', annotations=frozenset({'շուրթեր', 'մարմին'})),
EmojiAnnotations(emoji='💋', codepoints=(128139,), name='համբույրի հետք', slug='համբույրի_հետք', annotations=frozenset({'սիրտ', 'շուրթեր', 'հետք', 'սիրավեպ', 'համբույր'})),
EmojiAnnotations(emoji='💘', codepoints=(128152,), name='նետահարված սիրտ', slug='նետահարված_սիրտ', annotations=frozenset({'նետ', 'սիրտ', 'սիրավեպ', 'կուպիդոն'})),
EmojiAnnotations(emoji='❤', codepoints=(10084,), name='կարմիր սիրտ', slug='կարմիր_սիրտ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='💓', codepoints=(128147,), name='բաբախող սիրտ', slug='բաբախող_սիրտ', annotations=frozenset({'սիրտ', 'սրտխփոց', 'պուլսացիա', 'բաբախյուն'})),
EmojiAnnotations(emoji='💔', codepoints=(128148,), name='կոտրված սիրտ', slug='կոտրված_սիրտ', annotations=frozenset({'սիրտ', 'կոտրված', 'կոտրել'})),
EmojiAnnotations(emoji='💕', codepoints=(128149,), name='երկու սրտեր', slug='երկու_սրտեր', annotations=frozenset({'սիրտ', 'սեր'})),
EmojiAnnotations(emoji='💖', codepoints=(128150,), name='շողշողացող սիրտ', slug='շողշողացող_սիրտ', annotations=frozenset({'սիրտ', 'կայծ', 'ոգևորված'})),
EmojiAnnotations(emoji='💗', codepoints=(128151,), name='աճող սիրտ', slug='աճող_սիրտ', annotations=frozenset({'նյարդային', 'սիրտ', 'սրտի զարկ', 'աճող', 'ոգևորված'})),
EmojiAnnotations(emoji='💙', codepoints=(128153,), name='կապույտ սիրտ', slug='կապույտ_սիրտ', annotations=frozenset({'կապույտ', 'սիրտ'})),
EmojiAnnotations(emoji='💚', codepoints=(128154,), name='կանաչ սիրտ', slug='կանաչ_սիրտ', annotations=frozenset({'սիրտ', 'կանաչ'})),
EmojiAnnotations(emoji='💛', codepoints=(128155,), name='դեղին սիրտ', slug='դեղին_սիրտ', annotations=frozenset({'սիրտ', 'դեղին'})),
EmojiAnnotations(emoji='💜', codepoints=(128156,), name='մանուշակագույն սիրտ', slug='մանուշակագույն_սիրտ', annotations=frozenset({'սիրտ', 'մանուշակագույն'})),
EmojiAnnotations(emoji='💝', codepoints=(128157,), name='ժապավենով սիրտ', slug='ժապավենով_սիրտ', annotations=frozenset({'սիրտ', 'ժապավեն', 'վալենտին'})),
EmojiAnnotations(emoji='💞', codepoints=(128158,), name='պտտվող սրտեր', slug='պտտվող_սրտեր', annotations=frozenset({'պտտվող', 'սիրտ'})),
EmojiAnnotations(emoji='💟', codepoints=(128159,), name='սրտաձև նախշ', slug='սրտաձև_նախշ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='❣', codepoints=(10083,), name='բացականչական նշանի տեսքով սիրտ', slug='բացականչական_նշանի_տեսքով_սիրտ', annotations=frozenset({'նշան', 'կետադրական', 'սիրտ', 'բացականչություն'})),
EmojiAnnotations(emoji='💌', codepoints=(128140,), name='սիրային նամակ', slug='սիրային_նամակ', annotations=frozenset({'սիրտ', 'նամակ', 'փոստ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='💤', codepoints=(128164,), name='խռռռ', slug='խռռռ', annotations=frozenset({'քնել', 'կոմիքս'})),
EmojiAnnotations(emoji='💢', codepoints=(128162,), name='զայրույթի նշան', slug='զայրույթի_նշան', annotations=frozenset({'զայրացած', 'կոմիքս', 'խենք'})),
EmojiAnnotations(emoji='💣', codepoints=(128163,), name='ռումբ', slug='ռումբ', annotations=frozenset({'կոմիքս'})),
EmojiAnnotations(emoji='💥', codepoints=(128165,), name='բախում', slug='բախում', annotations=frozenset({'բում', 'կոմիքս'})),
EmojiAnnotations(emoji='💦', codepoints=(128166,), name='քրտինքի կաթիլներ', slug='քրտինքի_կաթիլներ', annotations=frozenset({'ցայտող', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='💨', codepoints=(128168,), name='սրընթաց', slug='սրընթաց', annotations=frozenset({'կոմիքս', 'ընթանալ', 'սլանալ'})),
EmojiAnnotations(emoji='💫', codepoints=(128171,), name='գլխապտույտ', slug='գլխապտույտ', annotations=frozenset({'կոմիքս', 'աստղ'})),
EmojiAnnotations(emoji='💬', codepoints=(128172,), name='խոսքի ամպիկ', slug='խոսքի_ամպիկ', annotations=frozenset({'երկխոսություն', 'փուչիկ', 'կոմիքս', 'պղպջակ', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5e8', codepoints=(128488,), name='խոսքի ձախակողմյա ամպիկ', slug='խոսքի_ձախակողմյա_ամպիկ', annotations=frozenset({'երկխոսություն', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5ef', codepoints=(128495,), name='զայրույթի աջակողմյա ամպիկ', slug='զայրույթի_աջակողմյա_ամպիկ', annotations=frozenset({'զայրացած', 'փուչիկ', 'պղպջակ', 'խենք'})),
EmojiAnnotations(emoji='💭', codepoints=(128173,), name='մտքի ամպիկ', slug='մտքի_ամպիկ', annotations=frozenset({'փուչիկ', 'կոմիքս', 'պղպջակ', 'միտք'})),
EmojiAnnotations(emoji='👓', codepoints=(128083,), name='ակնոց', slug='ակնոց', annotations=frozenset({'հագուստ', 'աչք'})),
EmojiAnnotations(emoji='\U0001f576', codepoints=(128374,), name='արևային ակնոց', slug='արևային_ակնոց', annotations=frozenset({'աչք', 'ակնոց', 'մուգ'})),
EmojiAnnotations(emoji='👔', codepoints=(128084,), name='փողկապ', slug='փողկապ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👕', codepoints=(128085,), name='սպորտային վերնաշապիկ', slug='սպորտային_վերնաշապիկ', annotations=frozenset({'հագուստ', 'վերնաշապիկ', 'սպորտային'})),
EmojiAnnotations(emoji='👖', codepoints=(128086,), name='ջինս', slug='ջինս', annotations=frozenset({'հագուստ', 'տաբատ', 'շալվար'})),
EmojiAnnotations(emoji='👗', codepoints=(128087,), name='զգեստ', slug='զգեստ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👘', codepoints=(128088,), name='կիմոնո', slug='կիմոնո', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👙', codepoints=(128089,), name='բիկինի', slug='բիկինի', annotations=frozenset({'հագուստ', 'լողալ'})),
EmojiAnnotations(emoji='👚', codepoints=(128090,), name='կնոջ հագուստ', slug='կնոջ_հագուստ', annotations=frozenset({'հագուստ', 'կին'})),
EmojiAnnotations(emoji='👛', codepoints=(128091,), name='դրամապանակ', slug='դրամապանակ', annotations=frozenset({'հագուստ', 'մետաղադրամ'})),
EmojiAnnotations(emoji='👜', codepoints=(128092,), name='ձեռքի պայուսակ', slug='ձեռքի_պայուսակ', annotations=frozenset({'հագուստ', 'պայուսակ'})),
EmojiAnnotations(emoji='👝', codepoints=(128093,), name='պայուսակ', slug='պայուսակ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='\U0001f6cd', codepoints=(128717,), name='գնումների պայուսակ', slug='գնումների_պայուսակ', annotations=frozenset({'գնումներ', 'պայուսակ', 'հյուրանոց'})),
EmojiAnnotations(emoji='🎒', codepoints=(127890,), name='դպրոցական պայուսակ', slug='դպրոցական_պայուսակ', annotations=frozenset({'դպրոց', 'պայուսակ', 'ուսապարկ'})),
EmojiAnnotations(emoji='👞', codepoints=(128094,), name='տղամարդու կոշիկ', slug='տղամարդու_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'տղամարդ'})),
EmojiAnnotations(emoji='👟', codepoints=(128095,), name='սպորտային կոշիկ', slug='սպորտային_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կեդեր', 'մարզական'})),
EmojiAnnotations(emoji='👠', codepoints=(128096,), name='բարձրակրունկ կոշիկ', slug='բարձրակրունկ_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կրունկ', 'կին'})),
EmojiAnnotations(emoji='👡', codepoints=(128097,), name='կնոջ սանդալ', slug='կնոջ_սանդալ', annotations=frozenset({'հագուստ', 'կոշիկ', 'սանդալ', 'կին'})),
EmojiAnnotations(emoji='👢', codepoints=(128098,), name='կնոջ երկարաճիթք կոշիկ', slug='կնոջ_երկարաճիթք_կոշիկ', annotations=frozenset({'երկարաճիթք կոշիկ', 'հագուստ', 'կոշիկ', 'կին'})),
EmojiAnnotations(emoji='👑', codepoints=(128081,), name='թագ', slug='թագ', annotations=frozenset({'հագուստ', 'արքա', 'թագուհի'})),
EmojiAnnotations(emoji='👒', codepoints=(128082,), name='կնոջ գլխարկ', slug='կնոջ_գլխարկ', annotations=frozenset({'հագուստ', 'գլխարկ', 'կին'})),
EmojiAnnotations(emoji='🎩', codepoints=(127913,), name='ցիլինդր', slug='ցիլինդր', annotations=frozenset({'հագուստ', 'գլխարկ'})),
EmojiAnnotations(emoji='🎓', codepoints=(127891,), name='շրջանավարտի գլխարկ', slug='շրջանավարտի_գլխարկ', annotations=frozenset({'գլխարկ', 'տոն', 'հագուստ', 'ավարտական'})),
EmojiAnnotations(emoji='\U0001f4ff', codepoints=(128255,), name='տերողորմյա', slug='տերողորմյա', annotations=frozenset({'հագուստ', 'վզնոց', 'ուլունքներ', 'աղոթք', 'կրոն'})),
EmojiAnnotations(emoji='💄', codepoints=(128132,), name='շրթներկ', slug='շրթներկ', annotations=frozenset({'կոսմետիա', 'դիմահարդարում'})),
EmojiAnnotations(emoji='💍', codepoints=(128141,), name='մատանի', slug='մատանի', annotations=frozenset({'ադամանդ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💎', codepoints=(128142,), name='թանկարժեք քար', slug='թանկարժեք_քար', annotations=frozenset({'ադամանդ', 'ակն', 'սիրավեպ', 'գոհար'})),
EmojiAnnotations(emoji='🐵', codepoints=(128053,), name='կապիկի դեմք', slug='կապիկի_դեմք', annotations=frozenset({'դեմք', 'կապիկ'})),
EmojiAnnotations(emoji='🐶', codepoints=(128054,), name='շան դեմք', slug='շան_դեմք', annotations=frozenset({'դեմք', 'շուն', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐕', codepoints=(128021,), name='շուն', slug='շուն', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐩', codepoints=(128041,), name='պուդել', slug='պուդել', annotations=frozenset({'շուն'})),
EmojiAnnotations(emoji='🐺', codepoints=(128058,), name='գայլի դեմք', slug='գայլի_դեմք', annotations=frozenset({'դեմք', 'գայլ'})),
EmojiAnnotations(emoji='🐱', codepoints=(128049,), name='կատվի դեմք', slug='կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐈', codepoints=(128008,), name='կատու', slug='կատու', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='\U0001f981', codepoints=(129409,), name='առյուծի դեմք', slug='առյուծի_դեմք', annotations=frozenset({'դեմք', 'առյուծ', 'կենդանակերպ', 'կորյուն'})),
EmojiAnnotations(emoji='🐯', codepoints=(128047,), name='վագրի դեմք', slug='վագրի_դեմք', annotations=frozenset({'դեմք', 'վագր'})),
EmojiAnnotations(emoji='🐴', codepoints=(128052,), name='ձիու դեմք', slug='ձիու_դեմք', annotations=frozenset({'դեմք', 'ձի'})),
EmojiAnnotations(emoji='🐎', codepoints=(128014,), name='ձի', slug='ձի', annotations=frozenset({'մրցավազք', 'մրցավազքային ձի'})),
EmojiAnnotations(emoji='\U0001f984', codepoints=(129412,), name='միաեղջյուրի դեմք', slug='միաեղջյուրի_դեմք', annotations=frozenset({'դեմք', 'միաեղջյուր'})),
EmojiAnnotations(emoji='🐮', codepoints=(128046,), name='կովի դեմք', slug='կովի_դեմք', annotations=frozenset({'դեմք', 'կով'})),
EmojiAnnotations(emoji='🐂', codepoints=(128002,), name='ցուլիկ', slug='ցուլիկ', annotations=frozenset({'կենդանակերպ', 'ցուլ'})),
EmojiAnnotations(emoji='🐃', codepoints=(128003,), name='ջրագոմեշ', slug='ջրագոմեշ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🐷', codepoints=(128055,), name='խոզի դեմք', slug='խոզի_դեմք', annotations=frozenset({'դեմք', 'խոզ'})),
EmojiAnnotations(emoji='🐖', codepoints=(128022,), name='խոզ', slug='խոզ', annotations=frozenset({'էգ խոզ'})),
EmojiAnnotations(emoji='🐗', codepoints=(128023,), name='վարազ', slug='վարազ', annotations=frozenset({'խոզ'})),
EmojiAnnotations(emoji='🐽', codepoints=(128061,), name='խոզի քիթ', slug='խոզի_քիթ', annotations=frozenset({'դեմք', 'քիթ', 'խոզ'})),
EmojiAnnotations(emoji='🐏', codepoints=(128015,), name='արու ոչխար', slug='արու_ոչխար', annotations=frozenset({'ոչխար', 'կենդանակերպ', 'խոյ'})),
EmojiAnnotations(emoji='🐑', codepoints=(128017,), name='ոչխար', slug='ոչխար', annotations=frozenset({'մաքի'})),
EmojiAnnotations(emoji='🐐', codepoints=(128016,), name='այծ', slug='այծ', annotations=frozenset({'այծեղջյուր', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐪', codepoints=(128042,), name='ուղտ', slug='ուղտ', annotations=frozenset({'միասապատ', 'կուզ'})),
EmojiAnnotations(emoji='🐫', codepoints=(128043,), name='երկսապատավոր ուղտ', slug='երկսապատավոր_ուղտ', annotations=frozenset({'ուղտ', 'երկսապատանի', 'կուզ'})),
EmojiAnnotations(emoji='🐭', codepoints=(128045,), name='մկան դեմք', slug='մկան_դեմք', annotations=frozenset({'դեմք', 'մուկ'})),
EmojiAnnotations(emoji='🐹', codepoints=(128057,), name='գերմանամկան դեմք', slug='գերմանամկան_դեմք', annotations=frozenset({'դեմք', 'գերմանամուկ', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐰', codepoints=(128048,), name='ճագարի դեմք', slug='ճագարի_դեմք', annotations=frozenset({'դեմք', 'ընտանի կենդանի', 'նապաստակ', 'ճագար'})),
EmojiAnnotations(emoji='🐇', codepoints=(128007,), name='ճագար', slug='ճագար', annotations=frozenset({'ընտանի կենդանի', 'նապաստակ'})),
EmojiAnnotations(emoji='🐻', codepoints=(128059,), name='արջի դեմք', slug='արջի_դեմք', annotations=frozenset({'դեմք', 'արջ'})),
EmojiAnnotations(emoji='🐨', codepoints=(128040,), name='կոալա', slug='կոալա', annotations=frozenset({'արջ'})),
EmojiAnnotations(emoji='🐼', codepoints=(128060,), name='պանդայի դեմք', slug='պանդայի_դեմք', annotations=frozenset({'դեմք', 'պանդա'})),
EmojiAnnotations(emoji='🐾', codepoints=(128062,), name='թաթերի հետքեր', slug='թաթերի_հետքեր', annotations=frozenset({'ոտքեր', 'հետք', 'թաթ'})),
EmojiAnnotations(emoji='🐓', codepoints=(128019,), name='աքաղաղ', slug='աքաղաղ', annotations=frozenset({'աքաղաք'})),
EmojiAnnotations(emoji='🐣', codepoints=(128035,), name='ձվից դուրս եկող ճուտիկ', slug='ձվից_դուրս_եկող_ճուտիկ', annotations=frozenset({'ձագ', 'ձվից դուրս եկող', 'ճուտիկ'})),
EmojiAnnotations(emoji='🐤', codepoints=(128036,), name='ճուտիկ', slug='ճուտիկ', annotations=frozenset({'ձագ'})),
EmojiAnnotations(emoji='🐥', codepoints=(128037,), name='դեմքով կանգնած ճուտիկ', slug='դեմքով_կանգնած_ճուտիկ', annotations=frozenset({'ձագ', 'ճուտիկ'})),
EmojiAnnotations(emoji='\U0001f54a', codepoints=(128330,), name='աղավնի', slug='աղավնի', annotations=frozenset({'թռչուն', 'թռչել', 'խաղաղություն'})),
EmojiAnnotations(emoji='🐸', codepoints=(128056,), name='գորտի դեմք', slug='գորտի_դեմք', annotations=frozenset({'դեմք', 'գորտ'})),
EmojiAnnotations(emoji='🐍', codepoints=(128013,), name='օձ', slug='օձ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='🐲', codepoints=(128050,), name='վիշապի դեմք', slug='վիշապի_դեմք', annotations=frozenset({'վիշապ', 'դեմք', 'հեքիաթ'})),
EmojiAnnotations(emoji='🐉', codepoints=(128009,), name='վիշապ', slug='վիշապ', annotations=frozenset({'հեքիաթ'})),
EmojiAnnotations(emoji='🐳', codepoints=(128051,), name='ջուր ցայտեցնող կետաձուկ', slug='ջուր_ցայտեցնող_կետաձուկ', annotations=frozenset({'դեմք', 'կետաձուկ', 'ցայտում'})),
EmojiAnnotations(emoji='🐟', codepoints=(128031,), name='ձուկ', slug='ձուկ', annotations=frozenset({'կենդանակերպ', 'ձկներ'})),
EmojiAnnotations(emoji='🐠', codepoints=(128032,), name='արևադարձային ձուկ', slug='արևադարձային_ձուկ', annotations=frozenset({'ձուկ', 'արևադարձային'})),
EmojiAnnotations(emoji='🐡', codepoints=(128033,), name='փքաձուկ', slug='փքաձուկ', annotations=frozenset({'ձուկ'})),
EmojiAnnotations(emoji='🐚', codepoints=(128026,), name='պարուրաձև խխունջախեցի', slug='պարուրաձև_խխունջախեցի', annotations=frozenset({'պարույր', 'խխունջ'})),
EmojiAnnotations(emoji='\U0001f980', codepoints=(129408,), name='կրաբ', slug='կրաբ', annotations=frozenset({'խեցգետին', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐜', codepoints=(128028,), name='մրջյուն', slug='մրջյուն', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐝', codepoints=(128029,), name='մեղու', slug='մեղու', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐞', codepoints=(128030,), name='զատիկ', slug='զատիկ', annotations=frozenset({'միջատ', 'բզեզ'})),
EmojiAnnotations(emoji='\U0001f577', codepoints=(128375,), name='սարդ', slug='սարդ', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='\U0001f578', codepoints=(128376,), name='սարդոստայն', slug='սարդոստայն', annotations=frozenset({'սարդ', 'ոստայն'})),
EmojiAnnotations(emoji='\U0001f982', codepoints=(129410,), name='շագանակագույն կարիճ', slug='շագանակագույն_կարիճ', annotations=frozenset({'կարիճ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='💐', codepoints=(128144,), name='ծաղկեփունջ', slug='ծաղկեփունջ', annotations=frozenset({'ծաղիկ', 'սիրավեպ', 'բույս'})),
EmojiAnnotations(emoji='🌸', codepoints=(127800,), name='բալենու ծաղիկ', slug='բալենու_ծաղիկ', annotations=frozenset({'ծաղիկ', 'բույս', 'բալ'})),
EmojiAnnotations(emoji='💮', codepoints=(128174,), name='սպիտակ ծաղիկ', slug='սպիտակ_ծաղիկ', annotations=frozenset({'ծաղիկ'})),
EmojiAnnotations(emoji='\U0001f3f5', codepoints=(127989,), name='վարդանախշ', slug='վարդանախշ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌹', codepoints=(127801,), name='վարդ', slug='վարդ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌺', codepoints=(127802,), name='հիբիսկուս', slug='հիբիսկուս', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌻', codepoints=(127803,), name='արևածաղիկ', slug='արևածաղիկ', annotations=frozenset({'ծաղիկ', 'արև', 'բույս'})),
EmojiAnnotations(emoji='🌼', codepoints=(127804,), name='ծաղիկ', slug='ծաղիկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌷', codepoints=(127799,), name='կակաչ', slug='կակաչ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌱', codepoints=(127793,), name='ծիլ', slug='ծիլ', annotations=frozenset({'բույս', 'մատղաշ'})),
EmojiAnnotations(emoji='🌲', codepoints=(127794,), name='եղևնի', slug='եղևնի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌳', codepoints=(127795,), name='սաղարթավոր ծառ', slug='սաղարթավոր_ծառ', annotations=frozenset({'սաղարթավոր', 'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌴', codepoints=(127796,), name='արմավենի', slug='արմավենի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌵', codepoints=(127797,), name='կակտուս', slug='կակտուս', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌾', codepoints=(127806,), name='բրնձի հասկեր', slug='բրնձի_հասկեր', annotations=frozenset({'ականջ', 'բույս', 'բրինձ'})),
EmojiAnnotations(emoji='🌿', codepoints=(127807,), name='խոտաբույս', slug='խոտաբույս', annotations=frozenset({'տերև', 'բույս'})),
EmojiAnnotations(emoji='☘', codepoints=(9752,), name='երեքնուկ', slug='երեքնուկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍀', codepoints=(127808,), name='քառատերև երեքնուկ', slug='քառատերև_երեքնուկ', annotations=frozenset({'4', 'չորս', 'տերև', 'բույս', 'երեքնուկ'})),
EmojiAnnotations(emoji='🍁', codepoints=(127809,), name='թխկու տերև', slug='թխկու_տերև', annotations=frozenset({'տերև', 'թխկի', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍂', codepoints=(127810,), name='ընկած տերևներ', slug='ընկած_տերևներ', annotations=frozenset({'տերև', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍃', codepoints=(127811,), name='ճախրող տերևներ', slug='ճախրող_տերևներ', annotations=frozenset({'տերև', 'քամի', 'փչել', 'թրթռալ', 'բույս'})),
EmojiAnnotations(emoji='🍇', codepoints=(127815,), name='խաղող', slug='խաղող', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍈', codepoints=(127816,), name='սեխ', slug='սեխ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍉', codepoints=(127817,), name='ձմերուկ', slug='ձմերուկ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍊', codepoints=(127818,), name='մանդարին', slug='մանդարին', annotations=frozenset({'նարինջ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍋', codepoints=(127819,), name='կիտրոն', slug='կիտրոն', annotations=frozenset({'ցիտրուս', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍌', codepoints=(127820,), name='բանան', slug='բանան', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍍', codepoints=(127821,), name='արքայախնձոր', slug='արքայախնձոր', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍎', codepoints=(127822,), name='կարմիր խնձոր', slug='կարմիր_խնձոր', annotations=frozenset({'կարմիր', 'խնձոր', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍏', codepoints=(127823,), name='կանաչ խնձոր', slug='կանաչ_խնձոր', annotations=frozenset({'խնձոր', 'բույս', 'պտուղ', 'կանաչ'})),
EmojiAnnotations(emoji='🍐', codepoints=(127824,), name='տանձ', slug='տանձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍑', codepoints=(127825,), name='դեղձ', slug='դեղձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍒', codepoints=(127826,), name='բալ', slug='բալ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍓', codepoints=(127827,), name='ելակ', slug='ելակ', annotations=frozenset({'հատապտուղ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍅', codepoints=(127813,), name='լոլիկ', slug='լոլիկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🍆', codepoints=(127814,), name='սմբուկ', slug='սմբուկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🌽', codepoints=(127805,), name='եգիպտացորեն', slug='եգիպտացորեն', annotations=frozenset({'ականջ', 'բույս'})),
EmojiAnnotations(emoji='\U0001f336', codepoints=(127798,), name='կծու պղպեղ', slug='կծու_պղպեղ', annotations=frozenset({'պղպեղ', 'կծու', 'բույս'})),
EmojiAnnotations(emoji='🍄', codepoints=(127812,), name='սունկ', slug='սունկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌰', codepoints=(127792,), name='շագանակ', slug='շագանակ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍞', codepoints=(127838,), name='հաց', slug='հաց', annotations=frozenset({'բոքոն'})),
EmojiAnnotations(emoji='\U0001f9c0', codepoints=(129472,), name='պանրի կտոր', slug='պանրի_կտոր', annotations=frozenset({'պանիր'})),
EmojiAnnotations(emoji='🍖', codepoints=(127830,), name='ոսկորով միս', slug='ոսկորով_միս', annotations=frozenset({'ոսկոր', 'միս'})),
EmojiAnnotations(emoji='🍗', codepoints=(127831,), name='հավի բուդ', slug='հավի_բուդ', annotations=frozenset({'ոսկոր', 'բուդ', 'հավ', 'թռչնամիս'})),
EmojiAnnotations(emoji='🍔', codepoints=(127828,), name='համբուրգեր', slug='համբուրգեր', annotations=frozenset({'բուրգեր'})),
EmojiAnnotations(emoji='🍟', codepoints=(127839,), name='տապակած կարտոֆիլ', slug='տապակած_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'ֆրի'})),
EmojiAnnotations(emoji='🍕', codepoints=(127829,), name='պիցցա', slug='պիցցա', annotations=frozenset({'պանիր', 'կտոր'})),
EmojiAnnotations(emoji='\U0001f32d', codepoints=(127789,), name='հոթդոգ', slug='հոթդոգ', annotations=frozenset({'նրբերշիկ', 'ֆրանկֆուրտեր'})),
EmojiAnnotations(emoji='\U0001f32e', codepoints=(127790,), name='տակո', slug='տակո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='\U0001f32f', codepoints=(127791,), name='բուրիտո', slug='բուրիտո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='🍲', codepoints=(127858,), name='թասով ճաշ', slug='թասով_ճաշ', annotations=frozenset({'թաս', 'ճաշ'})),
EmojiAnnotations(emoji='🍱', codepoints=(127857,), name='բենտո արկղիկ', slug='բենտո_արկղիկ', annotations=frozenset({'բենտո', 'արկղիկ'})),
EmojiAnnotations(emoji='🍘', codepoints=(127832,), name='բրնձի կրեկեր', slug='բրնձի_կրեկեր', annotations=frozenset({'բրինձ', 'կրեկեր'})),
EmojiAnnotations(emoji='🍙', codepoints=(127833,), name='բրնձի գնդիկ', slug='բրնձի_գնդիկ', annotations=frozenset({'գնդիկ', 'ճապոնական', 'բրինձ'})),
EmojiAnnotations(emoji='🍚', codepoints=(127834,), name='եփած բրինձ', slug='եփած_բրինձ', annotations=frozenset({'եփած', 'բրինձ'})),
EmojiAnnotations(emoji='🍛', codepoints=(127835,), name='կարրիով բրինձ', slug='կարրիով_բրինձ', annotations=frozenset({'կարրի', 'բրինձ'})),
EmojiAnnotations(emoji='🍜', codepoints=(127836,), name='տաք ապուր', slug='տաք_ապուր', annotations=frozenset({'թաս', 'տաք', 'լապշա'})),
EmojiAnnotations(emoji='🍝', codepoints=(127837,), name='սպագետի', slug='սպագետի', annotations=frozenset({'մակարոնեղեն'})),
EmojiAnnotations(emoji='🍠', codepoints=(127840,), name='կարմրացրած քաղցր կարտոֆիլ', slug='կարմրացրած_քաղցր_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'կարմրացրած', 'քաղցր'})),
EmojiAnnotations(emoji='🍢', codepoints=(127842,), name='օդեն', slug='օդեն', annotations=frozenset({'ծովամթերք', 'շամփուր', 'քյաբաբ', 'փայտիկ'})),
EmojiAnnotations(emoji='🍤', codepoints=(127844,), name='տապակած ծովախեցգետին', slug='տապակած_ծովախեցգետին', annotations=frozenset({'տապակած', 'ծովախեցգետին'})),
EmojiAnnotations(emoji='🍥', codepoints=(127845,), name='ձկնային տորթ պտտանախշով', slug='ձկնային_տորթ_պտտանախշով', annotations=frozenset({'տորթ', 'խմորեղեն', 'ձուկ', 'պտտանախշ'})),
EmojiAnnotations(emoji='🍡', codepoints=(127841,), name='դանգո', slug='դանգո', annotations=frozenset({'շամփուր', 'փայտիկ', 'քաղցր', 'ճապոնական', 'դեսերտ'})),
EmojiAnnotations(emoji='🍦', codepoints=(127846,), name='լցնովի պաղպաղակ', slug='լցնովի_պաղպաղակ', annotations=frozenset({'քաղցր', 'պաղպաղակ', 'դեսերտ', 'կրեմ', 'լցնովի', 'փափուկ'})),
EmojiAnnotations(emoji='🍧', codepoints=(127847,), name='մանրացված սառույց', slug='մանրացված_սառույց', annotations=frozenset({'սառույց', 'մանրացված', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍨', codepoints=(127848,), name='պաղպաղակ', slug='պաղպաղակ', annotations=frozenset({'կրեմ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍩', codepoints=(127849,), name='դոնաթ', slug='դոնաթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍪', codepoints=(127850,), name='թխվածքաբլիթ', slug='թխվածքաբլիթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🎂', codepoints=(127874,), name='ծննդյան տորթ', slug='ծննդյան_տորթ', annotations=frozenset({'քաղցր', 'տոն', 'դեսերտ', 'տորթ', 'խմորեղեն', 'տարեդարձ'})),
EmojiAnnotations(emoji='🍰', codepoints=(127856,), name='տորթի կտոր', slug='տորթի_կտոր', annotations=frozenset({'կտոր', 'տորթ', 'խմորեղեն', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍫', codepoints=(127851,), name='շոկոլադե սալիկ', slug='շոկոլադե_սալիկ', annotations=frozenset({'սալիկ', 'շոկոլադ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍬', codepoints=(127852,), name='կոնֆետ', slug='կոնֆետ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍭', codepoints=(127853,), name='սառնաշաքար', slug='սառնաշաքար', annotations=frozenset({'կոնֆետ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍮', codepoints=(127854,), name='պուդինգ', slug='պուդինգ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍯', codepoints=(127855,), name='մեղրի կճուճ', slug='մեղրի_կճուճ', annotations=frozenset({'քաղցր', 'մեղր', 'կճուճ'})),
EmojiAnnotations(emoji='🍼', codepoints=(127868,), name='մանկական շիշ', slug='մանկական_շիշ', annotations=frozenset({'մանկական', 'շիշ', 'խմել', 'կաթ'})),
EmojiAnnotations(emoji='☕', codepoints=(9749,), name='տաք ըմպելիք', slug='տաք_ըմպելիք', annotations=frozenset({'սուրճ', 'թեյ', 'խմել', 'տաք', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍵', codepoints=(127861,), name='թեյի բաժակ առանց բռնակի', slug='թեյի_բաժակ_առանց_բռնակի', annotations=frozenset({'թեյի բաժակ', 'բաժակ', 'խմել', 'թեյ', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍶', codepoints=(127862,), name='սակե', slug='սակե', annotations=frozenset({'բար', 'շիշ', 'բաժակ', 'խմել', 'ըմպելիք'})),
EmojiAnnotations(emoji='\U0001f37e', codepoints=(127870,), name='թռչող խցանով շիշ', slug='թռչող_խցանով_շիշ', annotations=frozenset({'բար', 'խցան', 'շիշ', 'խմել', 'դուրս թռչել'})),
EmojiAnnotations(emoji='🍷', codepoints=(127863,), name='գինու բաժակ', slug='գինու_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'գինի', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍸', codepoints=(127864,), name='կոկտեյլի բաժակ', slug='կոկտեյլի_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'կոկտեյլ'})),
EmojiAnnotations(emoji='🍹', codepoints=(127865,), name='արևադարձային ընպելիք', slug='արևադարձային_ընպելիք', annotations=frozenset({'բար', 'խմել', 'արևադարձային'})),
EmojiAnnotations(emoji='🍺', codepoints=(127866,), name='գարեջրի գավաթ', slug='գարեջրի_գավաթ', annotations=frozenset({'բար', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='🍻', codepoints=(127867,), name='զրնգացող գարեջրի գավաթներ', slug='զրնգացող_գարեջրի_գավաթներ', annotations=frozenset({'բար', 'զրնգալ', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='\U0001f37d', codepoints=(127869,), name='դանակ և պատառաքաղ ափսեի հետ', slug='դանակ_և_պատառաքաղ_ափսեի_հետ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ', 'ափսե'})),
EmojiAnnotations(emoji='🍴', codepoints=(127860,), name='դանակ և պատառաքաղ', slug='դանակ_և_պատառաքաղ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ'})),
EmojiAnnotations(emoji='🍳', codepoints=(127859,), name='թավայով ձվածեղ', slug='թավայով_ձվածեղ', annotations=frozenset({'ձու', 'թավա', 'տապակել'})),
EmojiAnnotations(emoji='\U0001f3fa', codepoints=(127994,), name='սափոր', slug='սափոր', annotations=frozenset({'խմել', 'խոհարարություն', 'գործիք', 'կենդանակերպ', 'զենք', 'ջրհոս'})),
EmojiAnnotations(emoji='🌍', codepoints=(127757,), name='եվրոպան և աֆրիկան պատկերող գլոբուս', slug='եվրոպան_և_աֆրիկան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'եվրոպա', 'աֆրիկա'})),
EmojiAnnotations(emoji='🌎', codepoints=(127758,), name='ամերիկաները պատկերող գլոբուս', slug='ամերիկաները_պատկերող_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'ամերիկաներ', 'աշխարհ'})),
EmojiAnnotations(emoji='🌏', codepoints=(127759,), name='ասիան և ավստրալիան պատկերող գլոբուս', slug='ասիան_և_ավստրալիան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'ավստրալիա', 'ասիա'})),
EmojiAnnotations(emoji='🌐', codepoints=(127760,), name='միջօրեականներով գլոբուս', slug='միջօրեականներով_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'միջօրեականներ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f5fa', codepoints=(128506,), name='աշխարհի քարտեզ', slug='աշխարհի_քարտեզ', annotations=frozenset({'քարտեզ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f3d4', codepoints=(127956,), name='ձյունածածկ գագաթով լեռ', slug='ձյունածածկ_գագաթով_լեռ', annotations=frozenset({'սառը', 'ձյուն', 'լեռ'})),
EmojiAnnotations(emoji='🌋', codepoints=(127755,), name='հրաբուխ', slug='հրաբուխ', annotations=frozenset({'եղանակ', 'լեռ', 'ժայթքում'})),
EmojiAnnotations(emoji='🗻', codepoints=(128507,), name='ֆուջի լեռ', slug='ֆուջի_լեռ', annotations=frozenset({'լեռ', 'ֆուջի'})),
EmojiAnnotations(emoji='\U0001f3d6', codepoints=(127958,), name='լողափ հովանոցով', slug='լողափ_հովանոցով', annotations=frozenset({'լողափ', 'հովանոց'})),
EmojiAnnotations(emoji='\U0001f3dd', codepoints=(127965,), name='անմարդաբնակ կղզի', slug='անմարդաբնակ_կղզի', annotations=frozenset({'կղզի', 'անմարդաբնակ'})),
EmojiAnnotations(emoji='\U0001f3de', codepoints=(127966,), name='ազգային պարկ', slug='ազգային_պարկ', annotations=frozenset({'պարկ'})),
EmojiAnnotations(emoji='\U0001f3db', codepoints=(127963,), name='հունահռոմեական շինություն', slug='հունահռոմեական_շինություն', annotations=frozenset({'հունահռոմեական', 'շինություն'})),
EmojiAnnotations(emoji='\U0001f3d7', codepoints=(127959,), name='շենքի կառուցում', slug='շենքի_կառուցում', annotations=frozenset({'շենք', 'շինարարություն'})),
EmojiAnnotations(emoji='\U0001f3d8', codepoints=(127960,), name='տան շինարարություն', slug='տան_շինարարություն', annotations=frozenset({'շենք', 'տուն'})),
EmojiAnnotations(emoji='\U0001f3d9', codepoints=(127961,), name='քաղաքի համայնապատկեր', slug='քաղաքի_համայնապատկեր', annotations=frozenset({'քաղաք', 'շենք'})),
EmojiAnnotations(emoji='\U0001f3da', codepoints=(127962,), name='լքված շինություն', slug='լքված_շինություն', annotations=frozenset({'լքված', 'շենք', 'տուն'})),
EmojiAnnotations(emoji='🏠', codepoints=(127968,), name='բնակելի տուն', slug='բնակելի_տուն', annotations=frozenset({'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='🏡', codepoints=(127969,), name='այգիով տուն', slug='այգիով_տուն', annotations=frozenset({'այգի', 'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='⛪', codepoints=(9962,), name='եկեղեցի', slug='եկեղեցի', annotations=frozenset({'խաչ', 'շենք', 'կրոն', 'քրիստոնեական'})),
EmojiAnnotations(emoji='\U0001f54b', codepoints=(128331,), name='կաաբա', slug='կաաբա', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54c', codepoints=(128332,), name='մզկիթ', slug='մզկիթ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54d', codepoints=(128333,), name='սինագոգ', slug='սինագոգ', annotations=frozenset({'հրեա', 'հրեական', 'տաճար', 'կրոն'})),
EmojiAnnotations(emoji='⛩', codepoints=(9961,), name='սինտոյական տաճար', slug='սինտոյական_տաճար', annotations=frozenset({'տաճար', 'կրոն', 'սինտոյական'})),
EmojiAnnotations(emoji='🏢', codepoints=(127970,), name='գրասենյակային շենք', slug='գրասենյակային_շենք', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏣', codepoints=(127971,), name='ճապոնական փոստատուն', slug='ճապոնական_փոստատուն', annotations=frozenset({'փոստատուն', 'շենք', 'ճապոնական'})),
EmojiAnnotations(emoji='🏤', codepoints=(127972,), name='փոստատուն', slug='փոստատուն', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='🏥', codepoints=(127973,), name='հիվանդանոց', slug='հիվանդանոց', annotations=frozenset({'բժշկություն', 'շենք', 'բժիշկ'})),
EmojiAnnotations(emoji='🏦', codepoints=(127974,), name='բանկ', slug='բանկ', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏨', codepoints=(127976,), name='հյուրանոց', slug='հյուրանոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏩', codepoints=(127977,), name='սիրային հյուրանոց', slug='սիրային_հյուրանոց', annotations=frozenset({'շենք', 'հյուրանոց', 'սեր'})),
EmojiAnnotations(emoji='🏪', codepoints=(127978,), name='շուրջօրյա խանութ', slug='շուրջօրյա_խանութ', annotations=frozenset({'խանութ', 'շենք', 'շուրջօրյա'})),
EmojiAnnotations(emoji='🏫', codepoints=(127979,), name='դպրոց', slug='դպրոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏬', codepoints=(127980,), name='հանրախանութ', slug='հանրախանութ', annotations=frozenset({'խանութ', 'շենք'})),
EmojiAnnotations(emoji='🏭', codepoints=(127981,), name='գործարան', slug='գործարան', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏯', codepoints=(127983,), name='ճապոնական դղյակ', slug='ճապոնական_դղյակ', annotations=frozenset({'շենք', 'ճապոնական', 'դղյակ'})),
EmojiAnnotations(emoji='🏰', codepoints=(127984,), name='դղյակ', slug='դղյակ', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='💒', codepoints=(128146,), name='հարսանիք', slug='հարսանիք', annotations=frozenset({'մատուռ', 'սիրավեպ'})),
EmojiAnnotations(emoji='🗼', codepoints=(128508,), name='տոկիոյի աշտարակը', slug='տոկիոյի_աշտարակը', annotations=frozenset({'աշտարակ', 'տոկյո'})),
EmojiAnnotations(emoji='🗽', codepoints=(128509,), name='ազատության արձանը', slug='ազատության_արձանը', annotations=frozenset({'ազատություն', 'արձան'})),
EmojiAnnotations(emoji='🗾', codepoints=(128510,), name='ճապոնիայի քարտեզը', slug='ճապոնիայի_քարտեզը', annotations=frozenset({'քարտեզ', 'ճապոնիա'})),
EmojiAnnotations(emoji='⛺', codepoints=(9978,), name='վրան', slug='վրան', annotations=frozenset({'ճամբար'})),
EmojiAnnotations(emoji='🌁', codepoints=(127745,), name='մառախլապատ', slug='մառախլապատ', annotations=frozenset({'եղանակ', 'մառախուղ'})),
EmojiAnnotations(emoji='🌃', codepoints=(127747,), name='աստղազարդ գիշեր', slug='աստղազարդ_գիշեր', annotations=frozenset({'եղանակ', 'գիշեր', 'աստղ'})),
EmojiAnnotations(emoji='🌄', codepoints=(127748,), name='արևածագը լեռներում', slug='արևածագը_լեռներում', annotations=frozenset({'արևածագ', 'եղանակ', 'արև', 'լեռ', 'առավոտ'})),
EmojiAnnotations(emoji='🌅', codepoints=(127749,), name='արևածագ', slug='արևածագ', annotations=frozenset({'եղանակ', 'արև', 'առավոտ'})),
EmojiAnnotations(emoji='🌆', codepoints=(127750,), name='քաղաքի համայնապատկեր մթնշաղին', slug='քաղաքի_համայնապատկեր_մթնշաղին', annotations=frozenset({'լանդշաֆտ', 'երեկո', 'շենք', 'մթնշաղ', 'մայրամուտ', 'եղանակ', 'քաղաք', 'արև'})),
EmojiAnnotations(emoji='🌇', codepoints=(127751,), name='մայրամուտ', slug='մայրամուտ', annotations=frozenset({'եղանակ', 'արև', 'շենք', 'մթնշաղ'})),
EmojiAnnotations(emoji='🌉', codepoints=(127753,), name='կամուրջը գիշերով', slug='կամուրջը_գիշերով', annotations=frozenset({'եղանակ', 'գիշեր', 'կամուրջ'})),
EmojiAnnotations(emoji='♨', codepoints=(9832,), name='տաք աղբյուրներ', slug='տաք_աղբյուրներ', annotations=frozenset({'աղբյուրներ', 'տաք', 'հոսք'})),
EmojiAnnotations(emoji='🌌', codepoints=(127756,), name='ծիր կաթին', slug='ծիր_կաթին', annotations=frozenset({'եղանակ', 'տիեզերք'})),
EmojiAnnotations(emoji='🎠', codepoints=(127904,), name='կարուսելի ձի', slug='կարուսելի_ձի', annotations=frozenset({'ձի', 'կարուսել'})),
EmojiAnnotations(emoji='🎡', codepoints=(127905,), name='սատանայի անիվ', slug='սատանայի_անիվ', annotations=frozenset({'զվարճանքների այգի', 'անիվ', 'սատանայի'})),
EmojiAnnotations(emoji='🎢', codepoints=(127906,), name='ամերիկյան բլուրներ', slug='ամերիկյան_բլուրներ', annotations=frozenset({'զվարճանքների այգի', 'բլուրներ', 'ամերիկյան'})),
EmojiAnnotations(emoji='💈', codepoints=(128136,), name='վարսավիրի ձող', slug='վարսավիրի_ձող', annotations=frozenset({'վարսավիր', 'սանրվածք', 'ձող'})),
EmojiAnnotations(emoji='🎪', codepoints=(127914,), name='կրկեսային վրան', slug='կրկեսային_վրան', annotations=frozenset({'վրան', 'կրկես'})),
EmojiAnnotations(emoji='🎭', codepoints=(127917,), name='կատարողական արվեստ', slug='կատարողական_արվեստ', annotations=frozenset({'ներկայացում', 'թատրոն', 'դիմակ', 'արվեստ'})),
EmojiAnnotations(emoji='\U0001f5bc', codepoints=(128444,), name='շրջանակ նկարով', slug='շրջանակ_նկարով', annotations=frozenset({'նկարչություն', 'նկար', 'արվեստ', 'շրջանակ', 'թանգարան'})),
EmojiAnnotations(emoji='🎨', codepoints=(127912,), name='ներկապնակ', slug='ներկապնակ', annotations=frozenset({'նկարչություն', 'արվեստ', 'թանգարան'})),
EmojiAnnotations(emoji='🎰', codepoints=(127920,), name='խաղային ավտոմատ', slug='խաղային_ավտոմատ', annotations=frozenset({'խաղ', 'ավտոմատ'})),
EmojiAnnotations(emoji='🚂', codepoints=(128642,), name='շոգեքարշ', slug='շոգեքարշ', annotations=frozenset({'փոխադրամիջոց', 'գոլորշի', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚃', codepoints=(128643,), name='երկաթուղային վագոն', slug='երկաթուղային_վագոն', annotations=frozenset({'փոխադրամիջոց', 'տրոլեյբուս', 'էլեկտրական', 'երկաթուղի', 'վագոն', 'տրամվայ', 'գնացք'})),
EmojiAnnotations(emoji='🚄', codepoints=(128644,), name='ճեպընթաց գնացք', slug='ճեպընթաց_գնացք', annotations=frozenset({'գնացք', 'փոխադրամիջոց', 'սինկանսեն', 'երկաթուղի', 'արագություն'})),
EmojiAnnotations(emoji='🚅', codepoints=(128645,), name='ճեպընթաց գնացք կլոր քթով', slug='ճեպընթաց_գնացք_կլոր_քթով', annotations=frozenset({'փոխադրամիջոց', 'արագություն', 'կլոր քիթ', 'երկաթուղի', 'սինկանսեն', 'գնացք'})),
EmojiAnnotations(emoji='🚆', codepoints=(128646,), name='գնացք', slug='գնացք', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚇', codepoints=(128647,), name='մետրո', slug='մետրո', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚈', codepoints=(128648,), name='վերգետնյա մետրո', slug='վերգետնյա_մետրո', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚉', codepoints=(128649,), name='կայարան', slug='կայարան', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚊', codepoints=(128650,), name='տրամվայ', slug='տրամվայ', annotations=frozenset({'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚝', codepoints=(128669,), name='մոնոռելս', slug='մոնոռելս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚞', codepoints=(128670,), name='լեռնային երկաթուղի', slug='լեռնային_երկաթուղի', annotations=frozenset({'վագոն', 'լեռ', 'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚋', codepoints=(128651,), name='տրամվայի վագոն', slug='տրամվայի_վագոն', annotations=frozenset({'տրամվայ', 'վագոն', 'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚌', codepoints=(128652,), name='ավտոբուս', slug='ավտոբուս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚍', codepoints=(128653,), name='մոտեցող ավտոբուս', slug='մոտեցող_ավտոբուս', annotations=frozenset({'մոտեցող', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚎', codepoints=(128654,), name='տրոլեյբուս', slug='տրոլեյբուս', annotations=frozenset({'տրամվայ', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚏', codepoints=(128655,), name='ավտոբուսի կանգառ', slug='ավտոբուսի_կանգառ', annotations=frozenset({'ավտոբուս', 'կանգառ'})),
EmojiAnnotations(emoji='🚐', codepoints=(128656,), name='միկրոավտոբուս', slug='միկրոավտոբուս', annotations=frozenset({'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚑', codepoints=(128657,), name='շտապօգնության մեքենա', slug='շտապօգնության_մեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚒', codepoints=(128658,), name='հրշեջ մեքենա', slug='հրշեջ_մեքենա', annotations=frozenset({'քարշակ', 'փոխադրամիջոց', 'հրդեք', 'բեռնատար'})),
EmojiAnnotations(emoji='🚓', codepoints=(128659,), name='ոստիկանական մեքենա', slug='ոստիկանական_մեքենա', annotations=frozenset({'պարեկ', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚔', codepoints=(128660,), name='մոտեցող ոստիկանական մեքենա', slug='մոտեցող_ոստիկանական_մեքենա', annotations=frozenset({'մոտեցող', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚕', codepoints=(128661,), name='տաքսի', slug='տաքսի', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚖', codepoints=(128662,), name='մոտեցող տաքսի', slug='մոտեցող_տաքսի', annotations=frozenset({'մոտեցող', 'տաքսի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚗', codepoints=(128663,), name='ավտոմեքենա', slug='ավտոմեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚘', codepoints=(128664,), name='մոտեցող ավտոմեքենա', slug='մոտեցող_ավտոմեքենա', annotations=frozenset({'մոտեցող', 'մեքենա', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚙', codepoints=(128665,), name='ավտոֆուրգոն', slug='ավտոֆուրգոն', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚚', codepoints=(128666,), name='բեռնատար', slug='բեռնատար', annotations=frozenset({'առաքում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚛', codepoints=(128667,), name='կցորդով բեռնատար', slug='կցորդով_բեռնատար', annotations=frozenset({'փոխադրամիջոց', 'կցորդ', 'բեռնատար'})),
EmojiAnnotations(emoji='🚜', codepoints=(128668,), name='տրակտոր', slug='տրակտոր', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚲', codepoints=(128690,), name='հեծանիվ', slug='հեծանիվ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛽', codepoints=(9981,), name='բենզալցակայանի պոմպ', slug='բենզալցակայանի_պոմպ', annotations=frozenset({'բենզին', 'կայան', 'վառելիք', 'պոմպ', 'բենզալցակայան'})),
EmojiAnnotations(emoji='\U0001f6e3', codepoints=(128739,), name='ավտոմայրուղի', slug='ավտոմայրուղի', annotations=frozenset({'մայրուղի', 'ճանապարհ'})),
EmojiAnnotations(emoji='\U0001f6e4', codepoints=(128740,), name='երկաթուղի', slug='երկաթուղի', annotations=frozenset({'գնացք'})),
EmojiAnnotations(emoji='🚨', codepoints=(128680,), name='ոստիկանական մեքենայի փարոս', slug='ոստիկանական_մեքենայի_փարոս', annotations=frozenset({'լույս', 'ոստիկանություն', 'փարոս', 'պտտվող', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚥', codepoints=(128677,), name='հորիզոնական լուսակիր', slug='հորիզոնական_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚦', codepoints=(128678,), name='ուղղահայաց լուսակիր', slug='ուղղահայաց_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚧', codepoints=(128679,), name='շինարարություն', slug='շինարարություն', annotations=frozenset({'արգելապատնեշ'})),
EmojiAnnotations(emoji='⚓', codepoints=(9875,), name='խարիսխ', slug='խարիսխ', annotations=frozenset({'գործիք', 'նավ'})),
EmojiAnnotations(emoji='⛵', codepoints=(9973,), name='առագաստանավ', slug='առագաստանավ', annotations=frozenset({'նավակ', 'հանգստավայր', 'ծով', 'զբոսանավ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚣', codepoints=(128675,), name='թիանավակ', slug='թիանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚤', codepoints=(128676,), name='արագընթաց մոտորանավակ', slug='արագընթաց_մոտորանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f3', codepoints=(128755,), name='ուղևորատար նավ', slug='ուղևորատար_նավ', annotations=frozenset({'նավ', 'ուղևոր', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛴', codepoints=(9972,), name='լաստանավ', slug='լաստանավ', annotations=frozenset({'նավակ'})),
EmojiAnnotations(emoji='\U0001f6e5', codepoints=(128741,), name='մոտորանավ', slug='մոտորանավ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚢', codepoints=(128674,), name='նավ', slug='նավ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='✈', codepoints=(9992,), name='ինքնաթիռ', slug='ինքնաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6e9', codepoints=(128745,), name='փոքր ինքնաթիռ', slug='փոքր_ինքնաթիռ', annotations=frozenset({'ինքնաթիռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6eb', codepoints=(128747,), name='օդանավի մեկնում', slug='օդանավի_մեկնում', annotations=frozenset({'ինքնաթիռ', 'գրանցում', 'մեկնում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ec', codepoints=(128748,), name='օդանավի ժամանում', slug='օդանավի_ժամանում', annotations=frozenset({'վայրէջք', 'ժամանող', 'օդանավ', 'ժամանում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='💺', codepoints=(128186,), name='նստատեղ', slug='նստատեղ', annotations=frozenset({'բազկաթոռ'})),
EmojiAnnotations(emoji='🚁', codepoints=(128641,), name='ուղղաթիռ', slug='ուղղաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚟', codepoints=(128671,), name='կախովի երկաթուղի', slug='կախովի_երկաթուղի', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'կախովի'})),
EmojiAnnotations(emoji='🚠', codepoints=(128672,), name='լեռնային ճոպանուղի', slug='լեռնային_ճոպանուղի', annotations=frozenset({'գոնդոլա', 'ճոպան', 'լեռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚡', codepoints=(128673,), name='օդային տրամվայ', slug='օդային_տրամվայ', annotations=frozenset({'օդային', 'ճոպան', 'ճոպանուղի', 'գոնդոլա', 'վագոն', 'տրամվայ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚀', codepoints=(128640,), name='հրթիռ', slug='հրթիռ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f0', codepoints=(128752,), name='արբանյակ', slug='արբանյակ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ce', codepoints=(128718,), name='հյուրանոցային զանգ', slug='հյուրանոցային_զանգ', annotations=frozenset({'զանգ', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cc', codepoints=(128716,), name='մահճակալում պառկած մարդ', slug='մահճակալում_պառկած_մարդ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cf', codepoints=(128719,), name='մահճակալ', slug='մահճակալ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cb', codepoints=(128715,), name='բազմոց և լամպ', slug='բազմոց_և_լամպ', annotations=frozenset({'լամպ', 'բազմոց', 'հյուրանոց'})),
EmojiAnnotations(emoji='🚽', codepoints=(128701,), name='զուգարանակոնք', slug='զուգարանակոնք', annotations=frozenset({'զուգարան'})),
EmojiAnnotations(emoji='🚿', codepoints=(128703,), name='լոգարանի ցնցուղ', slug='լոգարանի_ցնցուղ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🛀', codepoints=(128704,), name='լոգանք ընդունող մարդ', slug='լոգանք_ընդունող_մարդ', annotations=frozenset({'լոգարան', 'լոգասենյակ'})),
EmojiAnnotations(emoji='🛁', codepoints=(128705,), name='լոգարան', slug='լոգարան', annotations=frozenset({'լոգասենյակ'})),
EmojiAnnotations(emoji='⌛', codepoints=(8987,), name='ավազի ժամացույց', slug='ավազի_ժամացույց', annotations=frozenset({'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏳', codepoints=(9203,), name='ավազի ժամացույց հոսող ավազով', slug='ավազի_ժամացույց_հոսող_ավազով', annotations=frozenset({'ավազի ժամացույց', 'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏰', codepoints=(9200,), name='զարթուցիչ', slug='զարթուցիչ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏱', codepoints=(9201,), name='վայրկյանաչափ', slug='վայրկյանաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏲', codepoints=(9202,), name='ժամաչափ', slug='ժամաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='\U0001f570', codepoints=(128368,), name='բուխարու ժամացույց', slug='բուխարու_ժամացույց', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='🕛', codepoints=(128347,), name='ժամը տասներկուսը', slug='ժամը_տասներկուսը', annotations=frozenset({'12', 'ժամ', 'տասներկու', '00', 'ժամացույց', '12:00'})),
EmojiAnnotations(emoji='🕧', codepoints=(128359,), name='տասներկուսն անց կես', slug='տասներկուսն_անց_կես', annotations=frozenset({'12', 'տասներկու', '12:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕐', codepoints=(128336,), name='ժամը մեկը', slug='ժամը_մեկը', annotations=frozenset({'ժամ', 'մեկ', '1', '1:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕜', codepoints=(128348,), name='մեկն անց կես', slug='մեկն_անց_կես', annotations=frozenset({'մեկ', '1', '1:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕑', codepoints=(128337,), name='ժամը երկուսը', slug='ժամը_երկուսը', annotations=frozenset({'ժամ', 'երկու', '00', '2:00', 'ժամացույց', '2'})),
EmojiAnnotations(emoji='🕝', codepoints=(128349,), name='երկուսն անց կես', slug='երկուսն_անց_կես', annotations=frozenset({'երկու', '2:30', 'ժամացույց', '2', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕒', codepoints=(128338,), name='ժամը երեքը', slug='ժամը_երեքը', annotations=frozenset({'ժամ', 'երեք', '3', '00', '3:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕞', codepoints=(128350,), name='երեքն անց կես', slug='երեքն_անց_կես', annotations=frozenset({'երեք', '3', '3:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕓', codepoints=(128339,), name='ժամը չորսը', slug='ժամը_չորսը', annotations=frozenset({'4:00', 'ժամ', '00', 'չորս', '4', 'ժամացույց'})),
EmojiAnnotations(emoji='🕟', codepoints=(128351,), name='չորսն անց կես', slug='չորսն_անց_կես', annotations=frozenset({'4:30', 'չորս', '4', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕔', codepoints=(128340,), name='ժամը հինգը', slug='ժամը_հինգը', annotations=frozenset({'ժամ', 'հինգ', '5:00', '5', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕠', codepoints=(128352,), name='հինգն անց կես', slug='հինգն_անց_կես', annotations=frozenset({'5:30', 'հինգ', '5', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕕', codepoints=(128341,), name='ժամը վեցը', slug='ժամը_վեցը', annotations=frozenset({'վեց', '6', 'ժամ', '6:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕡', codepoints=(128353,), name='վեցն անց կես', slug='վեցն_անց_կես', annotations=frozenset({'վեց', '6', '6:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕖', codepoints=(128342,), name='ժամը յոթը', slug='ժամը_յոթը', annotations=frozenset({'ժամ', 'յոթ', '7', '00', '7:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕢', codepoints=(128354,), name='յոթն անց կես', slug='յոթն_անց_կես', annotations=frozenset({'յոթ', '7', '7:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕗', codepoints=(128343,), name='ժամը ութը', slug='ժամը_ութը', annotations=frozenset({'ժամ', '8:00', '00', '8', 'ժամացույց', 'ութ'})),
EmojiAnnotations(emoji='🕣', codepoints=(128355,), name='ութն անց կես', slug='ութն_անց_կես', annotations=frozenset({'8:30', '8', 'ժամացույց', 'ութ', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕘', codepoints=(128344,), name='ժամը ինը', slug='ժամը_ինը', annotations=frozenset({'ժամ', '9:00', 'ինը', '00', '9', 'ժամացույց'})),
EmojiAnnotations(emoji='🕤', codepoints=(128356,), name='ինն անց կես', slug='ինն_անց_կես', annotations=frozenset({'ինը', '9', 'ժամացույց', 'երեսուն', '30', '9:30'})),
EmojiAnnotations(emoji='🕙', codepoints=(128345,), name='ժամը տասը', slug='ժամը_տասը', annotations=frozenset({'10', '10:00', 'ժամ', 'տասը', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕥', codepoints=(128357,), name='տասն անց կես', slug='տասն_անց_կես', annotations=frozenset({'10', 'տասը', '10:30', 'երեսուն', 'ժամացույց', '30'})),
EmojiAnnotations(emoji='🕚', codepoints=(128346,), name='ժամը տասնմեկը', slug='ժամը_տասնմեկը', annotations=frozenset({'11', 'ժամ', '00', '11:00', 'տասնմեկ', 'ժամացույց'})),
EmojiAnnotations(emoji='🕦', codepoints=(128358,), name='տասնմեկն անց կես', slug='տասնմեկն_անց_կես', annotations=frozenset({'11', '11:30', 'տասնմեկ', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🌑', codepoints=(127761,), name='նորալուսին', slug='նորալուսին', annotations=frozenset({'եղանակ', 'մութ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌒', codepoints=(127762,), name='աճող մահիկ', slug='աճող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'մահիկ', 'աճող'})),
EmojiAnnotations(emoji='🌓', codepoints=(127763,), name='լուսինն առաջին քառորդում', slug='լուսինն_առաջին_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌔', codepoints=(127764,), name='աճող ուռուցիկ լուսին', slug='աճող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'եղանակ', 'աճող'})),
EmojiAnnotations(emoji='🌕', codepoints=(127765,), name='լիալուսին', slug='լիալուսին', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌖', codepoints=(127766,), name='նվազող ուռուցիկ լուսին', slug='նվազող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'նվազող', 'եղանակ'})),
EmojiAnnotations(emoji='🌗', codepoints=(127767,), name='լուսինը երկրորդ քառորդում', slug='լուսինը_երկրորդ_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌘', codepoints=(127768,), name='նվազող մահիկ', slug='նվազող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'նվազող', 'մահիկ'})),
EmojiAnnotations(emoji='🌙', codepoints=(127769,), name='մահիկ', slug='մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌚', codepoints=(127770,), name='դեմքով նորալուսին', slug='դեմքով_նորալուսին', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌛', codepoints=(127771,), name='լուսինն առաջին քառորդում դեմքով', slug='լուսինն_առաջին_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='🌜', codepoints=(127772,), name='լուսինը երկրորդ քառորդում դեմքով', slug='լուսինը_երկրորդ_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='\U0001f321', codepoints=(127777,), name='ջերմաչափ', slug='ջերմաչափ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='☀', codepoints=(9728,), name='արև', slug='արև', annotations=frozenset({'եղանակ', 'տիեզերք', 'արևոտ', 'պայծառ', 'ճառագայթներ'})),
EmojiAnnotations(emoji='🌝', codepoints=(127773,), name='դեմքով լիալուսին', slug='դեմքով_լիալուսին', annotations=frozenset({'տիեզերք', 'լուսին', 'լիալուսին', 'պայծառ', 'դեմք', 'եղանակ'})),
EmojiAnnotations(emoji='🌞', codepoints=(127774,), name='դեմքով արև', slug='դեմքով_արև', annotations=frozenset({'եղանակ', 'դեմք', 'տիեզերք', 'արև', 'պայծառ'})),
EmojiAnnotations(emoji='⭐', codepoints=(11088,), name='սպիտակավուն աստղ', slug='սպիտակավուն_աստղ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='🌟', codepoints=(127775,), name='փայլող աստղ', slug='փայլող_աստղ', annotations=frozenset({'փայլող', 'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🌠', codepoints=(127776,), name='ընկնող աստղ', slug='ընկնող_աստղ', annotations=frozenset({'տիեզերք', 'աստղ', 'ընկնող'})),
EmojiAnnotations(emoji='☁', codepoints=(9729,), name='ամպ', slug='ամպ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='⛅', codepoints=(9925,), name='արև ամպի հետևում', slug='արև_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='⛈', codepoints=(9928,), name='կայծակով և անձրևով ամպ', slug='կայծակով_և_անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև', 'ամպրոպ'})),
EmojiAnnotations(emoji='\U0001f324', codepoints=(127780,), name='արև փոքր ամպի հետևում', slug='արև_փոքր_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f325', codepoints=(127781,), name='արև մեծ ամպի հետևում', slug='արև_մեծ_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f326', codepoints=(127782,), name='արև անձրևով ամպի հետևում', slug='արև_անձրևով_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f327', codepoints=(127783,), name='անձրևով ամպ', slug='անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f328', codepoints=(127784,), name='ձյունով ամպ', slug='ձյունով_ամպ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f329', codepoints=(127785,), name='կայծակով ամպ', slug='կայծակով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'կայծակ'})),
EmojiAnnotations(emoji='\U0001f32a', codepoints=(127786,), name='պտտահողմ', slug='պտտահողմ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32b', codepoints=(127787,), name='մառախուղ', slug='մառախուղ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32c', codepoints=(127788,), name='քամու երես', slug='քամու_երես', annotations=frozenset({'եղանակ', 'դեմք', 'քամի', 'փչել', 'ամպ'})),
EmojiAnnotations(emoji='🌀', codepoints=(127744,), name='ցիկլոն', slug='ցիկլոն', annotations=frozenset({'եղանակ', 'պտտվող', 'թայֆուն'})),
EmojiAnnotations(emoji='🌈', codepoints=(127752,), name='ծիածան', slug='ծիածան', annotations=frozenset({'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='🌂', codepoints=(127746,), name='փակ անձրևանոց', slug='փակ_անձրևանոց', annotations=frozenset({'հագուստ', 'անձրևանոց', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☂', codepoints=(9730,), name='անձրևանոց', slug='անձրևանոց', annotations=frozenset({'հագուստ', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☔', codepoints=(9748,), name='անձրևանոց անձրևի կաթիլներով', slug='անձրևանոց_անձրևի_կաթիլներով', annotations=frozenset({'հագուստ', 'կաթիլ', 'անձրևանոց', 'անձրև', 'եղանակ'})),
EmojiAnnotations(emoji='⛱', codepoints=(9969,), name='անձրևանոց գետնի վրա', slug='անձրևանոց_գետնի_վրա', annotations=frozenset({'անձրևանոց', 'եղանակ', 'արև', 'անձրև'})),
EmojiAnnotations(emoji='⚡', codepoints=(9889,), name='բարձր լարում', slug='բարձր_լարում', annotations=frozenset({'վտանգ', 'լարում', 'էլեկտրականություն', 'էլեկտրական', 'կայծակ'})),
EmojiAnnotations(emoji='❄', codepoints=(10052,), name='ձյան փաթիլ', slug='ձյան_փաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='☃', codepoints=(9731,), name='ձնեմարդ', slug='ձնեմարդ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='⛄', codepoints=(9924,), name='ձնեմարդ առանց ձյան', slug='ձնեմարդ_առանց_ձյան', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ձնեմարդ'})),
EmojiAnnotations(emoji='☄', codepoints=(9732,), name='գիսաստղ', slug='գիսաստղ', annotations=frozenset({'տիեզերք'})),
EmojiAnnotations(emoji='🔥', codepoints=(128293,), name='կրակ', slug='կրակ', annotations=frozenset({'գործիք', 'բոց'})),
EmojiAnnotations(emoji='💧', codepoints=(128167,), name='կաթիլ', slug='կաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='🌊', codepoints=(127754,), name='ծովի ալիք', slug='ծովի_ալիք', annotations=frozenset({'ալիք', 'եղանակ', 'օվկիանոս', 'ջուր'})),
EmojiAnnotations(emoji='🎃', codepoints=(127875,), name='ջեքի լապտեր', slug='ջեքի_լապտեր', annotations=frozenset({'լապտեր', 'տոն', 'հելոուին', 'ջեք'})),
EmojiAnnotations(emoji='🎄', codepoints=(127876,), name='տոնածառ', slug='տոնածառ', annotations=frozenset({'սուրբ ծնունդ', 'տոն', 'ծառ'})),
EmojiAnnotations(emoji='🎆', codepoints=(127878,), name='հրավառություն', slug='հրավառություն', annotations=frozenset({'տոնակատարություն'})),
EmojiAnnotations(emoji='🎇', codepoints=(127879,), name='բենգալյան կրակ', slug='բենգալյան_կրակ', annotations=frozenset({'տոնակատարություն', 'կայծ', 'հրավառություն'})),
EmojiAnnotations(emoji='✨', codepoints=(10024,), name='կայծեր', slug='կայծեր', annotations=frozenset({'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🎈', codepoints=(127880,), name='փուչիկ', slug='փուչիկ', annotations=frozenset({'տոն'})),
EmojiAnnotations(emoji='🎉', codepoints=(127881,), name='ճայթուկ', slug='ճայթուկ', annotations=frozenset({'տոն', 'երեկույթ'})),
EmojiAnnotations(emoji='🎊', codepoints=(127882,), name='կոնֆետի', slug='կոնֆետի', annotations=frozenset({'տոն', 'գունդ'})),
EmojiAnnotations(emoji='🎋', codepoints=(127883,), name='տանաբատա', slug='տանաբատա', annotations=frozenset({'դրոշակ', 'տոն', 'ճապոնական', 'ծառ'})),
EmojiAnnotations(emoji='🎌', codepoints=(127884,), name='խաչված դրոշակներ', slug='խաչված_դրոշակներ', annotations=frozenset({'տոն', 'խաչ', 'խաչված', 'ճապոնական'})),
EmojiAnnotations(emoji='🎍', codepoints=(127885,), name='բամբուկից դեկորացիա', slug='բամբուկից_դեկորացիա', annotations=frozenset({'բամբուկ', 'տոն', 'ճապոնական', 'դեկորացիա', 'բույս'})),
EmojiAnnotations(emoji='🎎', codepoints=(127886,), name='ճապոնական տիկնիկներ', slug='ճապոնական_տիկնիկներ', annotations=frozenset({'տոն', 'փառատոն', 'ճապոնական', 'տիկնիկ'})),
EmojiAnnotations(emoji='🎏', codepoints=(127887,), name='կարպերի տեսքով նավադրոշ', slug='կարպերի_տեսքով_նավադրոշ', annotations=frozenset({'տոն', 'նավադրոշ', 'կարպ'})),
EmojiAnnotations(emoji='🎐', codepoints=(127888,), name='քամու զանգակ', slug='քամու_զանգակ', annotations=frozenset({'տոն', 'քամի', 'զանգ'})),
EmojiAnnotations(emoji='🎑', codepoints=(127889,), name='լուսնի ծես', slug='լուսնի_ծես', annotations=frozenset({'տոն', 'լուսին', 'ծես'})),
EmojiAnnotations(emoji='🎀', codepoints=(127872,), name='ժապավեն', slug='ժապավեն', annotations=frozenset({'տոն', 'տոնակատարություն'})),
EmojiAnnotations(emoji='🎁', codepoints=(127873,), name='փաթեթավորված նվեր', slug='փաթեթավորված_նվեր', annotations=frozenset({'տոն', 'փաթեթավորված', 'արկղ', 'նվեր'})),
EmojiAnnotations(emoji='\U0001f396', codepoints=(127894,), name='ռազմական մեդալ', slug='ռազմական_մեդալ', annotations=frozenset({'տոն', 'ռազմական', 'մեդալ'})),
EmojiAnnotations(emoji='\U0001f397', codepoints=(127895,), name='հուշաժապավեն', slug='հուշաժապավեն', annotations=frozenset({'տոն', 'ժապավեն', 'հուշ'})),
EmojiAnnotations(emoji='\U0001f39e', codepoints=(127902,), name='տեսաժապավեն', slug='տեսաժապավեն', annotations=frozenset({'կադր', 'ժապավեն', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='\U0001f39f', codepoints=(127903,), name='մուտքի տոմս', slug='մուտքի_տոմս', annotations=frozenset({'տոմս', 'մուտք'})),
EmojiAnnotations(emoji='🎫', codepoints=(127915,), name='տոմս', slug='տոմս', annotations=frozenset({'մուտք'})),
EmojiAnnotations(emoji='⚽', codepoints=(9917,), name='ֆուտբոլի գնդակ', slug='ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='⚾', codepoints=(9918,), name='բեյսբոլի գնդակ', slug='բեյսբոլի_գնդակ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='🏀', codepoints=(127936,), name='բասկետբոլի գնդակ', slug='բասկետբոլի_գնդակ', annotations=frozenset({'գնդակ', 'բասկետբոլ'})),
EmojiAnnotations(emoji='🏈', codepoints=(127944,), name='ամերիկյան ֆուտբոլի գնդակ', slug='ամերիկյան_ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ', 'ամերիկյան'})),
EmojiAnnotations(emoji='🏉', codepoints=(127945,), name='ռեգբիի գնդակ', slug='ռեգբիի_գնդակ', annotations=frozenset({'ռեգբի', 'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='🎾', codepoints=(127934,), name='թենիսի գնդակ', slug='թենիսի_գնդակ', annotations=frozenset({'գնդակ', 'թենիս', 'մեծ'})),
EmojiAnnotations(emoji='🎱', codepoints=(127921,), name='բիլիարդ', slug='բիլիարդ', annotations=frozenset({'8', 'խաղ', '8 գնդակ', 'գնդակ', 'ութ'})),
EmojiAnnotations(emoji='🎳', codepoints=(127923,), name='բոուլինգ', slug='բոուլինգ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='⛳', codepoints=(9971,), name='գոլֆի դրոշակ', slug='գոլֆի_դրոշակ', annotations=frozenset({'գոլֆ', 'անցք'})),
EmojiAnnotations(emoji='\U0001f3cc', codepoints=(127948,), name='գոլֆ խաղացող', slug='գոլֆ_խաղացող', annotations=frozenset({'գոլֆ', 'գնդակ'})),
EmojiAnnotations(emoji='⛸', codepoints=(9976,), name='չմուշկ', slug='չմուշկ', annotations=frozenset({'սառույց'})),
EmojiAnnotations(emoji='🎣', codepoints=(127907,), name='կարթաձող', slug='կարթաձող', annotations=frozenset({'կարթ', 'ձուկ'})),
EmojiAnnotations(emoji='🎽', codepoints=(127933,), name='պտտվող շապիկ', slug='պտտվող_շապիկ', annotations=frozenset({'շապիկ', 'պտտվող', 'ժապավեն'})),
EmojiAnnotations(emoji='🎿', codepoints=(127935,), name='դահուկներ', slug='դահուկներ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='⛷', codepoints=(9975,), name='դահուկորդ', slug='դահուկորդ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='🏂', codepoints=(127938,), name='սնոուբորդիստ', slug='սնոուբորդիստ', annotations=frozenset({'ձյուն', 'դահուկ', 'սնոուբորդ'})),
EmojiAnnotations(emoji='🏄', codepoints=(127940,), name='սերֆեր', slug='սերֆեր', annotations=frozenset({'սերֆինգ'})),
EmojiAnnotations(emoji='🏇', codepoints=(127943,), name='ձիավազք', slug='ձիավազք', annotations=frozenset({'ձի', 'ժոկեյ', 'մրցարշավային ձի', 'մրցարշավ'})),
EmojiAnnotations(emoji='🏊', codepoints=(127946,), name='լողորդ', slug='լողորդ', annotations=frozenset({'լողալ'})),
EmojiAnnotations(emoji='⛹', codepoints=(9977,), name='գնդակով մարդ', slug='գնդակով_մարդ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3cb', codepoints=(127947,), name='ծանրորդ', slug='ծանրորդ', annotations=frozenset({'ծանրություն'})),
EmojiAnnotations(emoji='🚴', codepoints=(128692,), name='հեծանվորդ', slug='հեծանվորդ', annotations=frozenset({'հեծանիվ'})),
EmojiAnnotations(emoji='🚵', codepoints=(128693,), name='լեռնահեծանվորդ', slug='լեռնահեծանվորդ', annotations=frozenset({'հեծանիվ', 'լեռ', 'հեծանվորդ'})),
EmojiAnnotations(emoji='\U0001f3ce', codepoints=(127950,), name='մրցարշավային մեքենա', slug='մրցարշավային_մեքենա', annotations=frozenset({'մեքենա', 'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3cd', codepoints=(127949,), name='մոտոցիկլետ', slug='մոտոցիկլետ', annotations=frozenset({'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3c5', codepoints=(127941,), name='սպորտային մեդալ', slug='սպորտային_մեդալ', annotations=frozenset({'մեդալ'})),
EmojiAnnotations(emoji='🏆', codepoints=(127942,), name='գավաթ', slug='գավաթ', annotations=frozenset({'մրցանակ'})),
EmojiAnnotations(emoji='\U0001f3cf', codepoints=(127951,), name='կրիկետ', slug='կրիկետ', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d0', codepoints=(127952,), name='վոլեյբոլի գնդակ', slug='վոլեյբոլի_գնդակ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d1', codepoints=(127953,), name='խոտի հոկեյ', slug='խոտի_հոկեյ', annotations=frozenset({'խաղ', 'մական', 'գնդակ', 'դաշտ', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d2', codepoints=(127954,), name='մական և տափօղակ', slug='մական_և_տափօղակ', annotations=frozenset({'սառույց', 'խաղ', 'տափօղակ', 'մական', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d3', codepoints=(127955,), name='սեղանի թենիս', slug='սեղանի_թենիս', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ', 'ռակետ', 'ձեռնաթիակ'})),
EmojiAnnotations(emoji='\U0001f3f8', codepoints=(127992,), name='բադմինտոն', slug='բադմինտոն', annotations=frozenset({'փետրագնդակ', 'խաղ', 'ռակետ', 'ձեռնաթիակ', 'վոլան'})),
EmojiAnnotations(emoji='🎯', codepoints=(127919,), name='դիպուկ հարված', slug='դիպուկ_հարված', annotations=frozenset({'հարվածել', 'խաղ', 'դարթ', 'կենտրոն', 'նշանակետ', 'թիրախ'})),
EmojiAnnotations(emoji='🎮', codepoints=(127918,), name='տեսախաղ', slug='տեսախաղ', annotations=frozenset({'խաղ', 'վահանակ'})),
EmojiAnnotations(emoji='\U0001f579', codepoints=(128377,), name='ջոյսթիք', slug='ջոյսթիք', annotations=frozenset({'խաղ', 'տեսախաղ'})),
EmojiAnnotations(emoji='🎲', codepoints=(127922,), name='զառ', slug='զառ', annotations=frozenset({'խաղ'})),
EmojiAnnotations(emoji='♠', codepoints=(9824,), name='ղառ', slug='ղառ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♥', codepoints=(9829,), name='սիրտ', slug='սիրտ', annotations=frozenset({'թղթախաղ', 'խաղ', 'սրտեր'})),
EmojiAnnotations(emoji='♦', codepoints=(9830,), name='քյափ', slug='քյափ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♣', codepoints=(9827,), name='խաչ', slug='խաչ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='🃏', codepoints=(127183,), name='ջոկեր', slug='ջոկեր', annotations=frozenset({'թղթախաղ', 'խաղ', 'խաղալ'})),
EmojiAnnotations(emoji='🀄', codepoints=(126980,), name='մաջոնգի կարմիր վիշապ', slug='մաջոնգի_կարմիր_վիշապ', annotations=frozenset({'խաղ', 'մաջոնգ', 'կարմիր'})),
EmojiAnnotations(emoji='🎴', codepoints=(127924,), name='ծաղկի խաղաթղթեր', slug='ծաղկի_խաղաթղթեր', annotations=frozenset({'թղթախաղ', 'ծաղիկ', 'խաղ', 'խաղալ', 'ճապոնական'})),
EmojiAnnotations(emoji='🔇', codepoints=(128263,), name='բարձրախոսն անջատված է', slug='բարձրախոսն_անջատված_է', annotations=frozenset({'լուռ', 'բարձրախոս', 'անջատել ձայնը', 'հանգիստ', 'ձայն'})),
EmojiAnnotations(emoji='🔈', codepoints=(128264,), name='բարձրախոս', slug='բարձրախոս', annotations=frozenset({'ձայնի ուժգնություն', 'ձայն'})),
EmojiAnnotations(emoji='🔉', codepoints=(128265,), name='բարձրախոսը միացված է', slug='բարձրախոսը_միացված_է', annotations=frozenset({'ալիք', 'ցածր', 'բարձրախոս', 'ձայն'})),
EmojiAnnotations(emoji='🔊', codepoints=(128266,), name='բարձրախոսի ձայնը բարձր է', slug='բարձրախոսի_ձայնը_բարձր_է', annotations=frozenset({'բարձր', 'բարձրաձայն', 'երեք', 'ձայն', '3', 'բարձրախոս'})),
EmojiAnnotations(emoji='📢', codepoints=(128226,), name='մեծ բարձրախոս', slug='մեծ_բարձրախոս', annotations=frozenset({'բարձրաձայն', 'հասարակական'})),
EmojiAnnotations(emoji='📯', codepoints=(128239,), name='փոստային եղջյուր', slug='փոստային_եղջյուր', annotations=frozenset({'եղջյուր', 'փոստ', 'փոստային'})),
EmojiAnnotations(emoji='🔕', codepoints=(128277,), name='զանգակ շեղ գծիկով', slug='զանգակ_շեղ_գծիկով', annotations=frozenset({'զանգակ', 'հանգիստ', 'ոչ', 'արգելված', 'լուռ', 'անջատել ձայնը'})),
EmojiAnnotations(emoji='🎼', codepoints=(127932,), name='սոլի բանալի', slug='սոլի_բանալի', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎵', codepoints=(127925,), name='նոտա', slug='նոտա', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎶', codepoints=(127926,), name='նոտաներ', slug='նոտաներ', annotations=frozenset({'նոտա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f399', codepoints=(127897,), name='ստուդիայի խոսափող', slug='ստուդիայի_խոսափող', annotations=frozenset({'խոսափող', 'ստուդիա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39a', codepoints=(127898,), name='ձայնի բարձրության սահոց', slug='ձայնի_բարձրության_սահոց', annotations=frozenset({'մակարդակ', 'սահոց', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39b', codepoints=(127899,), name='կառավարման կոճակներ', slug='կառավարման_կոճակներ', annotations=frozenset({'կոճակներ', 'կառավարել', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎤', codepoints=(127908,), name='խոսափող', slug='խոսափող', annotations=frozenset({'կարաոկե'})),
EmojiAnnotations(emoji='🎷', codepoints=(127927,), name='սաքսոֆոն', slug='սաքսոֆոն', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎸', codepoints=(127928,), name='կիթառ', slug='կիթառ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎹', codepoints=(127929,), name='երաժշտական ստեղնաշար', slug='երաժշտական_ստեղնաշար', annotations=frozenset({'գործիք', 'ստեղնաշար', 'դաշնամուր', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎺', codepoints=(127930,), name='շեփոր', slug='շեփոր', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎻', codepoints=(127931,), name='ջութակ', slug='ջութակ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='📱', codepoints=(128241,), name='բջջային հեռախոս', slug='բջջային_հեռախոս', annotations=frozenset({'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📲', codepoints=(128242,), name='բջջային հեռախոս սլաքով', slug='բջջային_հեռախոս_սլաքով', annotations=frozenset({'հեռախոս', 'զանգել', 'սլաք', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📞', codepoints=(128222,), name='հեռախոսի լսափող', slug='հեռախոսի_լսափող', annotations=frozenset({'լսափող', 'հեռախոս'})),
EmojiAnnotations(emoji='📠', codepoints=(128224,), name='ֆաքսի մեքենա', slug='ֆաքսի_մեքենա', annotations=frozenset({'ֆաքս'})),
EmojiAnnotations(emoji='🔌', codepoints=(128268,), name='էլեկտրական խրոց', slug='էլեկտրական_խրոց', annotations=frozenset({'էլեկտրականություն', 'էլեկտրական', 'խրոց'})),
EmojiAnnotations(emoji='💻', codepoints=(128187,), name='նոթբուք', slug='նոթբուք', annotations=frozenset({'համակարգիչ', 'անձնական'})),
EmojiAnnotations(emoji='\U0001f5a8', codepoints=(128424,), name='տպիչ', slug='տպիչ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='⌨', codepoints=(9000,), name='ստեղնաշար', slug='ստեղնաշար', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='\U0001f5b1', codepoints=(128433,), name='համակարգչի մկնիկ', slug='համակարգչի_մկնիկ', annotations=frozenset({'համակարգիչ', 'մկնիկ', 'կոճակ', 'երեք', '3'})),
EmojiAnnotations(emoji='\U0001f5b2', codepoints=(128434,), name='թրեքբոլ', slug='թրեքբոլ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='💽', codepoints=(128189,), name='մինի սկավառակ', slug='մինի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'օպտիկական'})),
EmojiAnnotations(emoji='💾', codepoints=(128190,), name='ֆլոպի սկավառակ', slug='ֆլոպի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'ֆլոպի'})),
EmojiAnnotations(emoji='💿', codepoints=(128191,), name='օպտիկական սկավառակ', slug='օպտիկական_սկավառակ', annotations=frozenset({'օպտիկական', 'dvd', 'համակարգիչ', 'blu-ray', 'cd', 'սկավառակ'})),
EmojiAnnotations(emoji='📀', codepoints=(128192,), name='dvd', slug='dvd', annotations=frozenset({'համակարգիչ', 'cd', 'սկավառակ', 'օպտիկական', 'blu-ray'})),
EmojiAnnotations(emoji='🎥', codepoints=(127909,), name='ժապավենային տեսախցիկ', slug='ժապավենային_տեսախցիկ', annotations=frozenset({'տեսախցիկ', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='🎬', codepoints=(127916,), name='կինոդուբլների համարացույց', slug='կինոդուբլների_համարացույց', annotations=frozenset({'ֆիլմ', 'կինոդուբլ'})),
EmojiAnnotations(emoji='\U0001f4fd', codepoints=(128253,), name='ժապավենային պրոյեկտոր', slug='ժապավենային_պրոյեկտոր', annotations=frozenset({'պրոյեկտոր', 'ժապավեն', 'կինո', 'ֆիլմ', 'վիդեո'})),
EmojiAnnotations(emoji='📺', codepoints=(128250,), name='հեռուստացույց', slug='հեռուստացույց', annotations=frozenset({'tv', 'վիդեո'})),
EmojiAnnotations(emoji='📷', codepoints=(128247,), name='ֆոտոապարատ', slug='ֆոտոապարատ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='\U0001f4f8', codepoints=(128248,), name='ֆոտոապարատ լուսաթարթիչով', slug='ֆոտոապարատ_լուսաթարթիչով', annotations=frozenset({'լուսաթարթիչ', 'ֆոտոապարատ', 'վիդեո'})),
EmojiAnnotations(emoji='📹', codepoints=(128249,), name='տեսախցիկ', slug='տեսախցիկ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='📼', codepoints=(128252,), name='տեսաերիզ', slug='տեսաերիզ', annotations=frozenset({'երիզ', 'vhs', 'վիդեո'})),
EmojiAnnotations(emoji='🔍', codepoints=(128269,), name='ձախ ուղղված խոշորացույց', slug='ձախ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔎', codepoints=(128270,), name='աջ ուղղված խոշորացույց', slug='աջ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔬', codepoints=(128300,), name='մանրադիտակ', slug='մանրադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔭', codepoints=(128301,), name='հեռադիտակ', slug='հեռադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='📡', codepoints=(128225,), name='արբանյակային ալեհավաք', slug='արբանյակային_ալեհավաք', annotations=frozenset({'ափսե', 'արբանյակ', 'ալեհավաք'})),
EmojiAnnotations(emoji='\U0001f56f', codepoints=(128367,), name='մոմ', slug='մոմ', annotations=frozenset({'լույս'})),
EmojiAnnotations(emoji='💡', codepoints=(128161,), name='էլեկտրական լամպ', slug='էլեկտրական_լամպ', annotations=frozenset({'գաղափար', 'լամպ', 'էլեկտրական', 'լույս', 'կոմիքս'})),
EmojiAnnotations(emoji='🔦', codepoints=(128294,), name='գրպանի լապտեր', slug='գրպանի_լապտեր', annotations=frozenset({'գործիք', 'լապտեր', 'էլեկտրական', 'լույս'})),
EmojiAnnotations(emoji='🏮', codepoints=(127982,), name='թղթե կարմիր լապտեր', slug='թղթե_կարմիր_լապտեր', annotations=frozenset({'լապտեր', 'բար', 'լույս', 'կարմիր', 'ճապոնական'})),
EmojiAnnotations(emoji='📔', codepoints=(128212,), name='ձևավոր կազմով տետր', slug='ձևավոր_կազմով_տետր', annotations=frozenset({'գիրք', 'նոթատետր', 'ձևավորված', 'կազմ'})),
EmojiAnnotations(emoji='📕', codepoints=(128213,), name='փակված գիրք', slug='փակված_գիրք', annotations=frozenset({'գիրք', 'փակված'})),
EmojiAnnotations(emoji='📖', codepoints=(128214,), name='բացված գիրք', slug='բացված_գիրք', annotations=frozenset({'գիրք', 'բացված'})),
EmojiAnnotations(emoji='📗', codepoints=(128215,), name='կանաչ գիրք', slug='կանաչ_գիրք', annotations=frozenset({'գիրք', 'կանաչ'})),
EmojiAnnotations(emoji='📘', codepoints=(128216,), name='կապույտ գիրք', slug='կապույտ_գիրք', annotations=frozenset({'գիրք', 'կապույտ'})),
EmojiAnnotations(emoji='📙', codepoints=(128217,), name='նարնջագույն գիրք', slug='նարնջագույն_գիրք', annotations=frozenset({'գիրք', 'նարնջագույն'})),
EmojiAnnotations(emoji='📚', codepoints=(128218,), name='գրքեր', slug='գրքեր', annotations=frozenset({'գիրք'})),
EmojiAnnotations(emoji='📒', codepoints=(128210,), name='հաշվապահական մատյան', slug='հաշվապահական_մատյան', annotations=frozenset({'նոթատետր'})),
EmojiAnnotations(emoji='📃', codepoints=(128195,), name='կլորացած էջ', slug='կլորացած_էջ', annotations=frozenset({'կլորացած', 'էջ', 'փաստաթուղթ'})),
EmojiAnnotations(emoji='📜', codepoints=(128220,), name='գալարաթուղթ', slug='գալարաթուղթ', annotations=frozenset({'թուղթ'})),
EmojiAnnotations(emoji='📄', codepoints=(128196,), name='էջ', slug='էջ', annotations=frozenset({'փաստաթութղ'})),
EmojiAnnotations(emoji='📰', codepoints=(128240,), name='լրագիր', slug='լրագիր', annotations=frozenset({'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='\U0001f5de', codepoints=(128478,), name='կլորացրած լրագիր', slug='կլորացրած_լրագիր', annotations=frozenset({'լրագիր', 'կլորացրած', 'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='📑', codepoints=(128209,), name='էջանիշ ներդիրներ', slug='էջանիշ_ներդիրներ', annotations=frozenset({'նշել', 'էջանիշ', 'ներդիր', 'նշիչ'})),
EmojiAnnotations(emoji='🔖', codepoints=(128278,), name='էջանիշ', slug='էջանիշ', annotations=frozenset({'նշել'})),
EmojiAnnotations(emoji='💰', codepoints=(128176,), name='փողի պարկ', slug='փողի_պարկ', annotations=frozenset({'դոլար', 'փող', 'պարկ'})),
EmojiAnnotations(emoji='💴', codepoints=(128180,), name='իեն թղթադրամ', slug='իեն_թղթադրամ', annotations=frozenset({'բանկ', 'իեն', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💵', codepoints=(128181,), name='դոլար թղթադրամ', slug='դոլար_թղթադրամ', annotations=frozenset({'բանկ', 'դոլար', 'տարադրամ', 'փող', 'թղթադրամ'})),
EmojiAnnotations(emoji='💶', codepoints=(128182,), name='եվրո թղթադրամ', slug='եվրո_թղթադրամ', annotations=frozenset({'բանկ', 'եվրո', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💷', codepoints=(128183,), name='ֆունտ թղթադրամ', slug='ֆունտ_թղթադրամ', annotations=frozenset({'բանկ', 'փող', 'տարադրամ', 'ֆունտ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💸', codepoints=(128184,), name='փող թևերով', slug='փող_թևերով', annotations=frozenset({'թղթադրամ', 'դոլար', 'բանկ', 'փող', 'թռչել', 'թևեր'})),
EmojiAnnotations(emoji='💳', codepoints=(128179,), name='պլաստիկ քարտ', slug='պլաստիկ_քարտ', annotations=frozenset({'բանկ', 'վարկ', 'փող', 'քարտ'})),
EmojiAnnotations(emoji='💹', codepoints=(128185,), name='աճող դիագրամ իենով', slug='աճող_դիագրամ_իենով', annotations=frozenset({'իեն', 'վերև', 'միտում', 'բանկ', 'փող', 'տարրադրամ', 'գրաֆիկ', 'շուկա', 'բարձրանալ', 'դիագրամ', 'աճ'})),
EmojiAnnotations(emoji='✉', codepoints=(9993,), name='ծրար', slug='ծրար', annotations=frozenset({'էլփոտ', 'նամակ'})),
EmojiAnnotations(emoji='📧', codepoints=(128231,), name='էլեկտրոնային նամակ', slug='էլեկտրոնային_նամակ', annotations=frozenset({'փոստ', 'նամակ', 'էլփոստ'})),
EmojiAnnotations(emoji='📨', codepoints=(128232,), name='ստացվող ծրար', slug='ստացվող_ծրար', annotations=frozenset({'փոստ', 'ստանալ', 'ծրար', 'նամակ', 'էլփոստ', 'ստացվող'})),
EmojiAnnotations(emoji='📩', codepoints=(128233,), name='ծրար սլաքով', slug='ծրար_սլաքով', annotations=frozenset({'փոստ', 'ուղարկված', 'ծրար', 'նամակ', 'էլփոստ', 'ուղարկվող', 'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='📤', codepoints=(128228,), name='ելքի արկղ', slug='ելքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'ուղարկված', 'արկղ'})),
EmojiAnnotations(emoji='📥', codepoints=(128229,), name='մուտքի արկղ', slug='մուտքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'արկղ', 'ստանալ'})),
EmojiAnnotations(emoji='📦', codepoints=(128230,), name='ծանրոց', slug='ծանրոց', annotations=frozenset({'արկղ'})),
EmojiAnnotations(emoji='📫', codepoints=(128235,), name='փակ փոստարկղ բարձրացված դրոշակով', slug='փակ_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📪', codepoints=(128234,), name='փակ փոստարկղ իջեցված դրոշակով', slug='փակ_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📬', codepoints=(128236,), name='բաց փոստարկղ բարձրացված դրոշակով', slug='բաց_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📭', codepoints=(128237,), name='բաց փոստարկղ իջեցված դրոշակով', slug='բաց_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📮', codepoints=(128238,), name='փոստատուփ', slug='փոստատուփ', annotations=frozenset({'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='\U0001f5f3', codepoints=(128499,), name='քվեատուփ քվեաթերթիկով', slug='քվեատուփ_քվեաթերթիկով', annotations=frozenset({'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✒', codepoints=(10002,), name='սև գրչածայր', slug='սև_գրչածայր', annotations=frozenset({'գրչածայր', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58b', codepoints=(128395,), name='ինքնահոս գրիչ', slug='ինքնահոս_գրիչ', annotations=frozenset({'ինքնահոս', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58a', codepoints=(128394,), name='գրիչ', slug='գրիչ', annotations=frozenset({'գնդիկավոր գրիչ'})),
EmojiAnnotations(emoji='\U0001f58c', codepoints=(128396,), name='վրձին', slug='վրձին', annotations=frozenset({'ներկել', 'նկարել'})),
EmojiAnnotations(emoji='\U0001f58d', codepoints=(128397,), name='մոմամատիտ', slug='մոմամատիտ', annotations=frozenset({'գունավոր մատիտ'})),
EmojiAnnotations(emoji='📝', codepoints=(128221,), name='հուշաթերթ', slug='հուշաթերթ', annotations=frozenset({'մատիտ'})),
EmojiAnnotations(emoji='📁', codepoints=(128193,), name='թղթապանակ', slug='թղթապանակ', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='📂', codepoints=(128194,), name='բաց թղթապանակ', slug='բաց_թղթապանակ', annotations=frozenset({'բաց', 'թղթապանակ', 'ֆայլ'})),
EmojiAnnotations(emoji='\U0001f5c2', codepoints=(128450,), name='քարտադարանի բաժանարարներ', slug='քարտադարանի_բաժանարարներ', annotations=frozenset({'ինդեքս', 'բաժանարար', 'քարտ'})),
EmojiAnnotations(emoji='📅', codepoints=(128197,), name='օրացույց', slug='օրացույց', annotations=frozenset({'ամսաթիվ'})),
EmojiAnnotations(emoji='📆', codepoints=(128198,), name='պոկովի օրացույց', slug='պոկովի_օրացույց', annotations=frozenset({'օրացույց'})),
EmojiAnnotations(emoji='\U0001f5d2', codepoints=(128466,), name='պարուրավոր նոթատետր', slug='պարուրավոր_նոթատետր', annotations=frozenset({'գրքույկ', 'տետր', 'պարույր'})),
EmojiAnnotations(emoji='\U0001f5d3', codepoints=(128467,), name='պարուրավոր օրացույց', slug='պարուրավոր_օրացույց', annotations=frozenset({'օրացույց', 'գրքույկ', 'պարույր'})),
EmojiAnnotations(emoji='📇', codepoints=(128199,), name='քարտադարան', slug='քարտադարան', annotations=frozenset({'ինդեքս', 'քարտ'})),
EmojiAnnotations(emoji='📈', codepoints=(128200,), name='աճող դիագրամ', slug='աճող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'դիագրամ', 'վեր', 'աճ', 'միտում'})),
EmojiAnnotations(emoji='📉', codepoints=(128201,), name='նվազող դիագրամ', slug='նվազող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'ներքև', 'դիագրամ', 'միտում'})),
EmojiAnnotations(emoji='📊', codepoints=(128202,), name='գոտեձև գծապատկեր', slug='գոտեձև_գծապատկեր', annotations=frozenset({'գոտի', 'գրաֆիկ', 'դիագրամ'})),
EmojiAnnotations(emoji='📍', codepoints=(128205,), name='գնդասեղ', slug='գնդասեղ', annotations=frozenset({'քորոց'})),
EmojiAnnotations(emoji='\U0001f587', codepoints=(128391,), name='միացված սկրեպներ', slug='միացված_սկրեպներ', annotations=frozenset({'միացնել', 'սկրեպ'})),
EmojiAnnotations(emoji='📏', codepoints=(128207,), name='քանոն', slug='քանոն', annotations=frozenset({'ուղղանկյուն'})),
EmojiAnnotations(emoji='📐', codepoints=(128208,), name='եռանկյունի քանոն', slug='եռանկյունի_քանոն', annotations=frozenset({'եռանկյունի', 'քանոն'})),
EmojiAnnotations(emoji='✂', codepoints=(9986,), name='մկրատ', slug='մկրատ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5c3', codepoints=(128451,), name='քարտադարանի արկղ', slug='քարտադարանի_արկղ', annotations=frozenset({'ֆայլ', 'արկղ', 'քարտ'})),
EmojiAnnotations(emoji='\U0001f5c4', codepoints=(128452,), name='պահարան', slug='պահարան', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='🔒', codepoints=(128274,), name='կողպեք', slug='կողպեք', annotations=frozenset({'փակ'})),
EmojiAnnotations(emoji='🔓', codepoints=(128275,), name='բաց կողպեք', slug='բաց_կողպեք', annotations=frozenset({'բաց', 'ապակողպել', 'կողպեք'})),
EmojiAnnotations(emoji='🔏', codepoints=(128271,), name='կողպեք ինքնահոսով', slug='կողպեք_ինքնահոսով', annotations=frozenset({'գրչածայր', 'գաղտնիություն', 'կողպեք', 'թանաք', 'գրիչ'})),
EmojiAnnotations(emoji='🔐', codepoints=(128272,), name='փակ կողպեք բանալիով', slug='փակ_կողպեք_բանալիով', annotations=frozenset({'ապահով', 'փակ', 'բնալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔑', codepoints=(128273,), name='բանալի', slug='բանալի', annotations=frozenset({'գաղտնաբառ', 'կողպեք'})),
EmojiAnnotations(emoji='\U0001f5dd', codepoints=(128477,), name='հին բանալի', slug='հին_բանալի', annotations=frozenset({'հին', 'բանալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔨', codepoints=(128296,), name='մուրճ', slug='մուրճ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='⛏', codepoints=(9935,), name='քլունգ', slug='քլունգ', annotations=frozenset({'գործիք', 'հանք'})),
EmojiAnnotations(emoji='⚒', codepoints=(9874,), name='մուրճեր', slug='մուրճեր', annotations=frozenset({'գործիք', 'մուրճ'})),
EmojiAnnotations(emoji='\U0001f6e0', codepoints=(128736,), name='մուրճ և պտուտակաբանալի', slug='մուրճ_և_պտուտակաբանալի', annotations=frozenset({'գործիք', 'պտուտակաբանալի', 'մուրճ'})),
EmojiAnnotations(emoji='🔧', codepoints=(128295,), name='պտուտակաբանալի', slug='պտուտակաբանալի', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔩', codepoints=(128297,), name='մանեկ ու հեղույս', slug='մանեկ_ու_հեղույս', annotations=frozenset({'մանեկ', 'գործիք', 'հեղույս'})),
EmojiAnnotations(emoji='⚙', codepoints=(9881,), name='ատամնանիվ', slug='ատամնանիվ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5dc', codepoints=(128476,), name='մամլակ', slug='մամլակ', annotations=frozenset({'մամլիչ'})),
EmojiAnnotations(emoji='⚗', codepoints=(9879,), name='թորիչ', slug='թորիչ', annotations=frozenset({'քիմիա', 'գործիք'})),
EmojiAnnotations(emoji='⚖', codepoints=(9878,), name='նժարավոր կշեռք', slug='նժարավոր_կշեռք', annotations=frozenset({'հավասարակշռություն', 'կշեռք', 'գործիք', 'ծանրություն', 'արդարություն', 'կենդանակերպ'})),
EmojiAnnotations(emoji='⛓', codepoints=(9939,), name='շղթաներ', slug='շղթաներ', annotations=frozenset({'շղթա'})),
EmojiAnnotations(emoji='💉', codepoints=(128137,), name='ներարկիչ', slug='ներարկիչ', annotations=frozenset({'գործիք', 'հիվանդ', 'բժշկություն', 'ասեղ', 'բժիշկ'})),
EmojiAnnotations(emoji='💊', codepoints=(128138,), name='դեղահաբ', slug='դեղահաբ', annotations=frozenset({'հիվանդ', 'բժշկություն', 'բժիշկ'})),
EmojiAnnotations(emoji='\U0001f5e1', codepoints=(128481,), name='դաշույն', slug='դաշույն', annotations=frozenset({'զենք', 'դանակ'})),
EmojiAnnotations(emoji='🔪', codepoints=(128298,), name='խոհանոցային դանակ', slug='խոհանոցային_դանակ', annotations=frozenset({'գործիք', 'եփել', 'խոհարար', 'դանակ', 'զենք'})),
EmojiAnnotations(emoji='⚔', codepoints=(9876,), name='խաչված սրեր', slug='խաչված_սրեր', annotations=frozenset({'սրեր', 'խաչված', 'զենք'})),
EmojiAnnotations(emoji='🔫', codepoints=(128299,), name='ատրճանակ', slug='ատրճանակ', annotations=frozenset({'գործիք', 'զենք'})),
EmojiAnnotations(emoji='\U0001f6e1', codepoints=(128737,), name='վահան', slug='վահան', annotations=frozenset({'զենք'})),
EmojiAnnotations(emoji='\U0001f3f9', codepoints=(127993,), name='նետ ու աղեղ', slug='նետ_ու_աղեղ', annotations=frozenset({'գործիք', 'նետ', 'աղեղնավոր', 'զենք', 'աղեղ'})),
EmojiAnnotations(emoji='🏁', codepoints=(127937,), name='վանդակավոր դրոշ', slug='վանդակավոր_դրոշ', annotations=frozenset({'մրցարշավ', 'վանդակավոր'})),
EmojiAnnotations(emoji='\U0001f3f3', codepoints=(127987,), name='ծածանվող սպիտակ դրոշ', slug='ծածանվող_սպիտակ_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='\U0001f3f4', codepoints=(127988,), name='ծածանվող սև դրոշ', slug='ծածանվող_սև_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='🚩', codepoints=(128681,), name='եռանկյունի դրոշ', slug='եռանկյունի_դրոշ', annotations=frozenset({'փոստ'})),
EmojiAnnotations(emoji='⚰', codepoints=(9904,), name='դագաղ', slug='դագաղ', annotations=frozenset({'մահ'})),
EmojiAnnotations(emoji='⚱', codepoints=(9905,), name='աճյունասափոր', slug='աճյունասափոր', annotations=frozenset({'հուղարկավորություն', 'մահ'})),
EmojiAnnotations(emoji='🗿', codepoints=(128511,), name='մոաի', slug='մոաի', annotations=frozenset({'դեմք', 'մոայի', 'արձան'})),
EmojiAnnotations(emoji='\U0001f6e2', codepoints=(128738,), name='նավթի տակառ', slug='նավթի_տակառ', annotations=frozenset({'տակառ', 'նավթ'})),
EmojiAnnotations(emoji='🔮', codepoints=(128302,), name='բյուրեղյա գունդ', slug='բյուրեղյա_գունդ', annotations=frozenset({'բյուրեղ', 'բախտ', 'գործիք', 'հեքիաթ', 'ֆանտազիա', 'գունդ'})),
EmojiAnnotations(emoji='🏧', codepoints=(127975,), name='բանկոմատի նշան', slug='բանկոմատի_նշան', annotations=frozenset({'բանկ', 'գանձապահ', 'atm', 'բանկոմատ'})),
EmojiAnnotations(emoji='🚮', codepoints=(128686,), name='աղբամանի նշան', slug='աղբամանի_նշան', annotations=frozenset({'աղբ', 'աղբարկղ'})),
EmojiAnnotations(emoji='🚰', codepoints=(128688,), name='խմելու ջուր', slug='խմելու_ջուր', annotations=frozenset({'խմելու', 'խմել', 'ջուր'})),
EmojiAnnotations(emoji='♿', codepoints=(9855,), name='անվասայլակ', slug='անվասայլակ', annotations=frozenset({'մատչելիություն'})),
EmojiAnnotations(emoji='🚹', codepoints=(128697,), name='տղամարդկանց զուգարան', slug='տղամարդկանց_զուգարան', annotations=frozenset({'wc', 'տղամարդ', 'զուգարան'})),
EmojiAnnotations(emoji='🚺', codepoints=(128698,), name='կանանց զուգարան', slug='կանանց_զուգարան', annotations=frozenset({'wc', 'կին', 'զուգարան'})),
EmojiAnnotations(emoji='🚻', codepoints=(128699,), name='ընդհանուր զուգարան', slug='ընդհանուր_զուգարան', annotations=frozenset({'wc', 'զուգարան'})),
EmojiAnnotations(emoji='🚼', codepoints=(128700,), name='նորածնի նշան', slug='նորածնի_նշան', annotations=frozenset({'նորածին', 'փոխել'})),
EmojiAnnotations(emoji='🚾', codepoints=(128702,), name='զուգարան', slug='զուգարան', annotations=frozenset({'wc', 'ջուր'})),
EmojiAnnotations(emoji='🛂', codepoints=(128706,), name='անձնագրային ստուգում', slug='անձնագրային_ստուգում', annotations=frozenset({'անձնագիր', 'ստուգում'})),
EmojiAnnotations(emoji='🛄', codepoints=(128708,), name='ուղեբեռի վերաբերյալ բողոք', slug='ուղեբեռի_վերաբերյալ_բողոք', annotations=frozenset({'ուղեբեռ', 'բողոք'})),
EmojiAnnotations(emoji='🛅', codepoints=(128709,), name='ուղեբեռ պահախցում', slug='ուղեբեռ_պահախցում', annotations=frozenset({'ուղեբեռ', 'բեռ', 'պահարան'})),
EmojiAnnotations(emoji='🚸', codepoints=(128696,), name='ճանապարհը հատող երեխաներ', slug='ճանապարհը_հատող_երեխաներ', annotations=frozenset({'երեխա', 'երթևեկություն', 'հատող', 'հետիոտն'})),
EmojiAnnotations(emoji='⛔', codepoints=(9940,), name='մուտք չկա', slug='մուտք_չկա', annotations=frozenset({'ոչ', 'արգելված', 'երթևեկություն', 'մուտք'})),
EmojiAnnotations(emoji='🚫', codepoints=(128683,), name='արգելված է', slug='արգելված_է', annotations=frozenset({'ոչ', 'արգելված', 'մուտք'})),
EmojiAnnotations(emoji='🚳', codepoints=(128691,), name='հեծանիվների մուտքն արգելված է', slug='հեծանիվների_մուտքն_արգելված_է', annotations=frozenset({'փոխադրամիջոց', 'արգելված', 'հեծանիվ', 'ոչ'})),
EmojiAnnotations(emoji='🚭', codepoints=(128685,), name='չծխել', slug='չծխել', annotations=frozenset({'ծխել', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚯', codepoints=(128687,), name='չաղտոտել', slug='չաղտոտել', annotations=frozenset({'աղբ', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚱', codepoints=(128689,), name='խմելու ջուր չէ', slug='խմելու_ջուր_չէ', annotations=frozenset({'արգելված', 'խմելու', 'խմել', 'ջուր', 'ոչ'})),
EmojiAnnotations(emoji='🚷', codepoints=(128695,), name='հետիոտնների մուտքն արգելված է', slug='հետիոտնների_մուտքն_արգելված_է', annotations=frozenset({'հետիոտն', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='⬆', codepoints=(11014,), name='վերև սլաք', slug='վերև_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'հյուսիս'})),
EmojiAnnotations(emoji='↗', codepoints=(8599,), name='վերև աջ սլաք', slug='վերև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևելք'})),
EmojiAnnotations(emoji='➡', codepoints=(10145,), name='աջ սլաք', slug='աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'արևելք'})),
EmojiAnnotations(emoji='↘', codepoints=(8600,), name='ներքև աջ սլաք', slug='ներքև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հարավ-արևելք'})),
EmojiAnnotations(emoji='⬇', codepoints=(11015,), name='ներքև սլաք', slug='ներքև_սլաք', annotations=frozenset({'ներքև', 'ուղղություն', 'սլաք', 'գլխավոր', 'հարավ'})),
EmojiAnnotations(emoji='↙', codepoints=(8601,), name='ներքև ձախ սլաք', slug='ներքև_ձախ_սլաք', annotations=frozenset({'հարավ-արևմուտք', 'ուղղություն', 'սլաք'})),
EmojiAnnotations(emoji='⬅', codepoints=(11013,), name='ձախ սլաք', slug='ձախ_սլաք', annotations=frozenset({'արևմուտք', 'ուղղություն', 'սլաք', 'գլխավոր'})),
EmojiAnnotations(emoji='↖', codepoints=(8598,), name='վերև ձախ սլաք', slug='վերև_ձախ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևմուտք'})),
EmojiAnnotations(emoji='↕', codepoints=(8597,), name='վերև-ներքև սլաք', slug='վերև_ներքև_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↔', codepoints=(8596,), name='աջ-ձախ սլաք', slug='աջ_ձախ_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↩', codepoints=(8617,), name='աջ շրջադարձի սլաք', slug='աջ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↪', codepoints=(8618,), name='ձախ շրջադարձի սլաք', slug='ձախ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤴', codepoints=(10548,), name='ձախից վերև թեքվող սլաք', slug='ձախից_վերև_թեքվող_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤵', codepoints=(10549,), name='ձախից ներքև թեքվող սլաք', slug='ձախից_ներքև_թեքվող_սլաք', annotations=frozenset({'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='🔃', codepoints=(128259,), name='ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ', slug='ժամասլաքի_ուղղությամբ_ուղղահայաց_սլաքներ', annotations=frozenset({'վերաբեռնել', 'ժամասլաքի ուղղությամբ', 'սլաք'})),
EmojiAnnotations(emoji='🔄', codepoints=(128260,), name='ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ', slug='ժամասլաքին_հակառակ_ուղղությամբ_սլաքներով_կոճակ', annotations=frozenset({'հակառակ ուղղությամբ', 'սլաք', 'ժամասլաքին հակառակ ուղղությամբ'})),
EmojiAnnotations(emoji='🔙', codepoints=(128281,), name='հետ գրությամբ սլաք', slug='հետ_գրությամբ_սլաք', annotations=frozenset({'հետ', 'սլաք'})),
EmojiAnnotations(emoji='🔚', codepoints=(128282,), name='վերջ գրությամբ սլաք', slug='վերջ_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերջ'})),
EmojiAnnotations(emoji='🔛', codepoints=(128283,), name='միացված է գրությամբ սլաք', slug='միացված_է_գրությամբ_սլաք', annotations=frozenset({'նշան', 'սլաք', 'միացված է'})),
EmojiAnnotations(emoji='🔜', codepoints=(128284,), name='շուտով գրությամբ սլաք', slug='շուտով_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'շուտով'})),
EmojiAnnotations(emoji='🔝', codepoints=(128285,), name='վերև գրությամբ սլաք', slug='վերև_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերև', 'վեր'})),
EmojiAnnotations(emoji='\U0001f6d0', codepoints=(128720,), name='աղոթատեղի', slug='աղոթատեղի', annotations=frozenset({'պաշտամունք', 'կրոն'})),
EmojiAnnotations(emoji='⚛', codepoints=(9883,), name='ատոմի նշան', slug='ատոմի_նշան', annotations=frozenset({'ատոմ', 'աթեիստ'})),
EmojiAnnotations(emoji='\U0001f549', codepoints=(128329,), name='օմ', slug='օմ', annotations=frozenset({'կրոն', 'հնդիկ'})),
EmojiAnnotations(emoji='✡', codepoints=(10017,), name='դավթի աստղ', slug='դավթի_աստղ', annotations=frozenset({'դավիթ', 'հրեա', 'հրեական', 'կրոն', 'աստղ'})),
EmojiAnnotations(emoji='☸', codepoints=(9784,), name='դհարմայի անիվ', slug='դհարմայի_անիվ', annotations=frozenset({'դհարմա', 'անիվ', 'բուդդիստ', 'կրոն'})),
EmojiAnnotations(emoji='☯', codepoints=(9775,), name='ին և յան', slug='ին_և_յան', annotations=frozenset({'յին', 'դաո', 'դաոսիստ', 'կրոն', 'յան'})),
EmojiAnnotations(emoji='✝', codepoints=(10013,), name='լատինական խաչ', slug='լատինական_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☦', codepoints=(9766,), name='ուղղափառ խաչ', slug='ուղղափառ_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☪', codepoints=(9770,), name='աստղ և մահիկ', slug='աստղ_և_մահիկ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='☮', codepoints=(9774,), name='խաղաղության նշան', slug='խաղաղության_նշան', annotations=frozenset({'խաղաղություն'})),
EmojiAnnotations(emoji='\U0001f54e', codepoints=(128334,), name='մենորա', slug='մենորա', annotations=frozenset({'մոմակալ', 'աշտանակ', 'կրոն'})),
EmojiAnnotations(emoji='🔯', codepoints=(128303,), name='կետիկով վեցթևանի աստղ', slug='կետիկով_վեցթևանի_աստղ', annotations=frozenset({'բախտ', 'աստղ'})),
EmojiAnnotations(emoji='♻', codepoints=(9851,), name='վերամշակման նշան', slug='վերամշակման_նշան', annotations=frozenset({'վերամշակել'})),
EmojiAnnotations(emoji='📛', codepoints=(128219,), name='բեյջ', slug='բեյջ', annotations=frozenset({'անուն'})),
EmojiAnnotations(emoji='🔰', codepoints=(128304,), name='սկսնակ լինելու ճապոնական նշան', slug='սկսնակ_լինելու_ճապոնական_նշան', annotations=frozenset({'հեծանակ', 'սկսնակ', 'գործիք', 'դեղին', 'տերև', 'ճապոնական', 'կանաչ'})),
EmojiAnnotations(emoji='🔱', codepoints=(128305,), name='եռաժանի խորհրդանշան', slug='եռաժանի_խորհրդանշան', annotations=frozenset({'գործիք', 'եռաժանի', 'նավ', 'խարիսխ', 'զինանշան'})),
EmojiAnnotations(emoji='⭕', codepoints=(11093,), name='մեծ թավ շրջան', slug='մեծ_թավ_շրջան', annotations=frozenset({'օ', 'շրջան'})),
EmojiAnnotations(emoji='✅', codepoints=(9989,), name='սպիտակ թավ ստուգանշան', slug='սպիտակ_թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='☑', codepoints=(9745,), name='վանդակ ստուգանշանով', slug='վանդակ_ստուգանշանով', annotations=frozenset({'նշել', 'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✔', codepoints=(10004,), name='թավ ստուգանշան', slug='թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='✖', codepoints=(10006,), name='բազմապատկման թավ նշան', slug='բազմապատկման_թավ_նշան', annotations=frozenset({'բազմապատկում', 'x', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❌', codepoints=(10060,), name='խաչի նշան', slug='խաչի_նշան', annotations=frozenset({'բազմապատկում', 'x', 'նշել', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❎', codepoints=(10062,), name='խաչի նշանով կոճակ', slug='խաչի_նշանով_կոճակ', annotations=frozenset({'նշել', 'քառակուսի'})),
EmojiAnnotations(emoji='➕', codepoints=(10133,), name='գումարման թավ նշան', slug='գումարման_թավ_նշան', annotations=frozenset({'պլյուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➖', codepoints=(10134,), name='հանման թավ նշան', slug='հանման_թավ_նշան', annotations=frozenset({'մինուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➗', codepoints=(10135,), name='բաժանման թավ նշան', slug='բաժանման_թավ_նշան', annotations=frozenset({'բաժանում', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➰', codepoints=(10160,), name='ոլորուն հանգույց', slug='ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'հանգույց'})),
EmojiAnnotations(emoji='➿', codepoints=(10175,), name='կրկնակի ոլորուն հանգույց', slug='կրկնակի_ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'կրկնակի', 'հանգույց'})),
EmojiAnnotations(emoji='〽', codepoints=(12349,), name='իորիտեն', slug='իորիտեն', annotations=frozenset({'նշել', 'մաս'})),
EmojiAnnotations(emoji='✳', codepoints=(10035,), name='ութ թևանի աստղանիշ', slug='ութ_թևանի_աստղանիշ', annotations=frozenset({'աստղանիշ'})),
EmojiAnnotations(emoji='✴', codepoints=(10036,), name='աստղիկ', slug='աստղիկ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='💱', codepoints=(128177,), name='տարադրամի փոխանակում', slug='տարադրամի_փոխանակում', annotations=frozenset({'բանկ', 'փոխանակում', 'փող', 'տարադրամ'})),
EmojiAnnotations(emoji='💲', codepoints=(128178,), name='դոլարի թավ նշան', slug='դոլարի_թավ_նշան', annotations=frozenset({'դոլար', 'տարադրամ', 'փող'})),
EmojiAnnotations(emoji='‼', codepoints=(8252,), name='կրկնակի բացականչական նշան', slug='կրկնակի_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='⁉', codepoints=(8265,), name='բացականչական հարցական նշան', slug='բացականչական_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'բացականչություն'})),
EmojiAnnotations(emoji='❓', codepoints=(10067,), name='հարցական նշան', slug='հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց'})),
EmojiAnnotations(emoji='❔', codepoints=(10068,), name='սպիտակ հարցական նշան', slug='սպիտակ_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'ուրվագծված'})),
EmojiAnnotations(emoji='❕', codepoints=(10069,), name='սպիտակ բացականչական նշան', slug='սպիտակ_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'ուրվագծված', 'բացականչություն'})),
EmojiAnnotations(emoji='❗', codepoints=(10071,), name='բացականչական նշան', slug='բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='〰', codepoints=(12336,), name='ալիքաձև գծիկ', slug='ալիքաձև_գծիկ', annotations=frozenset({'ալիքաձև', 'կետադրություն', 'գծիկ'})),
EmojiAnnotations(emoji='™', codepoints=(8482,), name='ապրանքանիշ', slug='ապրանքանիշ', annotations=frozenset({'նշան', 'tm'})),
EmojiAnnotations(emoji='♈', codepoints=(9800,), name='խոյ', slug='խոյ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♉', codepoints=(9801,), name='ցուլ', slug='ցուլ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♊', codepoints=(9802,), name='երկվորյակներ', slug='երկվորյակներ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♋', codepoints=(9803,), name='խեցգետին', slug='խեցգետին', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♌', codepoints=(9804,), name='առյուծ', slug='առյուծ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♍', codepoints=(9805,), name='կույս', slug='կույս', annotations=frozenset({'օրիորդ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♎', codepoints=(9806,), name='կշեռք', slug='կշեռք', annotations=frozenset({'արդարադատություն', 'կենդանակերպ', 'հավասարակշռություն'})),
EmojiAnnotations(emoji='♏', codepoints=(9807,), name='կարիճ', slug='կարիճ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♐', codepoints=(9808,), name='աղեղնավոր', slug='աղեղնավոր', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♑', codepoints=(9809,), name='այծեղջյուր', slug='այծեղջյուր', annotations=frozenset({'այծ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♒', codepoints=(9810,), name='ջրհոս', slug='ջրհոս', annotations=frozenset({'կենդանակերպ', 'կրող', 'ջուր'})),
EmojiAnnotations(emoji='♓', codepoints=(9811,), name='ձկներ', slug='ձկներ', annotations=frozenset({'կենդանակերպ', 'ձուկ'})),
EmojiAnnotations(emoji='⛎', codepoints=(9934,), name='օձակիր', slug='օձակիր', annotations=frozenset({'օձ', 'կենդանակերպ', 'կրող'})),
EmojiAnnotations(emoji='🔀', codepoints=(128256,), name='խառնել կատարումները կոճակ', slug='խառնել_կատարումները_կոճակ', annotations=frozenset({'խաչված', 'սլաք'})),
EmojiAnnotations(emoji='🔁', codepoints=(128257,), name='անընդհատ կրկնել կոճակ', slug='անընդհատ_կրկնել_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'կրկնել'})),
EmojiAnnotations(emoji='🔂', codepoints=(128258,), name='կրկնել մեկ անգամ կոճակ', slug='կրկնել_մեկ_անգամ_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'մեկ անգամ'})),
EmojiAnnotations(emoji='▶', codepoints=(9654,), name='նվագարկել կոճակ', slug='նվագարկել_կոճակ', annotations=frozenset({'նվագարկել', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='⏩', codepoints=(9193,), name='արագ առաջ կոճակ', slug='արագ_առաջ_կոճակ', annotations=frozenset({'արագ', 'սլաք', 'կրկնակի', 'առաջ'})),
EmojiAnnotations(emoji='⏭', codepoints=(9197,), name='հաջորդ կատարումը կոճակ', slug='հաջորդ_կատարումը_կոճակ', annotations=frozenset({'հաջորդ տեսարանը', 'եռանկյուն', 'սլաք', 'հաջորդ կատարումը'})),
EmojiAnnotations(emoji='⏯', codepoints=(9199,), name='նվագարկել կամ դադար կոճակ', slug='նվագարկել_կամ_դադար_կոճակ', annotations=frozenset({'նվագարկել', 'դադար', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='◀', codepoints=(9664,), name='հետադարձել կոճակ', slug='հետադարձել_կոճակ', annotations=frozenset({'ձախ', 'եռանկյուն', 'սլաք', 'հետադարձել'})),
EmojiAnnotations(emoji='⏪', codepoints=(9194,), name='արագ հետադարձել կոճակ', slug='արագ_հետադարձել_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի', 'հետադարձել'})),
EmojiAnnotations(emoji='⏮', codepoints=(9198,), name='վերջին կատարումը կոճակ', slug='վերջին_կատարումը_կոճակ', annotations=frozenset({'նախորդ տեսարանը', 'նախորդ կատարումը', 'սլաք', 'եռանկյուն'})),
EmojiAnnotations(emoji='🔼', codepoints=(128316,), name='վերև կոճակ', slug='վերև_կոճակ', annotations=frozenset({'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏫', codepoints=(9195,), name='արագ վերև կոճակ', slug='արագ_վերև_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='🔽', codepoints=(128317,), name='ներքև կոճակ', slug='ներքև_կոճակ', annotations=frozenset({'ներքև', 'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏬', codepoints=(9196,), name='արագ ներքև կոճակ', slug='արագ_ներքև_կոճակ', annotations=frozenset({'ներքև', 'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='\u23f8', codepoints=(9208,), name='դադար կոճակ', slug='դադար_կոճակ', annotations=frozenset({'գծեր', 'դադար', 'կրկնակի', 'ուղղահայաց'})),
EmojiAnnotations(emoji='\u23f9', codepoints=(9209,), name='ստոպ կոճակ', slug='ստոպ_կոճակ', annotations=frozenset({'ստոպ', 'քառակուսի'})),
EmojiAnnotations(emoji='\u23fa', codepoints=(9210,), name='ձայնագրել կոճակ', slug='ձայնագրել_կոճակ', annotations=frozenset({'ձայնագրել', 'շրջան'})),
EmojiAnnotations(emoji='⏏', codepoints=(9167,), name='դուրս հանել կոճակ', slug='դուրս_հանել_կոճակ', annotations=frozenset({'դուրս հանել'})),
EmojiAnnotations(emoji='🎦', codepoints=(127910,), name='կինոմատոգրաֆիա', slug='կինոմատոգրաֆիա', annotations=frozenset({'տեսախցիկ', 'ժապավեն', 'ֆիլմ'})),
EmojiAnnotations(emoji='🔅', codepoints=(128261,), name='մթեցնել կոճակ', slug='մթեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'թույլ', 'մթեցնել'})),
EmojiAnnotations(emoji='🔆', codepoints=(128262,), name='պայծառեցնել կոճակ', slug='պայծառեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'պայծառ'})),
EmojiAnnotations(emoji='📶', codepoints=(128246,), name='անտենայի գծիկներ', slug='անտենայի_գծիկներ', annotations=frozenset({'գծիկ', 'ազդանշան', 'հեռախոս', 'շարժական', 'անտենա', 'բջջային'})),
EmojiAnnotations(emoji='📵', codepoints=(128245,), name='բջջային հեռախոսներն արգելվում են', slug='բջջային_հեռախոսներն_արգելվում_են', annotations=frozenset({'ոչ', 'արգելված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📳', codepoints=(128243,), name='թրթռազանգի ռեժիմ', slug='թրթռազանգի_ռեժիմ', annotations=frozenset({'ռեժիմ', 'թրթռում', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📴', codepoints=(128244,), name='բջջայինն անջատված է', slug='բջջայինն_անջատված_է', annotations=frozenset({'անջատված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='{#⃣}', codepoints=(123, 35, 8419, 125), name='ստեղն վանդականիշ', slug='ստեղն_վանդականիշ', annotations=frozenset({'ստեղն', 'վանդականիշ', 'ֆունտ'})),
EmojiAnnotations(emoji='{*⃣}', codepoints=(123, 42, 8419, 125), name='ստեղն աստղանիշ', slug='ստեղն_աստղանիշ', annotations=frozenset({'ստեղն', 'աստղանիշ', 'աստղ'})),
EmojiAnnotations(emoji='{0⃣}', codepoints=(123, 48, 8419, 125), name='ստեղն զրո', slug='ստեղն_զրո', annotations=frozenset({'0', 'ստեղն', 'զրո'})),
EmojiAnnotations(emoji='{1⃣}', codepoints=(123, 49, 8419, 125), name='ստեղն մեկ', slug='ստեղն_մեկ', annotations=frozenset({'ստեղն', 'մեկ', '1'})),
EmojiAnnotations(emoji='{2⃣}', codepoints=(123, 50, 8419, 125), name='ստեղն երկու', slug='ստեղն_երկու', annotations=frozenset({'ստեղն', 'երկու', '2'})),
EmojiAnnotations(emoji='{3⃣}', codepoints=(123, 51, 8419, 125), name='ստեղն երեք', slug='ստեղն_երեք', annotations=frozenset({'ստեղն', 'երեք', '3'})),
EmojiAnnotations(emoji='{4⃣}', codepoints=(123, 52, 8419, 125), name='ստեղն չորս', slug='ստեղն_չորս', annotations=frozenset({'4', 'ստեղն', 'չորս'})),
EmojiAnnotations(emoji='{5⃣}', codepoints=(123, 53, 8419, 125), name='ստեղն հինգ', slug='ստեղն_հինգ', annotations=frozenset({'ստեղն', '5', 'հինգ'})),
EmojiAnnotations(emoji='{6⃣}', codepoints=(123, 54, 8419, 125), name='ստեղն վեց', slug='ստեղն_վեց', annotations=frozenset({'ստեղն', 'վեց', '6'})),
EmojiAnnotations(emoji='{7⃣}', codepoints=(123, 55, 8419, 125), name='ստեղն յոթ', slug='ստեղն_յոթ', annotations=frozenset({'7', 'ստեղն', 'յոթ'})),
EmojiAnnotations(emoji='{8⃣}', codepoints=(123, 56, 8419, 125), name='ստեղն ութ', slug='ստեղն_ութ', annotations=frozenset({'8', 'ստեղն', 'ութ'})),
EmojiAnnotations(emoji='{9⃣}', codepoints=(123, 57, 8419, 125), name='ստեղն ինը', slug='ստեղն_ինը', annotations=frozenset({'ստեղն', 'ինը', '9'})),
EmojiAnnotations(emoji='🔟', codepoints=(128287,), name='ստեղն տասը', slug='ստեղն_տասը', annotations=frozenset({'ստեղն', '10', 'տասը'})),
EmojiAnnotations(emoji='💯', codepoints=(128175,), name='հարյուր միավոր', slug='հարյուր_միավոր', annotations=frozenset({'հարյուր', 'միավոր', '100', 'ամբողջ'})),
EmojiAnnotations(emoji='🔞', codepoints=(128286,), name='տասնութից ցածր արգելվում է', slug='տասնութից_ցածր_արգելվում_է', annotations=frozenset({'18', 'ոչ', 'արգելված', 'տարիքային սահմանափակում', 'անչափահաս', 'տասնութ'})),
EmojiAnnotations(emoji='🔠', codepoints=(128288,), name='լատինատառ մեծատառ ներածում', slug='լատինատառ_մեծատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'լատինական', 'մեծատառ'})),
EmojiAnnotations(emoji='🔡', codepoints=(128289,), name='լատինատառ փոքրատառ ներածում', slug='լատինատառ_փոքրատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'abcd', 'լատինական', 'փոքրատառ'})),
EmojiAnnotations(emoji='🔢', codepoints=(128290,), name='թվերի ներածում', slug='թվերի_ներածում', annotations=frozenset({'մուտքագրել', '1234', 'թվեր'})),
EmojiAnnotations(emoji='🔣', codepoints=(128291,), name='նշանների ներածում', slug='նշանների_ներածում', annotations=frozenset({'մուտքագրել'})),
EmojiAnnotations(emoji='🔤', codepoints=(128292,), name='լատինատառ ներածում', slug='լատինատառ_ներածում', annotations=frozenset({'abc', 'այբուբեն', 'տառեր', 'մուտքագրել', 'լատինական'})),
EmojiAnnotations(emoji='🅰', codepoints=(127344,), name='էյ կոճակ', slug='էյ_կոճակ', annotations=frozenset({'a', 'արյուն'})),
EmojiAnnotations(emoji='🆎', codepoints=(127374,), name='էյ-բի կոճակ', slug='էյ_բի_կոճակ', annotations=frozenset({'արյուն', 'ab'})),
EmojiAnnotations(emoji='🅱', codepoints=(127345,), name='բի կոճակ', slug='բի_կոճակ', annotations=frozenset({'b', 'արյուն'})),
EmojiAnnotations(emoji='🆑', codepoints=(127377,), name='սի-էլ', slug='սի_էլ', annotations=frozenset({'cl'})),
EmojiAnnotations(emoji='ℹ', codepoints=(8505,), name='տեղեկատու', slug='տեղեկատու', annotations=frozenset({'i', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🆔', codepoints=(127380,), name='այ-դի', slug='այ_դի', annotations=frozenset({'ինքնություն', 'id'})),
EmojiAnnotations(emoji='Ⓜ', codepoints=(9410,), name='էմ տառը շրջանակի մեջ', slug='էմ_տառը_շրջանակի_մեջ', annotations=frozenset({'m', 'շրջան'})),
EmojiAnnotations(emoji='🆖', codepoints=(127382,), name='էն-ջի', slug='էն_ջի', annotations=frozenset({'ng'})),
EmojiAnnotations(emoji='🅾', codepoints=(127358,), name='օ կոճակ', slug='օ_կոճակ', annotations=frozenset({'o', 'արյուն'})),
EmojiAnnotations(emoji='🆗', codepoints=(127383,), name='օքեյ', slug='օքեյ', annotations=frozenset({'ok'})),
EmojiAnnotations(emoji='🅿', codepoints=(127359,), name='փի կոճակ', slug='փի_կոճակ', annotations=frozenset({'կայանատեղի'})),
EmojiAnnotations(emoji='🆘', codepoints=(127384,), name='սոս', slug='սոս', annotations=frozenset({'օգնել', 'sos'})),
EmojiAnnotations(emoji='🆙', codepoints=(127385,), name='ափ կոճակ', slug='ափ_կոճակ', annotations=frozenset({'նշան', 'up', 'վեր'})),
EmojiAnnotations(emoji='🆚', codepoints=(127386,), name='վի-էս', slug='վի_էս', annotations=frozenset({'ընդդեմ', 'vs'})),
EmojiAnnotations(emoji='🈁', codepoints=(127489,), name='կատականա կոկո', slug='կատականա_կոկո', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈂', codepoints=(127490,), name='կատականա սա', slug='կատականա_սա', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈷', codepoints=(127543,), name='լուսին գաղափարագիր', slug='լուսին_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈶', codepoints=(127542,), name='գոյ գաղափարագիր', slug='գոյ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈯', codepoints=(127535,), name='մատ գաղափարագիր', slug='մատ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉐', codepoints=(127568,), name='առավելություն գաղափարագիր շրջանակի մեջ', slug='առավելություն_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈹', codepoints=(127545,), name='բաժանել գաղափարագիր', slug='բաժանել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈚', codepoints=(127514,), name='ժխտում գաղափարագիր', slug='ժխտում_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈲', codepoints=(127538,), name='արգելել գաղափարագիր', slug='արգելել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉑', codepoints=(127569,), name='ընդունել գաղափարագիր շրջանակի մեջ', slug='ընդունել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈸', codepoints=(127544,), name='կիրառել գաղափարագիր', slug='կիրառել_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈴', codepoints=(127540,), name='միասին գաղափարագիր', slug='միասին_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈳', codepoints=(127539,), name='դատարկ գաղափարագիր', slug='դատարկ_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='㊗', codepoints=(12951,), name='շնորհավորել գաղափարագիր շրջանակի մեջ', slug='շնորհավորել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'շնորհավորանք', 'չինարեն', 'գաղափարագիր', 'չինական'})),
EmojiAnnotations(emoji='㊙', codepoints=(12953,), name='գաղտնի գաղափարագիր շրջանակի մեջ', slug='գաղտնի_գաղափարագիր_շրջանակի__մեջ', annotations=frozenset({'գաղափարագիր', 'չինարեն', 'գաղտնիք', 'չինական'})),
EmojiAnnotations(emoji='🈺', codepoints=(127546,), name='աշխատում է գաղափարագիր', slug='աշխատում_է_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈵', codepoints=(127541,), name='լիություն գաղափարագիր', slug='լիություն_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='▪', codepoints=(9642,), name='սև փոքր քառակուսի', slug='սև_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='▫', codepoints=(9643,), name='սպիտակ փոքր քառակուսի', slug='սպիտակ_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◻', codepoints=(9723,), name='սպիտակ միջին չափի քառակուսի', slug='սպիտակ_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◼', codepoints=(9724,), name='սև միջին չափի քառակուսի', slug='սև_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◽', codepoints=(9725,), name='սպիտակ միջին-փոքր քառակուսի', slug='սպիտակ_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◾', codepoints=(9726,), name='սև միջին-փոքր քառակուսի', slug='սև_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬛', codepoints=(11035,), name='սև մեծ քառակուսի', slug='սև_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬜', codepoints=(11036,), name='սպիտակ մեծ քառակուսի', slug='սպիտակ_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔶', codepoints=(128310,), name='նարնջագույն մեծ շեղանկյուն', slug='նարնջագույն_մեծ_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔷', codepoints=(128311,), name='կապույտ մեծ շեղանկյուն', slug='կապույտ_մեծ_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔸', codepoints=(128312,), name='նարնջագույն փոքր շեղանկյուն', slug='նարնջագույն_փոքր_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔹', codepoints=(128313,), name='կապույտ փոքր շեղանկյուն', slug='կապույտ_փոքր_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔺', codepoints=(128314,), name='կարմիր եռանկյունի ուղղված վերև', slug='կարմիր_եռանկյունի_ուղղված_վերև', annotations=frozenset({'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='🔻', codepoints=(128315,), name='կարմիր եռանկյունի ուղղված ներքև', slug='կարմիր_եռանկյունի_ուղղված_ներքև', annotations=frozenset({'ներքև', 'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='💠', codepoints=(128160,), name='կետով շեղանկյուն', slug='կետով_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'կոմիքս', 'շեղանկյուն', 'ներսում'})),
EmojiAnnotations(emoji='🔘', codepoints=(128280,), name='կետակոճակ', slug='կետակոճակ', annotations=frozenset({'կետ', 'կոճակ', 'երկրաչափական', 'ռադիո'})),
EmojiAnnotations(emoji='🔲', codepoints=(128306,), name='սև քառակուսի կոճակ', slug='սև_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔳', codepoints=(128307,), name='սպիտակ քառակուսի կոճակ', slug='սպիտակ_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'ուրվագծված', 'քառակուսի'})),
EmojiAnnotations(emoji='⚪', codepoints=(9898,), name='սպիտակ շրջանակ', slug='սպիտակ_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='⚫', codepoints=(9899,), name='սև շրջանակ', slug='սև_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='🔴', codepoints=(128308,), name='կարմիր շրջանակ', slug='կարմիր_շրջանակ', annotations=frozenset({'երկրաչափական', 'կարմիր', 'շրջան'})),
EmojiAnnotations(emoji='🔵', codepoints=(128309,), name='կապույտ շրջանակ', slug='կապույտ_շրջանակ', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շրջան'})),] | true | true |
f72af208934e1a6893d8de9bece97fef4e04f823 | 68,824 | py | Python | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | from ._types import array
def abs(x: array, /) -> array:
"""
Calculates the absolute value for each element ``x_i`` of the input array ``x`` (i.e., the element-wise result has the same magnitude as the respective element in ``x`` but has positive sign).
.. note::
For signed integer data types, the absolute value of the minimum representable integer is implementation-dependent.
**Special Cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``-0``, the result is ``+0``.
- If ``x_i`` is ``-infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the absolute value of each element in ``x``. The returned array must have the same data type as ``x``.
"""
def acos(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse cosine, having domain ``[-1, +1]`` and codomain ``[+0, +π]``, for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``1``, the result is ``+0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def acosh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic cosine, having domain ``[+1, +infinity]`` and codomain ``[+0, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``1``, the result is ``NaN``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def add(x1: array, x2: array, /) -> array:
"""
Calculates the sum for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``+infinity``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a finite number, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a finite number, the result is ``-infinity``.
- If ``x1_i`` is a finite number and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x1_i`` is a finite number and ``x2_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``-0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``-0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is a nonzero finite number, the result is ``x2_i``.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is either ``+0`` or ``-0``, the result is ``x1_i``.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is ``-x1_i``, the result is ``+0``.
- In the remaining cases, when neither ``infinity``, ``+0``, ``-0``, nor a ``NaN`` is involved, and the operands have the same mathematical sign or have different magnitudes, the sum must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported round mode. If the magnitude is too large to represent, the operation overflows and the result is an `infinity` of appropriate mathematical sign.
.. note::
Floating-point addition is a commutative operation, but not always associative.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise sums. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def asin(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse sine, having domain ``[-1, +1]`` and codomain ``[-π/2, +π/2]`` for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def asinh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic sine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` in the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atan(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse tangent, having domain ``[-infinity, +infinity]`` and codomain ``[-π/2, +π/2]``, for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-π/2``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atan2(x1: array, x2: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the inverse tangent of the quotient ``x1/x2``, having domain ``[-infinity, +infinity] x [-infinity, +infinity]`` (where the ``x`` notation denotes the set of ordered pairs of elements ``(x1_i, x2_i)``) and codomain ``[-π, +π]``, for each pair of elements ``(x1_i, x2_i)`` of the input arrays ``x1`` and ``x2``, respectively. Each element-wise result is expressed in radians.
The mathematical signs of ``x1_i`` and ``x2_i`` determine the quadrant of each element-wise result. The quadrant (i.e., branch) is chosen such that each element-wise result is the signed angle in radians between the ray ending at the origin and passing through the point ``(1,0)`` and the ray ending at the origin and passing through the point ``(x2_i, x1_i)``.
.. note::
Note the role reversal: the "y-coordinate" is the first function parameter; the "x-coordinate" is the second function parameter. The parameter order is intentional and traditional for the two-argument inverse tangent function where the y-coordinate argument is first and the x-coordinate argument is second.
By IEEE 754 convention, the inverse tangent of the quotient ``x1/x2`` is defined for ``x2_i`` equal to positive or negative zero and for either or both of ``x1_i`` and ``x2_i`` equal to positive or negative ``infinity``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``+0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is greater than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is greater than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``+infinity``, the result is ``-0``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is finite, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is finite, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``+π/4``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``+3π/4``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``-π/4``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-3π/4``.
Parameters
----------
x1: array
input array corresponding to the y-coordinates. Should have a real-valued floating-point data type.
x2: array
input array corresponding to the x-coordinates. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse tangent of the quotient ``x1/x2``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atanh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic tangent, having domain ``[-1, +1]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is ``-1``, the result is ``-infinity``.
- If ``x_i`` is ``+1``, the result is ``+infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def bitwise_and(x1: array, x2: array, /) -> array:
"""
Computes the bitwise AND of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_left_shift(x1: array, x2: array, /) -> array:
"""
Shifts the bits of each element ``x1_i`` of the input array ``x1`` to the left by appending ``x2_i`` (i.e., the respective element in the input array ``x2``) zeros to the right of ``x1_i``.
Parameters
----------
x1: array
first input array. Should have an integer data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer data type. Each element must be greater than or equal to ``0``.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_invert(x: array, /) -> array:
"""
Inverts (flips) each bit for each element ``x_i`` of the input array ``x``.
Parameters
----------
x: array
input array. Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have the same data type as ``x``.
"""
def bitwise_or(x1: array, x2: array, /) -> array:
"""
Computes the bitwise OR of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_right_shift(x1: array, x2: array, /) -> array:
"""
Shifts the bits of each element ``x1_i`` of the input array ``x1`` to the right according to the respective element ``x2_i`` of the input array ``x2``.
.. note::
This operation must be an arithmetic shift (i.e., sign-propagating) and thus equivalent to floor division by a power of two.
Parameters
----------
x1: array
first input array. Should have an integer data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer data type. Each element must be greater than or equal to ``0``.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_xor(x1: array, x2: array, /) -> array:
"""
Computes the bitwise XOR of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def ceil(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the smallest (i.e., closest to ``-infinity``) integer-valued number that is not less than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def cos(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the cosine, having domain ``(-infinity, +infinity)`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``NaN``.
- If ``x_i`` is ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are each expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def cosh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic cosine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` in the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def divide(x1: array, x2: array, /) -> array:
"""
Calculates the division for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
If one or both of the input arrays have integer data types, the result is implementation-dependent, as type promotion between data type "kinds" (e.g., integer versus floating-point) is unspecified.
Specification-compliant libraries may choose to raise an error or return an array containing the element-wise results. If an array is returned, the array must have a real-valued floating-point data type.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``+infinity``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``+infinity``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``-0``.
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``-0``.
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign and are both nonzero finite numbers, the result has a positive mathematical sign.
- If ``x1_i`` and ``x2_i`` have different mathematical signs and are both nonzero finite numbers, the result has a negative mathematical sign.
- In the remaining cases, where neither ``-infinity``, ``+0``, ``-0``, nor ``NaN`` is involved, the quotient must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported rounding mode. If the magnitude is too large to represent, the operation overflows and the result is an ``infinity`` of appropriate mathematical sign. If the magnitude is too small to represent, the operation underflows and the result is a zero of appropriate mathematical sign.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i == x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. May have any data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). May have any data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def exp(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the exponential function, having domain ``[-infinity, +infinity]`` and codomain ``[+0, +infinity]``, for each element ``x_i`` of the input array ``x`` (``e`` raised to the power of ``x_i``, where ``e`` is the base of the natural logarithm).
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``+0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated exponential function result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def expm1(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to ``exp(x)-1``, having domain ``[-infinity, +infinity]`` and codomain ``[-1, +infinity]``, for each element ``x_i`` of the input array ``x``.
.. note::
The purpose of this function is to calculate ``exp(x)-1.0`` more accurately when `x` is close to zero. Accordingly, conforming implementations should avoid implementing this function as simply ``exp(x)-1.0``. See FDLIBM, or some other IEEE 754-2019 compliant mathematical library, for a potential reference implementation.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-1``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def floor(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the greatest (i.e., closest to ``+infinity``) integer-valued number that is not greater than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def floor_divide(x1: array, x2: array, /) -> array:
"""
Rounds the result of dividing each element ``x1_i`` of the input array ``x1`` by the respective element ``x2_i`` of the input array ``x2`` to the greatest (i.e., closest to `+infinity`) integer-value number that is not greater than the division result.
.. note::
For input arrays which promote to an integer data type, the result of division by zero is unspecified and thus implementation-defined.
**Special cases**
.. note::
Floor division was introduced in Python via `PEP 238 <https://www.python.org/dev/peps/pep-0238/>`_ with the goal to disambiguate "true division" (i.e., computing an approximation to the mathematical operation of division) from "floor division" (i.e., rounding the result of division toward negative infinity). The former was computed when one of the operands was a ``float``, while the latter was computed when both operands were ``int``\s. Overloading the ``/`` operator to support both behaviors led to subtle numerical bugs when integers are possible, but not expected.
To resolve this ambiguity, ``/`` was designated for true division, and ``//`` was designated for floor division. Semantically, floor division was `defined <https://www.python.org/dev/peps/pep-0238/#semantics-of-floor-division>`_ as equivalent to ``a // b == floor(a/b)``; however, special floating-point cases were left ill-defined.
Accordingly, floor division is not implemented consistently across array libraries for some of the special cases documented below. Namely, when one of the operands is ``infinity``, libraries may diverge with some choosing to strictly follow ``floor(a/b)`` and others choosing to pair ``//`` with ``%`` according to the relation ``b = a % b + b * (a // b)``. The special cases leading to divergent behavior are documented below.
This specification prefers floor division to match ``floor(divide(x1, x2))`` in order to avoid surprising and unexpected results; however, array libraries may choose to more strictly follow Python behavior.
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``+infinity``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``+infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``-infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``-infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``+infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``-0``. (**note**: libraries may return ``-1.0`` to match Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``-0``. (**note**: libraries may return ``-1.0`` to match Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign and are both nonzero finite numbers, the result has a positive mathematical sign.
- If ``x1_i`` and ``x2_i`` have different mathematical signs and are both nonzero finite numbers, the result has a negative mathematical sign.
- In the remaining cases, where neither ``-infinity``, ``+0``, ``-0``, nor ``NaN`` is involved, the quotient must be computed and rounded to the greatest (i.e., closest to `+infinity`) representable integer-value number that is not greater than the division result. If the magnitude is too large to represent, the operation overflows and the result is an ``infinity`` of appropriate mathematical sign. If the magnitude is too small to represent, the operation underflows and the result is a zero of appropriate mathematical sign.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def greater(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i > x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def greater_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i >= x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def isfinite(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine if finite (i.e., not ``NaN`` and not equal to positive or negative infinity).
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is finite and ``False`` otherwise. The returned array must have a data type of ``bool``.
"""
def isinf(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine if equal to positive or negative infinity.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is either positive or negative infinity and ``False`` otherwise. The returned array must have a data type of ``bool``.
"""
def isnan(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine whether the element is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is ``NaN`` and ``False`` otherwise. The returned array should have a data type of ``bool``.
"""
def less(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i < x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def less_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i <= x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def log(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the natural (base ``e``) logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated natural logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log1p(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to ``log(1+x)``, where ``log`` refers to the natural (base ``e``) logarithm, having domain ``[-1, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
.. note::
The purpose of this function is to calculate ``log(1+x)`` more accurately when `x` is close to zero. Accordingly, conforming implementations should avoid implementing this function as simply ``log(1+x)``. See FDLIBM, or some other IEEE 754-2019 compliant mathematical library, for a potential reference implementation.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``-1``, the result is ``-infinity``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log2(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the base ``2`` logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated base ``2`` logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log10(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the base ``10`` logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated base ``10`` logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def logaddexp(x1: array, x2: array, /) -> array:
"""
Calculates the logarithm of the sum of exponentiations ``log(exp(x1) + exp(x2))`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is not ``NaN``, the result is ``+infinity``.
- If ``x1_i`` is not ``NaN`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x1: array
first input array. Should have a real-valued floating-point data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def logical_and(x1: array, x2: array, /) -> array:
"""
Computes the logical AND for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of `bool`.
"""
def logical_not(x: array, /) -> array:
"""
Computes the logical NOT for each element ``x_i`` of the input array ``x``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x: array
input array. Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def logical_or(x1: array, x2: array, /) -> array:
"""
Computes the logical OR for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def logical_xor(x1: array, x2: array, /) -> array:
"""
Computes the logical XOR for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def multiply(x1: array, x2: array, /) -> array:
"""
Calculates the product for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign, the result has a positive mathematical sign, unless the result is ``NaN``. If the result is ``NaN``, the "sign" of ``NaN`` is implementation-defined.
- If ``x1_i`` and ``x2_i`` have different mathematical signs, the result has a negative mathematical sign, unless the result is ``NaN``. If the result is ``NaN``, the "sign" of ``NaN`` is implementation-defined.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is a nonzero finite number, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- In the remaining cases, where neither ``infinity`` nor ``NaN`` is involved, the product must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported rounding mode. If the magnitude is too large to represent, the result is an `infinity` of appropriate mathematical sign. If the magnitude is too small to represent, the result is a zero of appropriate mathematical sign.
.. note::
Floating-point multiplication is not always associative due to finite precision.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise products. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def negative(x: array, /) -> array:
"""
Computes the numerical negative of each element ``x_i`` (i.e., ``y_i = -x_i``) of the input array ``x``.
.. note::
For signed integer data types, the numerical negative of the minimum representable integer is implementation-dependent.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def not_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i != x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. May have any data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`).
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def positive(x: array, /) -> array:
"""
Computes the numerical positive of each element ``x_i`` (i.e., ``y_i = +x_i``) of the input array ``x``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def pow(x1: array, x2: array, /) -> array:
"""
Calculates an implementation-dependent approximation of exponentiation by raising each element ``x1_i`` (the base) of the input array ``x1`` to the power of ``x2_i`` (the exponent), where ``x2_i`` is the corresponding element of the input array ``x2``.
.. note::
If both ``x1`` and ``x2`` have integer data types, the result of ``pow`` when ``x2_i`` is negative (i.e., less than zero) is unspecified and thus implementation-dependent.
If ``x1`` has an integer data type and ``x2`` has a real-valued floating-point data type, behavior is implementation-dependent (type promotion between data type "kinds" (integer versus floating-point) is unspecified).
**Special cases**
For floating-point operands,
- If ``x1_i`` is not equal to ``1`` and ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x2_i`` is ``+0``, the result is ``1``, even if ``x1_i`` is ``NaN``.
- If ``x2_i`` is ``-0``, the result is ``1``, even if ``x1_i`` is ``NaN``.
- If ``x1_i`` is ``NaN`` and ``x2_i`` is not equal to ``0``, the result is ``NaN``.
- If ``abs(x1_i)`` is greater than ``1`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``abs(x1_i)`` is greater than ``1`` and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``abs(x1_i)`` is ``1`` and ``x2_i`` is ``+infinity``, the result is ``1``.
- If ``abs(x1_i)`` is ``1`` and ``x2_i`` is ``-infinity``, the result is ``1``.
- If ``x1_i`` is ``1`` and ``x2_i`` is not ``NaN``, the result is ``1``.
- If ``abs(x1_i)`` is less than ``1`` and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``abs(x1_i)`` is less than ``1`` and ``x2_i`` is ``-infinity``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is greater than ``0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is greater than ``0``, and ``x2_i`` is an odd integer value, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is greater than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is less than ``0``, and ``x2_i`` is an odd integer value, the result is ``-0``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is less than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``+infinity``.
- If ``x1_i`` is ``-0``, ``x2_i`` is greater than ``0``, and ``x2_i`` is an odd integer value, the result is ``-0``.
- If ``x1_i`` is ``-0``, ``x2_i`` is greater than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+0``.
- If ``x1_i`` is ``-0``, ``x2_i`` is less than ``0``, and ``x2_i`` is an odd integer value, the result is ``-infinity``.
- If ``x1_i`` is ``-0``, ``x2_i`` is less than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+infinity``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, ``x2_i`` is a finite number, and ``x2_i`` is not an integer value, the result is ``NaN``.
Parameters
----------
x1: array
first input array whose elements correspond to the exponentiation base. Should have a real-valued data type.
x2: array
second input array whose elements correspond to the exponentiation exponent. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def remainder(x1: array, x2: array, /) -> array:
"""
Returns the remainder of division for each element ``x1_i`` of the input array ``x1`` and the respective element ``x2_i`` of the input array ``x2``.
.. note::
This function is equivalent to the Python modulus operator ``x1_i % x2_i``.
.. note::
For input arrays which promote to an integer data type, the result of division by zero is unspecified and thus implementation-defined.
**Special cases**
.. note::
In general, similar to Python's ``%`` operator, this function is **not** recommended for floating-point operands as semantics do not follow IEEE 754. That this function is specified to accept floating-point operands is primarily for reasons of backward compatibility.
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``NaN``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``NaN``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``NaN``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``x1_i``. (**note**: this result matches Python behavior.)
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``x2_i``. (**note**: this result matches Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``x2_i``. (**note**: this results matches Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``x1_i``. (**note**: this result matches Python behavior.)
- In the remaining cases, the result must match that of the Python ``%`` operator.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. Each element-wise result must have the same sign as the respective element ``x2_i``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def round(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the nearest integer-valued number.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If two integers are equally close to ``x_i``, the result is the even integer closest to ``x_i``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def sign(x: array, /) -> array:
"""
Returns an indication of the sign of a number for each element ``x_i`` of the input array ``x``.
**Special cases**
- If ``x_i`` is less than ``0``, the result is ``-1``.
- If ``x_i`` is either ``-0`` or ``+0``, the result is ``0``.
- If ``x_i`` is greater than ``0``, the result is ``+1``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def sin(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the sine, having domain ``(-infinity, +infinity)`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are each expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def sinh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic sine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def square(x: array, /) -> array:
"""
Squares (``x_i * x_i``) each element ``x_i`` of the input array ``x``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def sqrt(x: array, /) -> array:
"""
Calculates the square root, having domain ``[0, +infinity]`` and codomain ``[0, +infinity]``, for each element ``x_i`` of the input array ``x``. After rounding, each result must be indistinguishable from the infinitely precise result (as required by IEEE 754).
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the square root of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def subtract(x1: array, x2: array, /) -> array:
"""
Calculates the difference for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``. The result of ``x1_i - x2_i`` must be the same as ``x1_i + (-x2_i)`` and must be governed by the same floating-point rules as addition (see :meth:`add`).
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise differences. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def tan(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the tangent, having domain ``(-infinity, +infinity)`` and codomain ``(-infinity, +infinity)``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def tanh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic tangent, having domain ``[-infinity, +infinity]`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+1``.
- If ``x_i`` is ``-infinity``, the result is ``-1``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def trunc(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the integer-valued number that is closest to but no greater than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
__all__ = ['abs', 'acos', 'acosh', 'add', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'bitwise_and', 'bitwise_left_shift', 'bitwise_invert', 'bitwise_or', 'bitwise_right_shift', 'bitwise_xor', 'ceil', 'cos', 'cosh', 'divide', 'equal', 'exp', 'expm1', 'floor', 'floor_divide', 'greater', 'greater_equal', 'isfinite', 'isinf', 'isnan', 'less', 'less_equal', 'log', 'log1p', 'log2', 'log10', 'logaddexp', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'multiply', 'negative', 'not_equal', 'positive', 'pow', 'remainder', 'round', 'sign', 'sin', 'sinh', 'square', 'sqrt', 'subtract', 'tan', 'tanh', 'trunc'] | 49.407035 | 614 | 0.621527 | from ._types import array
def abs(x: array, /) -> array:
def acos(x: array, /) -> array:
def acosh(x: array, /) -> array:
def add(x1: array, x2: array, /) -> array:
def asin(x: array, /) -> array:
def asinh(x: array, /) -> array:
def atan(x: array, /) -> array:
def atan2(x1: array, x2: array, /) -> array:
def atanh(x: array, /) -> array:
def bitwise_and(x1: array, x2: array, /) -> array:
def bitwise_left_shift(x1: array, x2: array, /) -> array:
def bitwise_invert(x: array, /) -> array:
def bitwise_or(x1: array, x2: array, /) -> array:
def bitwise_right_shift(x1: array, x2: array, /) -> array:
def bitwise_xor(x1: array, x2: array, /) -> array:
def ceil(x: array, /) -> array:
def cos(x: array, /) -> array:
def cosh(x: array, /) -> array:
def divide(x1: array, x2: array, /) -> array:
def equal(x1: array, x2: array, /) -> array:
def exp(x: array, /) -> array:
def expm1(x: array, /) -> array:
def floor(x: array, /) -> array:
def floor_divide(x1: array, x2: array, /) -> array:
def greater(x1: array, x2: array, /) -> array:
def greater_equal(x1: array, x2: array, /) -> array:
def isfinite(x: array, /) -> array:
def isinf(x: array, /) -> array:
def isnan(x: array, /) -> array:
def less(x1: array, x2: array, /) -> array:
def less_equal(x1: array, x2: array, /) -> array:
def log(x: array, /) -> array:
def log1p(x: array, /) -> array:
def log2(x: array, /) -> array:
def log10(x: array, /) -> array:
def logaddexp(x1: array, x2: array, /) -> array:
def logical_and(x1: array, x2: array, /) -> array:
def logical_not(x: array, /) -> array:
def logical_or(x1: array, x2: array, /) -> array:
def logical_xor(x1: array, x2: array, /) -> array:
def multiply(x1: array, x2: array, /) -> array:
def negative(x: array, /) -> array:
def not_equal(x1: array, x2: array, /) -> array:
def positive(x: array, /) -> array:
def pow(x1: array, x2: array, /) -> array:
def remainder(x1: array, x2: array, /) -> array:
def round(x: array, /) -> array:
def sign(x: array, /) -> array:
def sin(x: array, /) -> array:
def sinh(x: array, /) -> array:
def square(x: array, /) -> array:
def sqrt(x: array, /) -> array:
def subtract(x1: array, x2: array, /) -> array:
def tan(x: array, /) -> array:
def tanh(x: array, /) -> array:
def trunc(x: array, /) -> array:
__all__ = ['abs', 'acos', 'acosh', 'add', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'bitwise_and', 'bitwise_left_shift', 'bitwise_invert', 'bitwise_or', 'bitwise_right_shift', 'bitwise_xor', 'ceil', 'cos', 'cosh', 'divide', 'equal', 'exp', 'expm1', 'floor', 'floor_divide', 'greater', 'greater_equal', 'isfinite', 'isinf', 'isnan', 'less', 'less_equal', 'log', 'log1p', 'log2', 'log10', 'logaddexp', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'multiply', 'negative', 'not_equal', 'positive', 'pow', 'remainder', 'round', 'sign', 'sin', 'sinh', 'square', 'sqrt', 'subtract', 'tan', 'tanh', 'trunc'] | true | true |
f72af3b77a6c41b7fa62f8cf773835380670f57a | 130 | py | Python | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | from ipykernel.kernelapp import IPKernelApp
from .CWLKernel import CWLKernel
IPKernelApp.launch_instance(kernel_class=CWLKernel) | 26 | 51 | 0.876923 | from ipykernel.kernelapp import IPKernelApp
from .CWLKernel import CWLKernel
IPKernelApp.launch_instance(kernel_class=CWLKernel) | true | true |
f72af4bbb77cd40f08c0addf4a50faf422264aa8 | 7,875 | py | Python | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | import argparse
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data.distributed
import torchvision.transforms as transforms
import resnet_multigpu_cgc as resnet
import cv2
import datasets as pointing_datasets
"""
Here, we evaluate the content heatmap (Grad-CAM heatmap within object bounding box) on the imagenet dataset.
"""
model_names = ['resnet18', 'resnet50']
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR', help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 96)')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('-g', '--num-gpus', default=1, type=int,
metavar='N', help='number of GPUs to match (default: 4)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--input_resize', default=224, type=int,
metavar='N', help='Resize for smallest side of input (default: 224)')
def main():
global args
args = parser.parse_args()
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch](pretrained=True)
else:
assert False, 'Unsupported architecture: {}'.format(args.arch)
else:
print("=> creating model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch]()
model = torch.nn.DataParallel(model).cuda()
if args.resume:
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
model.load_state_dict(checkpoint['state_dict'])
if (not args.resume) and (not args.pretrained):
assert False, "Please specify either the pre-trained model or checkpoint for evaluation"
cudnn.benchmark = True
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
# Here, we don't resize the images. We feed the full image and use AdaptivePooling before FC.
# We will resize Gradcam heatmap to image size and compare the actual bbox co-ordinates
val_dataset = pointing_datasets.ImageNetDetection(args.data,
transform=transforms.Compose([
transforms.Resize(args.input_resize),
transforms.ToTensor(),
normalize,
]))
# we set batch size=1 since we are loading full resolution images.
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=1, shuffle=False,
num_workers=args.workers, pin_memory=True)
validate_multi(val_loader, val_dataset, model)
def validate_multi(val_loader, val_dataset, model):
batch_time = AverageMeter()
heatmap_inside_bbox = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (images, annotation, targets) in enumerate(val_loader):
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
# we assume batch size == 1 and unwrap the first elem of every list in annotation object
annotation = unwrap_dict(annotation)
image_size = val_dataset.as_image_size(annotation)
output, feats = model(images, vanilla_with_feats=True)
output_gradcam = compute_gradcam(output, feats, targets)
output_gradcam_np = output_gradcam.data.cpu().numpy()[0] # since we have batch size==1
resized_output_gradcam = cv2.resize(output_gradcam_np, image_size)
spatial_sum = resized_output_gradcam.sum()
if spatial_sum <= 0:
# We ignore images with zero Grad-CAM
continue
# resized_output_gradcam is now normalized and can be considered as probabilities
resized_output_gradcam = resized_output_gradcam / spatial_sum
mask = pointing_datasets.imagenet_as_mask(annotation, targets[0].item())
mask = mask.type(torch.ByteTensor)
mask = mask.cpu().data.numpy()
gcam_inside_gt_mask = mask * resized_output_gradcam
# Now we sum the heatmap inside the object bounding box
total_gcam_inside_gt_mask = gcam_inside_gt_mask.sum()
heatmap_inside_bbox.update(total_gcam_inside_gt_mask)
if i % 1000 == 0:
print('\nResults after {} examples: '.format(i+1))
print('Curr % of heatmap inside bbox: {:.4f} ({:.4f})'.format(heatmap_inside_bbox.val * 100,
heatmap_inside_bbox.avg * 100))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
print('\nFinal Results - ')
print('\n\n% of heatmap inside bbox: {:.4f}'.format(heatmap_inside_bbox.avg * 100))
return
def compute_gradcam(output, feats, target):
"""
Compute the gradcam for the top predicted category
:param output:
:param feats:
:param target:
:return:
"""
eps = 1e-8
relu = nn.ReLU(inplace=True)
target = target.cpu().numpy()
one_hot = np.zeros((output.shape[0], output.shape[-1]), dtype=np.float32)
indices_range = np.arange(output.shape[0])
one_hot[indices_range, target[indices_range]] = 1
one_hot = torch.from_numpy(one_hot)
one_hot.requires_grad = True
# Compute the Grad-CAM for the original image
one_hot_cuda = torch.sum(one_hot.cuda() * output)
dy_dz1, = torch.autograd.grad(one_hot_cuda, feats, grad_outputs=torch.ones(one_hot_cuda.size()).cuda(),
retain_graph=True, create_graph=True)
# Changing to dot product of grad and features to preserve grad spatial locations
gcam512_1 = dy_dz1 * feats
gradcam = gcam512_1.sum(dim=1)
gradcam = relu(gradcam)
spatial_sum1 = gradcam.sum(dim=[1, 2]).unsqueeze(-1).unsqueeze(-1)
gradcam = (gradcam / (spatial_sum1 + eps)) + eps
return gradcam
def unwrap_dict(dict_object):
new_dict = {}
for k, v in dict_object.items():
if k == 'object':
new_v_list = []
for elem in v:
new_v_list.append(unwrap_dict(elem))
new_dict[k] = new_v_list
continue
if isinstance(v, dict):
new_v = unwrap_dict(v)
elif isinstance(v, list) and len(v) == 1:
new_v = v[0]
else:
new_v = v
new_dict[k] = new_v
return new_dict
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
| 36.971831 | 112 | 0.616381 | import argparse
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data.distributed
import torchvision.transforms as transforms
import resnet_multigpu_cgc as resnet
import cv2
import datasets as pointing_datasets
model_names = ['resnet18', 'resnet50']
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR', help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 96)')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('-g', '--num-gpus', default=1, type=int,
metavar='N', help='number of GPUs to match (default: 4)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--input_resize', default=224, type=int,
metavar='N', help='Resize for smallest side of input (default: 224)')
def main():
global args
args = parser.parse_args()
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch](pretrained=True)
else:
assert False, 'Unsupported architecture: {}'.format(args.arch)
else:
print("=> creating model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch]()
model = torch.nn.DataParallel(model).cuda()
if args.resume:
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
model.load_state_dict(checkpoint['state_dict'])
if (not args.resume) and (not args.pretrained):
assert False, "Please specify either the pre-trained model or checkpoint for evaluation"
cudnn.benchmark = True
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
# We will resize Gradcam heatmap to image size and compare the actual bbox co-ordinates
val_dataset = pointing_datasets.ImageNetDetection(args.data,
transform=transforms.Compose([
transforms.Resize(args.input_resize),
transforms.ToTensor(),
normalize,
]))
# we set batch size=1 since we are loading full resolution images.
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=1, shuffle=False,
num_workers=args.workers, pin_memory=True)
validate_multi(val_loader, val_dataset, model)
def validate_multi(val_loader, val_dataset, model):
batch_time = AverageMeter()
heatmap_inside_bbox = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (images, annotation, targets) in enumerate(val_loader):
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
# we assume batch size == 1 and unwrap the first elem of every list in annotation object
annotation = unwrap_dict(annotation)
image_size = val_dataset.as_image_size(annotation)
output, feats = model(images, vanilla_with_feats=True)
output_gradcam = compute_gradcam(output, feats, targets)
output_gradcam_np = output_gradcam.data.cpu().numpy()[0] # since we have batch size==1
resized_output_gradcam = cv2.resize(output_gradcam_np, image_size)
spatial_sum = resized_output_gradcam.sum()
if spatial_sum <= 0:
# We ignore images with zero Grad-CAM
continue
# resized_output_gradcam is now normalized and can be considered as probabilities
resized_output_gradcam = resized_output_gradcam / spatial_sum
mask = pointing_datasets.imagenet_as_mask(annotation, targets[0].item())
mask = mask.type(torch.ByteTensor)
mask = mask.cpu().data.numpy()
gcam_inside_gt_mask = mask * resized_output_gradcam
# Now we sum the heatmap inside the object bounding box
total_gcam_inside_gt_mask = gcam_inside_gt_mask.sum()
heatmap_inside_bbox.update(total_gcam_inside_gt_mask)
if i % 1000 == 0:
print('\nResults after {} examples: '.format(i+1))
print('Curr % of heatmap inside bbox: {:.4f} ({:.4f})'.format(heatmap_inside_bbox.val * 100,
heatmap_inside_bbox.avg * 100))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
print('\nFinal Results - ')
print('\n\n% of heatmap inside bbox: {:.4f}'.format(heatmap_inside_bbox.avg * 100))
return
def compute_gradcam(output, feats, target):
eps = 1e-8
relu = nn.ReLU(inplace=True)
target = target.cpu().numpy()
one_hot = np.zeros((output.shape[0], output.shape[-1]), dtype=np.float32)
indices_range = np.arange(output.shape[0])
one_hot[indices_range, target[indices_range]] = 1
one_hot = torch.from_numpy(one_hot)
one_hot.requires_grad = True
# Compute the Grad-CAM for the original image
one_hot_cuda = torch.sum(one_hot.cuda() * output)
dy_dz1, = torch.autograd.grad(one_hot_cuda, feats, grad_outputs=torch.ones(one_hot_cuda.size()).cuda(),
retain_graph=True, create_graph=True)
# Changing to dot product of grad and features to preserve grad spatial locations
gcam512_1 = dy_dz1 * feats
gradcam = gcam512_1.sum(dim=1)
gradcam = relu(gradcam)
spatial_sum1 = gradcam.sum(dim=[1, 2]).unsqueeze(-1).unsqueeze(-1)
gradcam = (gradcam / (spatial_sum1 + eps)) + eps
return gradcam
def unwrap_dict(dict_object):
new_dict = {}
for k, v in dict_object.items():
if k == 'object':
new_v_list = []
for elem in v:
new_v_list.append(unwrap_dict(elem))
new_dict[k] = new_v_list
continue
if isinstance(v, dict):
new_v = unwrap_dict(v)
elif isinstance(v, list) and len(v) == 1:
new_v = v[0]
else:
new_v = v
new_dict[k] = new_v
return new_dict
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
| true | true |
f72af5153bd9f88566e3d6863c7c6bad63faba5c | 558 | py | Python | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | null | null | null | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | null | null | null | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | 1 | 2020-09-09T09:27:06.000Z | 2020-09-09T09:27:06.000Z | global_var = 10
def function_example(local_var_1, local_var_2):
print(local_var_1, local_var_2, global_var)
function_example(11, 12)
def function_example_1(local_var_1, local_var_2):
global global_var
global_var = 20
print(local_var_1, local_var_2, global_var, id(global_var))
function_example_1(11, 12)
print(global_var, id(global_var))
# nonlocal
def counter():
num = 0
def plus_one():
nonlocal num
num+=1
return num
return plus_one
count = counter()
print(count)
print(count())
print(count())
| 16.909091 | 63 | 0.702509 | global_var = 10
def function_example(local_var_1, local_var_2):
print(local_var_1, local_var_2, global_var)
function_example(11, 12)
def function_example_1(local_var_1, local_var_2):
global global_var
global_var = 20
print(local_var_1, local_var_2, global_var, id(global_var))
function_example_1(11, 12)
print(global_var, id(global_var))
def counter():
num = 0
def plus_one():
nonlocal num
num+=1
return num
return plus_one
count = counter()
print(count)
print(count())
print(count())
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.