id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7 values |
|---|---|---|
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/gfx/vml_attach.js | dojo.require("dojox.gfx.vml");
dojo.experimental("dojox.gfx.vml_attach");
(function(){
dojox.gfx.attachNode=function(_1){
if(!_1){
return null;
}
var s=null;
switch(_1.tagName.toLowerCase()){
case dojox.gfx.Rect.nodeType:
s=new dojox.gfx.Rect(_1);
_2(s);
break;
case dojox.gfx.Ellipse.nodeType:
if(_1.style.width==_1.style.height){
s=new dojox.gfx.Circle(_1);
_3(s);
}else{
s=new dojox.gfx.Ellipse(_1);
_4(s);
}
break;
case dojox.gfx.Path.nodeType:
switch(_1.getAttribute("dojoGfxType")){
case "line":
s=new dojox.gfx.Line(_1);
_5(s);
break;
case "polyline":
s=new dojox.gfx.Polyline(_1);
_6(s);
break;
case "path":
s=new dojox.gfx.Path(_1);
_7(s);
break;
case "text":
s=new dojox.gfx.Text(_1);
_8(s);
_9(s);
_a(s);
break;
case "textpath":
s=new dojox.gfx.TextPath(_1);
_7(s);
_8(s);
_9(s);
break;
}
break;
case dojox.gfx.Image.nodeType:
switch(_1.getAttribute("dojoGfxType")){
case "image":
s=new dojox.gfx.Image(_1);
_b(s);
_c(s);
break;
}
break;
default:
return null;
}
if(!(s instanceof dojox.gfx.Image)){
_d(s);
_e(s);
if(!(s instanceof dojox.gfx.Text)){
_f(s);
}
}
return s;
};
dojox.gfx.attachSurface=function(_10){
var s=new dojox.gfx.Surface();
s.clipNode=_10;
var r=s.rawNode=_10.firstChild;
var b=r.firstChild;
if(!b||b.tagName!="rect"){
return null;
}
s.bgNode=r;
return s;
};
var _d=function(_11){
var _12=null,r=_11.rawNode,fo=r.fill;
if(fo.on&&fo.type=="gradient"){
var _12=dojo.clone(dojox.gfx.defaultLinearGradient),rad=dojox.gfx.matrix._degToRad(fo.angle);
_12.x2=Math.cos(rad);
_12.y2=Math.sin(rad);
_12.colors=[];
var _13=fo.colors.value.split(";");
for(var i=0;i<_13.length;++i){
var t=_13[i].match(/\S+/g);
if(!t||t.length!=2){
continue;
}
_12.colors.push({offset:dojox.gfx.vml._parseFloat(t[0]),color:new dojo.Color(t[1])});
}
}else{
if(fo.on&&fo.type=="gradientradial"){
var _12=dojo.clone(dojox.gfx.defaultRadialGradient),w=parseFloat(r.style.width),h=parseFloat(r.style.height);
_12.cx=isNaN(w)?0:fo.focusposition.x*w;
_12.cy=isNaN(h)?0:fo.focusposition.y*h;
_12.r=isNaN(w)?1:w/2;
_12.colors=[];
var _13=fo.colors.value.split(";");
for(var i=_13.length-1;i>=0;--i){
var t=_13[i].match(/\S+/g);
if(!t||t.length!=2){
continue;
}
_12.colors.push({offset:dojox.gfx.vml._parseFloat(t[0]),color:new dojo.Color(t[1])});
}
}else{
if(fo.on&&fo.type=="tile"){
var _12=dojo.clone(dojox.gfx.defaultPattern);
_12.width=dojox.gfx.pt2px(fo.size.x);
_12.height=dojox.gfx.pt2px(fo.size.y);
_12.x=fo.origin.x*_12.width;
_12.y=fo.origin.y*_12.height;
_12.src=fo.src;
}else{
if(fo.on&&r.fillcolor){
_12=new dojo.Color(r.fillcolor+"");
_12.a=fo.opacity;
}
}
}
}
_11.fillStyle=_12;
};
var _e=function(_14){
var r=_14.rawNode;
if(!r.stroked){
_14.strokeStyle=null;
return;
}
var _15=_14.strokeStyle=dojo.clone(dojox.gfx.defaultStroke),rs=r.stroke;
_15.color=new dojo.Color(r.strokecolor.value);
_15.width=dojox.gfx.normalizedLength(r.strokeweight+"");
_15.color.a=rs.opacity;
_15.cap=this._translate(this._capMapReversed,rs.endcap);
_15.join=rs.joinstyle=="miter"?rs.miterlimit:rs.joinstyle;
_15.style=rs.dashstyle;
};
var _f=function(_16){
var s=_16.rawNode.skew,sm=s.matrix,so=s.offset;
_16.matrix=dojox.gfx.matrix.normalize({xx:sm.xtox,xy:sm.ytox,yx:sm.xtoy,yy:sm.ytoy,dx:dojox.gfx.pt2px(so.x),dy:dojox.gfx.pt2px(so.y)});
};
var _17=function(_18){
_18.bgNode=_18.rawNode.firstChild;
};
var _2=function(_19){
var r=_19.rawNode,_1a=r.outerHTML.match(/arcsize = \"(\d*\.?\d+[%f]?)\"/)[1],_1b=r.style,_1c=parseFloat(_1b.width),_1d=parseFloat(_1b.height);
_1a=(_1a.indexOf("%")>=0)?parseFloat(_1a)/100:dojox.gfx.vml._parseFloat(_1a);
_19.shape=dojox.gfx.makeParameters(dojox.gfx.defaultRect,{x:parseInt(_1b.left),y:parseInt(_1b.top),width:_1c,height:_1d,r:Math.min(_1c,_1d)*_1a});
};
var _4=function(_1e){
var _1f=_1e.rawNode.style,rx=parseInt(_1f.width)/2,ry=parseInt(_1f.height)/2;
_1e.shape=dojox.gfx.makeParameters(dojox.gfx.defaultEllipse,{cx:parseInt(_1f.left)+rx,cy:parseInt(_1f.top)+ry,rx:rx,ry:ry});
};
var _3=function(_20){
var _21=_20.rawNode.style,r=parseInt(_21.width)/2;
_20.shape=dojox.gfx.makeParameters(dojox.gfx.defaultCircle,{cx:parseInt(_21.left)+r,cy:parseInt(_21.top)+r,r:r});
};
var _5=function(_22){
var _23=_22.shape=dojo.clone(dojox.gfx.defaultLine),p=_22.rawNode.path.v.match(dojox.gfx.pathVmlRegExp);
do{
if(p.length<7||p[0]!="m"||p[3]!="l"||p[6]!="e"){
break;
}
_23.x1=parseInt(p[1]);
_23.y1=parseInt(p[2]);
_23.x2=parseInt(p[4]);
_23.y2=parseInt(p[5]);
}while(false);
};
var _6=function(_24){
var _25=_24.shape=dojo.clone(dojox.gfx.defaultPolyline),p=_24.rawNode.path.v.match(dojox.gfx.pathVmlRegExp);
do{
if(p.length<3||p[0]!="m"){
break;
}
var x=parseInt(p[0]),y=parseInt(p[1]);
if(isNaN(x)||isNaN(y)){
break;
}
_25.points.push({x:x,y:y});
if(p.length<6||p[3]!="l"){
break;
}
for(var i=4;i<p.length;i+=2){
x=parseInt(p[i]);
y=parseInt(p[i+1]);
if(isNaN(x)||isNaN(y)){
break;
}
_25.points.push({x:x,y:y});
}
}while(false);
};
var _b=function(_26){
_26.shape=dojo.clone(dojox.gfx.defaultImage);
_26.shape.src=_26.rawNode.firstChild.src;
};
var _c=function(_27){
var m=_27.rawNode.filters["DXImageTransform.Microsoft.Matrix"];
_27.matrix=dojox.gfx.matrix.normalize({xx:m.M11,xy:m.M12,yx:m.M21,yy:m.M22,dx:m.Dx,dy:m.Dy});
};
var _8=function(_28){
var _29=_28.shape=dojo.clone(dojox.gfx.defaultText),r=_28.rawNode,p=r.path.v.match(dojox.gfx.pathVmlRegExp);
do{
if(!p||p.length!=7){
break;
}
var c=r.childNodes,i=0;
for(;i<c.length&&c[i].tagName!="textpath";++i){
}
if(i>=c.length){
break;
}
var s=c[i].style;
_29.text=c[i].string;
switch(s["v-text-align"]){
case "left":
_29.x=parseInt(p[1]);
_29.align="start";
break;
case "center":
_29.x=(parseInt(p[1])+parseInt(p[4]))/2;
_29.align="middle";
break;
case "right":
_29.x=parseInt(p[4]);
_29.align="end";
break;
}
_29.y=parseInt(p[2]);
_29.decoration=s["text-decoration"];
_29.rotated=s["v-rotate-letters"].toLowerCase() in dojox.gfx.vml._bool;
_29.kerning=s["v-text-kern"].toLowerCase() in dojox.gfx.vml._bool;
return;
}while(false);
_28.shape=null;
};
var _9=function(_2a){
var _2b=_2a.fontStyle=dojo.clone(dojox.gfx.defaultFont),c=_2a.rawNode.childNodes,i=0;
for(;i<c.length&&c[i].tagName=="textpath";++i){
}
if(i>=c.length){
_2a.fontStyle=null;
return;
}
var s=c[i].style;
_2b.style=s.fontstyle;
_2b.variant=s.fontvariant;
_2b.weight=s.fontweight;
_2b.size=s.fontsize;
_2b.family=s.fontfamily;
};
var _a=function(_2c){
_f(_2c);
var _2d=_2c.matrix,fs=_2c.fontStyle;
if(_2d&&fs){
_2c.matrix=dojox.gfx.matrix.multiply(_2d,{dy:dojox.gfx.normalizedLength(fs.size)*0.35});
}
};
var _7=function(_2e){
var _2f=_2e.shape=dojo.clone(dojox.gfx.defaultPath),p=_2e.rawNode.path.v.match(dojox.gfx.pathVmlRegExp),t=[],_30=false,map=dojox.gfx.Path._pathVmlToSvgMap;
for(var i=0;i<p.length;++p){
var s=p[i];
if(s in map){
_30=false;
t.push(map[s]);
}else{
if(!_30){
var n=parseInt(s);
if(isNaN(n)){
_30=true;
}else{
t.push(n);
}
}
}
}
var l=t.length;
if(l>=4&&t[l-1]==""&&t[l-2]==0&&t[l-3]==0&&t[l-4]=="l"){
t.splice(l-4,4);
}
if(l){
_2f.path=t.join(" ");
}
};
})(); | PypiClean |
/GPolyEncode-0.1.1.tar.gz/GPolyEncode-0.1.1/gpolyencode.py | import math
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
class GPolyEncoder(object):
def __init__(self, num_levels=18, zoom_factor=2, threshold=0.00001, force_endpoints=True):
self._num_levels = num_levels
self._zoom_factor = zoom_factor
self._threshold = threshold
self._force_endpoints = force_endpoints
self._zoom_level_breaks = []
for i in range(num_levels):
self._zoom_level_breaks.append(threshold * (zoom_factor ** (num_levels - i - 1)))
def encode(self, points):
dists = {}
# simplify using Douglas-Peucker
max_dist = 0
abs_max_dist = 0
stack = []
if (len(points) > 2):
stack.append((0, len(points)-1))
while len(stack):
current = stack.pop()
max_dist = 0
for i in range(current[0]+1, current[1]):
temp = self._distance(points[i], points[current[0]], points[current[1]])
if temp > max_dist:
max_dist = temp
max_loc = i
abs_max_dist = max(abs_max_dist, max_dist)
if max_dist > self._threshold:
dists[max_loc] = max_dist
stack.append((current[0], max_loc))
stack.append((max_loc, current[1]))
enc_points, enc_levels = self._encode(points, dists, abs_max_dist)
r = {
'points': enc_points,
'levels': enc_levels,
'zoomFactor': self._zoom_factor,
'numLevels': self._num_levels,
}
return r
def _encode(self, points, dists, abs_max_dist):
encoded_levels = StringIO()
encoded_points = StringIO()
plat = 0
plng = 0
if (self._force_endpoints):
encoded_levels.write(self._encode_number(self._num_levels - 1))
else:
encoded_levels.write(self._encode_number(self._num_levels - self._compute_level(abs_max_dist) - 1))
n_points = len(points)
for i,p in enumerate(points):
if (i > 0) and (i < n_points-1) and (i in dists):
encoded_levels.write(self._encode_number(self._num_levels - self._compute_level(dists[i]) -1))
if (i in dists) or (i == 0) or (i == n_points-1):
late5 = int(math.floor(p[1] * 1E5))
lnge5 = int(math.floor(p[0] * 1E5))
dlat = late5 - plat
dlng = lnge5 - plng
plat = late5
plng = lnge5
encoded_points.write(self._encode_signed_number(dlat))
encoded_points.write(self._encode_signed_number(dlng))
if (self._force_endpoints):
encoded_levels.write(self._encode_number(self._num_levels - 1))
else:
encoded_levels.write(self._encode_number(self._num_levels - self._compute_level(abs_max_dist) - 1))
return (
encoded_points.getvalue(), #.replace("\\", "\\\\"),
encoded_levels.getvalue()
)
def _compute_level(self, abs_max_dist):
lev = 0
if abs_max_dist > self._threshold:
while abs_max_dist < self._zoom_level_breaks[lev]:
lev += 1
return lev
def _encode_signed_number(self, num):
sgn_num = num << 1
if num < 0:
sgn_num = ~sgn_num
return self._encode_number(sgn_num)
def _encode_number(self, num):
s = StringIO()
while num >= 0x20:
next_val = (0x20 | (num & 0x1f)) + 63
s.write(chr(next_val))
num >>= 5
num += 63
s.write(chr(num))
return s.getvalue()
def _distance(self, p0, p1, p2):
out = 0.0
if (p1[1] == p2[1] and p1[0] == p2[0]):
out = math.sqrt((p2[1] - p0[1]) ** 2 + (p2[0] - p0[0]) ** 2)
else:
u = ((p0[1] - p1[1]) * (p2[1] - p1[1]) + (p0[0] - p1[0]) * (p2[0] - p1[0])) \
/ ((p2[1] - p1[1]) ** 2 + (p2[0] - p1[0]) ** 2)
if u <= 0:
out = math.sqrt((p0[1] - p1[1]) ** 2 + (p0[0] - p1[0]) ** 2)
elif u >= 1:
out = math.sqrt((p0[1] - p2[1]) ** 2 + (p0[0] - p2[0]) ** 2)
elif (0 < u) and (u < 1):
out = math.sqrt((p0[1] - p1[1] - u * (p2[1] - p1[1])) ** 2 \
+ (p0[0] - p1[0] - u * (p2[0] - p1[0])) ** 2)
return out | PypiClean |
/Booktype-1.5.tar.gz/Booktype-1.5/lib/booki/site_static/xinha/plugins/TableOperations/lang/fr.js | {
"Align": "Aligner",
"All four sides": "Quatre cotés",
"Background": "Arrière plan",
"Baseline": "Ligne de base",
"Border": "Bordure",
"Borders": "Bordures",
"Bottom": "Bas",
"Style [CSS]": "Style [CSS]",
"Caption": "Étiquette",
"Cell Properties": "Propriétés de cellule",
"Center": "Centre",
"Char": "Charactère",
"Collapsed borders": "Bordure effondrés",
"Color": "Couleur",
"Description": "Description",
"FG Color": "Couleur de face",
"Float": "Flotteur",
"Frames": "Vues",
"Height": "Largeur",
"How many columns would you like to merge?": "Combien de colonnes voulez-vous fusionner?",
"How many rows would you like to merge?": "Combien de rangées voulez-vous fusionner?",
"Image URL": "URL pour l'image",
"Justify": "Justifié",
"Layout": "Arrangement",
"Left": "Gauche",
"Margin": "Marge",
"Middle": "Milieu",
"No rules": "Aucune règle",
"No sides": "Aucun côté",
"None": "Aucun",
"Padding": "Remplissage",
"Please click into some cell": "Cliquer sur une cellule",
"Right": "Droit",
"Row Properties": "Propriétés de rangée",
"Rules will appear between all rows and columns": "Règles entre les rangées et les cellules",
"Rules will appear between columns only": "Règles entre les colonnes seulement",
"Rules will appear between rows only": "Règles entre les rangées seulement",
"Rules": "Les règles",
"Spacing and padding": "Espacement et remplissage",
"Spacing": "Espacement",
"Summary": "Sommaire",
"Delete cell": "Supprimer une cellule",
"Insert cell after": "Insérer une cellule après",
"Insert cell before": "Insérer une cellule avant",
"Merge cells": "Fusionner les cellules",
"Cell properties": "Cell properties",
"Split cell": "Diviser la cellule",
"Delete column": "Supprimer la colonne",
"Insert column after": "Insérer une colonne après",
"Insert column before": "Insérer une colonne avant",
"Split column": "Diviser une colonne",
"Delete row": "Supprimer une rangée",
"Insert row before": "Insérer une rangée avant",
"Insert row after": "Insérer une rangée après",
"Row properties": "Propriétés de rangée",
"Split row": "Diviser la rangée",
"Table properties": "Propriétés de table",
"Table Properties": "Propriétés de table",
"Text align": "Alignement",
"The bottom side only": "Côté du bas seulement",
"The left-hand side only": "Côté gauche seulement",
"The right and left sides only": "Côté gauche et droit seulement",
"The right-hand side only": "Côté droit seulement",
"The top and bottom sides only": "Côté haut et bas seulement",
"The top side only": "Côté haut seulement",
"Top": "Haut",
"Unset color": "Enlever la couleur",
"Vertical align": "Vertical",
"Width": "Longeur",
"Xinha cowardly refuses to delete the last cell in row.": "Il est impossible de supprimer la dernière cellule de la rangée.",
"Xinha cowardly refuses to delete the last column in table.": "Il est impossible de supprimer la dernière colonne de la table.",
"Xinha cowardly refuses to delete the last row in table.": "Il est impossible de supprimer la dernière rangée de la table",
"percent": "%",
"pixels": "pixels"
}; | PypiClean |
/Diofant-0.14.0a2.tar.gz/Diofant-0.14.0a2/diofant/integrals/trigonometry.py | from ..core import Dummy, Eq, Integer, Rational, Wild, cacheit
from ..functions import Piecewise, binomial, cos, sin
# TODO sin(a*x)*cos(b*x) -> sin((a+b)x) + sin((a-b)x) ?
# creating, each time, Wild's and sin/cos/Mul is expensive. Also, our match &
# subs are very slow when not cached, and if we create Wild each time, we
# effectively block caching.
#
# so we cache the pattern
# need to use a function instead of lamda since hash of lambda changes on
# each call to _pat_sincos
def _integer_instance(n):
return isinstance(n, Integer)
@cacheit
def _pat_sincos(x):
a = Wild('a', exclude=[x])
n, m = [Wild(s, exclude=[x], properties=[_integer_instance])
for s in 'nm']
pat = sin(a*x)**n * cos(a*x)**m
return pat, a, n, m
_u = Dummy('u')
def trigintegrate(f, x, conds='piecewise'):
"""Integrate f = Mul(trig) over x
>>> trigintegrate(sin(x)*cos(x), x)
sin(x)**2/2
>>> trigintegrate(sin(x)**2, x)
x/2 - sin(x)*cos(x)/2
>>> trigintegrate(tan(x)*sec(x), x)
1/cos(x)
>>> trigintegrate(sin(x)*tan(x), x)
-log(sin(x) - 1)/2 + log(sin(x) + 1)/2 - sin(x)
References
==========
* https://en.wikibooks.org/wiki/Calculus/Integration_techniques
See Also
========
diofant.integrals.integrals.Integral.doit
diofant.integrals.integrals.Integral
"""
from .integrals import integrate
pat, a, n, m = _pat_sincos(x)
f = f.rewrite('sincos')
M = f.match(pat)
if M is None:
return
n, m = M[n], M[m]
if n == 0 and m == 0:
return x
zz = x if n == 0 else Integer(0)
a = M[a]
if n.is_odd or m.is_odd:
u = _u
n_, m_ = n.is_odd, m.is_odd
# take smallest n or m -- to choose simplest substitution
if n_ and m_:
n_ = n_ and (n < m) # NB: careful here, one of the
m_ = m_ and not n < m # conditions *must* be true
# n m u=C (n-1)/2 m
# S(x) * C(x) dx --> -(1-u^2) * u du
if n_:
ff = -(1 - u**2)**((n - 1)/2) * u**m
uu = cos(a*x)
# n m u=S n (m-1)/2
# S(x) * C(x) dx --> u * (1-u^2) du
else:
assert m_
ff = u**n * (1 - u**2)**((m - 1)/2)
uu = sin(a*x)
fi = integrate(ff, u) # XXX cyclic deps
fx = fi.subs({u: uu})
if conds == 'piecewise':
return Piecewise((zz, Eq(a, 0)), (fx / a, True))
return fx / a
# n & m are both even
#
# 2k 2m 2l 2l
# we transform S (x) * C (x) into terms with only S (x) or C (x)
#
# example:
# 100 4 100 2 2 100 4 2
# S (x) * C (x) = S (x) * (1-S (x)) = S (x) * (1 + S (x) - 2*S (x))
#
# 104 102 100
# = S (x) - 2*S (x) + S (x)
# 2k
# then S is integrated with recursive formula
# take largest n or m -- to choose simplest substitution
n_ = (abs(n) > abs(m))
m_ = (abs(m) > abs(n))
res = Integer(0)
if n_:
# 2k 2 k i 2i
# C = (1 - S ) = sum(i, (-) * B(k, i) * S )
if m > 0:
for i in range(m//2 + 1):
res += ((-1)**i * binomial(m//2, i) *
_sin_pow_integrate(n + 2*i, x))
elif m == 0:
res = _sin_pow_integrate(n, x)
else:
# m < 0 , |n| > |m|
# /
# |
# | m n
# | cos (x) sin (x) dx =
# |
# |
# /
# /
# |
# -1 m+1 n-1 n - 1 | m+2 n-2
# ________ cos (x) sin (x) + _______ | cos (x) sin (x) dx
# |
# m + 1 m + 1 |
# /
res = (Rational(-1, m + 1) * cos(x)**(m + 1) * sin(x)**(n - 1) +
Rational(n - 1, m + 1) *
trigintegrate(cos(x)**(m + 2)*sin(x)**(n - 2), x))
elif m_:
# 2k 2 k i 2i
# S = (1 - C ) = sum(i, (-) * B(k, i) * C )
if n > 0:
# / /
# | |
# | m n | -m n
# | cos (x)*sin (x) dx or | cos (x) * sin (x) dx
# | |
# / /
#
# |m| > |n| ; m, n >0 ; m, n belong to Z - {0}
# n 2
# sin (x) term is expanded here in terms of cos (x),
# and then integrated.
#
for i in range(n//2 + 1):
res += ((-1)**i * binomial(n//2, i) *
_cos_pow_integrate(m + 2*i, x))
elif n == 0:
# /
# |
# | 1
# | _ _ _
# | m
# | cos (x)
# /
#
res = _cos_pow_integrate(m, x)
else:
# n < 0 , |m| > |n|
# /
# |
# | m n
# | cos (x) sin (x) dx =
# |
# |
# /
# /
# |
# 1 m-1 n+1 m - 1 | m-2 n+2
# _______ cos (x) sin (x) + _______ | cos (x) sin (x) dx
# |
# n + 1 n + 1 |
# /
res = (Rational(1, n + 1) * cos(x)**(m - 1)*sin(x)**(n + 1) +
Rational(m - 1, n + 1) *
trigintegrate(cos(x)**(m - 2)*sin(x)**(n + 2), x))
else:
if m == n:
# Substitute sin(2x)/2 for sin(x)cos(x) and then Integrate.
res = integrate((Rational(1, 2)*sin(2*x))**m, x)
else:
assert m == -n
if n < 0:
# Same as the scheme described above.
# the function argument to integrate in the end will
# be 1 , this cannot be integrated by trigintegrate.
# Hence use diofant.integrals.integrate.
res = (Rational(1, n + 1) * cos(x)**(m - 1) * sin(x)**(n + 1) +
Rational(m - 1, n + 1) *
integrate(cos(x)**(m - 2) * sin(x)**(n + 2), x))
else:
res = (Rational(-1, m + 1) * cos(x)**(m + 1) * sin(x)**(n - 1) +
Rational(n - 1, m + 1) *
integrate(cos(x)**(m + 2)*sin(x)**(n - 2), x))
if conds == 'piecewise':
return Piecewise((zz, Eq(a, 0)), (res.subs({x: a*x}) / a, True))
return res.subs({x: a*x}) / a
def _sin_pow_integrate(n, x):
assert n.is_even
if n > 0:
# n > 0
# / /
# | |
# | n -1 n-1 n - 1 | n-2
# | sin (x) dx = ______ cos (x) sin (x) + _______ | sin (x) dx
# | |
# | n n |
# / /
#
return (Rational(-1, n) * cos(x) * sin(x)**(n - 1) +
Rational(n - 1, n) * _sin_pow_integrate(n - 2, x))
if n < 0:
# n < 0
# / /
# | |
# | n 1 n+1 n + 2 | n+2
# | sin (x) dx = _______ cos (x) sin (x) + _______ | sin (x) dx
# | |
# | n + 1 n + 1 |
# / /
#
return (Rational(1, n + 1) * cos(x) * sin(x)**(n + 1) +
Rational(n + 2, n + 1) * _sin_pow_integrate(n + 2, x))
else:
# n == 0
# Recursion break.
return x
def _cos_pow_integrate(n, x):
assert n.is_even
if n > 0:
# n > 0
# / /
# | |
# | n 1 n-1 n - 1 | n-2
# | sin (x) dx = ______ sin (x) cos (x) + _______ | cos (x) dx
# | |
# | n n |
# / /
#
return (Rational(1, n) * sin(x) * cos(x)**(n - 1) +
Rational(n - 1, n) * _cos_pow_integrate(n - 2, x))
if n < 0:
# n < 0
# / /
# | |
# | n -1 n+1 n + 2 | n+2
# | cos (x) dx = _______ sin (x) cos (x) + _______ | cos (x) dx
# | |
# | n + 1 n + 1 |
# / /
#
return (Rational(-1, n + 1) * sin(x) * cos(x)**(n + 1) +
Rational(n + 2, n + 1) * _cos_pow_integrate(n + 2, x))
else:
# n == 0
# Recursion Break.
return x | PypiClean |
/MatchZoo-test-1.0.tar.gz/MatchZoo-test-1.0/matchzoo/data_pack/data_pack.py |
import typing
import inspect
from pathlib import Path
import functools
import dill
from tqdm import tqdm
import numpy as np
import pandas as pd
import matchzoo
tqdm.pandas()
def _convert_to_list_index(
index: typing.Union[int, slice, np.array],
length: int
):
if isinstance(index, int):
index = [index]
elif isinstance(index, slice):
index = list(range(*index.indices(length)))
return index
class DataPack(object):
"""
Matchzoo :class:`DataPack` data structure, store dataframe and context.
`DataPack` is a MatchZoo native data structure that most MatchZoo data
handling processes build upon. A `DataPack` consists of three parts:
`left`, `right` and `relation`, each one of is a `pandas.DataFrame`.
:param relation: Store the relation between left document
and right document use ids.
:param left: Store the content or features for id_left.
:param right: Store the content or features for
id_right.
Example:
>>> left = [
... ['qid1', 'query 1'],
... ['qid2', 'query 2']
... ]
>>> right = [
... ['did1', 'document 1'],
... ['did2', 'document 2']
... ]
>>> relation = [['qid1', 'did1', 1], ['qid2', 'did2', 1]]
>>> relation_df = pd.DataFrame(relation)
>>> left = pd.DataFrame(left)
>>> right = pd.DataFrame(right)
>>> dp = DataPack(
... relation=relation_df,
... left=left,
... right=right,
... )
>>> len(dp)
2
"""
DATA_FILENAME = 'data.dill'
def __init__(
self,
relation: pd.DataFrame,
left: pd.DataFrame,
right: pd.DataFrame
):
""":class:`DataPack` initializer."""
self._relation = relation
self._left = left
self._right = right
@property
def has_label(self) -> bool:
""":return: `True` if `label` column exists, `False` other wise."""
return 'label' in self._relation.columns
def __len__(self) -> int:
"""Get numer of rows in the class:`DataPack` object."""
return self._relation.shape[0]
@property
def frame(self) -> 'DataPack.FrameView':
"""
View the data pack as a :class:`pandas.DataFrame`.
Returned data frame is created by merging the left data frame,
the right dataframe and the relation data frame. Use `[]` to access
an item or a slice of items.
:return: A :class:`matchzoo.DataPack.FrameView` instance.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> type(data_pack.frame)
<class 'matchzoo.data_pack.data_pack.DataPack.FrameView'>
>>> frame_slice = data_pack.frame[0:5]
>>> type(frame_slice)
<class 'pandas.core.frame.DataFrame'>
>>> list(frame_slice.columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> full_frame = data_pack.frame()
>>> len(full_frame) == len(data_pack)
True
"""
return DataPack.FrameView(self)
def unpack(self) -> typing.Tuple[typing.Dict[str, np.array],
typing.Optional[np.array]]:
"""
Unpack the data for training.
The return value can be directly feed to `model.fit` or
`model.fit_generator`.
:return: A tuple of (X, y). `y` is `None` if `self` has no label.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> X, y = data_pack.unpack()
>>> type(X)
<class 'dict'>
>>> sorted(X.keys())
['id_left', 'id_right', 'text_left', 'text_right']
>>> type(y)
<class 'numpy.ndarray'>
>>> X, y = data_pack.drop_label().unpack()
>>> type(y)
<class 'NoneType'>
"""
frame = self.frame()
columns = list(frame.columns)
if self.has_label:
columns.remove('label')
y = np.vstack(np.asarray(frame['label']))
else:
y = None
x = frame[columns].to_dict(orient='list')
for key, val in x.items():
x[key] = np.array(val)
return x, y
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> 'DataPack':
"""
Get specific item(s) as a new :class:`DataPack`.
The returned :class:`DataPack` will be a copy of the subset of the
original :class:`DataPack`.
:param index: Index of the item(s) to get.
:return: An instance of :class:`DataPack`.
"""
index = _convert_to_list_index(index, len(self))
relation = self._relation.loc[index].reset_index(drop=True)
left = self._left.loc[relation['id_left'].unique()]
right = self._right.loc[relation['id_right'].unique()]
return DataPack(left=left.copy(),
right=right.copy(),
relation=relation.copy())
@property
def relation(self):
"""`relation` getter."""
return self._relation
@relation.setter
def relation(self, value):
"""`relation` setter."""
self._relation = value
@property
def left(self) -> pd.DataFrame:
"""Get :meth:`left` of :class:`DataPack`."""
return self._left
@property
def right(self) -> pd.DataFrame:
"""Get :meth:`right` of :class:`DataPack`."""
return self._right
def copy(self) -> 'DataPack':
""":return: A deep copy."""
return DataPack(left=self._left.copy(),
right=self._right.copy(),
relation=self._relation.copy())
def save(self, dirpath: typing.Union[str, Path]):
"""
Save the :class:`DataPack` object.
A saved :class:`DataPack` is represented as a directory with a
:class:`DataPack` object (transformed user input as features and
context), it will be saved by `pickle`.
:param dirpath: directory path of the saved :class:`DataPack`.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(self.DATA_FILENAME)
if not dirpath.exists():
dirpath.mkdir(parents=True)
dill.dump(self, open(data_file_path, mode='wb'))
def _optional_inplace(func):
"""
Decorator that adds `inplace` key word argument to a method.
Decorate any method that modifies inplace to make that inplace change
optional.
"""
doc = ":param inplace: `True` to modify inplace, `False` to return " \
"a modified copy. (default: `False`)"
def _clean(s):
return s.replace(' ', '').replace('\n', '')
if _clean(doc) not in _clean(inspect.getdoc(func)):
raise NotImplementedError(
f"`inplace` parameter of {func} not documented.\n"
f"Please add the following line to its documentation:\n{doc}")
@functools.wraps(func)
def wrapper(
self, *args, inplace: bool = False, **kwargs
) -> typing.Optional['DataPack']:
if inplace:
target = self
else:
target = self.copy()
func(target, *args, **kwargs)
if not inplace:
return target
return wrapper
@_optional_inplace
def shuffle(self):
"""
Shuffle the data pack by shuffling the relation column.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> import numpy.random
>>> numpy.random.seed(0)
>>> data_pack = mz.datasets.toy.load_data()
>>> orig_ids = data_pack.relation['id_left']
>>> shuffled = data_pack.shuffle()
>>> (shuffled.relation['id_left'] != orig_ids).any()
True
"""
self._relation = self._relation.sample(frac=1)
self._relation.reset_index(drop=True, inplace=True)
@_optional_inplace
def drop_label(self):
"""
Remove `label` column from the data pack.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> data_pack.has_label
True
>>> data_pack.drop_label(inplace=True)
>>> data_pack.has_label
False
"""
self._relation = self._relation.drop(columns='label')
@_optional_inplace
def append_text_length(self, verbose=1):
"""
Append `length_left` and `length_right` columns.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> 'length_left' in data_pack.frame[0].columns
False
>>> new_data_pack = data_pack.append_text_length(verbose=0)
>>> 'length_left' in new_data_pack.frame[0].columns
True
>>> 'length_left' in data_pack.frame[0].columns
False
>>> data_pack.append_text_length(inplace=True, verbose=0)
>>> 'length_left' in data_pack.frame[0].columns
True
"""
self.apply_on_text(len, rename=('length_left', 'length_right'),
inplace=True, verbose=verbose)
@_optional_inplace
def apply_on_text(
self, func: typing.Callable,
mode: str = 'both',
rename: typing.Optional[str] = None,
verbose: int = 1
):
"""
Apply `func` to text columns based on `mode`.
:param func: The function to apply.
:param mode: One of "both", "left" and "right".
:param rename: If set, use new names for results instead of replacing
the original columns. To set `rename` in "both" mode, use a tuple
of `str`, e.g. ("text_left_new_name", "text_right_new_name").
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
To apply `len` on the left text and add the result as 'length_left':
>>> data_pack.apply_on_text(len, mode='left',
... rename='length_left',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'label']
To do the same to the right text:
>>> data_pack.apply_on_text(len, mode='right',
... rename='length_right',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'length_right', 'label']
To do the same to the both texts at the same time:
>>> data_pack.apply_on_text(len, mode='both',
... rename=('extra_left', 'extra_right'),
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'extra_left', 'id_right', 'text_right', 'length_right', 'extra_right', 'label']
To suppress outputs:
>>> data_pack.apply_on_text(len, mode='both', verbose=0,
... inplace=True)
"""
if mode == 'both':
self._apply_on_text_both(func, rename, verbose=verbose)
elif mode == 'left':
self._apply_on_text_left(func, rename, verbose=verbose)
elif mode == 'right':
self._apply_on_text_right(func, rename, verbose=verbose)
else:
raise ValueError(f"{mode} is not a valid mode type."
f"Must be one of `left` `right` `both`.")
def _apply_on_text_right(self, func, rename, verbose=1):
name = rename or 'text_right'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._right[name] = self._right['text_right'].progress_apply(func)
else:
self._right[name] = self._right['text_right'].apply(func)
def _apply_on_text_left(self, func, rename, verbose=1):
name = rename or 'text_left'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._left[name] = self._left['text_left'].progress_apply(func)
else:
self._left[name] = self._left['text_left'].apply(func)
def _apply_on_text_both(self, func, rename, verbose=1):
left_name, right_name = rename or ('text_left', 'text_right')
self._apply_on_text_left(func, rename=left_name, verbose=verbose)
self._apply_on_text_right(func, rename=right_name, verbose=verbose)
class FrameView(object):
"""FrameView."""
def __init__(self, data_pack: 'DataPack'):
"""
View a data pack as a frame.
A slice of the view is genereated by merging three parts of the
data pack being viewed into a big table.
:param data_pack: :class:`DataPack` to view.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
Use `()` to get a full copy of the frame:
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> len(frame()) == len(data_pack)
True
Notice that a view is binded to the original data pack, so changing
contents of the data pack will affect a view previously created:
>>> data_pack.drop_label(inplace=True)
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right']
To slice the view:
>>> frame_slice = frame[3:5]
>>> len(frame_slice)
2
"""
self._data_pack = data_pack
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> pd.DataFrame:
"""Slicer."""
dp = self._data_pack
index = _convert_to_list_index(index, len(dp))
left_df = dp.left.loc[dp.relation['id_left'][index]].reset_index()
right_df = dp.right.loc[
dp.relation['id_right'][index]].reset_index()
joined_table = left_df.join(right_df)
for column in dp.relation.columns:
if column not in ['id_left', 'id_right']:
labels = dp.relation[column][index].to_frame()
labels = labels.reset_index(drop=True)
joined_table = joined_table.join(labels)
return joined_table
def __call__(self):
""":return: A full copy. Equivalant to `frame[:]`."""
return self[:]
def load_data_pack(dirpath: typing.Union[str, Path]) -> DataPack:
"""
Load a :class:`DataPack`. The reverse function of :meth:`save`.
:param dirpath: directory path of the saved model.
:return: a :class:`DataPack` instance.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(DataPack.DATA_FILENAME)
dp = dill.load(open(data_file_path, 'rb'))
return dp | PypiClean |
/MnemoPwd-1.2.1-py3-none-any.whl/mnemopwd/server/util/Daemon.py |
# Copyright (c) 2015, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Provides a simple Daemon class to ease the process of forking a
python application on POSIX systems.
"""
import errno
import logging
import socket
from logging.handlers import RotatingFileHandler
import os
import signal
import sys
import time
import datetime
from ...server.util.Configuration import Configuration
class Daemon(object):
"""Daemon base class"""
def run(self):
"""Override. We are in the daemon at this point."""
def main(self):
"""Read the command line and either start or stop the daemon"""
if Configuration.action == 'start':
self.start()
elif Configuration.action == 'stop':
self.stop()
elif Configuration.action == 'status':
self.status()
else:
raise ValueError(Configuration.action)
def on_sigterm(self, signalnum, frame):
"""Handle segterm by treating as a keyboard interrupt"""
raise KeyboardInterrupt('SIGTERM')
def add_signal_handlers(self):
"""Register the sigterm handler"""
signal.signal(signal.SIGTERM, self.on_sigterm)
def start(self):
"""Initialize and run the daemon"""
self.check_pid()
self.add_signal_handlers()
self.start_logging()
try:
self.check_pid_writable()
self.check_server_accessibility()
self.daemonize()
except:
logging.exception("failed to start due to an exception")
raise
self.write_pid()
try:
try:
self.run()
except (KeyboardInterrupt, SystemExit):
pass
except OSError as exc:
logging.exception(str(exc))
pass
except:
logging.exception("stopping with an exception")
raise
finally:
self.remove_pid()
def stop(self):
"""Stop the running process"""
if Configuration.pidfile and os.path.exists(Configuration.pidfile):
file = open(Configuration.pidfile)
pid = int(file.read())
file.close()
os.kill(pid, signal.SIGTERM)
for n in range(10):
time.sleep(0.25)
try:
os.kill(pid, 0)
except OSError as why:
if why.errno == errno.ESRCH:
break
else:
raise
else:
sys.exit("pid %d did not die" % pid)
else:
sys.exit("not running")
def status(self):
self.check_pid(True)
def start_logging(self):
"""Configure the logging module"""
handler = RotatingFileHandler(
Configuration.logfile,
maxBytes=Configuration.logmaxmb * 1024 * 1024,
backupCount=Configuration.logbackups)
log = logging.getLogger()
log.setLevel(Configuration.loglevel)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
log.addHandler(handler)
def check_pid(self, status=False):
"""Check the pid file.
Stop using sys.exit() if another instance is already running.
If the pid file exists but no other instance is running,
delete the pid file.
"""
if not Configuration.pidfile:
return
if os.path.exists(Configuration.pidfile):
try:
pid = int(open(Configuration.pidfile, 'rb').read().decode('utf-8').strip())
except ValueError:
msg = 'pidfile %s contains a non-integer value' % Configuration.pidfile
sys.exit(msg)
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == errno.ESRCH:
# The pid doesn't exist, so remove the stale pidfile.
os.remove(Configuration.pidfile)
else:
msg = ("failed to check status of process %s "
"from pidfile %s: %s" % (pid, Configuration.pidfile, err.strerror))
sys.exit(msg)
else:
mtime = os.stat(Configuration.pidfile).st_mtime
since = datetime.timedelta(seconds=(time.time() - mtime))
msg = 'instance [pid %s] seems to be running since %s [%s days]' % (pid, time.ctime(mtime), since.days)
sys.exit(msg)
elif status:
print('no instance seems to be running')
def check_pid_writable(self):
"""Verify the user has access to write to the pid file.
Note that the eventual process ID isn't known until after
daemonize(), so it's not possible to write the PID here.
"""
if not Configuration.pidfile:
return
if os.path.exists(Configuration.pidfile):
check = Configuration.pidfile
else:
check = os.path.dirname(Configuration.pidfile)
if not os.access(check, os.W_OK):
msg = 'unable to write to pidfile %s' % Configuration.pidfile
sys.exit(msg)
def check_server_accessibility(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((Configuration.host, Configuration.port))
except OSError as exc:
if exc.errno == 48:
print("address [%s:%d] already in use" % (Configuration.host, Configuration.port))
sys.exit(1)
def write_pid(self):
"""Write to the pid file"""
if Configuration.pidfile:
open(Configuration.pidfile, 'wb').write(str(os.getpid()).encode('utf-8'))
def remove_pid(self):
"""Delete the pid file"""
if Configuration.pidfile and os.path.exists(Configuration.pidfile):
os.remove(Configuration.pidfile)
def daemonize(self):
"""Detach from the terminal and continue as a daemon"""
if os.fork(): # launch child and...
os._exit(0) # kill off parent
os.setsid()
if os.fork(): # launch child and...
os._exit(0) # kill off parent again.
os.umask(63) # 077 in octal
null = os.open('/dev/null', os.O_RDWR)
for i in range(3):
try:
os.dup2(null, i)
except OSError as e:
if e.errno != errno.EBADF:
raise
os.close(null) | PypiClean |
/Hydro_Quebec_API_Wrapper-3.0.8-py3-none-any.whl/hydroqc/types/cpc.py | from typing import TypedDict
class PeriodDataTyping(TypedDict, total=True):
"""CPC Period json output format."""
nbJourLecturePeriode: int
nbJourPrevuPeriode: int
montantFacturePeriode: float
montantProjetePeriode: float
moyenneDollarsJourPeriode: float
moyenneKwhJourPeriode: float
consoTotalPeriode: float
consoTotalProjetePeriode: float
consoRegPeriode: float
consoHautPeriode: float
nbKwhConsoHautTarifFlexPeriode: float
montantVentePointeCritique: float
tempMoyennePeriode: float
coutCentkWh: float | None
dernierTarif: str
indMVEPeriode: bool
class CriticalPeakDataTyping(TypedDict, total=False):
"""CPC json sub output format."""
dateEffacement: str
heureDebut: str
heureFin: str
consoReelle: float
consoReference: float
consoEffacee: float
montantEffacee: float
codeConso: str
indFacture: bool
class CriticalPeaksDataTyping(TypedDict, total=True):
"""CPC json sub output format."""
dateDebutPeriodeHiver: str
dateFinPeriodeHiver: str
periodesEffacementHiver: list[CriticalPeakDataTyping]
class TotalCriticalPeaksDataTyping(TypedDict, total=True):
"""CPC json sub output format."""
dateDebutPeriodeHiver: str
consoEffacee: float
montantEfface: float
class CPCDataTyping(TypedDict, total=False):
"""CPC json output format."""
adresseCourriel: str
adresseLieuConso1: str
adresseLieuConso2: str
codeAdhesionCPC: str
codeEligibiliteCPC: str
codeUsageContrat: str
dateDebutAdhesionCPC: str
dateFinAdhesionCPC: str
indAppelRequis: bool
montantEffaceProjete: str
etatMontantEffaceeProjete: str
nomMarketingActuel: str
nomMarketingBase: str
nomMarketingCPC: str
optionTarifActuel: str
optionTarifBase: str
optionTarifCPC: str
tarifActuel: str
tarifBase: str
tarifCPC: str
success: bool
periodesEffacementsHivers: list[CriticalPeaksDataTyping]
cumulPeriodesEffacementsHivers: list[TotalCriticalPeaksDataTyping]
# infoSimulation: list | PypiClean |
/Nuitka-1.8.tar.gz/Nuitka-1.8/nuitka/build/inline_copy/lib/scons-3.1.2/SCons/Node/Python.py |
__revision__ = "src/engine/SCons/Node/Python.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.Node
class ValueNodeInfo(SCons.Node.NodeInfoBase):
__slots__ = ('csig',)
current_version_id = 2
field_list = ['csig']
def str_to_node(self, s):
return Value(s)
def __getstate__(self):
"""
Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class.
"""
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
for name in getattr(obj,'__slots__',()):
if hasattr(self, name):
state[name] = getattr(self, name)
state['_version_id'] = self.current_version_id
try:
del state['__weakref__']
except KeyError:
pass
return state
def __setstate__(self, state):
"""
Restore the attributes from a pickled state.
"""
# TODO check or discard version
del state['_version_id']
for key, value in state.items():
if key not in ('__weakref__',):
setattr(self, key, value)
class ValueBuildInfo(SCons.Node.BuildInfoBase):
__slots__ = ()
current_version_id = 2
class Value(SCons.Node.Node):
"""A class for Python variables, typically passed on the command line
or generated by a script, but not from a file or some other source.
"""
NodeInfo = ValueNodeInfo
BuildInfo = ValueBuildInfo
def __init__(self, value, built_value=None):
SCons.Node.Node.__init__(self)
self.value = value
self.changed_since_last_build = 6
self.store_info = 0
if built_value is not None:
self.built_value = built_value
def str_for_display(self):
return repr(self.value)
def __str__(self):
return str(self.value)
def make_ready(self):
self.get_csig()
def build(self, **kw):
if not hasattr(self, 'built_value'):
SCons.Node.Node.build(self, **kw)
is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir):
# Make Value nodes get built regardless of
# what directory scons was run from. Value nodes
# are outside the filesystem:
return 1
def write(self, built_value):
"""Set the value of the node."""
self.built_value = built_value
def read(self):
"""Return the value. If necessary, the value is built."""
self.build()
if not hasattr(self, 'built_value'):
self.built_value = self.value
return self.built_value
def get_text_contents(self):
"""By the assumption that the node.built_value is a
deterministic product of the sources, the contents of a Value
are the concatenation of all the contents of its sources. As
the value need not be built when get_contents() is called, we
cannot use the actual node.built_value."""
###TODO: something reasonable about universal newlines
contents = str(self.value)
for kid in self.children(None):
contents = contents + kid.get_contents().decode()
return contents
def get_contents(self):
"""
Get contents for signature calculations.
:return: bytes
"""
text_contents = self.get_text_contents()
try:
return text_contents.encode()
except UnicodeDecodeError:
# Already encoded as python2 str are bytes
return text_contents
def changed_since_last_build(self, target, prev_ni):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except AttributeError:
return 1
def get_csig(self, calc=None):
"""Because we're a Python value node and don't have a real
timestamp, we get to ignore the calculator and just use the
value contents.
Returns string. Ideally string of hex digits. (Not bytes)
"""
try:
return self.ninfo.csig
except AttributeError:
pass
contents = self.get_text_contents()
self.get_ninfo().csig = contents
return contents
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/IOT3ApiClient-1.0.0.tar.gz/IOT3ApiClient-1.0.0/urllib3/filepost.py | from __future__ import absolute_import
import binascii
import codecs
import os
from io import BytesIO
from .fields import RequestField
from .packages import six
from .packages.six import b
writer = codecs.lookup("utf-8")[3]
def choose_boundary():
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
boundary = binascii.hexlify(os.urandom(16))
if not six.PY2:
boundary = boundary.decode("ascii")
return boundary
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b("--%s\r\n" % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b"\r\n")
body.write(b("--%s--\r\n" % (boundary)))
content_type = str("multipart/form-data; boundary=%s" % boundary)
return body.getvalue(), content_type | PypiClean |
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/full/plugins/a11yhelp/dialogs/lang/sk.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("a11yhelp","sk",{title:"Inštrukcie prístupnosti",contents:"Pomocný obsah. Pre zatvorenie tohto okna, stlačte ESC.",legend:[{name:"Všeobecne",items:[{name:"Lišta nástrojov editora",legend:"Stlačte ${toolbarFocus} pre navigáciu na lištu nástrojov. Medzi ďalšou a predchádzajúcou lištou nástrojov sa pohybujete s TAB a SHIFT+TAB. Medzi ďalším a predchádzajúcim tlačidlom na lište nástrojov sa pohybujete s pravou šípkou a ľavou šípkou. Stlačte medzerník alebo ENTER pre aktiváciu tlačidla lišty nástrojov."},
{name:"Editorový dialóg",legend:"V dialógovom okne stlačte TAB pre presun na ďalší prvok, SHIFT+TAB pre presun na predchádzajúci prvok, ENTER pre odoslanie, ESC pre zrušenie. Keď má dialógové okno viacero kariet, zoznam kariet dosiahnete buď stlačením ALT+F10 alebo s TAB v príslušnom poradí kariet. So zameraným zoznamom kariet sa pohybujte k ďalšej alebo predchádzajúcej karte cez PRAVÚ a ĽAVÚ ŠÍPKU."},{name:"Editorové kontextové menu",legend:"Stlačte ${contextMenu} alebo APPLICATION KEY pre otvorenie kontextového menu. Potom sa presúvajte na ďalšie možnosti menu s TAB alebo dolnou šípkou. Presunte sa k predchádzajúcej možnosti s SHIFT+TAB alebo hornou šípkou. Stlačte medzerník alebo ENTER pre výber možnosti menu. Otvorte pod-menu danej možnosti s medzerníkom, alebo ENTER, alebo pravou šípkou. Vráťte sa späť do položky rodičovského menu s ESC alebo ľavou šípkou. Zatvorte kontextové menu s ESC."},
{name:"Editorov box zoznamu",legend:"V boxe zoznamu, presuňte sa na ďalšiu položku v zozname s TAB alebo dolnou šípkou. Presuňte sa k predchádzajúcej položke v zozname so SHIFT+TAB alebo hornou šípkou. Stlačte medzerník alebo ENTER pre výber možnosti zoznamu. Stlačte ESC pre zatvorenie boxu zoznamu."},{name:"Editorove pásmo cesty prvku",legend:"Stlačte ${elementsPathFocus} pre navigovanie na pásmo cesty elementu. Presuňte sa na tlačidlo ďalšieho prvku s TAB alebo pravou šípkou. Presuňte sa k predchádzajúcemu tlačidlu s SHIFT+TAB alebo ľavou šípkou. Stlačte medzerník alebo ENTER pre výber prvku v editore."}]},
{name:"Príkazy",items:[{name:"Vrátiť príkazy",legend:"Stlačte ${undo}"},{name:"Nanovo vrátiť príkaz",legend:"Stlačte ${redo}"},{name:"Príkaz na stučnenie",legend:"Stlačte ${bold}"},{name:"Príkaz na kurzívu",legend:"Stlačte ${italic}"},{name:"Príkaz na podčiarknutie",legend:"Stlačte ${underline}"},{name:"Príkaz na odkaz",legend:"Stlačte ${link}"},{name:"Príkaz na zbalenie lišty nástrojov",legend:"Stlačte ${toolbarCollapse}"},{name:"Prejsť na predchádzajúcu zamerateľnú medzeru príkazu",legend:"Stlačte ${accessPreviousSpace} pre prístup na najbližšie nedosiahnuteľné zamerateľné medzery pred vsuvkuo. Napríklad: dve za sebou idúce horizontálne čiary. Opakujte kombináciu klávesov pre dosiahnutie vzdialených zamerateľných medzier."},
{name:"Prejsť na ďalší ",legend:"Stlačte ${accessNextSpace} pre prístup na najbližšie nedosiahnuteľné zamerateľné medzery po vsuvke. Napríklad: dve za sebou idúce horizontálne čiary. Opakujte kombináciu klávesov pre dosiahnutie vzdialených zamerateľných medzier."},{name:"Pomoc prístupnosti",legend:"Stlačte ${a11yHelp}"},{name:"Vložiť ako čistý text",legend:"Stlačte ${pastetext}",legendEdge:"Stlačte ${pastetext} a potom ${paste}"}]}],tab:"Tab",pause:"Pause",capslock:"Caps Lock",escape:"Escape",pageUp:"Stránka hore",
pageDown:"Stránka dole",leftArrow:"Šípka naľavo",upArrow:"Šípka hore",rightArrow:"Šípka napravo",downArrow:"Šípka dole",insert:"Insert",leftWindowKey:"Ľavé Windows tlačidlo",rightWindowKey:"Pravé Windows tlačidlo",selectKey:"Tlačidlo Select",numpad0:"Numpad 0",numpad1:"Numpad 1",numpad2:"Numpad 2",numpad3:"Numpad 3",numpad4:"Numpad 4",numpad5:"Numpad 5",numpad6:"Numpad 6",numpad7:"Numpad 7",numpad8:"Numpad 8",numpad9:"Numpad 9",multiply:"Násobenie",add:"Sčítanie",subtract:"Odčítanie",decimalPoint:"Desatinná čiarka",
divide:"Delenie",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Num Lock",scrollLock:"Scroll Lock",semiColon:"Bodkočiarka",equalSign:"Rovná sa",comma:"Čiarka",dash:"Pomĺčka",period:"Bodka",forwardSlash:"Lomítko",graveAccent:"Zdôrazňovanie prízvuku",openBracket:"Hranatá zátvorka otváracia",backSlash:"Backslash",closeBracket:"Hranatá zátvorka zatváracia",singleQuote:"Jednoduché úvodzovky"}); | PypiClean |
/CocoRPy27-1.4.1.zip/CocoRPy27-1.4.1/Core.py |
import os.path
import sys
import copy
from optparse import OptionParser
from Trace import Trace
from Errors import Errors
from CharClass import CharClass
class Comment( object ):
''' info about comment syntax'''
first = None # list of comments
def __init__( self, frm, to, nested):
assert isinstance(frm,Node)
assert isinstance(to,Node)
assert isinstance(nested,bool)
self.start = self.Str(frm)
self.stop = self.Str(to)
self.nested = nested
self.next = Comment.first
Comment.first = self
def Str(self, p):
assert isinstance(p,Node)
s = '' # StringBuffer
while p is not None:
if p.typ == Node.chr:
s += chr(p.val)
elif p.typ == Node.clas:
st = CharClass.Set(p.val) # BitSet
if len(st) != 1:
Errors.SemErr("character set contains more than 1 character")
s += chr(min(st))
#s += chr(st.First())
else:
Errors.SemErr("comment delimiters may not be structured")
p = p.next
if len(s) == 0 or len(s) > 2:
Errors.SemErr("comment delimiters must be 1 or 2 characters long")
s = '?'
return s
class Symbol( object ):
terminals = [ ] # of Symbol
pragmas = [ ]
nonterminals = [ ]
# token kinds
fixedToken = 0 # e.g. 'a' ('b' | 'c') (structure of literals)
classToken = 1 # e.g. digit {digit} (at least one char class)
litToken = 2 # e.g. "while"
classLitToken = 3 # e.g. letter {letter} but without literals that have the same structure
def __init__( self, typ, name, line ):
assert isinstance( typ, int )
assert isinstance( name, (str,unicode) )
assert isinstance( line, int )
self.n = 0 # symbol number
self.typ = 0 # t, nt, pr, unknown, rslv
self.name = 0 # symbol name
self.graph = None # Node, nt: to first node of syntax graph
self.tokenKind = 0 # t: token kind (fixedToken, classToken, ...)
self.deletable = False # nt: true if nonterminal is deletable
self.firstReady = False # nt: true if terminal start symbols have already been computed
self.first = None # set, nt: terminal start symbols
self.follow = None # set, nt: terminal followers
self.nts = None # set, nt: nonterminals whose followers have to be added to this sym
self.line = 0 # source text line number of item in this node
self.attrPos = None # Position, nt: position of attributes in source text (or None)
self.semPos = None # Position, pr: pos of semantic action in source text (or None)
# nt: pos of local declarations in source text (or None)
self.retType = '' # AH - nt: Type of output attribute (or None)
self.retVar = None # str - AH - nt: Name of output attribute (or None)
self.symName = None # str, symbolic name /* pdt */
if len(name) == 2 and name[0] == '"':
Errors.SemErr( 'empty token not allowed' )
name = '???'
self.typ = typ
self.name = name
self.line = line
if typ == Node.t:
self.n = len(Symbol.terminals)
Symbol.terminals.append( self )
elif typ == Node.pr:
Symbol.pragmas.append( self )
elif typ == Node.nt:
self.n = len(Symbol.nonterminals)
Symbol.nonterminals.append( self )
@staticmethod
def Find( name ):
assert isinstance( name, ( str, unicode ) )
for s in Symbol.terminals:
if s.name == name:
return s
for s in Symbol.nonterminals:
if s.name == name:
return s
return None
def compareTo( self, x ):
assert isinstance( x, Symbol )
return self.name.__cmp__( x.name )
class Target( object ):
''' set of states that are reached by an action'''
def __init__( self, s ):
assert isinstance( s, State )
self.state = s # target state
self.next = None # Target instance
class Node( object ):
nodes = [ ]
nTyp = [" ", "t ", "pr ", "nt ", "clas", "chr ", "wt ", "any ", "eps ",
"sync", "sem ", "alt ", "iter", "opt ", "rslv"]
# constants for node kinds
t = 1 # terminal symbol
pr = 2 # pragma
nt = 3 # nonterminal symbol
clas = 4 # character class
chr = 5 # character
wt = 6 # weak terminal symbol
any = 7 #
eps = 8 # empty
sync = 9 # synchronization symbol
sem = 10 # semantic action: (. .)
alt = 11 # alternative: |
iter = 12 # iteration: { }
opt = 13 # option: [ ]
rslv = 14 # resolver expr /* ML */ /* AW 03-01-13 renamed slv --> rslv */
normalTrans = 0 # transition codes
contextTrans = 1
def __init__( self, typ, symOrNodeOrInt, line=None ):
assert isinstance( typ, int )
assert isinstance( symOrNodeOrInt, Symbol ) or isinstance( symOrNodeOrInt, Node ) or isinstance( symOrNodeOrInt, int ) or (symOrNodeOrInt is None)
assert isinstance( line, int ) or (line is None)
self.n = 0 # node number
self.typ = 0 # t, nt, wt, chr, clas, any, eps, sem, sync, alt, iter, opt, rslv
self.next = None # Node, to successor node
self.down = None # Node, alt: to next alternative
self.sub = None # Node, alt, iter, opt: to first node of substructure
self.up = False # true: "next" leads to successor in enclosing structure
self.sym = None # Symbol, nt, t, wt: symbol represented by this node
self.val = 0 # chr: ordinal character value
# clas: index of character class
self.code = 0 # chr, clas: transition code
self.set = None # set, any, sync: the set represented by this node
self.pos = None # Position, nt, t, wt: pos of actual attributes
# sem: pos of semantic action in source text
self.line = 0 # source text line number of item in this node
self.state = None # State, DFA state corresponding to this node
# (only used in DFA.ConvertToStates)
self.retVar= None # str, nt: name of output attribute (or None)
if isinstance(symOrNodeOrInt, int) and isinstance(line, int):
self.typ = typ
self.sym = None
self.line = line
self.n = len(Node.nodes)
Node.nodes.append( self )
self.val = symOrNodeOrInt
elif line is None:
self.typ = typ
self.sym = None
self.line = 0
self.n = len(Node.nodes)
Node.nodes.append( self )
self.sub = symOrNodeOrInt
else:
self.typ = typ
self.sym = symOrNodeOrInt
self.line = line
self.n = len(Node.nodes)
Node.nodes.append( self )
@staticmethod
def DelGraph( p ):
return (p is None) or Node.DelNode(p) and Node.DelGraph(p.next)
@staticmethod
def DelSubGraph( p ):
return p is None or Node.DelNode(p) and (p.up or Node.DelSubGraph(p.next))
@staticmethod
def DelAlt( p ):
return p is None or Node.DelNode(p) and (p.up or Node.DelAlt(p.next))
@staticmethod
def DelNode( p ):
if p.typ == Node.nt:
return p.sym.deletable
elif p.typ == Node.alt:
return Node.DelAlt( p.sub ) or p.down != None and Node.DelAlt(p.down)
else:
return p.typ in ( Node.eps, Node.iter, Node.opt, Node.sem, Node.sync, Node.rslv )
#----------------- for printing ----------------------
@staticmethod
def Ptr( p, up ):
assert isinstance( p, Node ) or ( p is None )
assert isinstance( up, bool )
if p is None:
return 0
elif up:
return -p.n
else:
return p.n
@staticmethod
def Pos( pos ):
if pos is None:
return ' '
else:
return Trace.formatString( str(pos.beg), 5 )
@staticmethod
def Name( name ):
assert isinstance( name, (str,unicode) )
return (name + ' ')[0:12]
# found no simpler way to get the first 12 characters of the name
# padded with blanks on the right
@staticmethod
def PrintNodes( ):
Trace.WriteLine("Graph nodes:")
Trace.WriteLine("----------------------------------------------------")
Trace.WriteLine(" n type name next down sub pos line")
Trace.WriteLine(" val code")
Trace.WriteLine("----------------------------------------------------")
for p in Node.nodes:
Trace.Write(str(p.n), 4)
Trace.Write(" " + Node.nTyp[p.typ] + " ")
if p.sym is not None:
Trace.Write(Node.Name(p.sym.name), 12)
Trace.Write(" ")
elif p.typ == Node.clas:
c = CharClass.classes[ p.val ]
Trace.Write(Node.Name(c.name), 12)
Trace.Write(" ")
else:
Trace.Write(" ")
Trace.Write(str(Node.Ptr(p.next, p.up)), 5)
Trace.Write(" ")
if p.typ in ( Node.t, Node.nt, Node.wt ):
Trace.Write(" ")
Trace.Write(Node.Pos(p.pos), 5)
elif p.typ == Node.chr:
Trace.Write(str(p.val), 5)
Trace.Write(" ")
Trace.Write(str(p.code), 5)
Trace.Write(" ")
elif p.typ == Node.clas:
Trace.Write(" ")
Trace.Write(str(p.code), 5)
Trace.Write(" ")
elif p.typ in ( Node.alt, Node.iter, Node.opt ):
Trace.Write(str(Node.Ptr(p.down, False)), 5)
Trace.Write(" ")
Trace.Write(str(Node.Ptr(p.sub, False)), 5)
Trace.Write(" ")
elif p.typ == Node.sem:
Trace.Write(" ")
Trace.Write(Node.Pos(p.pos), 5)
elif p.typ in ( Node.eps, Node.any, Node.sync ):
Trace.Write(" ")
Trace.WriteLine(str(p.line), 5);
Trace.WriteLine();
class State( object ):
''' state of finite automaton'''
lastNr = 0 # highest state number
def __init__( self ):
self.nr = 0 # state number
self.firstAction = None # Action, to first action of this state
self.endOf = None # Symbol, recognized token if state is final
self.ctx = False # true if state is reached via contextTrans
self.next = None # State
State.lastNr += 1
self.nr = State.lastNr
def AddAction(self, act):
assert isinstance(act,Action)
lasta = None # Action
a = self.firstAction # Action
while (a is not None) and (act.typ >= a.typ):
lasta = a
a = a.next
# collecting classes at the beginning gives better performance
act.next = a
if a == self.firstAction:
self.firstAction = act
else:
lasta.next = act
def DetachAction(self, act):
assert isinstance(act,Action)
lasta = None # Action
a = self.firstAction # Action
while (a is not None) and a != act:
lasta = a
a = a.next
if a is not None:
if a == self.firstAction:
self.firstAction = a.next
else:
lasta.next = a.next
def TheAction(self, ch):
assert isinstance(ch,(str,unicode))
if isinstance( ch, (str,unicode) ):
ch = ord(ch)
a = self.firstAction
while a is not None:
if a.typ == Node.chr and ch == a.sym:
return a
elif a.typ == Node.clas:
s = CharClass.Set(a.sym)
if ch in s:
return a
a = a.next
return None
def MeltWith(self, s):
'''copy actions of s to state'''
assert isinstance( s, State )
action = s.firstAction
while action is not None:
a = Action(action.typ, action.sym, action.tc)
a.AddTargets(action)
self.AddAction(a)
action = action.next
class Action( object ):
''' action of finite automaton'''
def __init__( self, typ, sym, tc):
assert isinstance(typ,int)
assert isinstance(sym,int)
assert isinstance(tc,int)
self.typ = typ # type of action symbol: clas, chr
self.sym = sym # action symbol
self.tc = tc # transition code: normalTrans, contextTrans
self.target = None # Target # states reached from this action
self.next = None # Action
def AddTarget(self,t):
''' add t to the action.targets'''
assert isinstance( t, Target )
last = None # Target
p = self.target # Target
while (p is not None) and (t.state.nr >= p.state.nr):
if t.state == p.state:
return
last = p
p = p.next
t.next = p
if (p == self.target):
self.target = t
else:
last.next = t
def AddTargets(self, a):
'''add copy of a.targets to action.targets'''
assert isinstance( a, Action )
p = a.target
while p is not None:
t = Target(p.state)
self.AddTarget(t)
p = p.next
if a.tc == Node.contextTrans:
self.tc = Node.contextTrans
def Symbols(self):
if self.typ == Node.clas:
s = copy.copy(CharClass.Set(self.sym))
else:
s = set( )
s.add(self.sym)
return s
def ShiftWith(self,s):
if len(s) == 1:
self.typ = Node.chr
self.sym = min(s) #.First()
else:
c = CharClass.Find(s)
if c is None:
c = CharClass("#", s) # class with dummy name
self.typ = Node.clas
self.sym = c.n
def GetTargetStates(self,param):
assert isinstance( param, list ) # Object[]
# compute the set of target states
targets = set( ) # BitSet
endOf = None # Symbol
ctx = False # boolean
t = self.target
while t is not None:
stateNr = t.state.nr # int
if stateNr <= DFA.lastSimState:
targets.add(stateNr)
else:
try:
targets |= Melted.Set(stateNr)
except:
print sys.exc_info()[1]
Errors.count += 1
if t.state.endOf is not None:
if (endOf is None) or (endOf == t.state.endOf):
endOf = t.state.endOf
else:
print "Tokens " + endOf.name + " and " + t.state.endOf.name + " cannot be distinguished"
Errors.count += 1
if t.state.ctx:
ctx = True
t = t.next
param[0] = targets
param[1] = endOf
return ctx
class Melted( object ): # info about melted states
first = None # Melted instance, head of melted state list
def __init__( self, st, state ):
assert isinstance( st, set )
assert isinstance( state, State )
self.set = st # set of old states
self.state = state # new state
self.next = Melted.first # Melted instance
Melted.first = self
@staticmethod
def Set( nr ):
assert isinstance( nr, int )
m = Melted.first
while m is not None:
if m.state.nr == nr:
return m.set
else:
m = m.next
raise RuntimeError( '-- compiler error in Melted.Set()' )
@staticmethod
def StateWithSet( s ):
assert isinstance( s, set )
m = Melted.first
while m is not None:
if s == m.set: #s.equals( m.set ):
return m
m = m.next
return None
class Graph( object ):
dummyNode = Node( Node.eps, None, 0 )
def __init__( self, p=None ):
assert isinstance(p, Node) or (p is None)
self.l = p #Node, left end of graph = head
self.r = p #Node, right end of graph = list of nodes to be linked to successor graph
@staticmethod
def MakeFirstAlt( g ):
assert isinstance( g, Graph )
g.l = Node(Node.alt, g.l)
g.l.line = g.l.sub.line # make line available for error handling */
g.l.next = g.r
g.r = g.l
@staticmethod
def MakeAlternative( g1, g2 ):
assert isinstance( g1, Graph )
assert isinstance( g2, Graph )
g2.l = Node(Node.alt, g2.l)
g2.l.line = g2.l.sub.line
p = g1.l
while p.down is not None:
p = p.down
p.down = g2.l
p = g1.r
while p.next is not None:
p = p.next
p.next = g2.r
@staticmethod
def MakeSequence( g1, g2 ):
assert isinstance( g1, Graph)
assert isinstance( g2, Graph)
p = g1.r.next
g1.r.next = g2.l # link head node
while p is not None: # link substructure
q = p.next
p.next = g2.l
p.up = True
p = q
g1.r = g2.r
@staticmethod
def MakeIteration( g ):
assert isinstance(g, Graph)
g.l = Node(Node.iter, g.l)
p = g.r
g.r = g.l
while p is not None:
q = p.next
p.next = g.l
p.up = True
p = q
@staticmethod
def MakeOption( g ):
assert isinstance( g, Graph)
g.l = Node(Node.opt, g.l)
g.l.next = g.r
g.r = g.l
@staticmethod
def Finish( g ):
assert isinstance( g, Graph)
p = g.r
while p is not None:
q = p.next
p.next = None
p = q
@staticmethod
def SetContextTrans( p ):
'''set transition code in the graph rooted at p'''
assert isinstance(p, Node) or (p is None)
DFA.hasCtxMoves = True
while p is not None:
if p.typ == Node.chr or p.typ == Node.clas:
p.code = Node.contextTrans
elif p.typ == Node.opt or p.typ == Node.iter:
Graph.SetContextTrans(p.sub)
elif p.typ == Node.alt:
Graph.SetContextTrans(p.sub)
Graph.SetContextTrans(p.down)
if p.up:
break
p = p.next
@staticmethod
def DeleteNodes( ):
Node.nodes = [ ]
Graph.dummyNode = Node(Node.eps, None, 0)
@staticmethod
def StrToGraph( st ):
assert isinstance(st,(str,unicode))
s = DFA.Unescape(st[1:-1])
if len(s) == 0:
Errors.SemErr("empty token not allowed")
g = Graph()
g.r = Graph.dummyNode
for i in xrange(0, len(s)):
p = Node(Node.chr, ord(s[i]), 0)
g.r.next = p
g.r = p
g.l = Graph.dummyNode.next
Graph.dummyNode.next = None
return g
class UserDefinedTokenName(object):
NameTab = [ ]
alias = ''
name = ''
def __init__(self, alias, name):
assert isinstance(alias, str)
assert isinstance(name, str)
self.alias = alias
self.name = name
UserDefinedTokenName.NameTab.append(self)
class Tab:
semDeclPos = None #Position, position of global semantic declarations
ignored = None #Set, characters ignored by the scanner
ddt = [ False ] * 20 # boolean[20], debug and test switches
gramSy = None #Symbol, root nonterminal; filled by ATG
eofSy = None #Symbol, end of file symbol
noSym = None #Symbol, used in case of an error
allSyncSets = None #set, union of all synchronisation sets
nsName = '' #namespace for generated files
frameDir = None #directory containing the frame files
literals = { } #Hashtable, symbols that are used as literals
visited = None #set, mark list for graph traversals
curSy = None #Symbol, current symbol in computation of sets
@staticmethod
def parseArgs( argv, testArgCt=False ):
usage = 'usage: %prog [options] filename.atg'
optParser = OptionParser( usage )
optParser.add_option( '-a', '-A', dest='traceAutomaton',
action='store_true', default=False,
help='Include automaton tracing in the trace file.' )
optParser.add_option( '-c', '-C', dest='generateDriver',
action='store_true', default=False,
help='Generate a main compiler source file.' )
optParser.add_option( '-f', '-F', dest='firstAndFollow',
action='store_true', default=False,
help='Include first & follow sets in the trace file.' )
optParser.add_option( '-g', '-G', dest='syntaxGraph',
action='store_true', default=False,
help='Include syntax graph in the trace file.' )
optParser.add_option( '-i', '-I', dest='traceComputations',
action='store_true', default=False,
help='Include a trace of the computations for first sets in the trace file.' )
optParser.add_option( '-j', '-J', dest='listAnyAndSync',
action='store_true', default=False,
help='Inclue a listing of the ANY and SYNC sets in the trace file.' )
optParser.add_option( '-m', '-M', dest='mergeErrors',
action='store_true', default=False,
help='Merge error messages in the source listing.' )
optParser.add_option( '-n', '-N', dest='tokenNames',
action='store_true', default=False,
help='Generate token names in the source listing.' )
optParser.add_option( '-p', '-P', dest='statistics',
action='store_true', default=False,
help='Include a listing of statistics in the trace file.' )
optParser.add_option( '-r', '-R', dest='frameFileDir',
default=None,
help='Use scanner.frame and parser.frame in directory DIR.', metavar='DIR' )
optParser.add_option( '-s', '-S', dest='symbolTable',
action='store_true', default=False,
help='Include the symbol table listing in the trace file.' )
optParser.add_option( '-t', '-T', dest='testOnly',
action='store_true', default=False,
help='Test the grammar only, don\'t generate any files.' )
optParser.add_option( '-x', '-X', dest='crossReferences',
action='store_true', default=False,
help='Include a cross reference listing in the trace file.' )
if argv is None:
options, args = optParser.parse_args( )
else:
options, args = optParser.parse_args( argv )
Tab.SetDDT( options )
if testArgCt:
if len(args) != 2:
optParser.print_help( )
sys.exit( )
return options,args
#---------------------------------------------------------------------
# Symbol set computations
#---------------------------------------------------------------------
@staticmethod
def First0( p, mark ):
'''Computes the first set for the given Node.'''
assert isinstance( p, Node ) or (p is None)
assert isinstance( mark, set )
fs = set( )
while (p is not None) and not (p.n in mark):
mark.add(p.n)
if p.typ == Node.nt:
if p.sym.firstReady:
fs |= p.sym.first
else:
fs |= Tab.First0( p.sym.graph, mark )
elif p.typ in ( Node.t, Node.wt ):
fs.add(p.sym.n)
elif p.typ == Node.any:
fs |= p.set
elif p.typ == Node.alt:
fs |= Tab.First0(p.sub, mark)
fs |= Tab.First0(p.down, mark)
elif p.typ in ( Node.iter, Node.opt ):
fs |= Tab.First0(p.sub, mark)
if not Node.DelNode(p):
break
p = p.next
return fs
@staticmethod
def First( p ):
assert isinstance( p, Node) or (p is None)
fs = Tab.First0(p, set( ) )
if Tab.ddt[3]:
Trace.WriteLine( )
if p is not None:
Trace.WriteLine("First: node = " + str(p.n) )
else:
Trace.WriteLine("First: node = None");
Tab.PrintSet(fs,0)
return fs
@staticmethod
def CompFirstSets( ):
assert isinstance( Symbol.nonterminals, list)
nt = Symbol.nonterminals
for sym in nt:
sym.first = set( )
sym.firstReady = False
for sym in nt:
sym.first = Tab.First(sym.graph)
sym.firstReady = True
@staticmethod
def CompFollow( p ):
assert isinstance(p, Node) or (p is None)
assert isinstance(Tab.visited, set)
while (p is not None) and (p.n not in Tab.visited):
Tab.visited.add(p.n)
if p.typ == Node.nt:
s = Tab.First(p.next)
p.sym.follow |= s
if Node.DelGraph(p.next):
p.sym.nts.add(Tab.curSy.n)
elif p.typ == Node.opt or p.typ == Node.iter:
Tab.CompFollow(p.sub)
elif p.typ == Node.alt:
Tab.CompFollow(p.sub)
Tab.CompFollow(p.down)
p = p.next
@staticmethod
def Complete( sym ):
assert isinstance( sym, Symbol )
if sym.n not in Tab.visited:
Tab.visited.add(sym.n)
nt = Symbol.nonterminals
for s in nt:
if s.n in sym.nts:
Tab.Complete(s)
sym.follow |= s.follow
if sym == Tab.curSy:
sym.nts.discard(s.n)
@staticmethod
def CompFollowSets():
nt = Symbol.nonterminals;
for sym in nt:
sym.follow = set()
sym.nts = set()
Tab.gramSy.follow.add(Tab.eofSy.n)
Tab.visited = set( )
for sym in nt:
Tab.curSy = sym
Tab.CompFollow(sym.graph)
for sym in nt:
Tab.visited = set()
Tab.curSy = sym
Tab.Complete(sym)
@staticmethod
def LeadingAny( p ):
assert isinstance( p, Node) or (p is None)
if p is None:
return None
a = None
if p.typ == Node.any:
a = p
elif p.typ == Node.alt:
a = Tab.LeadingAny(p.sub)
if a is None:
a = Tab.LeadingAny(p.down)
elif p.typ == Node.opt or p.typ == Node.iter:
a = Tab.LeadingAny(p.sub)
elif Node.DelNode(p) and not p.up:
a = Tab.LeadingAny(p.next)
return a
@staticmethod
def FindAS( p ):
'''find ANY sets'''
assert isinstance( p, Node ) or ( p is None )
while p is not None:
if p.typ == Node.opt or p.typ == Node.iter:
Tab.FindAS(p.sub)
a = Tab.LeadingAny(p.sub)
if a is not None:
a.set -= Tab.First(p.next)
elif p.typ == Node.alt:
s1 = set( )
q = p
while q is not None:
Tab.FindAS(q.sub)
a = Tab.LeadingAny(q.sub)
if a is not None:
h = Tab.First(q.down)
h |= s1
a.set -= h
else:
s1 |= Tab.First(q.sub)
q = q.down
if p.up:
break
p = p.next;
@staticmethod
def CompAnySets():
nt = Symbol.nonterminals
for sym in nt:
Tab.FindAS(sym.graph)
@staticmethod
def Expected( p, curSy):
assert isinstance( p, Node ) or (p is None)
assert isinstance( curSy, Symbol )
s = Tab.First(p)
if Node.DelGraph(p):
s |= curSy.follow
return s
@staticmethod
# does not look behind resolvers; only called during LL(1) test and in CheckRes
def Expected0( p, curSy ):
assert isinstance( p, Node)
assert isinstance(curSy,Symbol)
if p.typ == Node.rslv:
return set( )
else:
return Tab.Expected(p, curSy)
@staticmethod
def CompSync(p):
assert isinstance( p, Node) or (p is None)
while (p is not None) and (p.n not in Tab.visited):
Tab.visited.add(p.n)
if p.typ == Node.sync:
s = Tab.Expected(p.next, Tab.curSy)
s.add(Tab.eofSy.n)
Tab.allSyncSets |= s
p.set = s
elif p.typ == Node.alt:
Tab.CompSync(p.sub)
Tab.CompSync(p.down)
elif p.typ == Node.opt or p.typ == Node.iter:
Tab.CompSync(p.sub)
p = p.next
@staticmethod
def CompSyncSets():
nt = Symbol.nonterminals
Tab.allSyncSets = set( )
Tab.allSyncSets.add(Tab.eofSy.n)
Tab.visited = set( )
for sym in nt:
Tab.curSy = sym
Tab.CompSync(Tab.curSy.graph)
@staticmethod
def SetupAnys():
for p in Node.nodes:
if p.typ == Node.any:
p.set = set( )
for j in xrange(0,len(Symbol.terminals)):
p.set.add(j)
p.set.discard(Tab.eofSy.n)
@staticmethod
def CompDeletableSymbols():
nt = Symbol.nonterminals
changed = False
for sym in nt:
if (not sym.deletable) and (sym.graph is not None) and Node.DelGraph(sym.graph):
sym.deletable = True
changed = True
while changed:
changed = False
for sym in nt:
if (not sym.deletable) and (sym.graph is not None) and Node.DelGraph(sym.graph):
sym.deletable = True
changed = True
for sym in nt:
if sym.deletable:
print " " + sym.name + " deletable"
@staticmethod
def RenumberPragmas():
n = len(Symbol.terminals)
for sym in Symbol.pragmas:
sym.n = n
n += 1
@staticmethod
def CompSymbolSets():
Tab.CompDeletableSymbols()
Tab.CompFirstSets()
Tab.CompFollowSets()
Tab.CompAnySets()
Tab.CompSyncSets()
if Tab.ddt[1]:
Trace.WriteLine()
Trace.WriteLine("First & follow symbols:")
Trace.WriteLine("----------------------")
Trace.WriteLine()
nt = Symbol.nonterminals
for sym in nt:
Trace.WriteLine(sym.name)
Trace.Write("first: ")
Tab.PrintSet(sym.first, 10)
Trace.Write("follow: ")
Tab.PrintSet(sym.follow,10)
Trace.WriteLine()
if Tab.ddt[4]:
Trace.WriteLine()
Trace.WriteLine("ANY and SYNC sets:")
Trace.WriteLine("-----------------")
for p in Node.nodes:
if p.typ == Node.any or p.typ == Node.sync:
Trace.Write(str(p.n), 4)
Trace.Write(" ")
Trace.Write(Node.nTyp[p.typ], 4)
Trace.Write(": ")
Tab.PrintSet(p.set,12)
Trace.WriteLine()
#---------------------------------------------------------------------
# Grammar checks
#---------------------------------------------------------------------
@staticmethod
def GrammarOk():
ok = ( Tab.NtsComplete()
and Tab.AllNtReached()
and Tab.NoCircularProductions()
and Tab.AllNtToTerm()
and Tab.ResolversOk() )
assert isinstance(ok,bool)
if ok:
Tab.CheckLL1()
return ok
#--------------- check for circular productions ----------------------
@staticmethod
def GetSingles(p, singles):
assert isinstance(p,Node) or (p is None)
assert isinstance(singles,list)
if p is None:
return # end of graph
if p.typ == Node.nt:
if p.up or Node.DelGraph(p.next):
singles.append(p.sym)
elif p.typ == Node.alt or p.typ == Node.iter or p.typ == Node.opt:
if p.up or Node.DelGraph(p.next):
Tab.GetSingles(p.sub, singles)
if p.typ == Node.alt:
Tab.GetSingles(p.down, singles)
if (not p.up) and Node.DelNode(p):
Tab.GetSingles(p.next, singles)
@staticmethod
def NoCircularProductions():
class CNode(object):
'''node of list for finding circular productions'''
def __init__( self, l, r ):
self.left = l
self.right = r
lst = [ ]
for sym in Symbol.nonterminals:
singles = [ ]
Tab.GetSingles(sym.graph, singles) # get nonterminals s such that sym-->s
for j in xrange( 0, len(singles) ):
s = singles[j]
lst.append( CNode(sym, s) )
changed = False
i = 0
while i < len(lst):
n = lst[i]
onLeftSide = False
onRightSide = False
for m in lst:
if n.left == m.right:
onRightSide = True
if n.right == m.left:
onLeftSide = True
if (not onLeftSide) or (not onRightSide):
lst.remove(n)
i -= 1
changed = True
i += 1
while changed:
changed = False
i = 0
while i < len(lst):
n = lst[i]
onLeftSide = False
onRightSide = False
for m in lst:
if n.left == m.right:
onRightSide = True
if n.right == m.left:
onLeftSide = True
if (not onLeftSide) or (not onRightSide):
lst.remove(n)
i -= 1
changed = True
i += 1
ok = True
for n in lst:
ok = False;
Errors.count += 1
print " " + n.left.name + " --> " + n.right.name
return ok
#--------------- check for LL(1) errors ----------------------
@staticmethod
def LL1Error( cond, sym):
assert isinstance(cond,int)
assert isinstance(sym,Symbol) or (sym is None)
print " LL(1) warning in " + Tab.curSy.name + ":",
if sym is not None:
print sym.name + " is",
if cond == 1:
print "the start of several alternatives"
elif cond == 2:
print "the start & successor of a deletable structure"
elif cond == 3:
print "an ANY node that matches no symbol"
elif cond == 4:
print "contents of [...] or {...} must not be deletable"
@staticmethod
def CheckOverlap( s1, s2, cond ):
assert isinstance( s1, set )
assert isinstance( s2, set )
assert isinstance( cond, int )
for sym in Symbol.terminals:
if (sym.n in s1) and (sym.n in s2):
Tab.LL1Error(cond, sym)
@staticmethod
def CheckAlts(p):
assert isinstance(p,Node)
while p is not None:
if p.typ == Node.alt:
q = p
s1 = set( )
while q is not None: # for all alternatives
s2 = Tab.Expected0(q.sub, Tab.curSy)
Tab.CheckOverlap(s1, s2, 1)
s1 |= s2
Tab.CheckAlts(q.sub)
q = q.down
elif p.typ == Node.opt or p.typ == Node.iter:
if Node.DelSubGraph(p.sub):
Tab.LL1Error(4, None) # e.g. [[...]]
else:
s1 = Tab.Expected0(p.sub, Tab.curSy)
s2 = Tab.Expected(p.next, Tab.curSy)
Tab.CheckOverlap(s1, s2, 2)
Tab.CheckAlts(p.sub)
elif p.typ == Node.any:
if len(p.set) == 0:
Tab.LL1Error(3, None)
# e.g. {ANY} ANY or [ANY] ANY
if p.up:
break
p = p.next
@staticmethod
def CheckLL1():
for sym in Symbol.nonterminals:
Tab.curSy = sym
Tab.CheckAlts(Tab.curSy.graph)
#------------- check if resolvers are legal --------------------
resOk = False
@staticmethod
def ResErr( p, msg):
assert isinstance(p,Node)
assert isinstance(msg, str)
Errors.SemErr(msg, (p.line, p.pos.col) )
Tab.resOk = False
@staticmethod
def CheckRes(p, rslvAllowed):
assert isinstance(p, Node)
assert isinstance(rslvAllowed, bool)
while p is not None:
if p.typ == Node.alt:
expected = set( )
q = p
while q is not None:
expected |= Tab.Expected0(q.sub, Tab.curSy)
q = q.down
soFar = set( )
q = p
while q is not None:
if q.sub.typ == Node.rslv:
fs = Tab.Expected(q.sub.next,Tab.curSy)
if fs.intersection(soFar):
Tab.ResErr( q.sub, "Resolver will never be evaluated. "
"Place it at previous conflicting alternative." )
if not fs.intersection(expected):
Tab.ResErr(q.sub, "Misplaced resolver: no LL(1) conflict." )
else:
soFar |= Tab.Expected(q.sub,Tab.curSy)
Tab.CheckRes(q.sub, True)
q=q.down
elif p.typ in ( Node.iter, Node.opt ):
if p.sub.typ == Node.rslv:
fs = Tab.First(p.sub.next)
fsNext = Tab.Expected(p.next, Tab.curSy)
if not fs.intersection(fsNext):
Tab.ResErr(p.sub, "Misplaced resolver: no LL(1) conflict.")
Tab.CheckRes(p.sub, True)
elif p.typ == Node.rslv:
if not rslvAllowed:
Tab.ResErr(p, "Misplaced resolver: no alternative.")
if p.up:
break
p = p.next
rslvAllowed = False
@staticmethod
def ResolversOk():
Tab.resOk = True
for sym in Symbol.nonterminals:
Tab.curSy = sym;
Tab.CheckRes(Tab.curSy.graph, False)
return Tab.resOk
#------------- check if every nts has a production --------------------
@staticmethod
def NtsComplete():
complete = True
for sym in Symbol.nonterminals:
if sym.graph is None:
complete = False
Errors.count += 1
print " No production for " + sym.name
return complete
#-------------- check if every nts can be reached -----------------
@staticmethod
def MarkReachedNts(p):
assert isinstance( p, Node ) or (p is None)
while p is not None:
if p.typ == Node.nt and (p.sym.n not in Tab.visited): # new nt reached
Tab.visited.add(p.sym.n)
Tab.MarkReachedNts(p.sym.graph)
elif p.typ == Node.alt or p.typ == Node.iter or p.typ == Node.opt:
Tab.MarkReachedNts(p.sub)
if p.typ == Node.alt:
Tab.MarkReachedNts(p.down)
if p.up:
break
p = p.next
@staticmethod
def AllNtReached():
ok = True
Tab.visited = set( )
Tab.visited.add(Tab.gramSy.n)
Tab.MarkReachedNts(Tab.gramSy.graph)
for sym in Symbol.nonterminals:
if sym.n not in Tab.visited:
ok = False
Errors.count += 1
print " " + sym.name + " cannot be reached"
return ok
#--------- check if every nts can be derived to terminals ------------
@staticmethod
def IsTerm( p, mark):
'''true if graph can be derived to terminals'''
assert isinstance( p,Node)
assert isinstance(mark,set)
while p is not None:
if p.typ == Node.nt and (p.sym.n not in mark):
return False
if ( p.typ == Node.alt and not Tab.IsTerm(p.sub, mark)
and ((p.down is None) or not Tab.IsTerm(p.down, mark))):
return False
if p.up:
break
p = p.next
return True
@staticmethod
def AllNtToTerm():
ok = True
mark = set( )
#a nonterminal is marked if it can be derived to terminal symbols
changed = False
for sym in Symbol.nonterminals:
if (sym.n not in mark) and Tab.IsTerm(sym.graph, mark):
mark.add(sym.n)
changed = True
while changed:
changed = False
for sym in Symbol.nonterminals:
if (sym.n not in mark) and Tab.IsTerm(sym.graph, mark):
mark.add(sym.n)
changed = True
for sym in Symbol.nonterminals:
if sym.n not in mark:
ok = False
Errors.count += 1
print " " + sym.name + " cannot be derived to terminals"
return ok
#---------------------------------------------------------------------
# Utility functions
#---------------------------------------------------------------------
@staticmethod
def Num(p):
if p is None:
return 0
else:
return p.n
@staticmethod
def PrintSet( aSet, indent ):
assert isinstance( indent, int )
col = indent
for sym in Symbol.terminals:
if sym.n in aSet:
ln = len(sym.name)
if col + ln >= 80:
Trace.WriteLine( )
col = 1
while col < indent:
Trace.Write(' ')
col += 1
Trace.Write(sym.name + " ")
col += ln + 1
if col == indent:
Trace.Write("-- empty set --")
Trace.WriteLine( )
tKind = ["fixedToken ", "classToken ", "litToken ", "classLitToken"]
@staticmethod
def PrintSym(sym):
assert isinstance(sym,Symbol)
Trace.Write(str(sym.n), 3)
Trace.Write(" ")
Trace.Write(Node.Name(sym.name), -14)
Trace.Write(" ")
Trace.Write(Node.nTyp[sym.typ], 2)
if sym.attrPos is None:
Trace.Write(" false ")
else:
Trace.Write(" true ")
if sym.typ == Node.nt:
Trace.Write(str(Tab.Num(sym.graph)), 5)
if sym.deletable:
Trace.Write(" true ")
else:
Trace.Write(" false ")
else:
Trace.Write(" ")
Trace.Write(str(sym.line), 5)
Trace.Write(" " + Tab.tKind[sym.tokenKind].strip());
if (sym.typ == Node.pr or sym.typ == Node.t) and (sym.symName is not None):
Trace.Write(" " + sym.symName)
Trace.WriteLine()
@staticmethod
def PrintSymbolTable():
Trace.WriteLine("Symbol Table:")
Trace.WriteLine("------------")
Trace.WriteLine()
Trace.WriteLine(" nr name typ hasAt graph del line tokenKind")
for sym in Symbol.terminals:
Tab.PrintSym(sym)
for sym in Symbol.pragmas:
Tab.PrintSym(sym)
for sym in Symbol.nonterminals:
Tab.PrintSym(sym)
Trace.WriteLine()
Trace.WriteLine("Literal Tokens:")
Trace.WriteLine("--------------")
for me_key, me_value in Tab.literals.iteritems():
Trace.WriteLine("_" + me_value.name + " = " + me_key + ".");
Trace.WriteLine()
@staticmethod
def XRef():
tab = { }
for sym in Symbol.nonterminals:
if sym in tab:
lst = tab[ sym ]
else:
lst = [ ]
tab[sym] = lst
lst.append( -sym.line )
# collect lines where symbols have been referenced
for n in Node.nodes:
if n.typ in (Node.t, Node.wt, Node.nt):
if n.sym in tab:
lst = tab[ n.sym ]
else:
lst = [ ]
tab[n.sym] = lst
lst.append(n.line)
# print cross reference list
Trace.WriteLine("Cross reference list:")
Trace.WriteLine("--------------------")
Trace.WriteLine( )
keyList = [ x.name for x in tab.keys() ]
keyList.sort( )
for key in keyList:
for k in tab.keys( ):
if k.name == key:
sym = k
break
Trace.Write(" ")
Trace.Write(Node.Name(sym.name), -12)
lst = tab[sym]
col = 14
for line in lst:
if col + 5 > 80:
Trace.WriteLine()
for col in xrange(1,15):
Trace.Write(" ");
Trace.Write(str(line), 5)
col += 5
Trace.WriteLine()
Trace.WriteLine()
Trace.WriteLine()
@staticmethod
def SetDDT(s):
options = { 'traceAutomaton': 0,
'generateDriver': 11,
'firstAndFollow': 1,
'syntaxGraph': 2,
'traceComputations': 3,
'listAnyAndSync': 4,
'mergeErrors': 5,
'tokenNames': 10,
'statistics': 8,
'symbolTable': 6,
'testOnly': 9,
'crossReferences': 7 }
for key,value in options.iteritems( ):
if s.__dict__[ key ]:
Tab.ddt[ value ] = True
@staticmethod
def Init():
Tab.eofSy = Symbol( Node.t, 'EOF', 0 )
Tab.literals = { }
# considerable extension from here on to handle name generation
@staticmethod
def NewName(alias,name):
assert isinstance( alias, str )
assert isinstance( name, str )
if name.find(' ') >= 0:
Errors.SemErr("tokens must not contain blanks")
u = UserDefinedTokenName(alias, name)
@staticmethod
def Ascii(ch):
assert isinstance(ch, (str,unicode))
name = {
chr( 0) : "nul",
chr( 1) : "soh",
chr( 2) : "stx",
chr( 3) : "etx",
chr( 4) : "eot",
chr( 5) : "enq",
chr( 6) : "ack",
chr( 7) : "bel",
chr( 8) : "bs",
chr( 9) : "ht",
chr(10) : "lf",
chr(11) : "vt",
chr(12) : "ff",
chr(13) : "cr",
chr(14) : "so",
chr(15) : "si",
chr(16) : "dle",
chr(17) : "dc1",
chr(18) : "dc2",
chr(19) : "dc3",
chr(20) : "dc4",
chr(21) : "nak",
chr(22) : "syn",
chr(23) : "etb",
chr(24) : "can",
chr(25) : "em",
chr(26) : "sub",
chr(27) : "esc",
chr(28) : "fs",
chr(29) : "gs",
chr(30) : "rs",
chr(31) : "us",
' ' : "_",
'!' : "bang",
'\"' : "dquote",
'#' : "hash",
'$' : "dollar",
'%' : "percent",
'&' : "and",
'\'' : "squote",
'(' : "lparen",
')' : "rparen",
'*' : "star",
'+' : "plus",
',' : "comma",
'-' : "minus",
'.' : "point",
'/' : "slash",
'0' : "d0",
'1' : "d1",
'2' : "d2",
'3' : "d3",
'4' : "d4",
'5' : "d5",
'6' : "d6",
'7' : "d7",
'8' : "d8",
'9' : "d9",
':' : "colon",
';' : "semicolon",
'<' : "less",
'=' : "equal",
'>' : "greater",
'?' : "query",
'@' : "at",
'[' : "lbrack",
'\\' : "backslash",
']' : "rbrack",
'^' : "uparrow",
'_' : "underscore",
'`' : "accent",
'{' : "lbrace",
'|' : "bar",
'}' : "rbrace",
'~' : "tilde",
chr(127) : "delete" }
if ch in name:
return name[ch]
else:
return "ASC" + str(ch)
@staticmethod
def SymName(name):
assert isinstance( name, (str,unicode) )
name = str(name)
for u in UserDefinedTokenName.NameTab:
if name == u.name:
return u.alias
if name[0] == '"':
name = DFA.Unescape(name[1:-1])
S = ''
for i in xrange(0,len(name)):
ch = name[i]
if ('a' <= ch <= 'z' or 'A' <= ch <= 'Z'
or '0' <= ch <= '9' and i > 0):
S += ch
else:
S += Tab.Ascii(ch)
S += '_Sym'
return S
@staticmethod
def AssignNames():
for sym in Symbol.terminals:
sym.symName = Tab.SymName( sym.name )
for sym in Symbol.pragmas:
sym.symName = Tab.SymName(sym.name);
Symbol.terminals[0].symName = 'EOF_SYM'
Symbol.terminals[ len(Symbol.terminals)-1 ].symName = 'NOT_SYM'
sys.stdout.write( " (Names assigned)" )
class DFA( object ):
maxStates = 0
EOF = -1
CR = '\r'
LF = '\n'
firstState = None # State
lastState = None # State, last allocated state
lastSimState = 0 # last non melted state
fram = None # Reader, scanner frame input
gen = None # PrintWriter, generated scanner file
curSy = None # Symbol, current token to be recognized (in FindTrans)
curGraph = None # Node, start of graph for current token (in FindTrans)
ignoreCase = False # true if input should be treated case-insensitively
dirtyDFA = False # DFA may become nondeterministic in MatchedDFA
hasCtxMoves = False # DFA has context transitions
srcName = '' # name of the attributed grammar file
srcDir = '' # directory of attributed grammar file
@staticmethod
def framRead():
try:
return DFA.fram.read(1)
except:
raise RuntimeError("-- error reading Scanner.frame")
return DFA.EOF
#---------- Output primitives
@staticmethod
def Ch(ch):
if isinstance(ch, int):
ch = str( ch )
if ch < ' ' or ch >= str(127) or ch == '\'' or ch == '\\':
return ch
else:
return "ord('" + ch + "')"
#if isinstance(ch, (str,unicode)):
#ch = ord(ch)
#if (ch < ord(' ') or ch >= 127 or ch == ord('\'') or ch == ord('\\')):
#return str(ch)
#else:
#return "ord('" + chr(ch) + "')"
@staticmethod
def ReportCh(ch):
if isinstance(ch, (str,unicode)):
ch = ord(ch)
if (ch < ord(' ') or ch >= 127 or ch == ord('\'') or ch == ord('\\')):
return str(ch)
else:
return ''.join( [ "'", chr(ch), "'" ] )
@staticmethod
def ChCond(ch, relOpStr='=='):
if isinstance(ch, (str,unicode)):
ch = ord(ch)
if (ch < ord(' ') or ch >= 127 or ch == ord('\'') or ch == ord('\\')):
return ''.join( [ 'ord(self.ch) ', relOpStr, " ", str(ch) ] )
else:
return ''.join( [ 'self.ch ', relOpStr, " '", chr(ch), "'" ] )
@staticmethod
def PutRange(s):
assert isinstance(s, set)
lo = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
hi = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
# fill lo and hi
mx = CharClass.charSetSize
top = -1
i = 0
while i < mx:
if i in s:
top += 1
lo[top] = i
i += 1
while i < mx and (i in s):
i += 1
hi[top] = i-1
else:
i += 1
# print ranges
if top == 1 and lo[0] == 0 and hi[1] == mx-1 and hi[0]+2 == lo[1]:
s1 = set( )
s1.add(hi[0]+1)
DFA.gen.write("not ")
DFA.PutRange(s1)
DFA.gen.write(" and Scanner.ch != Scanner.buffer.EOF")
else:
DFA.gen.write("(")
for i in xrange( 0, top+1 ):
if hi[i] == lo[i]:
DFA.gen.write( DFA.ChCond( lo[i] ) )
elif (lo[i] == 0):
DFA.gen.write( DFA.ChCond( hi[i], '<=' ) )
else:
DFA.gen.write( DFA.ChCond( lo[i], '>=' ) )
DFA.gen.write( ' and ' + DFA.ChCond( hi[i], '<=' ) )
if i < top:
DFA.gen.write('\n')
DFA.gen.write(" or ")
if 'ANYCHAR' in s:
DFA.gen.write(' or ord(self.ch) > %d' % CharClass.charSetSize)
DFA.gen.write(")")
#---------- String handling
@staticmethod
def Hex2Char(s):
assert isinstance( s, (str,unicode) )
val = 0 # int
for i in xrange(0,len(s)):
ch = s[i] # char
if '0' <= ch <= '9':
val = 16 * val + (ord(ch) - ord('0'))
elif 'a' <= ch <= 'f':
val = 16 * val + (10 + ord(ch) - ord('a'))
elif 'A' <= ch <= 'F':
val = 16 * val + (10 + ord(ch) - ord('A'))
else:
Errors.SemErr("bad escape sequence in string or character")
if val > CharClass.charSetSize:
Errors.SemErr("bad escape sequence in string or character")
return chr(val % CharClass.charSetSize)
@staticmethod
def Char2Hex( ch):
assert isinstance(ch,(str,unicode))
hx = hex(ord(ch))
for i in xrange(len(hx), 4):
hx = '0' + hx;
return "\\u" + hx;
@staticmethod
def Unescape (s):
# replaces escape sequences in s by their Unicode values.
assert isinstance( s, (str,unicode) )
buf = ''
i = 0
while i < len(s):
if s[i] == '\\':
ch = s[i+1]
if ch in ( 'u', 'x' ):
if i + 4 <= len(s):
buf += DFA.Hex2Char(s[i+2: i+6])
i += 6
else:
Errors.SemErr("bad escape sequence in string or character")
i = len(s)
elif ch in ( '\\', '\'', '\"', 'r', 'n', 't', '0', 'a', 'b', 'f'):
buf += { '\\': '\\',
'\'': '\'',
'\"': '\"',
'r' : '\r',
'n' : '\n',
't' : '\t',
'0' : '\0',
'a' : '\a',
'b' : '\b',
'f' : '\f' } [ s[i+1] ]
i += 2
else:
Errors.SemErr("bad escape sequence in string or character")
i += 2
else:
buf += s[i]
i += 1
return buf
@staticmethod
def Escape(s):
assert isinstance(s, (str,unicode))
buf = ''
for ch in s:
if ch in ('\\', '\'', '\"', '\t', '\r', '\n'):
buf += { '\\': "\\\\",
'\'': "\\'",
'\"': "\\\"",
'\t': "\\t",
'\r': "\\r",
'\n': "\\n" }[ ch ]
elif ch < ' ' or ch > chr(127): #'\x7f'
buf += DFA.Char2Hex(ch)
else:
buf += ch
return buf
#---------- State handling
@staticmethod
def NewState():
s = State() # State
if DFA.firstState is None:
DFA.firstState = s
else:
DFA.lastState.next = s
DFA.lastState = s
return s
@staticmethod
def NewTransition( frm, to, typ, sym, tc):
assert isinstance(frm,State)
assert isinstance(to,State)
assert isinstance(typ,int)
assert isinstance(sym,(int,str,unicode))
assert isinstance(tc,int)
if to == DFA.firstState:
Errors.SemErr("token must not start with an iteration")
if isinstance(sym, (str,unicode)):
sym = ord(sym)
t = Target(to)
a = Action(typ, sym, tc)
a.target = t
frm.AddAction(a)
if typ == Node.clas:
DFA.curSy.tokenKind = Symbol.classToken
@staticmethod
def CombineShifts():
state = DFA.firstState
while state is not None:
a = state.firstAction
while a is not None:
b = a.next;
while b is not None:
if a.target.state == b.target.state and a.tc == b.tc:
seta = a.Symbols()
setb = b.Symbols()
seta |= setb
a.ShiftWith(seta)
c = b
b = b.next
state.DetachAction(c)
else:
b = b.next
a = a.next
state = state.next
@staticmethod
def FindUsedStates( state, used):
assert isinstance(state,State)
assert isinstance(used,set)
if state.nr in used:
return
used.add(state.nr)
a = state.firstAction
while a is not None:
DFA.FindUsedStates(a.target.state, used)
a = a.next
@staticmethod
def DeleteRedundantStates():
newState = [ None for x in xrange(State.lastNr + 1) ]
used = set( )
DFA.FindUsedStates(DFA.firstState, used)
# combine equal final states
s1 = DFA.firstState.next
while s1 is not None:
# DFA.firstState cannot be final
if ((s1.nr in used) and (s1.endOf is not None)
and (s1.firstAction is None) and not s1.ctx):
s2 = s1.next
while s2 is not None:
nots2_ctx = 1 if not s2.ctx else 0
if ((s2.nr in used) and s1.endOf == s2.endOf and (s2.firstAction is None) & nots2_ctx):
used.discard(s2.nr)
newState[s2.nr] = s1
s2 = s2.next
s1 = s1.next
state = DFA.firstState
while state is not None:
if state.nr in used:
a = state.firstAction
while a is not None:
if a.target.state.nr not in used:
a.target.state = newState[a.target.state.nr]
a = a.next
state = state.next
# delete unused states
DFA.lastState = DFA.firstState
State.lastNr = 0 # DFA.firstState has number 0
state = DFA.firstState.next
while state is not None:
if state.nr in used:
State.lastNr += 1
state.nr = State.lastNr
DFA.lastState = state
else:
DFA.lastState.next = state.next
state = state.next
@staticmethod
def TheState(p):
assert isinstance(p, Node) or (p is None)
if p is None:
state = DFA.NewState()
state.endOf = DFA.curSy
return state
else:
return p.state
@staticmethod
def Step( frm, p, stepped):
assert isinstance(frm,State)
assert isinstance(p,Node) or (p is None)
assert isinstance(stepped,set)
if p is None:
return
stepped.add(p.n)
if p.typ in (Node.clas, Node.chr):
DFA.NewTransition(frm, DFA.TheState(p.next), p.typ, p.val, p.code)
elif p.typ == Node.alt:
DFA.Step(frm, p.sub, stepped)
DFA.Step(frm, p.down, stepped)
elif p.typ in (Node.iter, Node.opt):
if (p.next is not None) and (p.next.n not in stepped):
DFA.Step(frm, p.next, stepped)
DFA.Step(frm, p.sub, stepped)
@staticmethod
def NumberNodes(p, state):
'''Assigns a state n.state to every node n. There will be a transition
from n.state to n.next.state triggered by n.val. All nodes in an
alternative chain are represented by the same state.'''
assert isinstance(p, Node) or (p is None)
assert isinstance(state,State) or (state is None)
if p is None:
return
if p.state is not None:
return
if state is None:
state = DFA.NewState()
p.state = state
if Node.DelGraph(p):
state.endOf = DFA.curSy
if p.typ in (Node.clas, Node.chr):
DFA.NumberNodes(p.next, None)
elif p.typ == Node.opt:
DFA.NumberNodes(p.next, None)
DFA.NumberNodes(p.sub, state)
elif p.typ == Node.iter:
DFA.NumberNodes(p.next, state)
DFA.NumberNodes(p.sub, state)
elif p.typ == Node.alt:
DFA.NumberNodes(p.sub, state)
DFA.NumberNodes(p.down, state)
@staticmethod
def FindTrans (p, start, marked):
assert isinstance(p,Node) or (p is None)
assert isinstance(start,bool)
assert isinstance(marked,set)
if (p is None) or (p.n in marked):
return
marked.add(p.n)
if start:
DFA.Step(p.state, p, set( )) #/ start of group of equally numbered nodes
if p.typ in (Node.clas, Node.chr):
DFA.FindTrans(p.next, True, marked)
elif p.typ == Node.opt:
DFA.FindTrans(p.next, True, marked)
DFA.FindTrans(p.sub, False, marked)
elif p.typ == Node.iter:
DFA.FindTrans(p.next, False, marked)
DFA.FindTrans(p.sub, False, marked)
elif p.typ == Node.alt:
DFA.FindTrans(p.sub, False, marked)
DFA.FindTrans(p.down, False, marked)
@staticmethod
def ConvertToStates(p, sym):
assert isinstance(p,Node)
assert isinstance(sym,Symbol)
DFA.curGraph = p
DFA.curSy = sym
if Node.DelGraph(DFA.curGraph):
Errors.SemErr("token might be empty")
DFA.NumberNodes(DFA.curGraph, DFA.firstState)
DFA.FindTrans(DFA.curGraph, True, set( ))
@staticmethod
def MatchLiteral(s, sym):
assert isinstance(sym,Symbol)
assert isinstance(s,(str,unicode))
'''match string against current automaton; store it either as a
fixedToken or as a litToken'''
s = DFA.Unescape(s[1:-1])
ln = len(s)
state = DFA.firstState # State
a = None
endedPrematurely = False
for i in xrange(0, ln): # try to match s against existing DFA
a = state.TheAction(s[i])
if a is None:
endedPrematurely = True
break
state = a.target.state
if not endedPrematurely:
i = ln
# if s was not totally consumed or leads to a non-final state => make new DFA from it
if (i != ln) or (state.endOf is None):
state = DFA.firstState
i = 0
a = None
DFA.dirtyDFA = True
while i < ln: # make new DFA for s[i..len-1]
to = DFA.NewState() # State
DFA.NewTransition(state, to, Node.chr, s[i], Node.normalTrans)
state = to
i += 1
matchedSym = state.endOf # Symbol
if state.endOf is None:
state.endOf = sym
elif (matchedSym.tokenKind == Symbol.fixedToken) or (a is not None) and (a.tc == Node.contextTrans):
# s matched a token with a fixed definition or a token with an appendix that will be cut off
Errors.SemErr("tokens " + sym.name + " and " + matchedSym.name + " cannot be distinguished");
else:
# matchedSym == classToken or classLitToken
matchedSym.tokenKind = Symbol.classLitToken
sym.tokenKind = Symbol.litToken
@staticmethod
def SplitActions( state, a, b):
assert isinstance(state,State)
assert isinstance(a, Action)
assert isinstance(b, Action)
seta = a.Symbols()
setb = b.Symbols()
if seta == setb: #seta.equals(setb):
a.AddTargets(b)
state.DetachAction(b)
elif seta >= setb:
setc = copy.copy(seta)
setc -= setb
b.AddTargets(a)
a.ShiftWith(setc)
elif setb >= seta:
setc = copy.copy(setb)
setc -= seta
a.AddTargets(b)
b.ShiftWith(setc)
else:
setc = copy.copy(seta)
setc &= setb
seta -= setc
setb -= setc
a.ShiftWith(seta)
b.ShiftWith(setb)
c = Action(0, 0, Node.normalTrans) # typ and sym are set in ShiftWith
c.AddTargets(a)
c.AddTargets(b)
c.ShiftWith(setc)
state.AddAction(c)
@staticmethod
def Overlap(a, b):
assert isinstance(a, Action)
assert isinstance(b, Action)
if a.typ == Node.chr:
if (b.typ == Node.chr):
return a.sym == b.sym
else:
setb = CharClass.Set(b.sym)
return a.sym in setb
else:
seta = CharClass.Set(a.sym)
if b.typ == Node.chr:
return b.sym in seta
else:
setb = CharClass.Set(b.sym)
return len(seta & setb) > 0
#return seta.intersects( setb )
@staticmethod
def MakeUnique( state):
assert isinstance(state,State)
# return True if actions were split
changed = False # boolean
a = state.firstAction
while a is not None:
b = a.next
while b is not None:
if DFA.Overlap(a, b):
DFA.SplitActions(state, a, b)
changed = True
b = b.next
a = a.next
return changed
@staticmethod
def MeltStates( state):
assert isinstance(state,State)
action = state.firstAction
while action is not None:
if action.target.next is not None:
param = [None,None]
ctx = action.GetTargetStates(param)
targets = param[0]
endOf = param[1]
melt = Melted.StateWithSet(targets) # Melted
if melt is None:
s = DFA.NewState()
s.endOf = endOf
s.ctx = ctx
targ = action.target
while targ is not None:
s.MeltWith(targ.state)
targ = targ.next
changed = DFA.MakeUnique(s)
while changed:
changed = DFA.MakeUnique(s)
melt = Melted(targets, s)
action.target.next = None
action.target.state = melt.state
action = action.next
@staticmethod
def FindCtxStates():
state = DFA.firstState
while state is not None:
a = state.firstAction
while a is not None:
if a.tc == Node.contextTrans:
a.target.state.ctx = True
a = a.next
state = state.next
@staticmethod
def MakeDeterministic():
DFA.lastSimState = DFA.lastState.nr
DFA.maxStates = 2 * DFA.lastSimState # heuristic for set size in Melted.set
DFA.FindCtxStates()
state = DFA.firstState
while state is not None:
changed = DFA.MakeUnique(state)
while changed:
changed = DFA.MakeUnique(state)
state = state.next
state = DFA.firstState
while state is not None:
DFA.MeltStates(state)
state = state.next
DFA.DeleteRedundantStates()
DFA.CombineShifts()
@staticmethod
def PrintStates( ):
Trace.WriteLine( )
Trace.WriteLine("Automaton Trace:")
Trace.WriteLine("---------------")
Trace.WriteLine( )
Trace.WriteLine("---------- states ----------")
state = DFA.firstState
while state is not None:
first = True # boolean
if state.endOf is None:
Trace.Write(" ")
else:
Trace.Write("E(" + Node.Name(state.endOf.name) + ")", 12)
Trace.Write(str(state.nr) + ":", 4)
if state.firstAction is None:
Trace.WriteLine()
action = state.firstAction
while action is not None:
if first:
Trace.Write(" ")
first = False
else:
Trace.Write(" ")
if (action.typ == Node.clas):
Trace.Write(CharClass.classes[action.sym].name)
else:
Trace.Write(DFA.ReportCh(action.sym), 3)
targ = action.target
while targ is not None:
Trace.Write(str(targ.state.nr), 4)
targ = targ.next
if action.tc == Node.contextTrans:
Trace.WriteLine(" context")
else:
Trace.WriteLine( )
action = action.next
state = state.next
Trace.WriteLine( )
Trace.WriteLine("---------- character classes ----------")
CharClass.WriteClasses( )
@staticmethod
def GenComBody2( com ):
assert isinstance(com,Comment)
DFA.gen.write (" while True:\n")
DFA.gen.write (" if " + DFA.ChCond(com.stop[0]) + ":\n")
if len(com.stop) == 1:
DFA.gen.write (" level -= 1\n")
DFA.gen.write (" if level == 0:\n")
DFA.gen.write (" self.oldEols = self.line - line0\n")
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" return True\n")
DFA.gen.write (" self.NextCh()\n")
else:
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" if " + DFA.ChCond(com.stop[1]) + ":\n")
DFA.gen.write (" level -= 1\n")
DFA.gen.write (" if level == 0:\n")
DFA.gen.write (" self.oldEols = self.line - line0\n")
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" return True\n")
DFA.gen.write (" self.NextCh()\n")
if com.nested:
DFA.gen.write (" elif " + DFA.ChCond(com.start[0]) + ":\n")
if (len(com.start) == 1):
DFA.gen.write(" level += 1\n")
DFA.gen.write(" self.NextCh()\n")
else:
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write(" if " + DFA.ChCond(com.start[1]) + ":\n")
DFA.gen.write(" level += 1\n")
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write (" elif self.ch == Buffer.EOF:\n")
DFA.gen.write (" return False\n")
DFA.gen.write (" else:\n")
DFA.gen.write (" self.NextCh()\n")
@staticmethod
def GenComBody3( com):
assert isinstance(com,Comment)
DFA.gen.write (" while True:\n")
DFA.gen.write (" if " + DFA.ChCond(com.stop[0]) + ":\n")
if len(com.stop) == 1:
DFA.gen.write (" level -= 1\n")
DFA.gen.write (" if level == 0:\n")
DFA.gen.write (" self.oldEols = self.line - line0\n")
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" return True\n")
DFA.gen.write (" self.NextCh()\n")
else:
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" if " + DFA.ChCond(com.stop[1]) + ":\n")
DFA.gen.write (" level -= 1\n")
DFA.gen.write (" if level == 0:\n")
DFA.gen.write (" self.oldEols = self.line - line0\n")
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" return True\n")
DFA.gen.write (" self.NextCh()\n")
if com.nested:
DFA.gen.write (" elif " + DFA.ChCond(com.start[0]) + ":\n")
if (len(com.start) == 1):
DFA.gen.write(" level += 1")
DFA.gen.write(" self.NextCh()")
else:
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write(" if " + DFA.ChCond(com.start[1]) + ":\n")
DFA.gen.write(" level += 1\n")
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write (" elif self.ch == Buffer.EOF:\n")
DFA.gen.write (" return False\n")
DFA.gen.write (" else:\n")
DFA.gen.write (" self.NextCh()\n")
@staticmethod
def GenComment( com, i):
assert isinstance(com,Comment)
assert isinstance(i, int)
DFA.gen.write ( '\n' )
DFA.gen.write (" def Comment" + str(i) + "( self ):\n")
DFA.gen.write (" level = 1\n")
DFA.gen.write (" line0 = self.line\n")
DFA.gen.write (" lineStart0 = self.lineStart\n")
if len(com.start) == 1:
DFA.gen.write(" self.NextCh()\n")
DFA.GenComBody2(com)
else:
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write(" if " + DFA.ChCond(com.start[1]) + ":\n")
DFA.gen.write(" self.NextCh()\n")
DFA.GenComBody3(com)
DFA.gen.write(" else:\n")
DFA.gen.write(" if self.ch == Scanner.EOL:\n")
DFA.gen.write(" self.line -= 1\n")
DFA.gen.write(" self.lineStart = lineStart0\n")
DFA.gen.write(" self.pos = self.pos - 2\n")
DFA.gen.write(" self.buffer.setPos(self.pos+1)\n")
DFA.gen.write(" self.NextCh()\n")
DFA.gen.write(" return False\n")
@staticmethod
def CopyFramePart(stop):
assert isinstance(stop,(str,unicode))
last = 0; # int
startCh = stop[0]
endOfStopString = len(stop) - 1
ch = DFA.framRead() # int
while ch != DFA.EOF:
if ch == startCh:
i = 0
if i == endOfStopString:
return # stop[0..i] found
ch = DFA.framRead()
i += 1
while ch == stop[i]:
if (i == endOfStopString):
return # stop[0..i] found
ch = DFA.framRead()
i += 1
# stop
DFA.gen.write(stop[0:1])
elif ch == DFA.LF:
if last != DFA.CR:
DFA.gen.write('\n')
last = ch
ch = DFA.framRead()
elif (ch == DFA.CR):
DFA.gen.write('\n')
last = ch
ch = DFA.framRead()
else:
if isinstance(chr, int):
DFA.gen.write(chr(ch))
else:
DFA.gen.write(ch)
last = ch
ch = DFA.framRead()
raise RuntimeError(" -- incomplete or corrupt scanner frame file")
@staticmethod
def SymName( sym):
assert isinstance(sym,Symbol)
if sym.name[0].isalpha( ):
# real name value is stored in Tab.literals
for me_key, me_value in Tab.literals.iteritems( ):
if me_value == sym:
return me_key
return sym.name
@staticmethod
def GenLiterals( ):
DFA.gen.write("lit = self.t.val")
if (DFA.ignoreCase):
DFA.gen.write(".lower()")
DFA.gen.write( '\n' )
first = True # boolean
for sym in Symbol.terminals:
if (sym.tokenKind == Symbol.litToken):
name = DFA.SymName(sym) # String
if DFA.ignoreCase:
name = name.lower()
# sym.name stores literals with quotes, e.g. "\"Literal\"",
if (first):
DFA.gen.write(" if ")
first = False
else:
DFA.gen.write(" elif ")
DFA.gen.write ("lit == " + name + ":\n")
DFA.gen.write (" self.t.kind = ")
DFA.PrintTermName(sym)
DFA.gen.write ( '\n' )
@staticmethod
def WriteState( state):
assert isinstance(state,State)
endOf = state.endOf # Symbol
DFA.gen.write(str(state.nr) + ":\n")
ctxEnd = state.ctx # boolean
action = state.firstAction
while action is not None:
if action == state.firstAction:
DFA.gen.write (" if ")
else:
DFA.gen.write (" elif ")
if (action.typ == Node.chr):
DFA.gen.write (DFA.ChCond(action.sym))
else:
DFA.PutRange(CharClass.Set(action.sym))
DFA.gen.write (":\n")
if action.tc == Node.contextTrans:
DFA.gen.write (" apx += 1\n")
ctxEnd = False
elif (state.ctx):
DFA.gen.write (" apx = 0\n")
if DFA.ignoreCase:
DFA.gen.write (" buf += unicode(self.ch)\n")
else:
DFA.gen.write (" buf += unicode(self.ch)\n")
DFA.gen.write (" self.NextCh()\n")
DFA.gen.write (" state = " + str(action.target.state.nr) + '\n')
action = action.next
if state.firstAction is not None:
DFA.gen.write (" else:\n")
if ctxEnd: # final context state: cut appendix
DFA.gen.write('\n')
DFA.gen.write(" self.pos = self.pos - apx - 1\n")
DFA.gen.write(" self.line = self.t.line\n")
DFA.gen.write(" self.buffer.setPos(self.pos+1)\n")
DFA.gen.write(" self.NextCh()\n")
#DFA.gen.write(" ")
if state.firstAction is not None:
if endOf is None:
DFA.gen.write(" self.t.kind = Scanner.noSym\n");
DFA.gen.write(" done = True\n")
else:
DFA.gen.write(" self.t.kind = ")
DFA.PrintTermName(endOf)
DFA.gen.write( '\n' )
if endOf.tokenKind == Symbol.classLitToken:
DFA.gen.write(" self.t.val = buf\n")
DFA.gen.write(" self.CheckLiteral()\n");
DFA.gen.write(" return self.t\n");
else:
DFA.gen.write(" done = True\n")
else:
if endOf is None:
DFA.gen.write(" self.t.kind = Scanner.noSym\n")
DFA.gen.write(" done = True\n")
else:
DFA.gen.write(" self.t.kind = ")
DFA.PrintTermName(endOf)
DFA.gen.write('\n')
if endOf.tokenKind == Symbol.classLitToken:
DFA.gen.write(" self.t.val = buf")
DFA.gen.write(" self.CheckLiteral()\n")
DFA.gen.write(" return self.t\n")
else:
DFA.gen.write(" done = True\n")
@staticmethod
def FillStartTab(startTab):
assert isinstance( startTab, list )
action = DFA.firstState.firstAction
while action is not None:
targetState = action.target.state.nr; # int
if action.typ == Node.chr:
startTab[action.sym] = targetState
else:
s = CharClass.Set(action.sym) # BitSet
for i in xrange( 0, CharClass.charSetSize ):
if i in s:
startTab[i] = targetState
action = action.next
@staticmethod
def OpenGen(backUp):
assert isinstance(backUp,bool)
try:
fn = DFA.srcDir + "Scanner.py" # String
if backUp and os.path.exists(fn):
if os.path.exists(fn + '.old'):
os.remove( fn + '.old' )
os.rename( fn, fn + '.old' )
DFA.gen = file( fn, 'w' )
except:
raise RuntimeError("-- Compiler Error: Cannot generate scanner file.")
@staticmethod
def WriteScanner( withNames):
assert isinstance(withNames,bool)
startTab = [ 0 for i in xrange(CharClass.charSetSize) ]
fr = DFA.srcDir + "Scanner.frame" # String
if not os.path.exists( fr ):
if Tab.frameDir is not None:
fr = os.path.join( Tab.frameDir.strip(), "Scanner.frame" )
if not os.path.exists(fr):
raise RuntimeError("-- Compiler Error: Cannot find Scanner.frame")
try:
DFA.fram = file( fr, 'r' )
except:
raise RuntimeError("-- Compiler Error: Cannot open Scanner.frame.")
DFA.OpenGen(True)
if DFA.dirtyDFA:
DFA.MakeDeterministic( )
DFA.FillStartTab(startTab)
DFA.CopyFramePart( "-->begin" )
if not DFA.srcName.lower( ).endswith( 'coco.atg' ):
DFA.gen.close()
DFA.OpenGen(False)
DFA.CopyFramePart("-->declarations")
DFA.gen.write(" charSetSize = " + str(CharClass.charSetSize) + '\n')
DFA.gen.write(" maxT = " + str(len(Symbol.terminals) - 1) + '\n')
DFA.gen.write(" noSym = " + str(Tab.noSym.n) + '\n')
if withNames:
DFA.gen.write(" # terminals\n")
for sym in Symbol.terminals:
DFA.gen.write(" " + sym.symName + " = " + str(sym.n) + '\n')
DFA.gen.write(" # pragmas\n")
for sym in Symbol.pragmas:
DFA.gen.write(" " + sym.symName + " = " + str(sym.n) + '\n')
DFA.gen.write( '\n' )
DFA.gen.write(" start = [\n")
for i in xrange(0,CharClass.charSetSize / 16):
DFA.gen.write(" ")
for j in xrange(0,16):
DFA.gen.write(Trace.formatString(str(startTab[16*i+j]), 3))
DFA.gen.write(",")
DFA.gen.write( '\n' )
DFA.gen.write(" -1]\n")
if DFA.ignoreCase:
DFA.gen.write(" valCh = u'' # current input character (for token.val)")
DFA.CopyFramePart("-->initialization")
j = 0
for i in Tab.ignored:
DFA.gen.write(" self.ignore.add(" + str(i) + ") \n")
DFA.CopyFramePart("-->casing")
if DFA.ignoreCase:
DFA.gen.write(" valCh = self.ch\n")
DFA.gen.write(" if self.ch != Buffer.EOF:\n")
DFA.gen.write(" self.ch = self.ch.lower()\n");
DFA.CopyFramePart("-->comments")
com = Comment.first # Comment
i = 0
while com is not None:
DFA.GenComment(com, i)
com = com.next
i += 1
DFA.CopyFramePart("-->literals")
DFA.GenLiterals()
DFA.CopyFramePart("-->scan1")
if Comment.first!=None:
DFA.gen.write("if (")
com = Comment.first
i = 0
while com is not None:
DFA.gen.write(DFA.ChCond(com.start[0]))
DFA.gen.write(" and self.Comment" + str(i) + "()")
if com.next is not None:
DFA.gen.write(" or ")
com = com.next
i += 1
DFA.gen.write("):\n")
DFA.gen.write(" return self.NextToken()\n")
if DFA.hasCtxMoves:
DFA.gen.write('\n')
DFA.gen.write(" apx = 0")
DFA.CopyFramePart("-->scan2")
if DFA.ignoreCase:
DFA.gen.write("buf += unicode(self.ch)\n")
DFA.gen.write(" self.NextCh()\n")
else:
DFA.gen.write("buf += unicode(self.ch)\n")
DFA.gen.write(" self.NextCh()\n")
DFA.CopyFramePart("-->scan3")
state = DFA.firstState.next
while state is not None:
DFA.gen.write(" elif state == ")
DFA.WriteState(state)
state = state.next
DFA.CopyFramePart("$$$")
DFA.gen.close()
@staticmethod
def Init ( file, dir):
assert isinstance(file,str)
assert isinstance(dir,str)
DFA.srcName = file
DFA.srcDir = dir
DFA.firstState = None
DFA.lastState = None
State.lastNr = -1;
DFA.firstState = DFA.NewState()
Melted.first = None
Comment.first = None
DFA.ignoreCase = False
DFA.dirtyDFA = False
DFA.hasCtxMoves = False
@staticmethod
def PrintTermName( sym):
assert isinstance(sym,Symbol)
if sym.symName is None:
DFA.gen.write(str(sym.n))
else:
DFA.gen.write("Scanner.")
DFA.gen.write(str(sym.symName)) | PypiClean |
/LinOTP-2.11.1.tar.gz/LinOTP-2.11.1/linotp/lib/HMAC.py | import hmac
import logging
import struct
from hashlib import sha1
import sys
(ma, mi, _, _, _,) = sys.version_info
pver = float(int(ma) + int(mi) * 0.1)
log = logging.getLogger(__name__)
class HmacOtp():
def __init__(self, secObj=None, counter=0, digits=6, hashfunc=sha1):
self.secretObj = secObj
self.counter = counter
self.digits = digits
self.hashfunc = hashfunc
def hmac(self, counter=None, key=None):
counter = counter or self.counter
data_input = struct.pack(">Q", counter)
if key is None:
dig = str(self.secretObj.hmac_digest(data_input,
hash_algo=self.hashfunc))
else:
if pver > 2.6:
dig = hmac.new(key, data_input, self.hashfunc).digest()
else:
dig = hmac.new(key, str(data_input), self.hashfunc).digest()
return dig
def truncate(self, digest):
offset = ord(digest[-1:]) & 0x0f
binary = (ord(digest[offset + 0]) & 0x7f) << 24
binary |= (ord(digest[offset + 1]) & 0xff) << 16
binary |= (ord(digest[offset + 2]) & 0xff) << 8
binary |= (ord(digest[offset + 3]) & 0xff)
return binary % (10 ** self.digits)
def generate(self, counter=None, inc_counter=True, key=None):
counter = counter or self.counter
otp = str(self.truncate(self.hmac(counter=counter, key=key)))
""" fill in the leading zeros """
sotp = (self.digits - len(otp)) * "0" + otp
if inc_counter:
self.counter = counter + 1
return sotp
def checkOtp(self, anOtpVal, window, symetric=False):
res = -1
start = self.counter
end = self.counter + window
if symetric is True:
# changed window/2 to window for TOTP
start = self.counter - (window)
start = 0 if (start < 0) else start
end = self.counter + (window)
for c in range(start , end):
otpval = self.generate(c)
if (unicode(otpval) == unicode(anOtpVal)):
res = c
break
#return -1 or the counter
return res
#eof########################################################################## | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/_base/focus.js | define("dijit/_base/focus",["dojo/_base/array","dojo/dom","dojo/_base/lang","dojo/topic","dojo/_base/window","../focus",".."],function(_1,_2,_3,_4,_5,_6,_7){
_3.mixin(_7,{_curFocus:null,_prevFocus:null,isCollapsed:function(){
return _7.getBookmark().isCollapsed;
},getBookmark:function(){
var bm,rg,tg,_8=_5.doc.selection,cf=_6.curNode;
if(_5.global.getSelection){
_8=_5.global.getSelection();
if(_8){
if(_8.isCollapsed){
tg=cf?cf.tagName:"";
if(tg){
tg=tg.toLowerCase();
if(tg=="textarea"||(tg=="input"&&(!cf.type||cf.type.toLowerCase()=="text"))){
_8={start:cf.selectionStart,end:cf.selectionEnd,node:cf,pRange:true};
return {isCollapsed:(_8.end<=_8.start),mark:_8};
}
}
bm={isCollapsed:true};
if(_8.rangeCount){
bm.mark=_8.getRangeAt(0).cloneRange();
}
}else{
rg=_8.getRangeAt(0);
bm={isCollapsed:false,mark:rg.cloneRange()};
}
}
}else{
if(_8){
tg=cf?cf.tagName:"";
tg=tg.toLowerCase();
if(cf&&tg&&(tg=="button"||tg=="textarea"||tg=="input")){
if(_8.type&&_8.type.toLowerCase()=="none"){
return {isCollapsed:true,mark:null};
}else{
rg=_8.createRange();
return {isCollapsed:rg.text&&rg.text.length?false:true,mark:{range:rg,pRange:true}};
}
}
bm={};
try{
rg=_8.createRange();
bm.isCollapsed=!(_8.type=="Text"?rg.htmlText.length:rg.length);
}
catch(e){
bm.isCollapsed=true;
return bm;
}
if(_8.type.toUpperCase()=="CONTROL"){
if(rg.length){
bm.mark=[];
var i=0,_9=rg.length;
while(i<_9){
bm.mark.push(rg.item(i++));
}
}else{
bm.isCollapsed=true;
bm.mark=null;
}
}else{
bm.mark=rg.getBookmark();
}
}else{
console.warn("No idea how to store the current selection for this browser!");
}
}
return bm;
},moveToBookmark:function(_a){
var _b=_5.doc,_c=_a.mark;
if(_c){
if(_5.global.getSelection){
var _d=_5.global.getSelection();
if(_d&&_d.removeAllRanges){
if(_c.pRange){
var n=_c.node;
n.selectionStart=_c.start;
n.selectionEnd=_c.end;
}else{
_d.removeAllRanges();
_d.addRange(_c);
}
}else{
console.warn("No idea how to restore selection for this browser!");
}
}else{
if(_b.selection&&_c){
var rg;
if(_c.pRange){
rg=_c.range;
}else{
if(_3.isArray(_c)){
rg=_b.body.createControlRange();
_1.forEach(_c,function(n){
rg.addElement(n);
});
}else{
rg=_b.body.createTextRange();
rg.moveToBookmark(_c);
}
}
rg.select();
}
}
}
},getFocus:function(_e,_f){
var _10=!_6.curNode||(_e&&_2.isDescendant(_6.curNode,_e.domNode))?_7._prevFocus:_6.curNode;
return {node:_10,bookmark:_10&&(_10==_6.curNode)&&_5.withGlobal(_f||_5.global,_7.getBookmark),openedForWindow:_f};
},_activeStack:[],registerIframe:function(_11){
return _6.registerIframe(_11);
},unregisterIframe:function(_12){
_12&&_12.remove();
},registerWin:function(_13,_14){
return _6.registerWin(_13,_14);
},unregisterWin:function(_15){
_15&&_15.remove();
}});
_6.focus=function(_16){
if(!_16){
return;
}
var _17="node" in _16?_16.node:_16,_18=_16.bookmark,_19=_16.openedForWindow,_1a=_18?_18.isCollapsed:false;
if(_17){
var _1b=(_17.tagName.toLowerCase()=="iframe")?_17.contentWindow:_17;
if(_1b&&_1b.focus){
try{
_1b.focus();
}
catch(e){
}
}
_6._onFocusNode(_17);
}
if(_18&&_5.withGlobal(_19||_5.global,_7.isCollapsed)&&!_1a){
if(_19){
_19.focus();
}
try{
_5.withGlobal(_19||_5.global,_7.moveToBookmark,null,[_18]);
}
catch(e2){
}
}
};
_6.watch("curNode",function(_1c,_1d,_1e){
_7._curFocus=_1e;
_7._prevFocus=_1d;
if(_1e){
_4.publish("focusNode",_1e);
}
});
_6.watch("activeStack",function(_1f,_20,_21){
_7._activeStack=_21;
});
_6.on("widget-blur",function(_22,by){
_4.publish("widgetBlur",_22,by);
});
_6.on("widget-focus",function(_23,by){
_4.publish("widgetFocus",_23,by);
});
return _7;
}); | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/localization/ar/TeX.js | MathJax.Localization.addTranslation("ar","TeX",{version:"2.7.9",isLoaded:true,strings:{ExtraOpenMissingClose:"\u0642\u0648\u0633 \u0645\u0641\u062A\u0648\u062D \u0625\u0636\u0627\u0641\u064A \u0623\u0648 \u063A\u0644\u0642 \u0642\u0648\u0633 \u0645\u0641\u0642\u0648\u062F",ExtraCloseMissingOpen:"\u0642\u0648\u0633 \u0645\u063A\u0644\u0642 \u0625\u0636\u0627\u0641\u064A \u0623\u0648 \u0641\u062A\u062D \u0642\u0648\u0633 \u0645\u0641\u0642\u0648\u062F",MissingLeftExtraRight:"\u064A\u0633\u0627\u0631 /\u0645\u0641\u0642\u0648\u062F \u0623\u0648 \u064A\u0645\u064A\u0646 /\u0625\u0636\u0627\u0641\u064A",ExtraLeftMissingRight:"\u064A\u0633\u0627\u0631 /\u0625\u0636\u0627\u0641\u064A \u0623\u0648 \u064A\u0645\u064A\u0646 /\u0645\u0641\u0642\u0648\u062F",Misplaced:"%1 \u0641\u064A \u063A\u064A\u0631 \u0645\u062D\u0644\u0647",AmbiguousUseOf:"\u0627\u0633\u062A\u062E\u062F\u0627\u0645 \u063A\u0627\u0645\u0636 \u0644%1",EnvBadEnd:"\\\u0628\u062F\u0627\u064A\u0629{%1} \u064A\u0646\u062A\u0647\u064A \u0628 \\\u0646\u0647\u0627\u064A\u0629{%2}",EnvMissingEnd:"\u0646\u0647\u0627\u064A\u0629\\\u0645\u0641\u0642\u0648\u062F\u0629{%1}",MissingBoxFor:"\u0635\u0646\u062F\u0648\u0642 \u0645\u0641\u0642\u0648\u062F \u0644 %1",UndefinedControlSequence:"%1 \u062A\u0633\u0644\u0633\u0644 \u0645\u0631\u0627\u0642\u0628\u0629 \u063A\u064A\u0631 \u0645\u0639\u0631\u0648\u0641",DoubleExponent:"\u0623\u0633 \u0645\u0632\u062F\u0648\u062C: \u0627\u0633\u062A\u062E\u062F\u0645 \u0627\u0644\u0623\u0642\u0648\u0627\u0633 \u0644\u0644\u062A\u0648\u0636\u064A\u062D",DoubleSubscripts:"\u0627\u0644\u062D\u0631\u0648\u0641 \u0627\u0644\u0633\u0641\u0644\u064A\u0629 \u0645\u0632\u062F\u0648\u062C\u0629: \u0627\u0633\u062A\u062E\u062F\u0645 \u0627\u0644\u0623\u0642\u0648\u0627\u0633 \u0644\u0644\u062A\u0648\u0636\u064A\u062D",DoubleExponentPrime:"\u0639\u062F\u062F \u0623\u0648\u0644\u064A \u064A\u0633\u0628\u0628 \u0623\u0633 \u0645\u0632\u062F\u0648\u062C: \u0627\u0633\u062A\u062E\u062F\u0645 \u0627\u0644\u0623\u0642\u0648\u0627\u0633 \u0644\u0644\u062A\u0648\u0636\u064A\u062D",CantUseHash1:"\u0644\u0627 \u064A\u0645\u0643\u0646\u0643 \u0627\u0633\u062A\u062E\u062F\u0627\u0645 '\u0627\u0644\u062D\u0631\u0641 \u0627\u0644\u0645\u0639\u0644\u0645 \u0627\u0644\u0643\u0644\u064A#' \u0641\u064A \u0648\u0636\u0639 \u0627\u0644\u0631\u064A\u0627\u0636\u064A\u0627\u062A",MisplacedMiddle:"%1 \u064A\u062C\u0628 \u0623\u0646 \u064A\u0643\u0648\u0646 \u0636\u0645\u0646 \\\u064A\u0633\u0627\u0631 \u0648\\\u064A\u0645\u064A\u0646",MisplacedLimits:"%1 \u0645\u0633\u0645\u0648\u062D \u0641\u0642\u0637 \u0639\u0644\u0649 \u0627\u0644\u0645\u0634\u063A\u0644\u064A\u0646",MisplacedMoveRoot:"%1 \u064A\u0645\u0643\u0646 \u0623\u0646 \u062A\u0638\u0647\u0631 \u0641\u0642\u0637 \u0641\u064A \u0627\u0644\u062C\u0630\u0631",MultipleCommand:"%1 \u0645\u062A\u0639\u062F\u062F",NotMathMLToken:"%1 \u0644\u064A\u0633 \u0639\u0646\u0635\u0631\u064B\u0627 \u0631\u0645\u0632\u064A\u064B\u0627",UnknownAttrForElement:"%1 \u0644\u064A\u0633\u062A \u0633\u0645\u0629 \u0645\u0639\u062A\u0631\u0641 \u0628\u0647\u0627 \u0644%2",ExtraAlignTab:"\u0645\u062D\u0627\u0630\u0627\u0629 \u0639\u0644\u0627\u0645\u0629 \u0627\u0644\u062A\u0628\u0648\u064A\u0628 \u0625\u0636\u0627\u0641\u064A\u0629 \u0641\u064A \\\u062D\u0627\u0644\u0627\u062A \u0627\u0644\u0646\u0635\u0648\u0635",InvalidEnv:"\u0627\u0633\u0645 \u0628\u064A\u0626\u0629 \u063A\u064A\u0631 \u0635\u0627\u0644\u062D '%1'",UnknownEnv:"\u0628\u064A\u0626\u0629 \u063A\u064A\u0631 \u0645\u0639\u0631\u0648\u0641\u0629 '%1'",ExtraCloseLooking:"\u063A\u0644\u0642 \u0642\u0648\u0633 \u0625\u0636\u0627\u0641\u064A \u0623\u062B\u0646\u0627\u0621 \u0627\u0644\u0628\u062D\u062B \u0639\u0646 %1",MissingOrUnrecognizedDelim:"\u0645\u062D\u062F\u062F \u0645\u0641\u0642\u0648\u062F \u0623\u0648 \u063A\u064A\u0631 \u0645\u0639\u062A\u0631\u0641 \u0628\u0647 \u0644 %1",MissingDimOrUnits:"\u0628\u0639\u062F (\u0623\u0648 \u0648\u062D\u062F\u0627\u062A\u0647) \u0645\u0641\u0642\u0648\u062F \u0644 %1",TokenNotFoundForCommand:"\u0644\u0645 \u064A\u062A\u0645 \u0627\u064A\u062C\u0627\u062F %1 \u0625\u0644\u064A %2",MathNotTerminated:"\u0627\u0644\u0631\u064A\u0627\u0636\u064A\u0627\u062A \u0644\u0627 \u062A\u0646\u062A\u0647\u064A \u0641\u064A \u0645\u0631\u0628\u0639 \u0627\u0644\u0646\u0635",IllegalMacroParam:"\u0645\u0631\u062C\u0639 \u0645\u0639\u0644\u0645 \u0643\u0644\u064A \u063A\u064A\u0631 \u0642\u0627\u0646\u0648\u0646\u064A",MaxBufferSize:"\u062A\u0645 \u062A\u062C\u0627\u0648\u0632 \u062D\u062C\u0645 \u0627\u0644\u0645\u062E\u0632\u0646 \u0627\u0644\u0645\u0624\u0642\u062A \u0627\u0644\u062F\u0627\u062E\u0644\u064A \u0644\u0645\u0627\u062B \u062C\u0627\u0643\u0633. \u0647\u0644 \u0647\u0646\u0627\u0643 \u0627\u0633\u062A\u062F\u0639\u0627\u0621 \u0645\u0627\u0643\u0631\u0648 \u0645\u0643\u0631\u0631\u061F",CommandNotAllowedInEnv:"%1 \u063A\u064A\u0631 \u0645\u0633\u0645\u0648\u062D \u0641\u064A \u0628\u064A\u0626\u0629 %2",MultipleLabel:"\u062A\u062D\u062F\u064A\u062F \u0645\u0636\u0627\u0639\u0641 \u0644\u062A\u0633\u0645\u064A\u0629 '%1'",CommandAtTheBeginingOfLine:"\u064A\u062C\u0628 \u0623\u0646 \u064A\u0623\u062A\u064A \u0641\u064A \u0628\u062F\u0627\u064A\u0629 \u0627\u0644\u0633\u0637\u0631 %1",IllegalAlign:"\u0645\u062D\u0627\u0630\u0627\u0629 \u063A\u064A\u0631 \u0642\u0627\u0646\u0648\u0646\u064A\u0629 \u0645\u062D\u062F\u062F\u0629 \u0641\u064A %1",BadMathStyleFor:"\u0646\u0645\u0637 \u0631\u064A\u0627\u0636\u064A \u0633\u064A\u0621 \u0644 %1",MultlineRowsOneCol:"\u0627\u0644\u0635\u0641\u0648\u0641 \u0636\u0645\u0646 \u0628\u064A\u0626\u0629 %1 \u064A\u062C\u0628 \u0623\u0646 \u062A\u062D\u062A\u0648\u064A \u0639\u0644\u0649 \u0639\u0645\u0648\u062F \u0648\u0627\u062D\u062F \u0628\u0627\u0644\u0636\u0628\u0637",MultipleBBoxProperty:"%1 \u0645\u062D\u062F\u062F \u0645\u0631\u062A\u064A\u0646 \u0641\u064A %2",ExtraEndMissingBegin:"%1 \u0625\u0636\u0627\u0641\u064A \u0623\u0648 \u0645\u0641\u0642\u0648\u062F \\begingroup",GlobalNotFollowedBy:"%1 \u0644\u0645 \u064A\u062A\u0645 \u062A\u062A\u0628\u0639\u0647 \\let, \\def, \u0623\u0648 \\newcommand",UndefinedColorModel:"\u0646\u0645\u0648\u0630\u062C \u0627\u0644\u0644\u0648\u0646 '%1' \u063A\u064A\u0631 \u0645\u0639\u0631\u0641",ModelArg1:"\u0642\u064A\u0645 \u0627\u0644\u0644\u0648\u0646 \u0644\u0644\u0646\u0645\u0648\u0630\u062C %1 \u062A\u062A\u0637\u0644\u0628 3 \u0623\u0631\u0642\u0627\u0645",InvalidDecimalNumber:"\u0639\u062F\u062F \u0639\u0634\u0631\u064A \u063A\u064A\u0631 \u0635\u0627\u0644\u062D",ModelArg2:"\u064A\u062C\u0628 \u0623\u0646 \u062A\u0643\u0648\u0646 \u0642\u064A\u0645 \u0627\u0644\u0644\u0648\u0646 \u0644\u0644\u0646\u0645\u0648\u0630\u062C %1 \u0628\u064A\u0646 %2 \u0648%3",InvalidNumber:"\u0639\u062F\u062F \u063A\u064A\u0631 \u0635\u0627\u0644\u062D",NoClosingChar:"\u0644\u0627 \u064A\u0645\u0643\u0646 \u0627\u0644\u0639\u062B\u0648\u0631 \u0639\u0644\u0649 \u0625\u063A\u0644\u0627\u0642 %1",IllegalParamNumber:"\u0639\u062F\u062F \u063A\u064A\u0631 \u0645\u0634\u0631\u0648\u0639 \u0644\u0644\u0645\u0639\u0627\u064A\u064A\u0631 \u0627\u0644\u0645\u062D\u062F\u062F\u0629 \u0641\u064A %1",CantUseHash2:"\u0627\u0633\u062A\u062E\u062F\u0627\u0645 \u063A\u064A\u0631 \u0627\u0644\u0645\u0634\u0631\u0648\u0639 \u0644# \u0641\u064A \u0642\u0627\u0644\u0628 \u0644%1",SequentialParam:"\u0645\u0639\u0644\u0645\u0627\u062A %1 \u064A\u062C\u0628 \u0623\u0646 \u064A\u062A\u0645 \u062A\u0631\u0642\u064A\u0645\u0647\u0627 \u0628\u0627\u0644\u062A\u0633\u0644\u0633\u0644",MissingReplacementString:"\u0633\u0644\u0633\u0644\u0629 \u0628\u062F\u064A\u0644\u0629 \u0644\u062A\u0639\u0631\u064A\u0641 %1 \u0645\u0641\u0642\u0648\u062F\u0629",MismatchUseDef:"\u0627\u0633\u062A\u062E\u062F\u0627\u0645 %1 \u0644\u0627 \u064A\u062A\u0637\u0627\u0628\u0642 \u0645\u0639 \u062A\u0639\u0631\u064A\u0641\u0647",NoClosingDelim:"\u0644\u0627 \u064A\u0645\u0643\u0646 \u0627\u0644\u0639\u062B\u0648\u0631 \u0639\u0644\u0649 \u0625\u063A\u0644\u0627\u0642 \u0645\u062D\u062F\u062F \u0644%1"}});MathJax.Ajax.loadComplete("[MathJax]/localization/ar/TeX.js"); | PypiClean |
/Misago-0.36.1.tar.gz/Misago-0.36.1/misago/static/misago/admin/momentjs/ml.js |
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
var ml = moment.defineLocale('ml', {
months : 'ജനുവരി_ഫെബ്രുവരി_മാർച്ച്_ഏപ്രിൽ_മേയ്_ജൂൺ_ജൂലൈ_ഓഗസ്റ്റ്_സെപ്റ്റംബർ_ഒക്ടോബർ_നവംബർ_ഡിസംബർ'.split('_'),
monthsShort : 'ജനു._ഫെബ്രു._മാർ._ഏപ്രി._മേയ്_ജൂൺ_ജൂലൈ._ഓഗ._സെപ്റ്റ._ഒക്ടോ._നവം._ഡിസം.'.split('_'),
monthsParseExact : true,
weekdays : 'ഞായറാഴ്ച_തിങ്കളാഴ്ച_ചൊവ്വാഴ്ച_ബുധനാഴ്ച_വ്യാഴാഴ്ച_വെള്ളിയാഴ്ച_ശനിയാഴ്ച'.split('_'),
weekdaysShort : 'ഞായർ_തിങ്കൾ_ചൊവ്വ_ബുധൻ_വ്യാഴം_വെള്ളി_ശനി'.split('_'),
weekdaysMin : 'ഞാ_തി_ചൊ_ബു_വ്യാ_വെ_ശ'.split('_'),
longDateFormat : {
LT : 'A h:mm -നു',
LTS : 'A h:mm:ss -നു',
L : 'DD/MM/YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY, A h:mm -നു',
LLLL : 'dddd, D MMMM YYYY, A h:mm -നു'
},
calendar : {
sameDay : '[ഇന്ന്] LT',
nextDay : '[നാളെ] LT',
nextWeek : 'dddd, LT',
lastDay : '[ഇന്നലെ] LT',
lastWeek : '[കഴിഞ്ഞ] dddd, LT',
sameElse : 'L'
},
relativeTime : {
future : '%s കഴിഞ്ഞ്',
past : '%s മുൻപ്',
s : 'അൽപ നിമിഷങ്ങൾ',
ss : '%d സെക്കൻഡ്',
m : 'ഒരു മിനിറ്റ്',
mm : '%d മിനിറ്റ്',
h : 'ഒരു മണിക്കൂർ',
hh : '%d മണിക്കൂർ',
d : 'ഒരു ദിവസം',
dd : '%d ദിവസം',
M : 'ഒരു മാസം',
MM : '%d മാസം',
y : 'ഒരു വർഷം',
yy : '%d വർഷം'
},
meridiemParse: /രാത്രി|രാവിലെ|ഉച്ച കഴിഞ്ഞ്|വൈകുന്നേരം|രാത്രി/i,
meridiemHour : function (hour, meridiem) {
if (hour === 12) {
hour = 0;
}
if ((meridiem === 'രാത്രി' && hour >= 4) ||
meridiem === 'ഉച്ച കഴിഞ്ഞ്' ||
meridiem === 'വൈകുന്നേരം') {
return hour + 12;
} else {
return hour;
}
},
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return 'രാത്രി';
} else if (hour < 12) {
return 'രാവിലെ';
} else if (hour < 17) {
return 'ഉച്ച കഴിഞ്ഞ്';
} else if (hour < 20) {
return 'വൈകുന്നേരം';
} else {
return 'രാത്രി';
}
}
});
return ml;
}))); | PypiClean |
/MyGaiaDB-0.3-cp310-cp310-win_amd64.whl/mygaiadb/query/callbacks.py | import inspect
import warnings
import numpy as np
from ..utils import radec_to_ecl
try:
from zero_point import zpt
_have_zpt = True
except ImportError:
_have_zpt = False
try:
import mwdust
_have_mwdust = True
except ImportError:
_have_mwdust = False
try:
from galpy.orbit import Orbit
from galpy.util.coords import radec_to_lb
_have_galpy = True
except ImportError:
_have_galpy = False
class QueryCallback:
"""
Callback to add new column to SQL query on the fly
"""
def __init__(self, new_col_name, func):
"""
INPUT:
new_col_name (string): Name of the new column you wan to add
func (function): function that maps query columns to new columns, arguements of this function need to have \
the same names as columns in query
"""
self.new_col_name = new_col_name
self.func = func
self.required_col = list(inspect.getfullargspec(self.func))[0]
class ZeroPointCallback(QueryCallback):
def __init__(self, new_col_name="parallax_w_zp"):
"""
Callback to use ``gaiadr3_zeropoint`` to get zero-point corrected parallax
INPUT:
new_col_name (string): Name of the new column you wan to add
"""
super().__init__(new_col_name, self.parallax_zp_func)
if not _have_zpt:
raise RuntimeError(
"You need to have gaiadr3_zeropoint package installed to use this callback. Please see: https://gitlab.com/icc-ub/public/gaiadr3_zeropoint"
)
zpt.load_tables()
@staticmethod
def parallax_zp_func(
ra,
dec,
parallax,
phot_bp_mean_mag,
nu_eff_used_in_astrometry,
pseudocolour,
astrometric_params_solved,
):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", UserWarning)
ect_lon, ect_lat = radec_to_ecl(ra, dec)
return parallax - zpt.get_zpt(
phot_bp_mean_mag,
nu_eff_used_in_astrometry,
pseudocolour,
ect_lat,
astrometric_params_solved,
)
class DustCallback(QueryCallback):
def __init__(self, new_col_name="sfd_ebv", filter=None, dustmap="SFD"):
"""
Callback to use ``mwdust`` to get extinction
INPUT:
new_col_name (string): Name of the new column you wan to add
filter (string): extinction in which filter, see mwdust
dustmap (string): which dust map to use, currently only supporting "SFD"
"""
if not _have_mwdust:
raise RuntimeError(
"You need to have mwdust package installed to use this callback. Please see: https://github.com/jobovy/mwdust"
)
if not _have_galpy:
raise RuntimeError(
"You need to have galpy package installed to use this callback. Please see: https://github.com/jobovy/galpy"
)
self.filter = filter
if dustmap.lower() == "sfd":
self.sfd = mwdust.SFD(filter=self.filter, noloop=True)
self._func_wrapped = lambda ra, dec: self.sfd_ebv_func(ra, dec)
super().__init__(new_col_name, self._func_wrapped)
def sfd_ebv_func(self, ra, dec):
lb = radec_to_lb(ra, dec, degree=True)
l, b = lb[:, 0], lb[:, 1]
return self.sfd(l, b, np.ones_like(l))
class OrbitsCallback(QueryCallback):
def __init__(self, new_col_name="e"):
"""
Callback to use ``galpy`` to setup orbit
INPUT:
new_col_name (string): Name of the new column you wan to add
filter (string): extinction in which filter, see mwdust
dustmap (string): which dust map to use, currently only supporting "SFD"
"""
if not _have_galpy:
raise RuntimeError(
"You need to have galpy package installed to use this callback. Please see: https://github.com/jobovy/galpy"
)
_r0 = 8.23
_v0 = 249.44
_z0 = 0.0208
self._func_wrapped = (
lambda ra, dec, pmra, pmdec, parallax, radial_velocity: Orbit(
[
ra,
dec,
(1 / parallax),
pmra,
pmdec,
radial_velocity,
],
radec=True,
ro=_r0,
vo=_v0,
zo=_z0,
)
)
super().__init__(new_col_name, self._func_wrapped)
def sfd_ebv_func(self, ra, dec):
lb = radec_to_lb(ra, dec, degree=True)
l, b = lb[:, 0], lb[:, 1]
return self.sfd(l, b, np.ones_like(l)) | PypiClean |
/Cubane-1.0.11.tar.gz/Cubane-1.0.11/cubane/backend/static/cubane/backend/tinymce/js/tinymce/plugins/autoresize/plugin.min.js | !function(){"use strict";var t=function(e){var n=e,i=function(){return n};return{get:i,set:function(t){n=t},clone:function(){return t(i())}}},e=tinymce.util.Tools.resolve("tinymce.PluginManager"),n=tinymce.util.Tools.resolve("tinymce.Env"),i=tinymce.util.Tools.resolve("tinymce.util.Delay"),o=function(t){return parseInt(t.getParam("autoresize_min_height",t.getElement().offsetHeight),10)},r=function(t){return parseInt(t.getParam("autoresize_max_height",0),10)},a=function(t){return t.getParam("autoresize_overflow_padding",1)},u=function(t){return t.getParam("autoresize_bottom_margin",50)},s=function(t){return t.getParam("autoresize_on_init",!0)},l=function(t,e,n,o,r){i.setEditorTimeout(t,function(){c(t,e),n--?l(t,e,n,o,r):r&&r()},o)},g=function(t,e){var n=t.getBody();n&&(n.style.overflowY=e?"":"hidden",e||(n.scrollTop=0))},c=function(t,e){var i,a,u,s,l,f,d,m,p,y,h,v=t.dom;if(a=t.getDoc())if((S=t).plugins.fullscreen&&S.plugins.fullscreen.isFullscreen())g(t,!0);else{var S;u=a.body,s=o(t),f=v.getStyle(u,"margin-top",!0),d=v.getStyle(u,"margin-bottom",!0),m=v.getStyle(u,"padding-top",!0),p=v.getStyle(u,"padding-bottom",!0),y=v.getStyle(u,"border-top-width",!0),h=v.getStyle(u,"border-bottom-width",!0),l=u.offsetHeight+parseInt(f,10)+parseInt(d,10)+parseInt(m,10)+parseInt(p,10)+parseInt(y,10)+parseInt(h,10),(isNaN(l)||l<=0)&&(l=n.ie?u.scrollHeight:n.webkit&&0===u.clientHeight?0:u.offsetHeight),l>o(t)&&(s=l);var _=r(t);_&&l>_?(s=_,g(t,!0)):g(t,!1),s!==e.get()&&(i=s-e.get(),v.setStyle(t.iframeElement,"height",s+"px"),e.set(s),n.webkit&&i<0&&c(t,e))}},f={setup:function(t,e){t.on("init",function(){var e,n,i=t.dom;e=a(t),n=u(t),!1!==e&&i.setStyles(t.getBody(),{paddingLeft:e,paddingRight:e}),!1!==n&&i.setStyles(t.getBody(),{paddingBottom:n})}),t.on("nodechange setcontent keyup FullscreenStateChanged",function(n){c(t,e)}),s(t)&&t.on("init",function(){l(t,e,20,100,function(){l(t,e,5,1e3)})})},resize:c},d=function(t,e){t.addCommand("mceAutoResize",function(){f.resize(t,e)})};e.add("autoresize",function(e){if(!e.inline){var n=t(0);d(e,n),f.setup(e,n)}})}(); | PypiClean |
/Homevee_Dev-0.0.0.0-py3-none-any.whl/Homevee/Item/Device/Thermostat/RademacherThermostat.py | import json
import traceback
import urllib.request
from Homevee.Exception import DatabaseSaveFailedException
from Homevee.Helper import Logger
from Homevee.Item.Device.Thermostat import Thermostat
from Homevee.Item.Gateway import *
from Homevee.Utils.Database import Database
from Homevee.Utils.DeviceTypes import RADEMACHER_THERMOSTAT
class RademacherThermostat(Thermostat):
def __init__(self, name, icon, location, id=None, temp=None):
super(RademacherThermostat, self).__init__(name, icon, location, id=id, temp=temp)
def get_device_type(self):
return RADEMACHER_THERMOSTAT
def delete(self, db=None):
try:
Database.delete("DELETE FROM HOMEPILOT_THERMOSTATS WHERE ID == :id", {'id': self.id}, db)
return True
except:
return False
def get_min_max(self):
return 4, 28
def save_to_db(self, db=None):
try:
Database.insert("""INSERT OR IGNORE INTO HOMEPILOT_THERMOSTATS (ID, NAME, ICON, LAST_TEMP, ROOM) VALUES
(:id, :name, :icon, :last_temp, :room)""",
{'id': self.id, 'name': self.name, 'icon': self.icon,
'last_temp': self.temp, 'room': self.location}, db)
# update
Database.update("""UPDATE OR IGNORE HOMEPILOT_THERMOSTATS SET NAME = :name, ICON = :icon,
LAST_TEMP = :last_temp, ROOM = :room WHERE ID = :id""",
{'name': self.name, 'icon': self.icon, 'last_temp': self.temp,
'room': self.location, 'id': self.id}, db)
# TODO add generated id to object
except:
if(Logger.IS_DEBUG):
traceback.print_exc()
raise DatabaseSaveFailedException("Could not save homepilot-thermostat to database")
def build_dict(self):
dict = {
'name': self.name,
'icon': self.icon,
'id': self.id,
'temp': self.temp,
'location': self.location
}
return dict
def update_temp(self, temp, db=None):
try:
gateway = Item.load_from_db(Gateway, RADEMACHER_HOMEPILOT, db)
value = int(float(temp) * 10)
url = "http://" + gateway.ip + "/deviceajax.do?cid=9&did=" + str(self.id) + "&goto=" + str(value) + "&command=0"
response = urllib.request.urlopen(url).read()
data = json.loads(response)
if (data['status'] != 'uisuccess'):
return False
return True
except:
if Logger.IS_DEBUG:
traceback.print_exc()
return False
@staticmethod
def load_all_ids_from_db(ids, db=None):
return RademacherThermostat.load_all_from_db('SELECT * FROM HOMEPILOT_THERMOSTATS WHERE ID IN (%s)'
% ','.join('?' * len(ids)), ids, db)
@staticmethod
def load_all_from_db(query, params, db=None):
items = []
for result in Database.select_all(query, params, db):
item = RademacherThermostat(result['NAME'], result['ICON'], result['ROOM'], result['ID'],
result['LAST_TEMP'])
items.append(item)
return items
@staticmethod
def load_all(db=None):
return RademacherThermostat.load_all_from_db('SELECT * FROM HOMEPILOT_THERMOSTATS', {}, db)
@staticmethod
def create_from_dict(dict):
try:
name = dict['name']
id = dict['id']
location = dict['location']
temp = dict['temp']
icon = dict['icon']
item = RademacherThermostat(name, icon, location, id, temp)
return item
except:
raise InvalidParametersException("Invalid parameters for RademacherThermostat.create_from_dict()") | PypiClean |
/Demoize-1.3.0.tar.gz/Demoize-1.3.0/demoize/demoize.py | import ast
import copy
import time
from astor import codegen
from web.server import run_app
from selenium import webdriver
from pygments import highlight
from pygments.lexers import get_lexer_by_name, get_lexer_for_filename
from pygments.formatters import HtmlFormatter
from selenium.common.exceptions import NoSuchElementException
class DemoServer(object):
def start_server(self, script_file, language=None, port=5000, **params):
print "Starting server with code file at %s" % script_file
lexer_opts = {
'stripall': True
}
if language:
lexer = get_lexer_by_name(language, **lexer_opts)
else:
lexer = get_lexer_for_filename(script_file, **lexer_opts)
formatter = HtmlFormatter(linenos=True, linespans="line",
anchorlinenos="aline")
code = ''
with open(script_file, 'r') as f:
code = f.read()
result = highlight(code, lexer, formatter)
params['code'] = result
run_app(port=port, **params)
class Demo(object):
def __init__(self, script_file, port=5000, browser="chrome",
extra_globals=None, coords=None, sleep=None):
if coords is None:
coords = {}
if extra_globals is None:
extra_globals = {}
self.extra_globals = extra_globals
self.script_file = script_file
self.sleep = sleep
print "Starting demo"
self.url = "http://localhost:%s" % port
if browser == "chrome":
self.driver = webdriver.Chrome()
elif browser == "safari":
self.driver = webdriver.Safari()
else:
self.driver = webdriver.Firefox()
if 'w' in coords and 'h' in coords:
self.driver.set_window_size(coords['w'], coords['h'])
if 'x' in coords and 'y' in coords:
self.driver.set_window_position(coords['x'], coords['y'])
self.driver.get(self.url)
self.cur_line = 0
def goto_line(self, line=1):
self.driver.find_element_by_id("line-%s" % line)
self.highlight_line(line)
if self.sleep:
time.sleep(self.sleep)
def highlight_line(self, line):
print "Highlighting line %s" % line
script = """(function() {
$(".hll").removeClass("hll");
$("#line-%s").addClass("hll");
$("a[href='#-%s']").addClass("hll");
})()""" % (line, line)
script = script.replace(" ", "")
script = script.replace("\n", "")
self.driver.execute_script(script)
self.driver.execute_script("demoize.scrollToCenterOnLine(%s)" % line)
self.cur_line = line
def next_line(self, skip=1):
try:
self.goto_line(self.cur_line + skip)
except NoSuchElementException:
self.goto_line(1)
def run(self):
try:
with open(self.script_file) as f:
script_lines = [line for line in f]
script_py = "".join(script_lines)
tree = ast.parse(script_py, filename=self.script_file)
new_tree = DemoTransformer().visit(tree)
new_tree = ast.fix_missing_locations(new_tree)
code_obj = compile(new_tree, self.script_file, 'exec')
self.extra_globals.update({'DEMOIZE_OBJ': self})
exec(code_obj, self.extra_globals)
except Exception:
print "Error parsing or running demo code"
raise
finally:
self.driver.quit()
class DemoTransformer(ast.NodeTransformer):
def generic_visit(self, node, parent_nodes=None):
if parent_nodes is None:
parent_nodes = []
else:
parent_nodes = copy.copy(parent_nodes)
avoid_classes = (ast.FunctionDef, ast.ClassDef, ast.If, ast.While,
ast.With, ast.TryExcept, ast.TryFinally,
ast.ExceptHandler, ast.IfExp, ast.For, ast.Global)
class_assign = (node.__class__ == ast.Assign and
len(parent_nodes) > 0 and
parent_nodes[-1].__class__ == ast.ClassDef)
if (hasattr(node, 'lineno') and
issubclass(node.__class__, ast.stmt) and
node.__class__ not in avoid_classes and
not class_assign):
extra_stmt = 'DEMOIZE_OBJ.goto_line(line=%s)' % node.lineno
extra_node = ast.parse(extra_stmt)
return (extra_node.body[0], node)
else:
parent_nodes.append(node)
for field, old_value in ast.iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, ast.AST):
value = self.generic_visit(value, parent_nodes)
if value is None:
continue
elif not isinstance(value, ast.AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, ast.AST):
new_node = self.generic_visit(old_value, parent_nodes)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
if __name__ == "__main__":
run_app() | PypiClean |
/Firefly_III_API_Client-2.0.5.0-py3-none-any.whl/firefly_iii_client/paths/v1_piggy_banks_id/put.py | from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from firefly_iii_client import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from firefly_iii_client import schemas # noqa: F401
from firefly_iii_client.model.piggy_bank_single import PiggyBankSingle
from firefly_iii_client.model.piggy_bank_update import PiggyBankUpdate
from firefly_iii_client.model.validation_error import ValidationError
from firefly_iii_client.model.unauthenticated import Unauthenticated
from firefly_iii_client.model.bad_request import BadRequest
from firefly_iii_client.model.internal_exception import InternalException
from firefly_iii_client.model.not_found import NotFound
from . import path
# Header params
XTraceIdSchema = schemas.UUIDSchema
RequestRequiredHeaderParams = typing_extensions.TypedDict(
'RequestRequiredHeaderParams',
{
}
)
RequestOptionalHeaderParams = typing_extensions.TypedDict(
'RequestOptionalHeaderParams',
{
'X-Trace-Id': typing.Union[XTraceIdSchema, str, uuid.UUID, ],
},
total=False
)
class RequestHeaderParams(RequestRequiredHeaderParams, RequestOptionalHeaderParams):
pass
request_header_x_trace_id = api_client.HeaderParameter(
name="X-Trace-Id",
style=api_client.ParameterStyle.SIMPLE,
schema=XTraceIdSchema,
)
# Path params
IdSchema = schemas.StrSchema
RequestRequiredPathParams = typing_extensions.TypedDict(
'RequestRequiredPathParams',
{
'id': typing.Union[IdSchema, str, ],
}
)
RequestOptionalPathParams = typing_extensions.TypedDict(
'RequestOptionalPathParams',
{
},
total=False
)
class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
pass
request_path_id = api_client.PathParameter(
name="id",
style=api_client.ParameterStyle.SIMPLE,
schema=IdSchema,
required=True,
)
# body param
SchemaForRequestBodyApplicationJson = PiggyBankUpdate
SchemaForRequestBodyApplicationXWwwFormUrlencoded = PiggyBankUpdate
request_body_piggy_bank_update = api_client.RequestBody(
content={
'application/json': api_client.MediaType(
schema=SchemaForRequestBodyApplicationJson),
'application/x-www-form-urlencoded': api_client.MediaType(
schema=SchemaForRequestBodyApplicationXWwwFormUrlencoded),
},
required=True,
)
_auth = [
'firefly_iii_auth',
]
SchemaFor200ResponseBodyApplicationVndApijson = PiggyBankSingle
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationVndApijson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/vnd.api+json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationVndApijson),
},
)
SchemaFor400ResponseBodyApplicationJson = BadRequest
@dataclass
class ApiResponseFor400(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor400ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_400 = api_client.OpenApiResponse(
response_cls=ApiResponseFor400,
content={
'application/json': api_client.MediaType(
schema=SchemaFor400ResponseBodyApplicationJson),
},
)
SchemaFor401ResponseBodyApplicationJson = Unauthenticated
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor401ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
content={
'application/json': api_client.MediaType(
schema=SchemaFor401ResponseBodyApplicationJson),
},
)
SchemaFor404ResponseBodyApplicationJson = NotFound
@dataclass
class ApiResponseFor404(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor404ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_404 = api_client.OpenApiResponse(
response_cls=ApiResponseFor404,
content={
'application/json': api_client.MediaType(
schema=SchemaFor404ResponseBodyApplicationJson),
},
)
SchemaFor422ResponseBodyApplicationJson = ValidationError
@dataclass
class ApiResponseFor422(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor422ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_422 = api_client.OpenApiResponse(
response_cls=ApiResponseFor422,
content={
'application/json': api_client.MediaType(
schema=SchemaFor422ResponseBodyApplicationJson),
},
)
SchemaFor500ResponseBodyApplicationJson = InternalException
@dataclass
class ApiResponseFor500(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor500ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_500 = api_client.OpenApiResponse(
response_cls=ApiResponseFor500,
content={
'application/json': api_client.MediaType(
schema=SchemaFor500ResponseBodyApplicationJson),
},
)
_status_code_to_response = {
'200': _response_for_200,
'400': _response_for_400,
'401': _response_for_401,
'404': _response_for_404,
'422': _response_for_422,
'500': _response_for_500,
}
_all_accept_content_types = (
'application/vnd.api+json',
'application/json',
)
class BaseApi(api_client.Api):
@typing.overload
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: typing_extensions.Literal["application/x-www-form-urlencoded"],
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def _update_piggy_bank_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = 'application/json',
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
"""
Update existing piggy bank.
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs_oapg(RequestHeaderParams, header_params)
self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
used_path = path.value
_path_params = {}
for parameter in (
request_path_id,
):
parameter_data = path_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
serialized_data = parameter.serialize(parameter_data)
_path_params.update(serialized_data)
for k, v in _path_params.items():
used_path = used_path.replace('{%s}' % k, v)
_headers = HTTPHeaderDict()
for parameter in (
request_header_x_trace_id,
):
parameter_data = header_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
serialized_data = parameter.serialize(parameter_data)
_headers.extend(serialized_data)
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
if body is schemas.unset:
raise exceptions.ApiValueError(
'The required body parameter has an invalid value of: unset. Set a valid value instead')
_fields = None
_body = None
serialized_data = request_body_piggy_bank_update.serialize(body, content_type)
_headers.add('Content-Type', content_type)
if 'fields' in serialized_data:
_fields = serialized_data['fields']
elif 'body' in serialized_data:
_body = serialized_data['body']
response = self.api_client.call_api(
resource_path=used_path,
method='put'.upper(),
headers=_headers,
fields=_fields,
body=_body,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(
status=response.status,
reason=response.reason,
api_response=api_response
)
return api_response
class UpdatePiggyBank(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
@typing.overload
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: typing_extensions.Literal["application/x-www-form-urlencoded"],
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def update_piggy_bank(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = 'application/json',
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._update_piggy_bank_oapg(
body=body,
header_params=header_params,
path_params=path_params,
content_type=content_type,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForput(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
@typing.overload
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: typing_extensions.Literal["application/x-www-form-urlencoded"],
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = ...,
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def put(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,SchemaForRequestBodyApplicationXWwwFormUrlencoded,],
content_type: str = 'application/json',
header_params: RequestHeaderParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._update_piggy_bank_oapg(
body=body,
header_params=header_params,
path_params=path_params,
content_type=content_type,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
) | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/djblets/webapi/encoders.py | import io
import json
from xml.sax.saxutils import XMLGenerator
from django.conf import settings
from django.contrib.auth.models import User, Group
from django.db.models.query import QuerySet
from djblets.util.serializers import DjbletsJSONEncoder
class WebAPIEncoder(object):
"""Encodes an object into a dictionary of fields and values.
This object is used for both JSON and XML API formats.
Projects can subclass this to provide representations of their objects.
To make use of a encoder, add the path to the encoder class to
the project's ``settings.WEB_API_ENCODERS`` list.
For example:
.. code-block:: python
WEB_API_ENCODERS = (
'myproject.webapi.MyEncoder',
)
"""
def encode(self, o, *args, **kwargs):
"""Encodes an object.
This is expected to return either a dictionary or a list. If the
object being encoded is not supported, return None, or call
the superclass's encode method.
"""
return None
class BasicAPIEncoder(WebAPIEncoder):
"""A basic encoder that encodes standard types.
This supports encoding of dates, times, QuerySets, Users, and Groups.
"""
def encode(self, o, *args, **kwargs):
if isinstance(o, QuerySet):
return list(o)
elif isinstance(o, User):
return {
'id': o.id,
'username': o.username,
'first_name': o.first_name,
'last_name': o.last_name,
'fullname': o.get_full_name(),
'email': o.email,
'url': o.get_absolute_url(),
}
elif isinstance(o, Group):
return {
'id': o.id,
'name': o.name,
}
else:
try:
return DjbletsJSONEncoder().default(o)
except TypeError:
return None
class ResourceAPIEncoder(WebAPIEncoder):
"""An encoder that encodes objects based on registered resources."""
def encode(self, o, *args, **kwargs):
if isinstance(o, QuerySet):
return list(o)
else:
calling_resource = kwargs.pop('calling_resource', None)
if calling_resource:
serializer = calling_resource.get_serializer_for_object(o)
else:
from djblets.webapi.resources import get_resource_for_object
serializer = get_resource_for_object(o)
if serializer:
return serializer.serialize_object(o, *args, **kwargs)
else:
try:
return DjbletsJSONEncoder().default(o)
except TypeError:
return None
class JSONEncoderAdapter(json.JSONEncoder):
"""Adapts a WebAPIEncoder to be used with json.
This takes an existing encoder and makes it available to use as a
json.JSONEncoder. This is used internally when generating JSON from a
WebAPIEncoder, but can be used in other projects for more specific
purposes as well.
"""
def __init__(self, encoder, *args, **kwargs):
super(JSONEncoderAdapter, self).__init__(
sort_keys=kwargs.pop('sort_keys', True),
*args, **kwargs)
self.encoder = encoder
def encode(self, o, *args, **kwargs):
self.encode_args = args
self.encode_kwargs = kwargs
return super(JSONEncoderAdapter, self).encode(o)
def default(self, o):
"""Encodes an object using the supplied WebAPIEncoder.
If the encoder is unable to encode this object, a TypeError is raised.
"""
result = self.encoder.encode(o, *self.encode_args,
**self.encode_kwargs)
if result is None:
raise TypeError("%r is not JSON serializable" % (o,))
return result
class XMLEncoderAdapter(object):
"""Adapts a WebAPIEncoder to output XML.
This takes an existing encoder and adapts it to output a simple XML format.
"""
def __init__(self, encoder, *args, **kwargs):
self.encoder = encoder
def encode(self, o, *args, **kwargs):
self.level = 0
self.doIndent = False
stream = io.StringIO()
self.xml = XMLGenerator(stream, settings.DEFAULT_CHARSET)
self.xml.startDocument()
self.startElement("rsp")
self.__encode(o, *args, **kwargs)
self.endElement("rsp")
self.xml.endDocument()
self.xml = None
return stream.getvalue()
def __encode(self, o, *args, **kwargs):
if isinstance(o, dict):
for key, value in o.items():
attrs = {}
if isinstance(key, int):
attrs['value'] = str(key)
key = 'int'
self.startElement(key, attrs)
self.__encode(value, *args, **kwargs)
self.endElement(key)
elif isinstance(o, (tuple, list)):
self.startElement("array")
for i in o:
self.startElement("item")
self.__encode(i, *args, **kwargs)
self.endElement("item")
self.endElement("array")
elif isinstance(o, str):
self.text(o)
elif isinstance(o, int):
self.text("%d" % o)
elif isinstance(o, float):
self.text("%s" % o)
elif isinstance(o, bool):
if o:
self.text("True")
else:
self.text("False")
elif o is None:
pass
else:
result = self.encoder.encode(o, *args, **kwargs)
if result is None:
raise TypeError("%r is not XML serializable" % (o,))
return self.__encode(result, *args, **kwargs)
def startElement(self, name, attrs={}):
self.addIndent()
self.xml.startElement(name, attrs)
self.level += 1
self.doIndent = True
def endElement(self, name):
self.level -= 1
self.addIndent()
self.xml.endElement(name)
self.doIndent = True
def text(self, value):
self.xml.characters(value)
self.doIndent = False
def addIndent(self):
if self.doIndent:
self.xml.ignorableWhitespace('\n' + ' ' * self.level)
_registered_encoders = None
def get_registered_encoders():
"""
Returns a list of registered Web API encoders.
"""
global _registered_encoders
if _registered_encoders is None:
_registered_encoders = []
encoders = getattr(settings, 'WEB_API_ENCODERS',
['djblets.webapi.encoders.BasicAPIEncoder'])
for encoder in encoders:
encoder_path = encoder.split('.')
if len(encoder_path) > 1:
encoder_module_name = '.'.join(encoder_path[:-1])
else:
encoder_module_name = '.'
encoder_module = __import__(encoder_module_name, {}, {},
encoder_path[-1])
encoder_class = getattr(encoder_module, encoder_path[-1])
_registered_encoders.append(encoder_class())
return _registered_encoders | PypiClean |
/FlexGet-3.9.6-py3-none-any.whl/flexget/components/seen/db.py | from datetime import datetime
from loguru import logger
from sqlalchemy import (
Boolean,
Column,
DateTime,
ForeignKey,
Index,
Integer,
Unicode,
or_,
select,
update,
)
from sqlalchemy.orm import relationship
from flexget import db_schema, plugin
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import with_session
from flexget.utils.sqlalchemy_utils import table_add_column, table_schema
try:
# NOTE: Importing other plugins is discouraged!
from flexget.components.imdb.utils import extract_id
except ImportError:
raise plugin.DependencyError(issued_by=__name__, missing='imdb')
logger = logger.bind(name='seen.db')
Base = db_schema.versioned_base('seen', 4)
ESCAPE_QUERY = '\\'
@db_schema.upgrade('seen')
def upgrade(ver, session):
if ver is None:
logger.info('Converting seen imdb_url to imdb_id for seen movies.')
field_table = table_schema('seen_field', session)
for row in session.execute(
select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')
):
new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])}
session.execute(update(field_table, field_table.c.id == row['id'], new_values))
ver = 1
if ver == 1:
field_table = table_schema('seen_field', session)
logger.info('Adding index to seen_field table.')
Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind)
ver = 2
if ver == 2:
logger.info('Adding local column to seen_entry table')
table_add_column('seen_entry', 'local', Boolean, session, default=False)
ver = 3
if ver == 3:
# setting the default to False in the last migration was broken, fix the data
logger.info('Repairing seen table')
entry_table = table_schema('seen_entry', session)
session.execute(update(entry_table, entry_table.c.local == None, {'local': False}))
ver = 4
return ver
class SeenEntry(Base):
__tablename__ = 'seen_entry'
id = Column(Integer, primary_key=True)
title = Column(Unicode)
reason = Column(Unicode)
task = Column('feed', Unicode)
added = Column(DateTime)
local = Column(Boolean)
fields = relationship('SeenField', backref='seen_entry', cascade='all, delete, delete-orphan')
def __init__(self, title, task, reason=None, local=None):
if local is None:
local = False
self.title = title
self.reason = reason
self.task = task
self.added = datetime.now()
self.local = local
def __str__(self):
return '<SeenEntry(title={},reason={},task={},added={})>'.format(
self.title,
self.reason,
self.task,
self.added,
)
def to_dict(self):
fields = []
for field in self.fields:
fields.append(field.to_dict())
seen_entry_object = {
'id': self.id,
'title': self.title,
'reason': self.reason,
'task': self.task,
'added': self.added,
'local': self.local,
'fields': fields,
}
return seen_entry_object
class SeenField(Base):
__tablename__ = 'seen_field'
id = Column(Integer, primary_key=True)
seen_entry_id = Column(Integer, ForeignKey('seen_entry.id'), nullable=False, index=True)
field = Column(Unicode)
value = Column(Unicode, index=True)
added = Column(DateTime)
def __init__(self, field, value):
self.field = field
self.value = value
self.added = datetime.now()
def __str__(self):
return f'<SeenField(field={self.field},value={self.value},added={self.added})>'
def to_dict(self):
return {
'field_name': self.field,
'field_id': self.id,
'value': self.value,
'added': self.added,
'seen_entry_id': self.seen_entry_id,
}
@with_session
def add(title, task_name, fields, reason=None, local=None, session=None):
"""
Adds seen entries to DB
:param title: name of title to be added
:param task_name: name of task to be added
:param fields: Dict of fields to be added to seen object
:return: Seen Entry object as committed to DB
"""
se = SeenEntry(title, task_name, reason, local)
for field, value in list(fields.items()):
sf = SeenField(field, value)
se.fields.append(sf)
session.add(se)
session.commit()
return se.to_dict()
@event('forget')
def forget(value, tasks=None, test=False):
"""
See module docstring
:param string value: Can be entry title or field value
:return: count, field_count where count is number of entries removed and field_count number of fields
"""
with Session() as session:
logger.debug('forget called with {}', value)
count = 0
field_count = 0
if isinstance(tasks, list):
pass
elif isinstance(tasks, str):
tasks = [tasks]
else:
tasks = None
if tasks:
query_se = session.query(SeenEntry).filter(SeenEntry.task.in_(tasks))
# If we just got a wildcard no need for further filtering
if value != '%':
query_se = query_se.filter(SeenEntry.title.like(value, escape=ESCAPE_QUERY))
query_sf = session.query(SeenField).filter(
SeenField.value.like(value, escape=ESCAPE_QUERY)
)
else:
query_se = session.query(SeenEntry).filter(SeenEntry.title == value)
query_sf = session.query(SeenField).filter(SeenField.value == value)
for se in query_se.all():
field_count += len(se.fields)
count += 1
if test:
logger.info(
f'Testing: would forget entry with title `{se.title}` of task `{se.task}`'
)
else:
logger.debug('forgetting {}', se)
session.delete(se)
for sf in query_sf.all():
se = sf.seen_entry
if tasks and se.task not in tasks:
continue
field_count += len(se.fields)
count += 1
if test:
logger.info(
f'Testing: would forget entry `{se.title}` of task `{se.task}` based on field `{sf.field}` with value `{sf.value}`'
)
else:
logger.debug('forgetting {}', se)
session.delete(se)
return count, field_count
@with_session
def search_by_field_values(field_value_list, task_name, local=False, session=None):
"""
Return a SeenEntry instance if it matches field values
:param field_value_list: List of field values to match
:param task_name: Name of task to compare to in case local flag is sent
:param local: Local flag
:param session: Current session
:return: SeenEntry Object or None
"""
found = session.query(SeenField).join(SeenEntry).filter(SeenField.value.in_(field_value_list))
if local:
found = found.filter(SeenEntry.task == task_name)
else:
# Entries added from CLI were having local marked as None rather than False for a while gh#879
found = found.filter(or_(SeenEntry.local == False, SeenEntry.local == None))
return found.first()
@event('manager.db_cleanup')
def db_cleanup(manager, session):
# TODO: Look into this, is it still valid?
logger.debug('TODO: Disabled because of ticket #1321')
return
# Remove seen fields over a year old
# result = session.query(SeenField).filter(SeenField.added < datetime.now() - timedelta(days=365)).delete()
# if result:
# log.verbose('Removed %d seen fields older than 1 year.' % result)
@with_session
def search(
count=None,
value=None,
status=None,
start=None,
stop=None,
tasks=None,
order_by='added',
descending=False,
session=None,
):
query = session.query(SeenEntry).join(SeenField)
if value:
query = query.filter(SeenField.value.like(value, escape=ESCAPE_QUERY))
if status is not None:
query = query.filter(SeenEntry.local == status)
if isinstance(tasks, list):
pass
elif isinstance(tasks, str):
tasks = [tasks]
else:
tasks = None
if tasks:
query = query.filter(SeenEntry.task.in_(tasks))
if count:
return query.group_by(SeenEntry).count()
if descending:
query = query.order_by(getattr(SeenEntry, order_by).desc())
else:
query = query.order_by(getattr(SeenEntry, order_by))
return query.group_by(SeenEntry).slice(start, stop)
@with_session
def get_entry_by_id(entry_id, session=None):
return session.query(SeenEntry).filter(SeenEntry.id == entry_id).one()
@with_session
def forget_by_id(entry_id, session=None):
"""
Delete SeenEntry via its ID
:param entry_id: SeenEntry ID
:param session: DB Session
"""
entry = get_entry_by_id(entry_id, session=session)
logger.debug('Deleting seen entry with ID {}', entry_id)
session.delete(entry) | PypiClean |
/FamcyDev-0.3.71-py3-none-any.whl/Famcy/bower_components/bootstrap/site/content/docs/5.0/utilities/interactions.md | ---
layout: docs
title: Interactions
description: Utility classes that change how users interact with contents of a website.
group: utilities
toc: false
---
## Text selection
Change the way in which the content is selected when the user interacts with it.
{{< example >}}
<p class="user-select-all">This paragraph will be entirely selected when clicked by the user.</p>
<p class="user-select-auto">This paragraph has default select behavior.</p>
<p class="user-select-none">This paragraph will not be selectable when clicked by the user.</p>
{{< /example >}}
## Pointer events
Bootstrap provides `.pe-none` and `.pe-auto` classes to prevent or add element interactions.
{{< example >}}
<p><a href="#" class="pe-none" tabindex="-1" aria-disabled="true">This link</a> can not be clicked.</p>
<p><a href="#" class="pe-auto">This link</a> can be clicked (this is default behavior).</p>
<p class="pe-none"><a href="#" tabindex="-1" aria-disabled="true">This link</a> can not be clicked because the <code>pointer-events</code> property is inherited from its parent. However, <a href="#" class="pe-auto">this link</a> has a <code>pe-auto</code> class and can be clicked.</p>
{{< /example >}}
{{< callout warning >}}
The `.pe-none` class (and the `pointer-events` CSS property it sets) only prevents interactions with a pointer (mouse, stylus, touch). Links and controls with `.pe-none` are, by default, still focusable and actionable for keyboard users. To ensure that they are completely neutralized even for keyboard users, you may need to add further attributes such as `tabindex="-1"` (to prevent them from receiving keyboard focus) and `aria-disabled="true"` (to convey the fact they are effectively disabled to assistive technologies), and possibly use JavaScript to completely prevent them from being actionable. For form controls, consider using the `disabled` HTML attribute instead.
{{< /callout >}}
## Sass
### Utilities API
Interaction utilities are declared in our utilities API in `scss/_utilities.scss`. [Learn how to use the utilities API.]({{< docsref "/utilities/api#using-the-api" >}})
{{< scss-docs name="utils-interaction" file="scss/_utilities.scss" >}}
| PypiClean |
/Django-HardWorker-0.1.0.zip/Django-HardWorker-0.1.0/hardworker/models.py | from datetime import datetime
#from django.db.models import Q
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import simplejson
from hardworker import get_worker
from django.utils.timezone import now, get_default_timezone, make_aware
class HardJob(models.Model):
"""
"""
owner = models.ForeignKey(User)
app = models.CharField(max_length = 100)
worker = models.CharField(max_length = 100)
params = models.TextField(null = True, blank = True)
registration_date = models.DateTimeField(auto_now = True)
started = models.DateTimeField(null = True, blank = True)
finished = models.DateTimeField(null = True, blank = True)
log = models.TextField(null = True, blank = True)
successful = models.NullBooleanField(null = True, blank = True)
progress = models.PositiveIntegerField(null = True, blank = True, default = 0)
owner_notified = models.BooleanField(default = False)
due_date = models.DateTimeField(null = True, blank = True)
def __unicode__(self):
return "%s registered %s.%s at %s. Status: %s." % (self.owner.username, self.app, self.worker, self.registration_date, self.successful)
@classmethod
def queue(cls):
"""
"""
return cls.objects.select_related().filter(finished = None).order_by('-registration_date')
@classmethod
def register_job(cls, owner, app, worker, params = None, due_date = None):
"""
"""
return HardJob.objects.create(owner = owner, app = app, worker = worker, params = simplejson.dumps(params), due_date = due_date)
@classmethod
def look_for_jobs(cls):
"""
"""
jobs = list(HardJob.queue())
if jobs:
current_job = jobs[0]
worker = get_worker(current_job.app, current_job.worker)
if worker:
HardJob.do_work(current_job, worker)
@classmethod
def set_progress(cls, job_id, progress):
"""
"""
if progress < 0 or progress > 100:
raise Exception("Invalid value for progress. Must be between 0 and 100. Was %s." % progress)
HardJob.objects.filter(id = job_id).update(progress = progress)
@classmethod
def do_work(cls, job, worker):
"""
"""
try:
if settings.DEBUG:
print "Found work! %s %s %s ..." % (job.owner, job.app, job.worker),
job.started = now()
params = {}
if worker.needs_params:
if not job.params or job.params == u'null':
raise Exception("Worker called without required parameters.")
params.update(simplejson.loads(job.params))
params['job_id'] = job.id
worker(**params)
job.successful = True
job.progress = 100
if settings.DEBUG:
print "done."
except Exception, e:
if settings.DEBUG:
print "failed with exception: %s!" % e
job.successful = False
job.log = "Job failed with exception: %s" % e
job.finished = now()
job.save()
if worker.recurring_timedelta:
print "recurring", worker.recurring_timedelta
due_date = datetime.today() + worker.recurring_timedelta
HardJob.register_job(job.owner, job.app, job.worker, due_date = make_aware(due_date, get_default_timezone())) | PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/pouchdb-find/lib/utils.js | 'use strict';
var Promise = require('pouchdb-promise');
/* istanbul ignore next */
exports.once = function (fun) {
var called = false;
return exports.getArguments(function (args) {
if (called) {
console.trace();
throw new Error('once called more than once');
} else {
called = true;
fun.apply(this, args);
}
});
};
/* istanbul ignore next */
exports.getArguments = function (fun) {
return function () {
var len = arguments.length;
var args = new Array(len);
var i = -1;
while (++i < len) {
args[i] = arguments[i];
}
return fun.call(this, args);
};
};
/* istanbul ignore next */
exports.toPromise = function (func) {
//create the function we will be returning
return exports.getArguments(function (args) {
var self = this;
var tempCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
// if the last argument is a function, assume its a callback
var usedCB;
if (tempCB) {
// if it was a callback, create a new callback which calls it,
// but do so async so we don't trap any errors
usedCB = function (err, resp) {
process.nextTick(function () {
tempCB(err, resp);
});
};
}
var promise = new Promise(function (fulfill, reject) {
try {
var callback = exports.once(function (err, mesg) {
if (err) {
reject(err);
} else {
fulfill(mesg);
}
});
// create a callback for this invocation
// apply the function in the orig context
args.push(callback);
func.apply(self, args);
} catch (e) {
reject(e);
}
});
// if there is a callback, call it back
if (usedCB) {
promise.then(function (result) {
usedCB(null, result);
}, usedCB);
}
promise.cancel = function () {
return this;
};
return promise;
});
};
exports.inherits = require('inherits');
exports.Promise = Promise;
exports.clone = function (obj) {
return exports.extend(true, {}, obj);
};
exports.extend = require('pouchdb-extend');
exports.callbackify = function (fun) {
return exports.getArguments(function (args) {
var cb = args.pop();
var promise = fun.apply(this, args);
exports.promisedCallback(promise, cb);
return promise;
});
};
exports.promisedCallback = function (promise, callback) {
promise.then(function (res) {
process.nextTick(function () {
callback(null, res);
});
}, function (reason) {
process.nextTick(function () {
callback(reason);
});
});
return promise;
};
var crypto = require('crypto');
var Md5 = require('spark-md5');
exports.MD5 = function (string) {
/* istanbul ignore else */
if (!process.browser) {
return crypto.createHash('md5').update(string).digest('hex');
} else {
return Md5.hash(string);
}
};
exports.flatten = exports.getArguments(function (args) {
var res = [];
for (var i = 0, len = args.length; i < len; i++) {
var subArr = args[i];
if (Array.isArray(subArr)) {
res = res.concat(exports.flatten.apply(null, subArr));
} else {
res.push(subArr);
}
}
return res;
});
exports.mergeObjects = function (arr) {
var res = {};
for (var i = 0, len = arr.length; i < len; i++) {
res = exports.extend(true, res, arr[i]);
}
return res;
};
// this would just be "return doc[field]", but fields
// can be "deep" due to dot notation
exports.getFieldFromDoc = function (doc, parsedField) {
var value = doc;
for (var i = 0, len = parsedField.length; i < len; i++) {
var key = parsedField[i];
value = value[key];
if (!value) {
break;
}
}
return value;
};
exports.setFieldInDoc = function (doc, parsedField, value) {
for (var i = 0, len = parsedField.length; i < len-1; i++) {
var elem = parsedField[i];
doc = doc[elem] = {};
}
doc[parsedField[len-1]] = value;
};
// Converts a string in dot notation to an array of its components, with backslash escaping
exports.parseField = function (fieldName) {
// fields may be deep (e.g. "foo.bar.baz"), so parse
var fields = [];
var current = '';
for (var i = 0, len = fieldName.length; i < len; i++) {
var ch = fieldName[i];
if (ch === '.') {
if (i > 0 && fieldName[i - 1] === '\\') { // escaped delimiter
current = current.substring(0, current.length - 1) + '.';
} else { // not escaped, so delimiter
fields.push(current);
current = '';
}
} else { // normal character
current += ch;
}
}
fields.push(current);
return fields;
};
// Selects a list of fields defined in dot notation from one doc
// and copies them to a new doc. Like underscore _.pick but supports nesting.
exports.pick = function (obj, arr) {
var res = {};
for (var i = 0, len = arr.length; i < len; i++) {
var parsedField = exports.parseField(arr[i]);
var value = exports.getFieldFromDoc(obj, parsedField);
if(typeof value !== 'undefined') {
exports.setFieldInDoc(res, parsedField, value);
}
}
return res;
};
// e.g. ['a'], ['a', 'b'] is true, but ['b'], ['a', 'b'] is false
exports.oneArrayIsSubArrayOfOther = function (left, right) {
for (var i = 0, len = Math.min(left.length, right.length); i < len; i++) {
if (left[i] !== right[i]) {
return false;
}
}
return true;
};
// e.g.['a', 'b', 'c'], ['a', 'b'] is false
exports.oneArrayIsStrictSubArrayOfOther = function (left, right) {
if (left.length > right.length) {
return false;
}
return exports.oneArrayIsSubArrayOfOther(left, right);
};
// same as above, but treat the left array as an unordered set
// e.g. ['b', 'a'], ['a', 'b', 'c'] is true, but ['c'], ['a', 'b', 'c'] is false
exports.oneSetIsSubArrayOfOther = function (left, right) {
left = left.slice();
for (var i = 0, len = right.length; i < len; i++) {
var field = right[i];
if (!left.length) {
break;
}
var leftIdx = left.indexOf(field);
if (leftIdx === -1) {
return false;
} else {
left.splice(leftIdx, 1);
}
}
return true;
};
exports.compare = function (left, right) {
return left < right ? -1 : left > right ? 1 : 0;
};
exports.arrayToObject = function (arr) {
var res = {};
for (var i = 0, len = arr.length; i < len; i++) {
res[arr[i]] = true;
}
return res;
};
exports.max = function (arr, fun) {
var max = null;
var maxScore = -1;
for (var i = 0, len = arr.length; i < len; i++) {
var element = arr[i];
var score = fun(element);
if (score > maxScore) {
maxScore = score;
max = element;
}
}
return max;
};
exports.arrayEquals = function (arr1, arr2) {
if (arr1.length !== arr2.length) {
return false;
}
for (var i = 0, len = arr1.length; i < len; i++) {
if (arr1[i] !== arr2[i]) {
return false;
}
}
return true;
};
exports.uniq = function(arr) {
var obj = {};
for (var i = 0; i < arr.length; i++) {
obj['$' + arr[i]] = true;
}
return Object.keys(obj).map(function (key) {
return key.substring(1);
});
};
exports.log = require('debug')('pouchdb:find'); | PypiClean |
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/workflow.py | import binascii
import json
import logging
import logging.handlers
import os
import pickle
import plistlib
import re
import shutil
import string
import subprocess
import sys
import time
import unicodedata
from copy import deepcopy
# imported to maintain API
from workflow.util import LockFile, atomic_writer, uninterruptible
#: Sentinel for properties that haven't been set yet (that might
#: correctly have the value ``None``)
UNSET = object()
####################################################################
# Standard system icons
####################################################################
# These icons are default macOS icons. They are super-high quality, and
# will be familiar to users.
# This library uses `ICON_ERROR` when a workflow dies in flames, so
# in my own workflows, I use `ICON_WARNING` for less fatal errors
# (e.g. bad user input, no results etc.)
# The system icons are all in this directory. There are many more than
# are listed here
ICON_ROOT = '/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources'
ICON_ACCOUNT = os.path.join(ICON_ROOT, 'Accounts.icns')
ICON_BURN = os.path.join(ICON_ROOT, 'BurningIcon.icns')
ICON_CLOCK = os.path.join(ICON_ROOT, 'Clock.icns')
ICON_COLOR = os.path.join(ICON_ROOT, 'ProfileBackgroundColor.icns')
ICON_COLOUR = ICON_COLOR # Queen's English, if you please
ICON_EJECT = os.path.join(ICON_ROOT, 'EjectMediaIcon.icns')
# Shown when a workflow throws an error
ICON_ERROR = os.path.join(ICON_ROOT, 'AlertStopIcon.icns')
ICON_FAVORITE = os.path.join(ICON_ROOT, 'ToolbarFavoritesIcon.icns')
ICON_FAVOURITE = ICON_FAVORITE
ICON_GROUP = os.path.join(ICON_ROOT, 'GroupIcon.icns')
ICON_HELP = os.path.join(ICON_ROOT, 'HelpIcon.icns')
ICON_HOME = os.path.join(ICON_ROOT, 'HomeFolderIcon.icns')
ICON_INFO = os.path.join(ICON_ROOT, 'ToolbarInfo.icns')
ICON_NETWORK = os.path.join(ICON_ROOT, 'GenericNetworkIcon.icns')
ICON_NOTE = os.path.join(ICON_ROOT, 'AlertNoteIcon.icns')
ICON_SETTINGS = os.path.join(ICON_ROOT, 'ToolbarAdvanced.icns')
ICON_SWIRL = os.path.join(ICON_ROOT, 'ErasingIcon.icns')
ICON_SWITCH = os.path.join(ICON_ROOT, 'General.icns')
ICON_SYNC = os.path.join(ICON_ROOT, 'Sync.icns')
ICON_TRASH = os.path.join(ICON_ROOT, 'TrashIcon.icns')
ICON_USER = os.path.join(ICON_ROOT, 'UserIcon.icns')
ICON_WARNING = os.path.join(ICON_ROOT, 'AlertCautionBadgeIcon.icns')
ICON_WEB = os.path.join(ICON_ROOT, 'BookmarkIcon.icns')
####################################################################
# non-ASCII to ASCII diacritic folding.
# Used by `fold_to_ascii` method
####################################################################
ASCII_REPLACEMENTS = {
'À': 'A',
'Á': 'A',
'Â': 'A',
'Ã': 'A',
'Ä': 'A',
'Å': 'A',
'Æ': 'AE',
'Ç': 'C',
'È': 'E',
'É': 'E',
'Ê': 'E',
'Ë': 'E',
'Ì': 'I',
'Í': 'I',
'Î': 'I',
'Ï': 'I',
'Ð': 'D',
'Ñ': 'N',
'Ò': 'O',
'Ó': 'O',
'Ô': 'O',
'Õ': 'O',
'Ö': 'O',
'Ø': 'O',
'Ù': 'U',
'Ú': 'U',
'Û': 'U',
'Ü': 'U',
'Ý': 'Y',
'Þ': 'Th',
'ß': 'ss',
'à': 'a',
'á': 'a',
'â': 'a',
'ã': 'a',
'ä': 'a',
'å': 'a',
'æ': 'ae',
'ç': 'c',
'è': 'e',
'é': 'e',
'ê': 'e',
'ë': 'e',
'ì': 'i',
'í': 'i',
'î': 'i',
'ï': 'i',
'ð': 'd',
'ñ': 'n',
'ò': 'o',
'ó': 'o',
'ô': 'o',
'õ': 'o',
'ö': 'o',
'ø': 'o',
'ù': 'u',
'ú': 'u',
'û': 'u',
'ü': 'u',
'ý': 'y',
'þ': 'th',
'ÿ': 'y',
'Ł': 'L',
'ł': 'l',
'Ń': 'N',
'ń': 'n',
'Ņ': 'N',
'ņ': 'n',
'Ň': 'N',
'ň': 'n',
'Ŋ': 'ng',
'ŋ': 'NG',
'Ō': 'O',
'ō': 'o',
'Ŏ': 'O',
'ŏ': 'o',
'Ő': 'O',
'ő': 'o',
'Œ': 'OE',
'œ': 'oe',
'Ŕ': 'R',
'ŕ': 'r',
'Ŗ': 'R',
'ŗ': 'r',
'Ř': 'R',
'ř': 'r',
'Ś': 'S',
'ś': 's',
'Ŝ': 'S',
'ŝ': 's',
'Ş': 'S',
'ş': 's',
'Š': 'S',
'š': 's',
'Ţ': 'T',
'ţ': 't',
'Ť': 'T',
'ť': 't',
'Ŧ': 'T',
'ŧ': 't',
'Ũ': 'U',
'ũ': 'u',
'Ū': 'U',
'ū': 'u',
'Ŭ': 'U',
'ŭ': 'u',
'Ů': 'U',
'ů': 'u',
'Ű': 'U',
'ű': 'u',
'Ŵ': 'W',
'ŵ': 'w',
'Ŷ': 'Y',
'ŷ': 'y',
'Ÿ': 'Y',
'Ź': 'Z',
'ź': 'z',
'Ż': 'Z',
'ż': 'z',
'Ž': 'Z',
'ž': 'z',
'ſ': 's',
'Α': 'A',
'Β': 'B',
'Γ': 'G',
'Δ': 'D',
'Ε': 'E',
'Ζ': 'Z',
'Η': 'E',
'Θ': 'Th',
'Ι': 'I',
'Κ': 'K',
'Λ': 'L',
'Μ': 'M',
'Ν': 'N',
'Ξ': 'Ks',
'Ο': 'O',
'Π': 'P',
'Ρ': 'R',
'Σ': 'S',
'Τ': 'T',
'Υ': 'U',
'Φ': 'Ph',
'Χ': 'Kh',
'Ψ': 'Ps',
'Ω': 'O',
'α': 'a',
'β': 'b',
'γ': 'g',
'δ': 'd',
'ε': 'e',
'ζ': 'z',
'η': 'e',
'θ': 'th',
'ι': 'i',
'κ': 'k',
'λ': 'l',
'μ': 'm',
'ν': 'n',
'ξ': 'x',
'ο': 'o',
'π': 'p',
'ρ': 'r',
'ς': 's',
'σ': 's',
'τ': 't',
'υ': 'u',
'φ': 'ph',
'χ': 'kh',
'ψ': 'ps',
'ω': 'o',
'А': 'A',
'Б': 'B',
'В': 'V',
'Г': 'G',
'Д': 'D',
'Е': 'E',
'Ж': 'Zh',
'З': 'Z',
'И': 'I',
'Й': 'I',
'К': 'K',
'Л': 'L',
'М': 'M',
'Н': 'N',
'О': 'O',
'П': 'P',
'Р': 'R',
'С': 'S',
'Т': 'T',
'У': 'U',
'Ф': 'F',
'Х': 'Kh',
'Ц': 'Ts',
'Ч': 'Ch',
'Ш': 'Sh',
'Щ': 'Shch',
'Ъ': "'",
'Ы': 'Y',
'Ь': "'",
'Э': 'E',
'Ю': 'Iu',
'Я': 'Ia',
'а': 'a',
'б': 'b',
'в': 'v',
'г': 'g',
'д': 'd',
'е': 'e',
'ж': 'zh',
'з': 'z',
'и': 'i',
'й': 'i',
'к': 'k',
'л': 'l',
'м': 'm',
'н': 'n',
'о': 'o',
'п': 'p',
'р': 'r',
'с': 's',
'т': 't',
'у': 'u',
'ф': 'f',
'х': 'kh',
'ц': 'ts',
'ч': 'ch',
'ш': 'sh',
'щ': 'shch',
'ъ': "'",
'ы': 'y',
'ь': "'",
'э': 'e',
'ю': 'iu',
'я': 'ia',
'ᴦ': 'G',
'ᴧ': 'L',
'ᴨ': 'P',
'ᴩ': 'R',
'ᴪ': 'PS',
'ẞ': 'Ss',
'Ỳ': 'Y',
'ỳ': 'y',
'Ỵ': 'Y',
'ỵ': 'y',
'Ỹ': 'Y',
'ỹ': 'y',
}
####################################################################
# Smart-to-dumb punctuation mapping
####################################################################
DUMB_PUNCTUATION = {
'‘': "'",
'’': "'",
'‚': "'",
'“': '"',
'”': '"',
'„': '"',
'–': '-',
'—': '-'
}
####################################################################
# Used by `Workflow.filter`
####################################################################
# Anchor characters in a name
#: Characters that indicate the beginning of a "word" in CamelCase
INITIALS = string.ascii_uppercase + string.digits
#: Split on non-letters, numbers
split_on_delimiters = re.compile('[^a-zA-Z0-9]').split
# Match filter flags
#: Match items that start with ``query``
MATCH_STARTSWITH = 1
#: Match items whose capital letters start with ``query``
MATCH_CAPITALS = 2
#: Match items with a component "word" that matches ``query``
MATCH_ATOM = 4
#: Match items whose initials (based on atoms) start with ``query``
MATCH_INITIALS_STARTSWITH = 8
#: Match items whose initials (based on atoms) contain ``query``
MATCH_INITIALS_CONTAIN = 16
#: Combination of :const:`MATCH_INITIALS_STARTSWITH` and
#: :const:`MATCH_INITIALS_CONTAIN`
MATCH_INITIALS = 24
#: Match items if ``query`` is a substring
MATCH_SUBSTRING = 32
#: Match items if all characters in ``query`` appear in the item in order
MATCH_ALLCHARS = 64
#: Combination of all other ``MATCH_*`` constants
MATCH_ALL = 127
####################################################################
# Used by `Workflow.check_update`
####################################################################
# Number of days to wait between checking for updates to the workflow
DEFAULT_UPDATE_FREQUENCY = 1
####################################################################
# Keychain access errors
####################################################################
class KeychainError(Exception):
"""Raised for unknown Keychain errors.
Raised by methods :meth:`Workflow.save_password`,
:meth:`Workflow.get_password` and :meth:`Workflow.delete_password`
when ``security`` CLI app returns an unknown error code.
"""
class PasswordNotFound(KeychainError):
"""Password not in Keychain.
Raised by method :meth:`Workflow.get_password` when ``account``
is unknown to the Keychain.
"""
class PasswordExists(KeychainError):
"""Raised when trying to overwrite an existing account password.
You should never receive this error: it is used internally
by the :meth:`Workflow.save_password` method to know if it needs
to delete the old password first (a Keychain implementation detail).
"""
####################################################################
# Helper functions
####################################################################
def isascii(text):
"""Test if ``text`` contains only ASCII characters.
:param text: text to test for ASCII-ness
:type text: ``str``
:returns: ``True`` if ``text`` contains only ASCII characters
:rtype: ``Boolean``
"""
try:
text.encode('ascii')
except UnicodeEncodeError:
return False
return True
####################################################################
# Implementation classes
####################################################################
class SerializerManager(object):
"""Contains registered serializers.
.. versionadded:: 1.8
A configured instance of this class is available at
:attr:`workflow.manager`.
Use :meth:`register()` to register new (or replace
existing) serializers, which you can specify by name when calling
:class:`~workflow.Workflow` data storage methods.
See :ref:`guide-serialization` and :ref:`guide-persistent-data`
for further information.
"""
def __init__(self):
"""Create new SerializerManager object."""
self._serializers = {}
def register(self, name, serializer):
"""Register ``serializer`` object under ``name``.
Raises :class:`AttributeError` if ``serializer`` in invalid.
.. note::
``name`` will be used as the file extension of the saved files.
:param name: Name to register ``serializer`` under
:type name: ``str``
:param serializer: object with ``load()`` and ``dump()``
methods
"""
# Basic validation
getattr(serializer, 'load')
getattr(serializer, 'dump')
self._serializers[name] = serializer
def serializer(self, name):
"""Return serializer object for ``name``.
:param name: Name of serializer to return
:type name: ``str`` or ``bytes``
:returns: serializer object or ``None`` if no such serializer
is registered.
"""
return self._serializers.get(name)
def unregister(self, name):
"""Remove registered serializer with ``name``.
Raises a :class:`ValueError` if there is no such registered
serializer.
:param name: Name of serializer to remove
:type name: ``str`` or ``bytes``
:returns: serializer object
"""
if name not in self._serializers:
raise ValueError('No such serializer registered : {0}'.format(
name))
serializer = self._serializers[name]
del self._serializers[name]
return serializer
@property
def serializers(self):
"""Return names of registered serializers."""
return sorted(self._serializers.keys())
class JSONSerializer(object):
"""Wrapper around :mod:`json`. Sets ``indent`` and ``encoding``.
.. versionadded:: 1.8
Use this serializer if you need readable data files. JSON doesn't
support Python objects as well as ``pickle``, so be
careful which data you try to serialize as JSON.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open JSON file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from JSON file
:rtype: object
"""
return json.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open JSON file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: JSON-serializable data structure
:param file_obj: file handle
:type file_obj: ``file`` object
"""
file_obj.write(bytes(json.dumps(obj, indent=2), 'utf-8'))
class PickleSerializer(object):
"""Wrapper around :mod:`pickle`. Sets ``protocol``.
.. versionadded:: 1.8
Use this serializer if you need to add custom pickling.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open pickle file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from pickle file
:rtype: object
"""
return pickle.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open pickle file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: Python object
:param file_obj: file handle
:type file_obj: ``file`` object
"""
return pickle.dump(obj, file_obj, protocol=-1)
# Set up default manager and register built-in serializers
manager = SerializerManager()
manager.register('pickle', PickleSerializer)
manager.register('json', JSONSerializer)
class Variables(dict):
"""Workflow variables for Run Script actions.
.. versionadded: 1.26
This class allows you to set workflow variables from
Run Script actions.
It is a subclass of :class:`dict`.
>>> v = Variables(username='deanishe', password='hunter2')
>>> v.arg = 'output value'
>>> print(v)
See :ref:`variables-run-script` in the User Guide for more
information.
Args:
arg (str or list, optional): Main output/``{query}``.
**variables: Workflow variables to set.
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Attributes:
arg (str or list): Output value (``{query}``).
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
config (dict): Configuration for downstream workflow element.
"""
def __init__(self, arg=None, **variables):
"""Create a new `Variables` object."""
self.arg = arg
self.config = {}
super(Variables, self).__init__(**variables)
@property
def obj(self):
"""``alfredworkflow`` :class:`dict`."""
o = {}
if self:
d2 = {}
for k, v in self.items():
d2[k] = v
o['variables'] = d2
if self.config:
o['config'] = self.config
if self.arg is not None:
o['arg'] = self.arg
return {'alfredworkflow': o}
def __str__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
str: ``alfredworkflow`` JSON object
"""
if not self and not self.config:
if not self.arg:
return ''
if isinstance(self.arg, str):
return self.arg
return json.dumps(self.obj)
def __bytes__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
bytes: UTF-8 encoded ``alfredworkflow`` JSON object
"""
return bytes(str(self), 'utf-8')
class Modifier(object):
"""Modify :class:`Item` arg/icon/variables when modifier key is pressed.
Don't use this class directly (as it won't be associated with any
:class:`Item`), but rather use :meth:`Item.add_modifier()`
to add modifiers to results.
>>> it = wf.add_item('Title', 'Subtitle', valid=True)
>>> it.setvar('name', 'default')
>>> m = it.add_modifier('cmd')
>>> m.setvar('name', 'alternate')
See :ref:`workflow-variables` in the User Guide for more information
and :ref:`example usage <example-variables>`.
Args:
key (str): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (str, optional): Override default subtitle.
arg (str, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (str, optional): Filepath/UTI of icon to use
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
Attributes:
arg (str): Arg to pass to following action.
config (dict): Configuration for a downstream element, such as
a File Filter.
icon (str): Filepath/UTI of icon.
icontype (str): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
key (str): Modifier key (see above).
subtitle (str): Override item subtitle.
valid (bool): Override item validity.
variables (dict): Workflow variables set by this modifier.
"""
def __init__(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Create a new :class:`Modifier`.
Don't use this class directly (as it won't be associated with any
:class:`Item`), but rather use :meth:`Item.add_modifier()`
to add modifiers to results.
Args:
key (str): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (str, optional): Override default subtitle.
arg (str, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (str, optional): Filepath/UTI of icon to use
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
"""
self.key = key
self.subtitle = subtitle
self.arg = arg
self.valid = valid
self.icon = icon
self.icontype = icontype
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (str): Name of variable.
value (str): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
@property
def obj(self):
"""Modifier formatted for JSON serialization for Alfred 3.
Returns:
dict: Modifier for serializing to JSON.
"""
o = {}
if self.subtitle is not None:
o['subtitle'] = self.subtitle
if self.arg is not None:
o['arg'] = self.arg
if self.valid is not None:
o['valid'] = self.valid
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
icon = self._icon()
if icon:
o['icon'] = icon
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
class Item(object):
"""Represents a feedback item for Alfred 3+.
Generates Alfred-compliant JSON for a single item.
Don't use this class directly (as it then won't be associated with
any :class:`Workflow <workflow.Workflow>` object), but rather use
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`.
See :meth:`~workflow.Workflow.add_item` for details of arguments.
"""
def __init__(self, title, subtitle='', arg=None, autocomplete=None,
match=None, valid=False, uid=None, icon=None, icontype=None,
type=None, largetext=None, copytext=None, quicklookurl=None):
"""Create a new :class:`Item` object.
Use same arguments as for
:class:`Workflow.Item <workflow.Workflow.Item>`.
Argument ``subtitle_modifiers`` is not supported.
"""
self.title = title
self.subtitle = subtitle
self.arg = arg
self.autocomplete = autocomplete
self.match = match
self.valid = valid
self.uid = uid
self.icon = icon
self.icontype = icontype
self.type = type
self.quicklookurl = quicklookurl
self.largetext = largetext
self.copytext = copytext
self.modifiers = {}
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (str): Name of variable.
value (str): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def add_modifier(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Add alternative values for a modifier key.
Args:
key (str): Modifier key, e.g. ``"cmd"`` or ``"alt"``
subtitle (str, optional): Override item subtitle.
arg (str, optional): Input for following action.
valid (bool, optional): Override item validity.
icon (str, optional): Filepath/UTI of icon.
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Returns:
Modifier: Configured :class:`Modifier`.
"""
mod = Modifier(key, subtitle, arg, valid, icon, icontype)
# Add Item variables to Modifier
mod.variables.update(self.variables)
self.modifiers[key] = mod
return mod
@property
def obj(self):
"""Item formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
# Required values
o = {
'title': self.title,
'subtitle': self.subtitle,
'valid': self.valid,
}
# Optional values
if self.arg is not None:
o['arg'] = self.arg
if self.autocomplete is not None:
o['autocomplete'] = self.autocomplete
if self.match is not None:
o['match'] = self.match
if self.uid is not None:
o['uid'] = self.uid
if self.type is not None:
o['type'] = self.type
if self.quicklookurl is not None:
o['quicklookurl'] = self.quicklookurl
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
# Largetype and copytext
text = self._text()
if text:
o['text'] = text
icon = self._icon()
if icon:
o['icon'] = icon
# Modifiers
mods = self._modifiers()
if mods:
o['mods'] = mods
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
def _text(self):
"""Return `largetext` and `copytext` object for item.
Returns:
dict: `text` mapping (may be empty)
"""
text = {}
if self.largetext is not None:
text['largetype'] = self.largetext
if self.copytext is not None:
text['copy'] = self.copytext
return text
def _modifiers(self):
"""Build `mods` dictionary for JSON feedback.
Returns:
dict: Modifier mapping or `None`.
"""
if self.modifiers:
mods = {}
for k, mod in self.modifiers.items():
mods[k] = mod.obj
return mods
return None
class Settings(dict):
"""A dictionary that saves itself when changed.
Dictionary keys & values will be saved as a JSON file
at ``filepath``. If the file does not exist, the dictionary
(and settings file) will be initialised with ``defaults``.
:param filepath: where to save the settings
:type filepath: :class:`str`
:param defaults: dict of default settings
:type defaults: :class:`dict`
An appropriate instance is provided by :class:`Workflow` instances at
:attr:`Workflow.settings`.
"""
def __init__(self, filepath, defaults=None):
"""Create new :class:`Settings` object."""
super(Settings, self).__init__()
self._filepath = filepath
self._nosave = False
self._original = {}
if os.path.exists(self._filepath):
self._load()
elif defaults:
for key, val in defaults.items():
self[key] = val
self.save() # save default settings
def _load(self):
"""Load cached settings from JSON file `self._filepath`."""
data = {}
with LockFile(self._filepath, 0.5):
with open(self._filepath, 'rb') as fp:
data.update(json.load(fp))
self._original = deepcopy(data)
self._nosave = True
self.update(data)
self._nosave = False
@uninterruptible
def save(self):
"""Save settings to JSON file specified in ``self._filepath``.
If you're using this class via :attr:`Workflow.settings`, which
you probably are, ``self._filepath`` will be ``settings.json``
in your workflow's data directory (see :attr:`~Workflow.datadir`).
"""
if self._nosave:
return
data = {}
data.update(self)
with LockFile(self._filepath, 0.5):
with atomic_writer(self._filepath, 'w') as fp:
json.dump(data, fp, sort_keys=True, indent=2)
# dict methods
def __setitem__(self, key, value):
"""Implement :class:`dict` interface."""
if self._original.get(key) != value:
super(Settings, self).__setitem__(key, value)
self.save()
def __delitem__(self, key):
"""Implement :class:`dict` interface."""
super(Settings, self).__delitem__(key)
self.save()
def update(self, *args, **kwargs):
"""Override :class:`dict` method to save on update."""
super(Settings, self).update(*args, **kwargs)
self.save()
def setdefault(self, key, value=None):
"""Override :class:`dict` method to save on update."""
ret = super(Settings, self).setdefault(key, value)
self.save()
return ret
class Workflow(object):
"""The ``Workflow`` object is the main interface to Alfred-PyWorkflow.
It provides APIs for accessing the Alfred/workflow environment,
storing & caching data, using Keychain, and generating Script
Filter feedback.
``Workflow`` is compatible with Alfred 3+.
:param default_settings: default workflow settings. If no settings file
exists, :class:`Workflow.settings` will be pre-populated with
``default_settings``.
:type default_settings: :class:`dict`
:param update_settings: settings for updating your workflow from
GitHub releases. The only required key is ``github_slug``,
whose value must take the form of ``username/repo``.
If specified, ``Workflow`` will check the repo's releases
for updates. Your workflow must also have a semantic version
number. Please see the :ref:`User Manual <user-manual>` and
`update API docs <api-updates>` for more information.
:type update_settings: :class:`dict`
:param input_encoding: encoding of command line arguments. You
should probably leave this as the default (``utf-8``), which
is the encoding Alfred uses.
:type input_encoding: :class:`str`
:param normalization: normalization to apply to CLI args.
See :meth:`Workflow.decode` for more details.
:type normalization: :class:`str`
:param capture_args: Capture and act on ``workflow:*`` arguments. See
:ref:`Magic arguments <magic-arguments>` for details.
:type capture_args: :class:`Boolean`
:param libraries: sequence of paths to directories containing
libraries. These paths will be prepended to ``sys.path``.
:type libraries: :class:`tuple` or :class:`list`
:param help_url: URL to webpage where a user can ask for help with
the workflow, report bugs, etc. This could be the GitHub repo
or a page on AlfredForum.com. If your workflow throws an error,
this URL will be displayed in the log and Alfred's debugger. It can
also be opened directly in a web browser with the ``workflow:help``
:ref:`magic argument <magic-arguments>`.
:type help_url: :class:`str`
Attributes:
item_class (class): Class used to generate feedback items.
variables (dict): Top level workflow variables.
"""
# Which class to use to generate feedback items. You probably
# won't want to change this
item_class = Item
def __init__(self, default_settings=None, update_settings=None,
input_encoding='utf-8', normalization='NFC',
capture_args=True, libraries=None,
help_url=None):
"""Create new :class:`Workflow` object."""
self._default_settings = default_settings or {}
self._update_settings = update_settings or {}
self._input_encoding = input_encoding
self._normalization = normalization
self._capture_args = capture_args
self.help_url = help_url
self._workflowdir = None
self._settings_path = None
self._settings = None
self._bundleid = None
self._debugging = None
self._name = None
self._cache_serializer = 'pickle'
self._data_serializer = 'pickle'
self._info = None
self._info_loaded = False
self._logger = None
self._items = []
self._alfred_env = None
self.variables = {}
self._rerun = 0
# Version number of the workflow
self._version = UNSET
# Version from last workflow run
self._last_version_run = UNSET
# Cache for regex patterns created for filter keys
self._search_pattern_cache = {}
#: Prefix for all magic arguments.
#: The default value is ``workflow:`` so keyword
#: ``config`` would match user query ``workflow:config``.
self.magic_prefix = 'workflow:'
#: Mapping of available magic arguments. The built-in magic
#: arguments are registered by default. To add your own magic arguments
#: (or override built-ins), add a key:value pair where the key is
#: what the user should enter (prefixed with :attr:`magic_prefix`)
#: and the value is a callable that will be called when the argument
#: is entered. If you would like to display a message in Alfred, the
#: function should return a ``str`` string.
#:
#: By default, the magic arguments documented
#: :ref:`here <magic-arguments>` are registered.
self.magic_arguments = {}
self._register_default_magic()
if libraries:
sys.path = libraries + sys.path
# Get session ID from environment if present
self._session_id = os.getenv('_WF_SESSION_ID') or None
if self._session_id:
self.setvar('_WF_SESSION_ID', self._session_id)
####################################################################
# API methods
####################################################################
# info.plist contents and alfred_* environment variables ----------
@property
def alfred_version(self):
"""Alfred version as :class:`~workflow.update.Version` object."""
from .update import Version
return Version(self.alfred_env.get('version'))
@property
def alfred_env(self):
"""Dict of Alfred's environmental variables minus ``alfred_`` prefix.
.. versionadded:: 1.7
The variables Alfred 2.4+ exports are:
============================ =========================================
Variable Description
============================ =========================================
debug Set to ``1`` if Alfred's debugger is
open, otherwise unset.
preferences Path to Alfred.alfredpreferences
(where your workflows and settings are
stored).
preferences_localhash Machine-specific preferences are stored
in ``Alfred.alfredpreferences/preferences/local/<hash>``
(see ``preferences`` above for
the path to ``Alfred.alfredpreferences``)
theme ID of selected theme
theme_background Background colour of selected theme in
format ``rgba(r,g,b,a)``
theme_subtext Show result subtext.
``0`` = Always,
``1`` = Alternative actions only,
``2`` = Selected result only,
``3`` = Never
version Alfred version number, e.g. ``'2.4'``
version_build Alfred build number, e.g. ``277``
workflow_bundleid Bundle ID, e.g.
``net.deanishe.alfred-mailto``
workflow_cache Path to workflow's cache directory
workflow_data Path to workflow's data directory
workflow_name Name of current workflow
workflow_uid UID of workflow
workflow_version The version number specified in the
workflow configuration sheet/info.plist
============================ =========================================
**Note:** all values are Unicode strings except ``version_build`` and
``theme_subtext``, which are integers.
:returns: ``dict`` of Alfred's environmental variables without the
``alfred_`` prefix, e.g. ``preferences``, ``workflow_data``.
"""
if self._alfred_env is not None:
return self._alfred_env
data = {}
for key in (
'debug',
'preferences',
'preferences_localhash',
'theme',
'theme_background',
'theme_subtext',
'version',
'version_build',
'workflow_bundleid',
'workflow_cache',
'workflow_data',
'workflow_name',
'workflow_uid',
'workflow_version'):
value = os.getenv('alfred_' + key, '')
if value:
if key in ('debug', 'version_build', 'theme_subtext'):
value = int(value)
else:
value = self.decode(value)
data[key] = value
self._alfred_env = data
return self._alfred_env
@property
def info(self):
""":class:`dict` of ``info.plist`` contents."""
if not self._info_loaded:
self._load_info_plist()
return self._info
@property
def bundleid(self):
"""Workflow bundle ID from environmental vars or ``info.plist``.
:returns: bundle ID
:rtype: ``str``
"""
if not self._bundleid:
if self.alfred_env.get('workflow_bundleid'):
self._bundleid = self.alfred_env.get('workflow_bundleid')
else:
self._bundleid = self.info['bundleid']
return self._bundleid
@property
def debugging(self):
"""Whether Alfred's debugger is open.
:returns: ``True`` if Alfred's debugger is open.
:rtype: ``bool``
"""
return self.alfred_env.get('debug') == 1
@property
def name(self):
"""Workflow name from Alfred's environmental vars or ``info.plist``.
:returns: workflow name
:rtype: ``str``
"""
if not self._name:
if self.alfred_env.get('workflow_name'):
self._name = self.decode(self.alfred_env.get('workflow_name'))
else:
self._name = self.decode(self.info['name'])
return self._name
@property
def version(self):
"""Return the version of the workflow.
.. versionadded:: 1.9.10
Get the workflow version from environment variable,
the ``update_settings`` dict passed on
instantiation, the ``version`` file located in the workflow's
root directory or ``info.plist``. Return ``None`` if none
exists or :class:`ValueError` if the version number is invalid
(i.e. not semantic).
:returns: Version of the workflow (not Alfred-PyWorkflow)
:rtype: :class:`~workflow.update.Version` object
"""
if self._version is UNSET:
version = None
# environment variable has priority
if self.alfred_env.get('workflow_version'):
version = self.alfred_env['workflow_version']
# Try `update_settings`
elif self._update_settings:
version = self._update_settings.get('version')
# `version` file
if not version:
filepath = self.workflowfile('version')
if os.path.exists(filepath):
with open(filepath, 'r') as fileobj:
version = fileobj.read()
# info.plist
if not version:
version = self.info.get('version')
if version:
from .update import Version
version = Version(version)
self._version = version
return self._version
# Workflow utility methods -----------------------------------------
@property
def args(self):
"""Return command line args as normalised unicode.
Args are decoded and normalised via :meth:`~Workflow.decode`.
The encoding and normalization are the ``input_encoding`` and
``normalization`` arguments passed to :class:`Workflow` (``UTF-8``
and ``NFC`` are the defaults).
If :class:`Workflow` is called with ``capture_args=True``
(the default), :class:`Workflow` will look for certain
``workflow:*`` args and, if found, perform the corresponding
actions and exit the workflow.
See :ref:`Magic arguments <magic-arguments>` for details.
"""
msg = None
args = [self.decode(arg) for arg in sys.argv[1:]]
# Handle magic args
if len(args) and self._capture_args:
for name in self.magic_arguments:
key = '{0}{1}'.format(self.magic_prefix, name)
if key in args:
msg = self.magic_arguments[name]()
if msg:
self.logger.debug(msg)
if not sys.stdout.isatty(): # Show message in Alfred
self.add_item(msg, valid=False, icon=ICON_INFO)
self.send_feedback()
sys.exit(0)
return args
@property
def cachedir(self):
"""Path to workflow's cache directory.
The cache directory is a subdirectory of Alfred's own cache directory
in ``~/Library/Caches``. The full path is in Alfred 4+ is:
``~/Library/Caches/com.runningwithcrayons.Alfred/Workflow Data/<bundle id>``
Returns:
str: full path to workflow's cache directory
"""
if self.alfred_env.get('workflow_cache'):
dirpath = self.alfred_env.get('workflow_cache')
else:
dirpath = self._default_cachedir
return self._create(dirpath)
@property
def _default_cachedir(self):
"""Alfred 4's default cache directory."""
return os.path.join(
os.path.expanduser(
'~/Library/Caches/com.runningwithcrayons.Alfred/'
'Workflow Data/'),
self.bundleid)
@property
def datadir(self):
"""Path to workflow's data directory.
The data directory is a subdirectory of Alfred's own data directory in
``~/Library/Application Support``. The full path for Alfred 4+ is:
``~/Library/Application Support/Alfred/Workflow Data/<bundle id>``
Returns:
str: full path to workflow data directory
"""
if self.alfred_env.get('workflow_data'):
dirpath = self.alfred_env.get('workflow_data')
else:
dirpath = self._default_datadir
return self._create(dirpath)
@property
def _default_datadir(self):
"""Alfred 4's default data directory."""
return os.path.join(os.path.expanduser(
'~/Library/Application Support/Alfred/Workflow Data/'),
self.bundleid)
@property
def workflowdir(self):
"""Path to workflow's root directory (where ``info.plist`` is).
Returns:
str: full path to workflow root directory
"""
if not self._workflowdir:
# Try the working directory first, then the directory
# the library is in. CWD will be the workflow root if
# a workflow is being run in Alfred
candidates = [
os.path.abspath(os.getcwd()),
os.path.dirname(os.path.abspath(os.path.dirname(__file__)))]
# climb the directory tree until we find `info.plist`
for dirpath in candidates:
# Ensure directory path is Unicode
dirpath = self.decode(dirpath)
while True:
if os.path.exists(os.path.join(dirpath, 'info.plist')):
self._workflowdir = dirpath
break
elif dirpath == '/':
# no `info.plist` found
break
# Check the parent directory
dirpath = os.path.dirname(dirpath)
# No need to check other candidates
if self._workflowdir:
break
if not self._workflowdir:
raise IOError("'info.plist' not found in directory tree")
return self._workflowdir
@property
def rerun(self):
"""How often (in seconds) Alfred should re-run the Script Filter."""
return self._rerun
@rerun.setter
def rerun(self, seconds):
"""Interval at which Alfred should re-run the Script Filter.
Args:
seconds (int): Interval between runs.
"""
self._rerun = seconds
@property
def session_id(self):
"""A unique session ID every time the user uses the workflow.
.. versionadded:: 1.25
The session ID persists while the user is using this workflow.
It expires when the user runs a different workflow or closes
Alfred.
"""
if not self._session_id:
from uuid import uuid4
self._session_id = uuid4().hex
self.setvar('_WF_SESSION_ID', self._session_id)
return self._session_id
def setvar(self, name, value, persist=False):
"""Set a "global" workflow variable.
.. versionchanged:: 1.33
These variables are always passed to downstream workflow objects.
If you have set :attr:`rerun`, these variables are also passed
back to the script when Alfred runs it again.
Args:
name (str): Name of variable.
value (str): Value of variable.
persist (bool, optional): Also save variable to ``info.plist``?
"""
self.variables[name] = value
if persist:
from .util import set_config
set_config(name, value, self.bundleid)
self.logger.debug('saved variable %r with value %r to info.plist',
name, value)
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def cachefile(self, filename):
"""Path to ``filename`` in workflow's cache directory.
Return absolute path to ``filename`` within your workflow's
:attr:`cache directory <Workflow.cachedir>`.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within cache directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.cachedir, filename)
def datafile(self, filename):
"""Path to ``filename`` in workflow's data directory.
Return absolute path to ``filename`` within your workflow's
:attr:`data directory <Workflow.datadir>`.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within data directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.datadir, filename)
def workflowfile(self, filename):
"""Return full path to ``filename`` in workflow's root directory.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within data directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.workflowdir, filename)
@property
def logfile(self):
"""Path to logfile.
:returns: path to logfile within workflow's cache directory
:rtype: ``str``
"""
return self.cachefile('%s.log' % self.bundleid)
@property
def logger(self):
"""Logger that logs to both console and a log file.
If Alfred's debugger is open, log level will be ``DEBUG``,
else it will be ``INFO``.
Use :meth:`open_log` to open the log file in Console.
:returns: an initialised :class:`~logging.Logger`
"""
if self._logger:
return self._logger
# Initialise new logger and optionally handlers
logger = logging.getLogger('')
# Only add one set of handlers
# Exclude from coverage, as pytest will have configured the
# root logger already
if not len(logger.handlers): # pragma: no cover
fmt = logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)s'
' %(levelname)-8s %(message)s',
datefmt='%H:%M:%S')
logfile = logging.handlers.RotatingFileHandler(
self.logfile,
maxBytes=1024 * 1024,
backupCount=1)
logfile.setFormatter(fmt)
logger.addHandler(logfile)
console = logging.StreamHandler()
console.setFormatter(fmt)
logger.addHandler(console)
if self.debugging:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
self._logger = logger
return self._logger
@logger.setter
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
@property
def settings_path(self):
"""Path to settings file within workflow's data directory.
:returns: path to ``settings.json`` file
:rtype: ``str``
"""
if not self._settings_path:
self._settings_path = self.datafile('settings.json')
return self._settings_path
@property
def settings(self):
"""Return a dictionary subclass that saves itself when changed.
See :ref:`guide-settings` in the :ref:`user-manual` for more
information on how to use :attr:`settings` and **important
limitations** on what it can do.
:returns: :class:`~workflow.workflow.Settings` instance
initialised from the data in JSON file at
:attr:`settings_path` or if that doesn't exist, with the
``default_settings`` :class:`dict` passed to
:class:`Workflow` on instantiation.
:rtype: :class:`~workflow.workflow.Settings` instance
"""
if not self._settings:
self.logger.debug('reading settings from %s', self.settings_path)
self._settings = Settings(self.settings_path,
self._default_settings)
return self._settings
@property
def cache_serializer(self):
"""Name of default cache serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``str``
"""
return self._cache_serializer
@cache_serializer.setter
def cache_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of default serializer to use.
:type serializer_name:
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default cache serializer: %s', serializer_name)
self._cache_serializer = serializer_name
@property
def data_serializer(self):
"""Name of default data serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``str``
"""
return self._data_serializer
@data_serializer.setter
def data_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of serializer to use by default.
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default data serializer: %s', serializer_name)
self._data_serializer = serializer_name
@property
def _session_prefix(self):
"""Filename prefix for current session."""
return '_wfsess-{0}-'.format(self.session_id)
def _mk_session_name(self, name):
"""New cache name/key based on session ID."""
return self._session_prefix + name
def stored_data(self, name):
"""Retrieve data from data directory.
Returns ``None`` if there are no data stored under ``name``.
.. versionadded:: 1.8
:param name: name of datastore
"""
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
if not os.path.exists(metadata_path):
self.logger.debug('no data stored for `%s`', name)
return None
with open(metadata_path, 'r') as file_obj:
serializer_name = file_obj.read().strip()
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Unknown serializer `{0}`. Register a corresponding '
'serializer with `manager.register()` '
'to load this data.'.format(serializer_name))
self.logger.debug('data `%s` stored as `%s`', name, serializer_name)
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if not os.path.exists(data_path):
self.logger.debug('no data stored: %s', name)
if os.path.exists(metadata_path):
os.unlink(metadata_path)
return None
with open(data_path, 'rb') as file_obj:
data = serializer.load(file_obj)
self.logger.debug('stored data loaded: %s', data_path)
return data
def store_data(self, name, data, serializer=None):
"""Save data to data directory.
.. versionadded:: 1.8
If ``data`` is ``None``, the datastore will be deleted.
Note that the datastore does NOT support mutliple threads.
:param name: name of datastore
:param data: object(s) to store. **Note:** some serializers
can only handled certain types of data.
:param serializer: name of serializer to use. If no serializer
is specified, the default will be used. See
:class:`SerializerManager` for more information.
:returns: data in datastore or ``None``
"""
# Ensure deletion is not interrupted by SIGTERM
@uninterruptible
def delete_paths(paths):
"""Clear one or more data stores"""
for path in paths:
if os.path.exists(path):
os.unlink(path)
self.logger.debug('deleted data file: %s', path)
serializer_name = serializer or self.data_serializer
# In order for `stored_data()` to be able to load data stored with
# an arbitrary serializer, yet still have meaningful file extensions,
# the format (i.e. extension) is saved to an accompanying file
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if data_path == self.settings_path:
raise ValueError(
'Cannot save data to' +
'`{0}` with format `{1}`. '.format(name, serializer_name) +
"This would overwrite Alfred-PyWorkflow's settings file.")
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Invalid serializer `{0}`. Register your serializer with '
'`manager.register()` first.'.format(serializer_name))
if data is None: # Delete cached data
delete_paths((metadata_path, data_path))
return
# Ensure write is not interrupted by SIGTERM
@uninterruptible
def _store():
# Save file extension
with atomic_writer(metadata_path, 'w') as file_obj:
file_obj.write(serializer_name)
with atomic_writer(data_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
_store()
self.logger.debug('saved data: %s', data_path)
def cached_data(self, name, data_func=None, max_age=60, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Return cached data if younger than ``max_age`` seconds.
Retrieve data from cache or re-generate and re-cache data if
stale/non-existant. If ``max_age`` is 0, return cached data no
matter how old.
:param name: Name of datastore
:type name: ``str``
:param data_func: Callable that returns fresh data. It
is called if the cache has expired or doesn't exist.
:type data_func: ``callable``
:param max_age: Maximum allowable age of cached data in seconds.
:type max_age: ``int``
:param session: Whether to scope the cache to the current session (optional).
:type session: ``bool``
:returns: Cached data, return value of ``data_func`` or ``None``
if ``data_func`` is not set.
"""
if session:
name = self._mk_session_name(name)
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
age = self.cached_data_age(name)
if (age < max_age or max_age == 0) and os.path.exists(cache_path):
with open(cache_path, 'rb') as file_obj:
self.logger.debug('loading cached data: %s', cache_path)
return serializer.load(file_obj)
if not data_func:
return None
data = data_func()
self.cache_data(name, data)
return data
def clear_session_cache(self, current=False):
"""Remove session data from the cache.
.. versionadded:: 1.25
.. versionchanged:: 1.27
By default, data belonging to the current session won't be
deleted. Set ``current=True`` to also clear current session.
Args:
current (bool, optional): If ``True``, also remove data for
current session.
"""
def _is_session_file(filename):
if current:
return filename.startswith('_wfsess-')
return filename.startswith('_wfsess-') \
and not filename.startswith(self._session_prefix)
self.clear_cache(_is_session_file)
def cache_data(self, name, data, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Save ``data`` to cache under ``name``. If ``data`` is
``None``, the corresponding cache file will be deleted.
:param name: name of datastore
:type name: ``str``
:param data: Data to store. This may be any object supported by
the cache serializer
:type data: ``object``
param session: Whether to scope the cache to the
current session (optional).
:type session: ``bool``
If ``session`` is ``True``, then ``name`` is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if data is None:
if os.path.exists(cache_path):
os.unlink(cache_path)
self.logger.debug('deleted cache file: %s', cache_path)
return
with atomic_writer(cache_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
self.logger.debug('cached data: %s', cache_path)
@property
def obj(self):
"""Feedback formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
items = []
for item in self._items:
items.append(item.obj)
o = {'items': items}
if self.variables:
o['variables'] = self.variables
if self.rerun:
o['rerun'] = self.rerun
return o
def warn_empty(self, title, subtitle='', icon=None):
"""Add a warning to feedback if there are no items.
.. versionadded:: 1.31
Add a "warning" item to Alfred feedback if no other items
have been added. This is a handy shortcut to prevent Alfred
from showing its fallback searches, which is does if no
items are returned.
Args:
title (str): Title of feedback item.
subtitle (str, optional): Subtitle of feedback item.
icon (str, optional): Icon for feedback item. If not
specified, ``ICON_WARNING`` is used.
Returns:
Item: Newly-created item.
"""
if len(self._items):
return
icon = icon or ICON_WARNING
return self.add_item(title, subtitle, icon=icon)
def cached_data_fresh(self, name, max_age):
"""Whether cache `name` is less than `max_age` seconds old.
:param name: name of datastore
:param max_age: maximum age of data in seconds
:type max_age: ``int``
:returns: ``True`` if data is less than ``max_age`` old, else
``False``
"""
age = self.cached_data_age(name)
if not age:
return False
return age < max_age
def cached_data_age(self, name):
"""Return age in seconds of cache `name` or 0 if cache doesn't exist.
:param name: name of datastore
:type name: ``str``
:returns: age of datastore in seconds
:rtype: ``int``
"""
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if not os.path.exists(cache_path):
return 0
return time.time() - os.stat(cache_path).st_mtime
def filter(self, query, items, key=lambda x: x, ascending=False,
include_score=False, min_score=0, max_results=0,
match_on=MATCH_ALL, fold_diacritics=True):
"""Fuzzy search filter. Returns list of ``items`` that match ``query``.
``query`` is case-insensitive. Any item that does not contain the
entirety of ``query`` is rejected.
If ``query`` is an empty string or contains only whitespace,
all items will match.
:param query: query to test items against
:type query: ``str``
:param items: iterable of items to test
:type items: ``list`` or ``tuple``
:param key: function to get comparison key from ``items``.
Must return a ``str`` string. The default simply returns
the item.
:type key: ``callable``
:param ascending: set to ``True`` to get worst matches first
:type ascending: ``Boolean``
:param include_score: Useful for debugging the scoring algorithm.
If ``True``, results will be a list of tuples
``(item, score, rule)``.
:type include_score: ``Boolean``
:param min_score: If non-zero, ignore results with a score lower
than this.
:type min_score: ``int``
:param max_results: If non-zero, prune results list to this length.
:type max_results: ``int``
:param match_on: Filter option flags. Bitwise-combined list of
``MATCH_*`` constants (see below).
:type match_on: ``int``
:param fold_diacritics: Convert search keys to ASCII-only
characters if ``query`` only contains ASCII characters.
:type fold_diacritics: ``Boolean``
:returns: list of ``items`` matching ``query`` or list of
``(item, score, rule)`` `tuples` if ``include_score`` is ``True``.
``rule`` is the ``MATCH_*`` rule that matched the item.
:rtype: ``list``
**Matching rules**
By default, :meth:`filter` uses all of the following flags (i.e.
:const:`MATCH_ALL`). The tests are always run in the given order:
1. :const:`MATCH_STARTSWITH`
Item search key starts with ``query`` (case-insensitive).
2. :const:`MATCH_CAPITALS`
The list of capital letters in item search key starts with
``query`` (``query`` may be lower-case). E.g., ``of``
would match ``OmniFocus``, ``gc`` would match ``Google Chrome``.
3. :const:`MATCH_ATOM`
Search key is split into "atoms" on non-word characters
(.,-,' etc.). Matches if ``query`` is one of these atoms
(case-insensitive).
4. :const:`MATCH_INITIALS_STARTSWITH`
Initials are the first characters of the above-described
"atoms" (case-insensitive).
5. :const:`MATCH_INITIALS_CONTAIN`
``query`` is a substring of the above-described initials.
6. :const:`MATCH_INITIALS`
Combination of (4) and (5).
7. :const:`MATCH_SUBSTRING`
``query`` is a substring of item search key (case-insensitive).
8. :const:`MATCH_ALLCHARS`
All characters in ``query`` appear in item search key in
the same order (case-insensitive).
9. :const:`MATCH_ALL`
Combination of all the above.
:const:`MATCH_ALLCHARS` is considerably slower than the other
tests and provides much less accurate results.
**Examples:**
To ignore :const:`MATCH_ALLCHARS` (tends to provide the worst
matches and is expensive to run), use
``match_on=MATCH_ALL ^ MATCH_ALLCHARS``.
To match only on capitals, use ``match_on=MATCH_CAPITALS``.
To match only on startswith and substring, use
``match_on=MATCH_STARTSWITH | MATCH_SUBSTRING``.
**Diacritic folding**
.. versionadded:: 1.3
If ``fold_diacritics`` is ``True`` (the default), and ``query``
contains only ASCII characters, non-ASCII characters in search keys
will be converted to ASCII equivalents (e.g. **ü** -> **u**,
**ß** -> **ss**, **é** -> **e**).
See :const:`ASCII_REPLACEMENTS` for all replacements.
If ``query`` contains non-ASCII characters, search keys will not be
altered.
"""
if not query:
return items
# Remove preceding/trailing spaces
query = query.strip()
if not query:
return items
# Use user override if there is one
fold_diacritics = self.settings.get('__workflow_diacritic_folding',
fold_diacritics)
results = []
for item in items:
skip = False
score = 0
words = [s.strip() for s in query.split(' ')]
value = key(item).strip()
if value == '':
continue
for word in words:
if word == '':
continue
s, rule = self._filter_item(value, word, match_on,
fold_diacritics)
if not s: # Skip items that don't match part of the query
skip = True
score += s
if skip:
continue
if score:
# use "reversed" `score` (i.e. highest becomes lowest) and
# `value` as sort key. This means items with the same score
# will be sorted in alphabetical not reverse alphabetical order
results.append(((100.0 / score, value.lower(), score),
(item, score, rule)))
# sort on keys, then discard the keys
results.sort(reverse=ascending)
results = [t[1] for t in results]
if min_score:
results = [r for r in results if r[1] > min_score]
if max_results and len(results) > max_results:
results = results[:max_results]
# return list of ``(item, score, rule)``
if include_score:
return results
# just return list of items
return [t[0] for t in results]
def _filter_item(self, value, query, match_on, fold_diacritics):
"""Filter ``value`` against ``query`` using rules ``match_on``.
:returns: ``(score, rule)``
"""
query = query.lower()
if not isascii(query):
fold_diacritics = False
if fold_diacritics:
value = self.fold_to_ascii(value)
# pre-filter any items that do not contain all characters
# of ``query`` to save on running several more expensive tests
if not set(query) <= set(value.lower()):
return (0, None)
# item starts with query
if match_on & MATCH_STARTSWITH and value.lower().startswith(query):
score = 100.0 - (len(value) / len(query))
return (score, MATCH_STARTSWITH)
# query matches capitalised letters in item,
# e.g. of = OmniFocus
if match_on & MATCH_CAPITALS:
initials = ''.join([c for c in value if c in INITIALS])
if initials.lower().startswith(query):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_CAPITALS)
# split the item into "atoms", i.e. words separated by
# spaces or other non-word characters
if (match_on & MATCH_ATOM or
match_on & MATCH_INITIALS_CONTAIN or
match_on & MATCH_INITIALS_STARTSWITH):
atoms = [s.lower() for s in split_on_delimiters(value)]
# print('atoms : %s --> %s' % (value, atoms))
# initials of the atoms
initials = ''.join([s[0] for s in atoms if s])
if match_on & MATCH_ATOM:
# is `query` one of the atoms in item?
# similar to substring, but scores more highly, as it's
# a word within the item
if query in atoms:
score = 100.0 - (len(value) / len(query))
return (score, MATCH_ATOM)
# `query` matches start (or all) of the initials of the
# atoms, e.g. ``himym`` matches "How I Met Your Mother"
# *and* "how i met your mother" (the ``capitals`` rule only
# matches the former)
if (match_on & MATCH_INITIALS_STARTSWITH and
initials.startswith(query)):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_STARTSWITH)
# `query` is a substring of initials, e.g. ``doh`` matches
# "The Dukes of Hazzard"
elif (match_on & MATCH_INITIALS_CONTAIN and
query in initials):
score = 95.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_CONTAIN)
# `query` is a substring of item
if match_on & MATCH_SUBSTRING and query in value.lower():
score = 90.0 - (len(value) / len(query))
return (score, MATCH_SUBSTRING)
# finally, assign a score based on how close together the
# characters in `query` are in item.
if match_on & MATCH_ALLCHARS:
search = self._search_for_query(query)
match = search(value)
if match:
score = 100.0 / ((1 + match.start()) *
(match.end() - match.start() + 1))
return (score, MATCH_ALLCHARS)
# Nothing matched
return (0, None)
def _search_for_query(self, query):
if query in self._search_pattern_cache:
return self._search_pattern_cache[query]
# Build pattern: include all characters
pattern = []
for c in query:
# pattern.append('[^{0}]*{0}'.format(re.escape(c)))
pattern.append('.*?{0}'.format(re.escape(c)))
pattern = ''.join(pattern)
search = re.compile(pattern, re.IGNORECASE).search
self._search_pattern_cache[query] = search
return search
def run(self, func, text_errors=False):
"""Call ``func`` to run your workflow.
:param func: Callable to call with ``self`` (i.e. the :class:`Workflow`
instance) as first argument.
:param text_errors: Emit error messages in plain text, not in
Alfred's XML/JSON feedback format. Use this when you're not
running Alfred-PyWorkflow in a Script Filter and would like
to pass the error message to, say, a notification.
:type text_errors: ``Boolean``
``func`` will be called with :class:`Workflow` instance as first
argument.
``func`` should be the main entry point to your workflow.
Any exceptions raised will be logged and an error message will be
output to Alfred.
"""
start = time.time()
# Write to debugger to ensure "real" output starts on a new line
print('.', file=sys.stderr)
# Call workflow's entry function/method within a try-except block
# to catch any errors and display an error message in Alfred
try:
if self.version:
self.logger.debug('---------- %s (%s) ----------',
self.name, self.version)
else:
self.logger.debug('---------- %s ----------', self.name)
# Run update check if configured for self-updates.
# This call has to go in the `run` try-except block, as it will
# initialise `self.settings`, which will raise an exception
# if `settings.json` isn't valid.
if self._update_settings:
self.check_update()
# Run workflow's entry function/method
func(self)
# Set last version run to current version after a successful
# run
self.set_last_version()
except Exception as err:
self.logger.exception(err)
if self.help_url:
self.logger.info('for assistance, see: %s', self.help_url)
if not sys.stdout.isatty(): # Show error in Alfred
if text_errors:
print(str(err), end='')
else:
self._items = []
if self._name:
name = self._name
elif self._bundleid: # pragma: no cover
name = self._bundleid
else: # pragma: no cover
name = os.path.dirname(__file__)
self.add_item(f"Error in workflow '{name}'",
str(err),
icon=ICON_ERROR)
self.send_feedback()
return 1
finally:
self.logger.debug('---------- finished in %0.3fs ----------',
time.time() - start)
return 0
# Alfred feedback methods ------------------------------------------
def add_item(self, title, subtitle='', arg=None, autocomplete=None,
valid=False, uid=None, icon=None, icontype=None, type=None,
largetext=None, copytext=None, quicklookurl=None, match=None):
"""Add an item to be output to Alfred.
:param title: Title shown in Alfred
:type title: ``str``
:param subtitle: Subtitle shown in Alfred
:type subtitle: ``str``
:param arg: Argument passed by Alfred as ``{query}`` when item is
actioned
:type arg: ``str``, ``list`` or ``tuple``
:param autocomplete: Text expanded in Alfred when item is TABbed
:type autocomplete: ``str``
:param valid: Whether or not item can be actioned
:type valid: ``Boolean``
:param uid: Used by Alfred to remember/sort items
:type uid: ``str``
:param icon: Filename of icon to use
:type icon: ``str``
:param icontype: Type of icon. Must be one of ``None`` , ``'filetype'``
or ``'fileicon'``. Use ``'filetype'`` when ``icon`` is a filetype
such as ``'public.folder'``. Use ``'fileicon'`` when you wish to
use the icon of the file specified as ``icon``, e.g.
``icon='/Applications/Safari.app', icontype='fileicon'``.
Leave as `None` if ``icon`` points to an actual
icon file.
:type icontype: ``str``
:param type: Result type. Currently only ``'file'`` is supported
(by Alfred). This will tell Alfred to enable file actions for
this item.
:type type: ``str``
:param largetext: Text to be displayed in Alfred's large text box
if user presses CMD+L on item.
:type largetext: ``str``
:param copytext: Text to be copied to pasteboard if user presses
CMD+C on item.
:type copytext: ``str``
:param quicklookurl: URL to be displayed using Alfred's Quick Look
feature (tapping ``SHIFT`` or ``⌘+Y`` on a result).
:type quicklookurl: ``str``
param match: If you have "Alfred filters results" turned on for
your Script Filter, Alfred (version 3.5 and above)
will filter against this field, not ``title`` (optional).
:type match: ``str``
:returns: Alfred feedback :class:`Item` instance
See :ref:`icons` for a list of the supported system icons.
See :meth:`Workflow.add_item() <workflow.Workflow.add_item>` for
the main documentation and other parameters.
.. note::
Although this method returns an :class:`Item` instance, you don't
need to hold onto it or worry about it. All generated :class:`Item`
instances are also collected internally and sent to Alfred when
:meth:`send_feedback` is called.
The generated :class:`Item` is only returned in case you want to
edit it or do something with it other than send it to Alfred. E.g.
use the :meth:`~Item.add_modifier()` method to shown subtitles
when modifier (CMD, OPT etc.) is pressed.
"""
item = self.item_class(title, subtitle, arg, autocomplete,
match, valid, uid, icon, icontype, type,
largetext, copytext, quicklookurl)
# Add variables to child item
item.variables.update(self.variables)
self._items.append(item)
return item
def send_feedback(self):
"""Print stored items to console/Alfred as JSON."""
if self.debugging:
json.dump(self.obj, sys.stdout, indent=2, separators=(',', ': '))
else:
json.dump(self.obj, sys.stdout)
sys.stdout.flush()
####################################################################
# Updating methods
####################################################################
@property
def first_run(self):
"""Return ``True`` if it's the first time this version has run.
.. versionadded:: 1.9.10
Raises a :class:`ValueError` if :attr:`version` isn't set.
"""
if not self.version:
raise ValueError('No workflow version set')
if not self.last_version_run:
return True
return self.version != self.last_version_run
@property
def last_version_run(self):
"""Return version of last version to run (or ``None``).
.. versionadded:: 1.9.10
:returns: :class:`~workflow.update.Version` instance
or ``None``
"""
if self._last_version_run is UNSET:
version = self.settings.get('__workflow_last_version')
if version:
from .update import Version
version = Version(version)
self._last_version_run = version
self.logger.debug('last run version: %s', self._last_version_run)
return self._last_version_run
def set_last_version(self, version=None):
"""Set :attr:`last_version_run` to current version.
.. versionadded:: 1.9.10
:param version: version to store (default is current version)
:type version: :class:`~workflow.update.Version` instance
or ``str``
:returns: ``True`` if version is saved, else ``False``
"""
if not version:
if not self.version:
self.logger.warning(
"Can't save last version: workflow has no version")
return False
version = self.version
if isinstance(version, str):
from .update import Version
version = Version(version)
self.settings['__workflow_last_version'] = str(version)
self.logger.debug('set last run version: %s', version)
return True
@property
def update_available(self):
"""Whether an update is available.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available, else ``False``
"""
key = '__workflow_latest_version'
# Create a new workflow object to ensure standard serialiser
# is used (update.py is called without the user's settings)
status = Workflow().cached_data(key, max_age=0)
# self.logger.debug('update status: %r', status)
if not status or not status.get('available'):
return False
return status['available']
@property
def prereleases(self):
"""Whether workflow should update to pre-release versions.
.. versionadded:: 1.16
:returns: ``True`` if pre-releases are enabled with the :ref:`magic
argument <magic-arguments>` or the ``update_settings`` dict, else
``False``.
"""
if self._update_settings.get('prereleases'):
return True
return self.settings.get('__workflow_prereleases') or False
def check_update(self, force=False):
"""Call update script if it's time to check for a new release.
.. versionadded:: 1.9
The update script will be run in the background, so it won't
interfere in the execution of your workflow.
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:param force: Force update check
:type force: ``Boolean``
"""
key = '__workflow_latest_version'
frequency = self._update_settings.get('frequency',
DEFAULT_UPDATE_FREQUENCY)
if not force and not self.settings.get('__workflow_autoupdate', True):
self.logger.debug('Auto update turned off by user')
return
# Check for new version if it's time
if (force or not self.cached_data_fresh(key, frequency * 86400)):
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
from .background import run_in_background
cmd = ['/usr/bin/env', 'python3', '-m', 'workflow.update', 'check', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.info('checking for update ...')
run_in_background('__workflow_update_check', cmd)
else:
self.logger.debug('update check not due')
def start_update(self):
"""Check for update and download and install new workflow file.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available and will be
installed, else ``False``
"""
from . import update
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
if not update.check_update(repo, version, self.prereleases):
return False
from .background import run_in_background
cmd = ['/usr/bin/env python3', '-m', 'workflow.update', 'install', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.debug('downloading update ...')
run_in_background('__workflow_update_install', cmd)
return True
####################################################################
# Keychain password storage methods
####################################################################
def save_password(self, account, password, service=None):
"""Save account credentials.
If the account exists, the old password will first be deleted
(Keychain throws an error otherwise).
If something goes wrong, a :class:`KeychainError` exception will
be raised.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param password: the password to secure
:type password: ``str``
:param service: Name of the service. By default, this is the
workflow's bundle ID
:type service: ``str``
"""
if not service:
service = self.bundleid
try:
self._call_security('add-generic-password', service, account,
'-w', password)
self.logger.debug('saved password : %s:%s', service, account)
except PasswordExists:
self.logger.debug('password exists : %s:%s', service, account)
current_password = self.get_password(account, service)
if current_password == password:
self.logger.debug('password unchanged')
else:
self.delete_password(account, service)
self._call_security('add-generic-password', service,
account, '-w', password)
self.logger.debug('save_password : %s:%s', service, account)
def get_password(self, account, service=None):
"""Retrieve the password saved at ``service/account``.
Raise :class:`PasswordNotFound` exception if password doesn't exist.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``str``
:returns: account password
:rtype: ``str``
"""
if not service:
service = self.bundleid
output = self._call_security('find-generic-password', service,
account, '-g')
# Parsing of `security` output is adapted from python-keyring
# by Jason R. Coombs
# https://pypi.python.org/pypi/keyring
m = re.search(
r'password:\s*(?:0x(?P<hex>[0-9A-F]+)\s*)?(?:"(?P<pw>.*)")?',
output)
if m:
groups = m.groupdict()
h = groups.get('hex')
password = groups.get('pw')
if h:
password = str(binascii.unhexlify(h), 'utf-8')
self.logger.debug('got password : %s:%s', service, account)
return password
def delete_password(self, account, service=None):
"""Delete the password stored at ``service/account``.
Raise :class:`PasswordNotFound` if account is unknown.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``str``
"""
if not service:
service = self.bundleid
self._call_security('delete-generic-password', service, account)
self.logger.debug('deleted password : %s:%s', service, account)
####################################################################
# Methods for workflow:* magic args
####################################################################
def _register_default_magic(self):
"""Register the built-in magic arguments."""
# TODO: refactor & simplify
# Wrap callback and message with callable
def callback(func, msg):
def wrapper():
func()
return msg
return wrapper
self.magic_arguments['delcache'] = callback(self.clear_cache,
'Deleted workflow cache')
self.magic_arguments['deldata'] = callback(self.clear_data,
'Deleted workflow data')
self.magic_arguments['delsettings'] = callback(
self.clear_settings, 'Deleted workflow settings')
self.magic_arguments['reset'] = callback(self.reset,
'Reset workflow')
self.magic_arguments['openlog'] = callback(self.open_log,
'Opening workflow log file')
self.magic_arguments['opencache'] = callback(
self.open_cachedir, 'Opening workflow cache directory')
self.magic_arguments['opendata'] = callback(
self.open_datadir, 'Opening workflow data directory')
self.magic_arguments['openworkflow'] = callback(
self.open_workflowdir, 'Opening workflow directory')
self.magic_arguments['openterm'] = callback(
self.open_terminal, 'Opening workflow root directory in Terminal')
# Diacritic folding
def fold_on():
self.settings['__workflow_diacritic_folding'] = True
return 'Diacritics will always be folded'
def fold_off():
self.settings['__workflow_diacritic_folding'] = False
return 'Diacritics will never be folded'
def fold_default():
if '__workflow_diacritic_folding' in self.settings:
del self.settings['__workflow_diacritic_folding']
return 'Diacritics folding reset'
self.magic_arguments['foldingon'] = fold_on
self.magic_arguments['foldingoff'] = fold_off
self.magic_arguments['foldingdefault'] = fold_default
# Updates
def update_on():
self.settings['__workflow_autoupdate'] = True
return 'Auto update turned on'
def update_off():
self.settings['__workflow_autoupdate'] = False
return 'Auto update turned off'
def prereleases_on():
self.settings['__workflow_prereleases'] = True
return 'Prerelease updates turned on'
def prereleases_off():
self.settings['__workflow_prereleases'] = False
return 'Prerelease updates turned off'
def do_update():
if self.start_update():
return 'Downloading and installing update ...'
else:
return 'No update available'
self.magic_arguments['autoupdate'] = update_on
self.magic_arguments['noautoupdate'] = update_off
self.magic_arguments['prereleases'] = prereleases_on
self.magic_arguments['noprereleases'] = prereleases_off
self.magic_arguments['update'] = do_update
# Help
def do_help():
if self.help_url:
self.open_help()
return 'Opening workflow help URL in browser'
else:
return 'Workflow has no help URL'
def show_version():
if self.version:
return 'Version: {0}'.format(self.version)
else:
return 'This workflow has no version number'
def list_magic():
"""Display all available magic args in Alfred."""
isatty = sys.stderr.isatty()
for name in sorted(self.magic_arguments.keys()):
if name == 'magic':
continue
arg = self.magic_prefix + name
self.logger.debug(arg)
if not isatty:
self.add_item(arg, icon=ICON_INFO)
if not isatty:
self.send_feedback()
self.magic_arguments['help'] = do_help
self.magic_arguments['magic'] = list_magic
self.magic_arguments['version'] = show_version
def clear_cache(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`cachedir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.cachedir, filter_func)
def clear_data(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`datadir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.datadir, filter_func)
def clear_settings(self):
"""Delete workflow's :attr:`settings_path`."""
if os.path.exists(self.settings_path):
os.unlink(self.settings_path)
self.logger.debug('deleted : %r', self.settings_path)
def reset(self):
"""Delete workflow settings, cache and data.
File :attr:`settings <settings_path>` and directories
:attr:`cache <cachedir>` and :attr:`data <datadir>` are deleted.
"""
self.clear_cache()
self.clear_data()
self.clear_settings()
def open_log(self):
"""Open :attr:`logfile` in default app (usually Console.app)."""
subprocess.call(['open', self.logfile]) # nosec
def open_cachedir(self):
"""Open the workflow's :attr:`cachedir` in Finder."""
subprocess.call(['open', self.cachedir]) # nosec
def open_datadir(self):
"""Open the workflow's :attr:`datadir` in Finder."""
subprocess.call(['open', self.datadir]) # nosec
def open_workflowdir(self):
"""Open the workflow's :attr:`workflowdir` in Finder."""
subprocess.call(['open', self.workflowdir]) # nosec
def open_terminal(self):
"""Open a Terminal window at workflow's :attr:`workflowdir`."""
subprocess.call(['open', '-a', 'Terminal', self.workflowdir]) # nosec
def open_help(self):
"""Open :attr:`help_url` in default browser."""
subprocess.call(['open', self.help_url]) # nosec
return 'Opening workflow help URL in browser'
####################################################################
# Helper methods
####################################################################
def decode(self, text, encoding=None, normalization=None):
"""Return ``text`` as normalised unicode.
If ``encoding`` and/or ``normalization`` is ``None``, the
``input_encoding``and ``normalization`` parameters passed to
:class:`Workflow` are used.
:param text: string
:type text: encoded or Unicode string. If ``text`` is already a
Unicode string, it will only be normalised.
:param encoding: The text encoding to use to decode ``text`` to
Unicode.
:type encoding: ``str`` or ``None``
:param normalization: The nomalisation form to apply to ``text``.
:type normalization: ``str`` or ``None``
:returns: decoded and normalised ``str``
:class:`Workflow` uses "NFC" normalisation by default. This is the
standard for Python and will work well with data from the web (via
:mod:`~workflow.web` or :mod:`json`).
macOS, on the other hand, uses "NFD" normalisation (nearly), so data
coming from the system (e.g. via :mod:`subprocess` or
:func:`os.listdir`/:mod:`os.path`) may not match. You should either
normalise this data, too, or change the default normalisation used by
:class:`Workflow`.
"""
encoding = encoding or self._input_encoding
normalization = normalization or self._normalization
if not isinstance(text, str):
text = str(text, encoding)
return unicodedata.normalize(normalization, text)
def fold_to_ascii(self, text):
"""Convert non-ASCII characters to closest ASCII equivalent.
.. versionadded:: 1.3
.. note:: This only works for a subset of European languages.
:param text: text to convert
:type text: ``str``
:returns: text containing only ASCII characters
:rtype: ``str``
"""
if isascii(text):
return text
text = ''.join([ASCII_REPLACEMENTS.get(c, c) for c in text])
return unicodedata.normalize('NFKD', text)
def dumbify_punctuation(self, text):
"""Convert non-ASCII punctuation to closest ASCII equivalent.
This method replaces "smart" quotes and n- or m-dashes with their
workaday ASCII equivalents. This method is currently not used
internally, but exists as a helper method for workflow authors.
.. versionadded: 1.9.7
:param text: text to convert
:type text: ``str``
:returns: text with only ASCII punctuation
:rtype: ``str``
"""
if isascii(text):
return text
text = ''.join([DUMB_PUNCTUATION.get(c, c) for c in text])
return text
def _delete_directory_contents(self, dirpath, filter_func):
"""Delete all files in a directory.
:param dirpath: path to directory to clear
:type dirpath: ``str``
:param filter_func function to determine whether a file shall be
deleted or not.
:type filter_func ``callable``
"""
if os.path.exists(dirpath):
for filename in os.listdir(dirpath):
if not filter_func(filename):
continue
path = os.path.join(dirpath, filename)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
self.logger.debug('deleted : %r', path)
def _load_info_plist(self):
"""Load workflow info from ``info.plist``."""
# info.plist should be in the directory above this one
with open(self.workflowfile('info.plist'), 'rb') as fp:
self._info = plistlib.load(fp)
self._info_loaded = True
def _create(self, dirpath):
"""Create directory `dirpath` if it doesn't exist.
:param dirpath: path to directory
:type dirpath: ``str``
:returns: ``dirpath`` argument
:rtype: ``str``
"""
if not os.path.exists(dirpath):
os.makedirs(dirpath)
return dirpath
def _call_security(self, action, service, account, *args):
"""Call ``security`` CLI program that provides access to keychains.
May raise `PasswordNotFound`, `PasswordExists` or `KeychainError`
exceptions (the first two are subclasses of `KeychainError`).
:param action: The ``security`` action to call, e.g.
``add-generic-password``
:type action: ``str``
:param service: Name of the service.
:type service: ``str``
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param password: the password to secure
:type password: ``str``
:param *args: list of command line arguments to be passed to
``security``
:type *args: `list` or `tuple`
:returns: ``(retcode, output)``. ``retcode`` is an `int`, ``output`` a
``str`` string.
:rtype: `tuple` (`int`, ``str``)
"""
cmd = ['security', action, '-s', service, '-a', account] + list(args)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
if p.returncode == 44: # password does not exist
raise PasswordNotFound()
elif p.returncode == 45: # password already exists
raise PasswordExists()
elif p.returncode > 0:
err = KeychainError('Unknown Keychain error : %s' % stdout)
err.retcode = p.returncode
raise err
return stdout.strip().decode('utf-8') | PypiClean |
/OdooRPC-0.10.1.tar.gz/OdooRPC-0.10.1/odoorpc/models.py | __all__ = ['Model']
import sys
from odoorpc import error
# Python 2
if sys.version_info[0] < 3:
# noqa: F821
NORMALIZED_TYPES = (int, long, str, unicode) # noqa: F821
# Python >= 3
else:
NORMALIZED_TYPES = (int, str, bytes)
FIELDS_RESERVED = ['id', 'ids', '__odoo__', '__osv__', '__data__', 'env']
def _normalize_ids(ids):
"""Normalizes the ids argument for ``browse``."""
if not ids:
return []
if ids.__class__ in NORMALIZED_TYPES:
return [ids]
return list(ids)
class IncrementalRecords(object):
"""A helper class used internally by __iadd__ and __isub__ methods.
Afterwards, field descriptors can adapt their behaviour when an instance of
this class is set.
"""
def __init__(self, tuples):
self.tuples = tuples
class MetaModel(type):
"""Define class methods for the :class:`Model` class."""
_env = None
def __getattr__(cls, method):
"""Provide a dynamic access to a RPC method."""
if method.startswith('_'):
return super(MetaModel, cls).__getattr__(method)
def rpc_method(*args, **kwargs):
"""Return the result of the RPC request."""
if cls._odoo.config['auto_context'] and 'context' not in kwargs:
kwargs['context'] = cls.env.context
result = cls._odoo.execute_kw(cls._name, method, args, kwargs)
return result
return rpc_method
def __repr__(cls):
return "Model(%r)" % (cls._name)
@property
def env(cls):
"""The environment used for this model/recordset."""
return cls._env
# An intermediate class used to associate the 'MetaModel' metaclass to the
# 'Model' one with a Python 2 and Python 3 compatibility
BaseModel = MetaModel('BaseModel', (), {})
class Model(BaseModel):
"""Base class for all data model proxies.
.. note::
All model proxies (based on this class) are generated by an
:class:`environment <odoorpc.env.Environment>`
(see the :attr:`odoorpc.ODOO.env` property).
.. doctest::
:options: +SKIP
>>> import odoorpc
>>> odoo = odoorpc.ODOO('localhost', port=8069)
>>> odoo.login('db_name', 'admin', 'password')
>>> User = odoo.env['res.users']
>>> User
Model('res.users')
.. doctest::
:hide:
>>> import odoorpc
>>> odoo = odoorpc.ODOO(HOST, protocol=PROTOCOL, port=PORT)
>>> odoo.login(DB, USER, PWD)
>>> User = odoo.env['res.users']
>>> User
Model('res.users')
Use this data model proxy to call any method:
.. doctest::
:options: +SKIP
>>> User.name_get([2]) # Use any methods from the model class
[[1, 'Mitchell Admin']]
.. doctest::
:hide:
>>> from odoorpc.tools import v
>>> uid = 1
>>> if v(VERSION) >= v('12.0'):
... uid = 2
>>> data = User.name_get([uid])
>>> 'Admin' in data[0][1]
True
Get a recordset:
.. doctest::
:options: +SKIP
>>> user = User.browse(2)
>>> user.name
'Mitchell Admin'
.. doctest::
:hide:
>>> from odoorpc.tools import v
>>> uid = 1
>>> if v(VERSION) >= v('12.0'):
... uid = 2
>>> user = User.browse(uid)
>>> 'Admin' in user.name
True
And call any method from it, it will be automatically applied on the
current record:
.. doctest::
:options: +SKIP
>>> user.name_get() # No IDs in parameter, the method is applied on the current recordset
[[1, 'Mitchell Admin']]
.. doctest::
:hide:
>>> data = user.name_get()
>>> 'Admin' in data[0][1]
True
.. warning::
Excepted the :func:`browse <odoorpc.models.Model.browse>` method,
method calls are purely dynamic. As long as you know the signature of
the model method targeted, you will be able to use it
(see the :ref:`tutorial <tuto-execute-queries>`).
"""
__metaclass__ = MetaModel
_odoo = None
_name = None
_columns = {} # {field: field object}
def __init__(self):
super(Model, self).__init__()
self._env_local = None
self._from_record = None
self._ids = []
self._values = {} # {field: {ID: value}}
self._values_to_write = {} # {field: {ID: value}}
for field in self._columns:
self._values[field] = {}
self._values_to_write[field] = {}
self.with_context = self._with_context
self.with_env = self._with_env
@property
def env(self):
"""The environment used for this model/recordset."""
if self._env_local:
return self._env_local
return self.__class__._env
@property
def id(self):
"""ID of the record (or the first ID of a recordset)."""
return self._ids[0] if self._ids else None
@property
def ids(self):
"""IDs of the recorset."""
return self._ids
@classmethod
def _browse(cls, env, ids, from_record=None, iterated=None):
"""Create an instance (a recordset) corresponding to `ids` and
attached to `env`.
`from_record` parameter is used when the recordset is related to a
parent record, and as such can take the value of a tuple
(record, field). This is useful to update the parent record when the
current recordset is modified.
`iterated` can take the value of an iterated recordset, and no extra
RPC queries are made to generate the resulting record (recordset and
its record share the same values).
"""
records = cls()
records._env_local = env
records._ids = _normalize_ids(ids)
if iterated:
records._values = iterated._values
records._values_to_write = iterated._values_to_write
else:
records._from_record = from_record
records._values = {}
records._values_to_write = {}
for field in cls._columns:
records._values[field] = {}
records._values_to_write[field] = {}
records._init_values()
return records
@classmethod
def browse(cls, ids):
"""Browse one or several records (if `ids` is a list of IDs).
.. doctest::
>>> odoo.env['res.partner'].browse(1)
Recordset('res.partner', [1])
.. doctest::
:options: +SKIP
>>> [partner.name for partner in odoo.env['res.partner'].browse([1, 3])]
['YourCompany', 'Mitchell Admin']
.. doctest::
:hide:
>>> names = [partner.name for partner in odoo.env['res.partner'].browse([1, 3])]
>>> 'YourCompany' in names[0]
True
>>> 'Admin' in names[1]
True
A list of data types returned by such record fields are
available :ref:`here <fields>`.
:return: a :class:`Model <odoorpc.models.Model>`
instance (recordset)
:raise: :class:`odoorpc.error.RPCError`
"""
return cls._browse(cls.env, ids)
@classmethod
def with_context(cls, *args, **kwargs):
"""Return a model (or recordset) equivalent to the current model
(or recordset) attached to an environment with another context.
The context is taken from the current environment or from the
positional arguments `args` if given, and modified by `kwargs`.
Thus, the following two examples are equivalent:
.. doctest::
>>> Product = odoo.env['product.product']
>>> Product.with_context(lang='fr_FR')
Model('product.product')
.. doctest::
>>> context = Product.env.context
>>> Product.with_context(context, lang='fr_FR')
Model('product.product')
This method is very convenient for example to search records
whatever their active status are (active/inactive):
.. doctest::
>>> all_product_ids = Product.with_context(active_test=False).search([])
Or to update translations of a recordset:
.. doctest::
>>> product_en = Product.browse(1)
>>> product_en.env.lang
'en_US'
>>> product_en.name = "My product" # Update the english translation
>>> product_fr = product_en.with_context(lang='fr_FR')
>>> product_fr.env.lang
'fr_FR'
>>> product_fr.name = "Mon produit" # Update the french translation
"""
context = dict(args[0] if args else cls.env.context, **kwargs)
return cls.with_env(cls.env(context=context))
def _with_context(self, *args, **kwargs):
"""As the `with_context` class method but for recordset."""
context = dict(args[0] if args else self.env.context, **kwargs)
return self.with_env(self.env(context=context))
@classmethod
def with_env(cls, env):
"""Return a model (or recordset) equivalent to the current model
(or recordset) attached to `env`.
"""
new_cls = type(cls.__name__, cls.__bases__, dict(cls.__dict__))
new_cls._env = env
return new_cls
def _with_env(self, env):
"""As the `with_env` class method but for recordset."""
res = self._browse(env, self._ids)
return res
def _init_values(self, context=None):
"""Retrieve field values from the server.
May be used to restore the original values in the purpose to cancel
all changes made.
"""
if context is None:
context = self.env.context
# Get basic fields (no relational ones)
basic_fields = []
for field_name in self._columns:
field = self._columns[field_name]
if not getattr(field, 'relation', False):
basic_fields.append(field_name)
# Fetch values from the server
if self.ids:
rows = self.__class__.read(
self.ids, basic_fields, context=context, load='_classic_write'
)
ids_fetched = set()
for row in rows:
ids_fetched.add(row['id'])
for field_name in row:
if field_name == 'id':
continue
self._values[field_name][row['id']] = row[field_name]
ids_in_error = set(self.ids) - ids_fetched
if ids_in_error:
raise ValueError(
"There is no '{model}' record with IDs {ids}.".format(
model=self._name, ids=list(ids_in_error)
)
)
# No ID: fields filled with default values
else:
default_get = self.__class__.default_get(
list(self._columns), context=context
)
for field_name in self._columns:
self._values[field_name][None] = default_get.get(
field_name, False
)
def __getattr__(self, method):
"""Provide a dynamic access to a RPC *instance* method (which applies
on the current recordset).
.. doctest::
>>> Partner = odoo.env['res.partner']
>>> Partner.write([1], {'name': 'YourCompany'}) # Class method
True
>>> partner = Partner.browse(1)
>>> partner.write({'name': 'YourCompany'}) # Instance method
True
"""
if method.startswith('_'):
return super(Model, self).__getattr__(method)
def rpc_method(*args, **kwargs):
"""Return the result of the RPC request."""
args = tuple([self.ids]) + args
if self._odoo.config['auto_context'] and 'context' not in kwargs:
kwargs['context'] = self.env.context
result = self._odoo.execute_kw(self._name, method, args, kwargs)
return result
return rpc_method
def __getitem__(self, key):
"""If `key` is an integer or a slice, return the corresponding record
selection as a recordset.
"""
if isinstance(key, int) or isinstance(key, slice):
return self._browse(self.env, self._ids[key], iterated=self)
else:
return getattr(self, key)
def __int__(self):
return self.id
def __eq__(self, other):
return other.__class__ == self.__class__ and self.id == other.id
# Need to explicitly declare '__hash__' in Python 3
# (because '__eq__' is defined)
__hash__ = BaseModel.__hash__
def __ne__(self, other):
return other.__class__ != self.__class__ or self.id != other.id
def __repr__(self):
return "Recordset({!r}, {})".format(self._name, self.ids)
def __iter__(self):
"""Return an iterator over `self`."""
for id_ in self._ids:
yield self._browse(self.env, id_, iterated=self)
def __nonzero__(self):
return bool(getattr(self, '_ids', True))
def __len__(self):
return len(self.ids)
def __iadd__(self, records):
if not self._from_record:
raise error.InternalError("No parent record to update")
try:
list(records)
except TypeError:
records = [records]
parent = self._from_record[0]
field = self._from_record[1]
updated_values = parent._values_to_write[field.name]
values = []
if updated_values.get(parent.id):
values = updated_values[parent.id][:] # Copy
from odoorpc import fields
for id_ in fields.records2ids(records):
if (3, id_) in values:
values.remove((3, id_))
if (4, id_) not in values:
values.append((4, id_))
return IncrementalRecords(values)
def __isub__(self, records):
if not self._from_record:
raise error.InternalError("No parent record to update")
try:
list(records)
except TypeError:
records = [records]
parent = self._from_record[0]
field = self._from_record[1]
updated_values = parent._values_to_write[field.name]
values = []
if updated_values.get(parent.id):
values = updated_values[parent.id][:] # Copy
from odoorpc import fields
for id_ in fields.records2ids(records):
if (4, id_) in values:
values.remove((4, id_))
if (3, id_) not in values:
values.append((3, id_))
return values | PypiClean |
/Auto_Python_For_Mobile-14.1.5.zip/Auto_Python_For_Mobile-14.1.5/component/CPT_api_request.py | #import unittest
import common
from common.COM_db_mysql import COM_createdbconnection
from common.COM_httpclient import COM_postrequest
from init.init_value import init_value
from common.COM_util_trans import Trans
class APIrequest(COM_createdbconnection,COM_postrequest):
def cpt_setusername(self,username):
self.username=username
def cpt_createdbconnection(self):
self.dbconnection = COM_createdbconnection()
def cpt_createdbconnection_given(self,host,dbname,user,password):
self.dbconnection = COM_createdbconnection(host,dbname,user,password)
def cpt_getcolumlist(self,tbname):
self.columlist = self.dbconnection.get_collist(tbname)
def cpt_mobilegetcolumlist(self,sql):
self.columlist = self.dbconnection.get_query_collist_custome(sql)
def cpt_getrow(self,tbname,rowindex):
self.row = self.dbconnection.get_query_rowindex(tbname, rowindex)
def cpt_getall(self,tbname):
self.results = self.dbconnection.get_query_data(tbname)
def cpt_getcounts(self,tbname):
self.counts = self.dbconnection.get_query_counts(tbname)
def cpt_getsn(self,tbname,username):
self.sn = self.dbconnection.get_query_sn(tbname,username)
def cpt_getmobilesn(self,tbname):
self.sn = self.dbconnection.get_mobilequery_sn(tbname)
def cpt_getusers(self):
self.users = self.dbconnection.get_query_distinct("view_casesets_users_data","username")
#def cpt_getsn(self,tbname):
# self.snlist = self.dbconnection.
def cpt_getapinames(self):
self.apinames = self.dbconnection.get_query_distinct("view_mobileapi_name","table_name")
def cpt_getusersnew(self,user):
self.users = self.dbconnection.get_query_distinct("view_casesets_users_data","username")
def cpt_getpostdata(self,tbname,rowindex,filterlist):
self.requestdata = self.dbconnection.get_query_requestdata_include(tbname,rowindex,filterlist)
def cpt_getpostdata_filter(self,tbname,rowindex,filterlist):
self.requestdata = self.dbconnection.get_query_requestdata_except(tbname,rowindex,filterlist)
def cpt_createhttpconnection(self,url):
self.httpconnection = COM_postrequest(url)
def cpt_sendhttprequest(self,path,requestdata):
self.response = self.httpconnection.post(path, requestdata)
def cpt_sendhttprequestreturnorderstr(self,path,requestdata):
self.response = self.httpconnection.postreturstronorder(path, requestdata)
def cpt_sendhttprequestmobile(self,path,requestdata):
self.response = self.httpconnection.mobilepost(path, requestdata)
def cpt_responseformattojson(self,response):
self.dictdecoderesponse=self.trans.dictTojson(response)
#self.dictdecode={k.decode("utf8",'ignore'):v.decode("utf8",'ignore') for k,v in self.response.items()}
#print json.dumps(self.response, encoding='UTF-8', ensure_ascii=False)
def cpt_responseformattojsonstr(self,response):
self.dictdecoderesponsestr=str(self.trans.unicodeToStr(response))
def cpt_requestformattojson(self,request):
self.dictdecoderequest=self.trans.dictTojson(request)
def cpt_pickpostdatafromresponse(self,key):
self.dict =self.httpconnection.responsefilter(key)
def cpt_reconstructrequest(self,olddict,plusdict):
self.requestdata = self.trans.dictAdd(olddict, plusdict)
def cpt_strreplace(self,find,replace):
self.dictdecoderesponsestr=self.trans.strreplace(self.dictdecoderesponsestr, find, replace)
def cpt_sort_collist_custome(self,filterlist):
self.sortcollist = self.dbconnection.get_query_collist_custome_sort(self.columlist,filterlist)
def cpt_dosign(self,password):
self.querydosign = self.dbconnection.get_query_MD5(self.sortcollist,self.requestdata,password)
def cpt_close(self):
self.dbconnection.close()
def cpt_api_getresponse(self,tbname,rowindex):
#apitest = TestCase()
#tbname="dh_item_get_20"
init=init_value()
self.cpt_createdbconnection()
self.cpt_getcolumlist(tbname)
#print self.columlist
self.cpt_getrow(tbname, rowindex)
#print self.row
self.cpt_getpostdata(tbname, rowindex, init.include)
self.cpt_createhttpconnection(init.url)
#
self.cpt_sendhttprequest(init.path, self.requestdata)
# '###get token #################'
self.cpt_pickpostdatafromresponse(init.token)
# get the api private request data
self.cpt_getpostdata_filter(tbname, rowindex, init.filter)
# reconstruct the request add timestamp and access_token
self.cpt_reconstructrequest(self.requestdata, self.dict)
# send request and order loads the response to json str
self.cpt_sendhttprequestreturnorderstr(init.path_v, self.requestdata)
# format fake unicode str to str for assertequal
self.cpt_responseformattojsonstr(self.response)
self.cpt_getpostdata(tbname, rowindex, init.chck)
self.chck=self.requestdata[init.chck[0]]
#self.assertEqual(self.requestdata[init.chck[0]],self.dictdecoderesponse,"expect is "+self.requestdata[init.chck[0]]+ "but actual is "+self.dictdecoderesponse)
self.cpt_close()
def cpt_mobileapi_getresponse(self,tbname,rowindex):
tran = Trans()
init=init_value()
sql = "select * from "+tbname
self.cpt_createdbconnection()
self.cpt_mobilegetcolumlist(sql)
#print self.columlist
self.cpt_sort_collist_custome(init.mobilefilter)
#print self.sortcollist
self.cpt_getrow(tbname, rowindex)
print "********"
print self.row
self.chck=self.row[0][init.chck[0]]
#self.chck=tran.jsonToStrOrder(self.chck)
#print '**********'+self.chck
#print type(self.chck)
self.cpt_getpostdata_filter(tbname,rowindex,init.mobilefilter)
self.cpt_dosign(init.mobilepassword)
self.cpt_createhttpconnection(init.mobileurl)
self.cpt_sendhttprequestreturnorderstr(init.mobilepath, self.querydosign)
self.cpt_responseformattojsonstr(self.response)
#print self.dictdecoderesponsestr
'''
print self.response
#print str(self.response)
print type(self.response)
print "-------"
print self.chck
print type(self.chck)
chckstr=self.chck
print type(chckstr)
print chckstr
res = self.response
chckstr = tran.jsonToStrOrder(chckstr)
print chckstr
print type(chckstr)
print "||||||||||||"
print res
resnew = str(res).replace("\r\n","")
#resnew = res.replace("\r\n","")
print resnew
print cmp(chckstr,res)
print (chckstr == res)
#self.cpt_responseformattojsonstr(self.response)
#print self.dictdecoderesponsestr
#print type(self.dictdecoderesponsestr)
#print type(str(self.dictdecoderesponsestr))
#print type(self.dictdecoderesponsestr)
#print type(self.chck)
'''
self.cpt_close()
'''
if __name__ == '__main__':
#sql="select * from dh_item_get_20"
"""
sql="dh_item_get_20"
include=['grant_type','username','password','client_id','client_secret','scope']
filter=['sn','isRun','flag','grant_type','username','password','client_id','client_secret','scope','chck_JSON','chck_JSONMD5']
url="api.dhgate.com"
path="/dop/oauth2/access_token?"
key="access_token"
path_v="/dop/router"
"""
tapirequest = APIrequest()
tapirequest.cpt_mobileapi_getresponse("dhnew_items_get","1")
#tapi = APIrequest()
#tapi.cpt_createdbconnection()
#tapi.cpt_getapinames()
#print tapi.apinames
print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&"
"""
tapirequest = APIrequest()
tapirequest.cpt_createdbconnection()
print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&"
tapirequest.cpt_getcolumlist(sql)
print tapirequest.columlist
tapirequest.cpt_getrow(sql, "1")
print tapirequest.row
tapirequest.cpt_getpostdata(sql, "1", include)
print tapirequest.requestdata
#print type(tapirequest.requestdata)
tapirequest.cpt_createhttpconnection(url)
tapirequest.cpt_sendhttprequest(path, tapirequest.requestdata)
print tapirequest.response
print '###get token #################'
tapirequest.cpt_pickpostdatafromresponse(key)
print tapirequest.dict
print '###send request'
tapirequest.cpt_getpostdata_filter(sql, "1", filter)
print tapirequest.requestdata
#tapirequest.cpt_reconstructresponse(tapirequest.requestdata, tapirequest.dict)
#print tapirequest.requestdata
#tapirequest.cpt_sendhttprequest(path_v, tapirequest.requestdata)
#print type(tapirequest.response)
#print tapirequest.response
#tapirequest.cpt_responseformattojson(tapirequest.response)
print"88888888888888888"
tapirequest.cpt_getsn("dh_item_get_20")
print tapirequest.snlist
#print '****'
#print tapirequest.dictdecode
#print tapirequest.requestdata
#print tapirequest.response["message"]
#print tapirequest.response["solution"]
#print type(tapirequest.response["solution"].encode('gbk'))
"""
#print str(tapirequest.response)
print '#################'
''' | PypiClean |
/Flask-FomanticUI-0.0.3.tar.gz/Flask-FomanticUI-0.0.3/flask_fomanticui/core.py |
# This file was part of Flask-Bootstrap and was modified under the terms of
# its BSD License. Copyright (c) 2013, Marc Brinkmann. All rights reserved.
#
# This file was part of Bootstrap-Flask and was modified under the terms of
# its MIT License. Copyright (c) 2018 Grey Li. All rights reserved.
#
# This file is part of the
# Flask-FomanticUI Project (https://github.com/juniors90/Flask-FomanticUI/).
# Copyright (c) 2021, Ferreira Juan David
# License: MIT
# Full Text: https://github.com/juniors90/Flask-FomanticUI/blob/master/LICENSE
# =============================================================================
# DOCS
# =============================================================================
"""Flask-FomanticUI.
Implementation of Fomantic UI in Flask.
"""
# =============================================================================
# IMPORTS
# =============================================================================
import warnings
from flask import Blueprint, Markup, current_app, url_for
try: # pragma: no cover
from wtforms.fields import HiddenField
except ImportError: # pragma: no cover
# docstr-coverage:excused `no one is reading this anyways`
def is_hidden_field_filter(field):
raise RuntimeError("WTForms is not installed.")
else:
# docstr-coverage:excused `no one is reading this anyways`
def is_hidden_field_filter(field):
return isinstance(field, HiddenField)
cdn_base = "https://cdn.jsdelivr.net/npm"
# docstr-coverage:excused `no one is reading this anyways`
def raise_helper(message): # pragma: no cover
raise RuntimeError(message)
def get_table_titles(data, primary_key, primary_key_title):
"""Detect and build the table titles tuple from ORM object.
.. note::
Currently only support SQLAlchemy.
"""
if not data:
return []
titles = []
for k in data[0].__table__.columns.keys():
if not k.startswith("_"):
titles.append((k, k.replace("_", " ").title()))
titles[0] = (primary_key, primary_key_title)
return titles
# docstr-coverage:excused `no one is reading this anyways`
def link_css_with_sri(url, sri):
return f'<link rel="stylesheet" href="{url}" integrity="{sri}" crossorigin="anonymous">' # noqa: E501
# docstr-coverage:excused `no one is reading this anyways`
def simple_link_css(url):
return f'<link rel="stylesheet" href="{url}">'
# docstr-coverage:excused `no one is reading this anyways`
def scripts_with_sri(url, sri):
return f'<script src="{url}" integrity="{sri}" crossorigin="anonymous"></script>' # noqa: E501
# docstr-coverage:excused `no one is reading this anyways`
def simple_scripts_js(url):
return f'<script src="{url}"></script>'
class _FomanticUI(object):
"""Base extension class for different Fomantic UI versions.
.. versionadded:: 0.0.1
"""
fomantic_version = None
jquery_version = None
fomantic_css_integrity = None
fomantic_js_integrity = None
jquery_integrity = None
static_folder = None
fomantic_css_filename = "semantic.min.css"
fomantic_js_filename = "semantic.min.js"
jquery_filename = "jquery.min.js"
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
"""Application factory."""
# default settings
app.config.setdefault("FOMANTIC_SERVE_LOCAL", False)
app.config.setdefault("FOMANTIC_BUTTON_STYLE", "primary")
app.config.setdefault("FOMANTIC_BUTTON_SIZE", "")
app.config.setdefault("FOMANTIC_ICON_COLOR", None)
app.config.setdefault("FOMANTIC_ICON_SIZE", None)
app.config.setdefault(
"FOMANTIC_ERROR_HEADER", "Action Forbidden"
) # noqa: E501
app.config.setdefault(
"FOMANTIC_CHECKBOX_HEADER_ERROR", "Action Forbidden"
) # noqa: E501
app.config.setdefault(
"FOMANTIC_RADIO_HEADER_ERROR", "Action Forbidden"
) # noqa: E501
app.config.setdefault(
"FOMANTIC_MSG_CATEGORY", None
) # change primary by None
app.config.setdefault("FOMANTIC_TABLE_VIEW_TITLE", "View")
app.config.setdefault("FOMANTIC_TABLE_EDIT_TITLE", "Edit")
app.config.setdefault("FOMANTIC_TABLE_DELETE_TITLE", "Delete")
app.config.setdefault("FOMANTIC_TABLE_NEW_TITLE", "New")
if not hasattr(app, "extensions"): # pragma: no cover
app.extensions = {}
app.extensions["fomantic"] = self
blueprint = Blueprint(
"fomantic",
__name__,
static_folder=f"static/{self.static_folder}",
static_url_path=f"{app.static_url_path}",
template_folder="templates",
)
app.register_blueprint(blueprint)
app.jinja_env.globals["fomantic"] = self
app.jinja_env.globals[
"fomantic_is_hidden_field"
] = is_hidden_field_filter
app.jinja_env.globals["get_table_titles"] = get_table_titles
app.jinja_env.globals["warn"] = warnings.warn
app.jinja_env.globals["raise"] = raise_helper
app.jinja_env.add_extension("jinja2.ext.do")
def load_css(self, s_version=None, fomantic_sri=None):
"""Load Fomantic's css resources with given version.
Parameters
----------
s_version : str
The version of Fomantic UI.
fomantic_sri : str
Subresource Integrity.
Return
------
scripts_cdn : markupsafe.Markup
Fomantic UI CDN File.
"""
serve_local = current_app.config["FOMANTIC_SERVE_LOCAL"]
s_version = self.fomantic_version if s_version is None else s_version
fomantic_sri = self._get_sri("fomantic_css", s_version, fomantic_sri)
if serve_local:
base_path = "css"
url = url_for(
"fomantic.static",
filename=f"{base_path}/{self.fomantic_css_filename}",
)
else:
base_path = cdn_base + f"/fomantic-ui@{s_version}/dist/"
url = base_path + self.fomantic_css_filename
if fomantic_sri:
css = link_css_with_sri(url, fomantic_sri)
else:
css = f'<link rel="stylesheet" type="text/css" href="{url}">'
scripts_cdn = Markup(css)
return scripts_cdn
def _get_js_script(self, version, name, sri):
"""Get <script> tag for JavaScipt resources."""
serve_local = current_app.config["FOMANTIC_SERVE_LOCAL"]
paths = {
"fomantic-ui": f"{self.fomantic_js_filename}",
"jquery": f"{self.jquery_filename}",
}
if serve_local:
path = "js/semantic"
url = url_for("fomantic.static", filename=f"{path}/{paths[name]}")
else:
url = cdn_base + f"/{name}@{version}/dist/{paths[name]}"
if sri:
script_html = scripts_with_sri(url, sri)
else:
script_html = simple_scripts_js(url)
return script_html
def _get_sri(self, name, version, sri):
serve_local = current_app.config["FOMANTIC_SERVE_LOCAL"]
sris = {
"fomantic_css": self.fomantic_css_integrity,
"fomantic_js": self.fomantic_js_integrity,
"jquery": self.jquery_integrity,
}
versions = {
"fomantic_css": self.fomantic_version,
"fomantic_js": self.fomantic_version,
"jquery": self.jquery_version,
}
if sri is not None:
return sri
if version == versions[name] and serve_local is False:
return sris[name]
return None
def load_js(
self,
version=None,
jq_version=None, # noqa: C901
fomantic_sri=None,
jquery_sri=None,
):
"""Load Fomantic UI and other resources with given version.
Parameter
---------
version : str
The version of Fomantic UI.
jq_version : str
The version of jQuery (for Fomantic UI).
fomantic_sri : str
Subresource Integrity for Fomantic UI..
jquery_sri : str
Subresource Integrity for jQuery.
Return
------
scripts_cdn : markupsafe.Markup
Fomantic UI CDN File.
"""
fui_version = self.fomantic_version if version is None else version
fui_sri = self._get_sri("fomantic_js", fui_version, fomantic_sri)
fui_js = self._get_js_script(fui_version, "fomantic-ui", fui_sri)
jq_version = self.jquery_version if jq_version is None else jq_version
jquery_sri = self._get_sri("jquery", jq_version, jquery_sri)
jquery = self._get_js_script(jq_version, "jquery", jquery_sri)
scripts_cdn = Markup(
f"""{jquery}
{fui_js}"""
)
return scripts_cdn | PypiClean |
/NlpToolkit-WordToVec-1.0.8.tar.gz/NlpToolkit-WordToVec-1.0.8/WordToVec/WordToVecParameter.py | class WordToVecParameter:
__layer_size: int
__cbow: bool
__alpha: float
__window: int
__hierarchical_soft_max: bool
__negative_sampling_size: int
__number_of_iterations: int
__seed: int
def __init__(self):
"""
Empty constructor for Word2Vec parameter
"""
self.__alpha = 0.025
self.__cbow = True
self.__hierarchical_soft_max = False
self.__layer_size = 100
self.__negative_sampling_size = 5
self.__number_of_iterations = 3
self.__window = 5
self.__seed = 1
def getLayerSize(self) -> int:
"""
Accessor for layerSize attribute.
RETURNS
-------
int
Size of the word vectors.
"""
return self.__layer_size
def isCbow(self) -> bool:
"""
Accessor for CBow attribute.
RETURNS
-------
bool
True is CBow will be applied, false otherwise.
"""
return self.__cbow
def getAlpha(self) -> float:
"""
Accessor for the alpha attribute.
RETURNS
-------
float
Current learning rate alpha.
"""
return self.__alpha
def getWindow(self) -> int:
"""
Accessor for the window size attribute.
RETURNS
-------
int
Current window size.
"""
return self.__window
def isHierarchicalSoftMax(self) -> bool:
"""
Accessor for the hierarchicalSoftMax attribute.
RETURNS
-------
bool
If hierarchical softmax will be applied, returns true; false otherwise.
"""
return self.__hierarchical_soft_max
def getNegativeSamplingSize(self) -> int:
"""
Accessor for the negativeSamplingSize attribute.
RETURNS
-------
int
Number of negative samples that will be withdrawn.
"""
return self.__negative_sampling_size
def getNumberOfIterations(self) -> int:
"""
Accessor for the numberOfIterations attribute.
RETURNS
-------
int
Number of epochs to train the network.
"""
return self.__number_of_iterations
def getSeed(self) -> int:
"""
Accessor for the seed attribute.
RETURNS
-------
int
Seed to train the network.
"""
return self.__seed
def setLayerSize(self, layerSize: int):
"""
Mutator for the layerSize attribute.
PARAMETERS
----------
layerSize : int
New size of the word vectors.
"""
self.__layer_size = layerSize
def setCbow(self, cbow: bool):
"""
Mutator for cBow attribute
PARAMETERS
----------
cbow : bool
True if CBow applied; false if SkipGram applied.
"""
self.__cbow = cbow
def setAlpha(self, alpha: float):
"""
Mutator for alpha attribute
PARAMETERS
----------
alpha : float
New learning rate.
"""
self.__alpha = alpha
def setWindow(self, window: int):
"""
Mutator for the window size attribute.
PARAMETERS
----------
window : int
New window size.
"""
self.__window = window
def setHierarchialSoftMax(self, hierarchicalSoftMax: bool):
"""
Mutator for the hierarchicalSoftMax attribute.
PARAMETERS
----------
hierarchicalSoftMax : bool
True is hierarchical softMax applied; false otherwise.
"""
self.__hierarchical_soft_max = hierarchicalSoftMax
def setNegativeSamplingSize(self, negativeSamplingSize: int):
"""
Mutator for the negativeSamplingSize attribute.
PARAMETERS
----------
negativeSamplingSize : int
New number of negative instances that will be withdrawn.
"""
self.__negative_sampling_size = negativeSamplingSize
def setNumberOfIterations(self, numberOfIterations: int):
"""
Mutator for the numberOfIterations attribute.
PARAMETERS
----------
numberOfIterations : int
New number of iterations.
"""
self.__number_of_iterations = numberOfIterations
def setSeed(self, seed: int):
"""
Mutator for the seed attribute.
PARAMETERS
----------
seed : int
New seed.
"""
self.__seed = seed | PypiClean |
/Bank_CLI-1.5.tar.gz/Bank_CLI-1.5/Bank/model.py | import json
from chequing import Chequing
from savings import Savings
import random
class Model():
_NEXT_ACC_NUM = 1000000
def __init__(self):
self.filename = 'accounts.json'
self.accounts = {}
self.tellers = {}
def write_file(self):
with open(self.filename, 'w') as open_f:
json.dump(self.accounts, open_f)
def read_file(self):
try:
with open(self.filename, 'r') as open_f:
self.accounts = {}
self.accounts = json.load(open_f)
except FileNotFoundError:
self.write_file()
def read_tellers_file(self):
with open('tellers.json', 'r') as open_f:
self.tellers = json.load(open_f)
def add_account(self, f_name, l_name, pin):
self.read_file()
if len(self.accounts) == 0:
n = 1
acc_num = n
else:
n = 0
for user in self.accounts:
if n < int(user):
n = int(user)
acc_num = n + 1
keys = self._encrypt(pin)
self.accounts[acc_num] = {'PIN': keys[0], 'First Name': f_name.title(), 'Last Name': l_name.title(), 'Type': {'Chequing': {'Balance': 0, 'Transaction Log': []}, 'Savings': {'Balance':0, 'Transaction Log': []}}, 'key': keys[1]}
self.write_file()
return acc_num
def del_account(self, acc_num):
self.read_file()
del self.accounts[str(acc_num)]
self.write_file()
def pin_check(self, acc_num, pin):
self.read_file()
if int(pin) == int(self.accounts[acc_num]['PIN'])/int(self.accounts[acc_num]['key']):
return True
def no_user_check(self, acc_num):
self.read_file()
if str(acc_num) in self.accounts:
return True
def no_teller_check(self, teller):
self.read_tellers_file()
if teller in self.tellers:
return True
def teller_password_check(self, teller, password):
self.read_tellers_file()
if str(password) == str(self.tellers[teller]['Password']):
return True
def pin_value_check(self, pin1, pin2):
try:
pin1 = int(pin1)
pin2 = int(pin2)
if self.check_length(4, len(str(pin1)), 6) != True or self.check_length(4, len(str(pin2)), 6) != True:
return 0
else:
if pin1 == pin2:
return 3
else:
return 2
except ValueError:
return 1
def check_length(self, expected_length, actual_length, expected_length2=None):
if actual_length == expected_length or actual_length == expected_length2:
return True
def pin_acc_num_check(self, acc_num, pin):
self.read_file()
if int(pin) == int(self.accounts[acc_num]['PIN'])/int(self.accounts[acc_num]['key']):
return True
def deposit_chequing(self, acc_num, amount, name):
self.read_file()
account = Chequing(name)
account.acc_bal = self.accounts[acc_num]['Type']['Chequing']['Balance']
if account.deposit(amount) == True:
self.accounts[acc_num]['Type']['Chequing']['Balance'] = account.acc_bal
one_log = '{} ${} on {}'.format(account.transaction_log[0][0][0], account.transaction_log[0][0][1], account.transaction_log[0][1])
self.accounts[acc_num]['Type']['Chequing']['Transaction Log'].append(one_log)
self.write_file()
return True
def deposit_savings(self, acc_num, amount, name):
self.read_file()
account = Savings(name)
account.acc_bal = self.accounts[acc_num]['Type']['Savings']['Balance']
if account.deposit(amount) == True:
self.accounts[acc_num]['Type']['Savings']['Balance'] = account.acc_bal
one_log = '{} ${} on {}'.format(account.transaction_log[0][0][0], account.transaction_log[0][0][1], account.transaction_log[0][1])
self.accounts[acc_num]['Type']['Savings']['Transaction Log'].append(one_log)
self.write_file()
return True
def withdraw_chequing(self, acc_num, amount, name):
self.read_file()
account = Chequing(name)
account.acc_bal = self.accounts[acc_num]['Type']['Chequing']['Balance']
if account.withdraw(amount) == True:
self.accounts[acc_num]['Type']['Chequing']['Balance'] = account.acc_bal
one_log = '{} ${} on {}'.format(account.transaction_log[0][0][0], account.transaction_log[0][0][1], account.transaction_log[0][1])
self.accounts[acc_num]['Type']['Chequing']['Transaction Log'].append(one_log)
self.write_file()
return True
def withdraw_savings(self, acc_num, amount, name):
self.read_file()
account = Savings(name)
account.acc_bal = self.accounts[acc_num]['Type']['Chequing']['Balance']
if account.withdraw(amount) == True:
self.accounts[acc_num]['Type']['Savings']['Balance'] = account.acc_bal
one_log = '{} ${} on {}'.format(account.transaction_log[0][0][0], account.transaction_log[0][0][1], account.transaction_log[0][1])
self.accounts[acc_num]['Type']['Savings']['Transaction Log'].append(one_log)
self.write_file()
return True
def no_account_check(self):
self.read_file()
if len(self.accounts) == 0:
return True
def get_balance(self, acc_num, type):
self.read_file()
return self.accounts[acc_num]['Type'][type]['Balance']
def get_transaction(self, acc_num, type):
self.read_file()
return self.accounts[acc_num]['Type'][type]['Transaction Log']
def _encrypt(self, pin):
rand = random.randint(17,42)
hashed_pin = int(pin)*rand
return (hashed_pin, rand)
# def _decrypt(self, hashed_pin, key):
# pin = int(hashed_pin)/int(key)
# return int(pin)
if __name__ == '__main__':
m = Model()
the_tuple = m._encrypt(1234)
print("The hashed password is", the_tuple) | PypiClean |
/BlueWhale3_Bioinformatics-4.1.32-py3-none-any.whl/orangecontrib/bioinformatics/widgets/OWHomologs.py | import sys
from typing import List, Union, Optional
from AnyQt.QtCore import QSize
from Orange.data import Table, Domain, StringVariable
from Orange.widgets import gui, widget
from Orange.widgets.widget import Msg
from Orange.widgets.settings import Setting
from orangecontrib.bioinformatics.ncbi.gene import Gene, GeneMatcher, load_gene_summary
from orangecontrib.bioinformatics.ncbi.taxonomy import (
COMMON_NAMES_MAPPING,
common_taxid_to_name,
species_name_to_taxid,
)
from orangecontrib.bioinformatics.widgets.utils.data import (
ERROR_ON_MISSING_TAX_ID,
ERROR_ON_MISSING_GENE_ID,
ERROR_ON_MISSING_ANNOTATION,
TableAnnotation,
)
from orangecontrib.bioinformatics.i18n_config import *
def __(key):
return i18n.t('bioinformatics.owhomologs.' + key)
HOMOLOG_SYMBOL = "Homolog"
HOMOLOG_ID = "Homolog ID"
class OWHomologs(widget.OWWidget):
name = __("name")
icon = 'icons/OWHomologs.svg'
priority = 120
class Outputs:
genes = widget.Output("Genes", Table, label=i18n.t("bioinformatics.common.gene"))
class Inputs:
data = widget.Input("Data", Table, label=i18n.t("bioinformatics.common.data"))
class Warning(widget.OWWidget.Warning):
no_genes = Msg(__("miss_data_input"))
missing_tax_id = Msg(ERROR_ON_MISSING_TAX_ID)
mising_gene_as_attribute_name = Msg(ERROR_ON_MISSING_ANNOTATION)
missing_gene_id = Msg(ERROR_ON_MISSING_GENE_ID)
mising_gene_id_attribute = Msg(ERROR_ON_MISSING_ANNOTATION)
want_main_area = False
auto_commit = Setting(True)
selected_organism: str = Setting('')
def __init__(self):
super().__init__()
self.taxonomy_names: List[str] = list(COMMON_NAMES_MAPPING.values())
self.taxonomy_ids: List[str] = list(COMMON_NAMES_MAPPING.keys())
self.source_tax: Optional[str] = None
self.target_tax: Optional[str] = None
self.data: Optional[Table] = None
info_box = gui.vBox(self.controlArea, __("box_info"))
self.info_gene_type = gui.widgetLabel(info_box, __("row_no_data_input"))
self.info_gene_type.setWordWrap(True)
self.info_gene = gui.widgetLabel(info_box, ' ')
self.info_gene.setWordWrap(True)
info_box.setMinimumWidth(200)
gui.separator(self.controlArea)
self.combo_box_id = -1
self.target_organism = gui.comboBox(self.controlArea, self, 'combo_box_id')
self.target_organism.addItems(self.taxonomy_names)
self.target_organism.activated[int].connect(self.target_organism_change)
gui.auto_commit(self.controlArea, self, "auto_commit", __("btn_commit"), __("btn_auto_commit"))
self.info.set_input_summary("0")
self.info.set_output_summary("0")
@Inputs.data
def set_data(self, data: Table) -> None:
self.Warning.clear()
self.data = data
if self.data:
if TableAnnotation.gene_as_attr_name not in self.data.attributes:
self.Warning.mising_gene_as_attribute_name()
self.data = None
return
if self.data.attributes[TableAnnotation.gene_as_attr_name]:
if TableAnnotation.gene_id_attribute not in self.data.attributes:
self.Warning.mising_gene_id_attribute()
self.data = None
return
else:
if TableAnnotation.tax_id not in self.data.attributes:
self.Warning.missing_tax_id()
self.data = None
return
if TableAnnotation.gene_id_column not in self.data.attributes:
self.Warning.mising_gene_as_attribute_name()
self.data = None
return
if self.data.attributes[TableAnnotation.gene_id_column] not in self.data.domain:
self.Warning.missing_gene_id()
self.data = None
return
else:
self.info.set_input_summary("0")
self.info.set_output_summary("0")
self.info_gene.clear()
self.info_gene_type.setText(__("row_no_data_input"))
self.Outputs.genes.send(None)
return
self.source_tax = data.attributes[TableAnnotation.tax_id]
taxonomy = common_taxid_to_name(self.source_tax)
self.target_organism.clear()
self.target_organism.addItems([tax_name for tax_name in self.taxonomy_names if tax_name != taxonomy])
if taxonomy == self.selected_organism:
self.combo_box_id = -1
self.selected_organism = self.taxonomy_names[0]
self.target_tax = species_name_to_taxid(self.selected_organism)
else:
try:
self.combo_box_id = self.taxonomy_names.index(self.selected_organism)
except ValueError:
self.combo_box_id = -1
if self.combo_box_id != -1:
self.target_organism.setCurrentIndex(self.combo_box_id)
self.selected_organism = self.taxonomy_names[self.combo_box_id]
self.target_tax = species_name_to_taxid(self.selected_organism)
else:
self.target_organism.setCurrentIndex(0)
self.selected_organism = self.taxonomy_names[0]
self.target_tax = species_name_to_taxid(self.selected_organism)
self.info_gene_type.setText(__("text.organism").format(taxonomy))
data_len = (
len(data.domain.attributes) if self.data.attributes[TableAnnotation.gene_as_attr_name] else len(data)
)
self.info_gene.setText(__("text.genes_num").format(data_len))
self.info.set_input_summary(f"{data_len}")
self.commit()
def find_homologs(self, genes: List[Union[str, Gene]]) -> List[Optional[Gene]]:
gm = GeneMatcher(self.source_tax)
gm.genes = genes
homologs = [g.homolog_gene(taxonomy_id=self.target_tax) for g in gm.genes]
homologs = load_gene_summary(self.target_tax, homologs)
return homologs
def target_organism_change(self, combo_box_id: int) -> None:
self.combo_box_id = combo_box_id
self.selected_organism = self.target_organism.itemText(combo_box_id)
self.target_tax = species_name_to_taxid(self.selected_organism)
self.commit()
def commit(self):
if self.data:
if self.data.attributes[TableAnnotation.gene_as_attr_name]:
domain = self.data.domain.copy()
table = self.data.transform(domain)
gene_loc = table.attributes[TableAnnotation.gene_id_attribute]
genes = [str(attr.attributes.get(gene_loc, None)) for attr in table.domain.attributes]
homologs = self.find_homologs(genes)
for homolog, col in zip(homologs, table.domain.attributes):
if homolog:
col.attributes[HOMOLOG_SYMBOL] = homolog.symbol
col.attributes[HOMOLOG_ID] = homolog.gene_id
table = table.from_table(
Domain(
[col for col in table.domain.attributes if HOMOLOG_ID in col.attributes],
table.domain.class_vars,
table.domain.metas,
),
table,
)
out_table = table if len(table.domain.attributes) > 0 else None
else:
genes, _ = self.data.get_column_view(self.data.attributes[TableAnnotation.gene_id_column])
homologs = self.find_homologs(genes)
homolog = StringVariable(HOMOLOG_SYMBOL)
homolog_id = StringVariable(HOMOLOG_ID)
domain = Domain(
self.data.domain.attributes,
self.data.domain.class_vars,
self.data.domain.metas + (homolog, homolog_id),
)
table = self.data.transform(domain)
col, _ = table.get_column_view(homolog)
col[:] = [g.symbol if g else "?" for g in homologs]
col, _ = table.get_column_view(homolog_id)
col[:] = [g.gene_id if g else "?" for g in homologs]
# note: filter out rows with unknown homologs
table = table[table.get_column_view(homolog_id)[0] != "?"]
out_table = table if len(table) > 0 else None
self.info.set_output_summary(f"{len(out_table) if out_table else 0}")
self.Outputs.genes.send(out_table)
else:
self.Outputs.genes.send(None)
def closeEvent(self, event):
super().closeEvent(event)
def sizeHint(self):
return QSize(520, 200)
def main(argv=None):
from AnyQt.QtWidgets import QApplication
app = QApplication(argv or sys.argv)
w = OWHomologs()
w.show()
w.activateWindow()
rv = app.exec_()
w.saveSettings()
w.onDeleteWidget()
return rv
if __name__ == "__main__":
sys.exit(main()) | PypiClean |
/BitGlitter-2.0.0.tar.gz/BitGlitter-2.0.0/bitglitter/__init__.py | from bitglitter.config.defaultdbdata import load_default_db_data
load_default_db_data()
# Core Uses
from bitglitter.write.write import write
from bitglitter.read.read import read
# General Config
from bitglitter.config.configfunctions import clear_stats, output_stats, remove_session, return_settings, \
update_settings
# Read Config
from bitglitter.config.readfunctions import attempt_metadata_decrypt, blacklist_stream_sha256, \
remove_all_blacklist_sha256, remove_all_partial_save_data, remove_blacklist_sha256, remove_partial_save, \
return_all_blacklist_sha256, return_all_read_information, return_single_read_information, return_stream_file_data, \
return_stream_frame_data, return_stream_manifest, return_stream_progress_data, unpackage, update_decrypt_values, \
update_stream_read, verify_is_bitglitter_file
# Palette Config
from bitglitter.config.palettefunctions import add_custom_palette, edit_nickname_to_custom_palette, \
export_palette_base64, generate_sample_frame, import_palette_base64, remove_all_custom_palettes, \
remove_all_custom_palette_nicknames, remove_custom_palette, remove_custom_palette_nickname, return_all_palettes,\
return_custom_palettes, return_default_palettes, return_palette
# Preset Config
from bitglitter.config.presetfunctions import add_new_preset, remove_all_presets, remove_preset, \
return_all_preset_data, return_preset
# ===============================================================================
# The MIT License (MIT)
#
# Copyright (c) 2021 - ∞ Mark Michon
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ===============================================================================
#
# Python library project page:
# https://github.com/MarkMichon1/BitGlitter-Python
#
# Electron App project page:
# https://github.com/MarkMichon1/BitGlitter
#
# Discord server:
# https://discord.gg/t9uv2pZ
#
# Enjoy! :)
#
# - Mark
# =============================================================================== | PypiClean |
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/30217.bf78e95f3a8caac48227.min.js | "use strict";(self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[30217],{65340:function(n,e,t){var s=t(87537),o=t.n(s),l=t(23645),r=t.n(l)()(o());r.push([n.id,".hljs{display:block;overflow-x:auto;padding:.5em;background:navy}.hljs,.hljs-subst{color:aqua}.hljs-string,.hljs-attribute,.hljs-symbol,.hljs-bullet,.hljs-built_in,.hljs-builtin-name,.hljs-template-tag,.hljs-template-variable,.hljs-addition{color:#ff0}.hljs-keyword,.hljs-selector-tag,.hljs-section,.hljs-type,.hljs-name,.hljs-selector-id,.hljs-selector-class,.hljs-variable{color:#fff}.hljs-comment,.hljs-quote,.hljs-doctag,.hljs-deletion{color:#888}.hljs-number,.hljs-regexp,.hljs-literal,.hljs-link{color:lime}.hljs-meta{color:teal}.hljs-keyword,.hljs-selector-tag,.hljs-title,.hljs-section,.hljs-name,.hljs-strong{font-weight:bold}.hljs-emphasis{font-style:italic}","",{version:3,sources:["webpack://./node_modules/highlight.js/styles/far.css"],names:[],mappings:"AAMA,MACE,aAAA,CACA,eAAA,CACA,YAAA,CACA,eAAA,CAGF,kBAEE,UAAA,CAGF,mJASE,UAAA,CAGF,2HAQE,UAAA,CAGF,sDAIE,UAAA,CAGF,mDAIE,UAAA,CAGF,WACE,UAAA,CAGF,mFAME,gBAAA,CAGF,eACE,iBAAA",sourcesContent:["/*\n\nFAR Style (c) MajestiC <majestic2k@gmail.com>\n\n*/\n\n.hljs {\n display: block;\n overflow-x: auto;\n padding: 0.5em;\n background: #000080;\n}\n\n.hljs,\n.hljs-subst {\n color: #0ff;\n}\n\n.hljs-string,\n.hljs-attribute,\n.hljs-symbol,\n.hljs-bullet,\n.hljs-built_in,\n.hljs-builtin-name,\n.hljs-template-tag,\n.hljs-template-variable,\n.hljs-addition {\n color: #ff0;\n}\n\n.hljs-keyword,\n.hljs-selector-tag,\n.hljs-section,\n.hljs-type,\n.hljs-name,\n.hljs-selector-id,\n.hljs-selector-class,\n.hljs-variable {\n color: #fff;\n}\n\n.hljs-comment,\n.hljs-quote,\n.hljs-doctag,\n.hljs-deletion {\n color: #888;\n}\n\n.hljs-number,\n.hljs-regexp,\n.hljs-literal,\n.hljs-link {\n color: #0f0;\n}\n\n.hljs-meta {\n color: #008080;\n}\n\n.hljs-keyword,\n.hljs-selector-tag,\n.hljs-title,\n.hljs-section,\n.hljs-name,\n.hljs-strong {\n font-weight: bold;\n}\n\n.hljs-emphasis {\n font-style: italic;\n}\n"],sourceRoot:""}]),e.Z=r},23645:function(n){n.exports=function(n){var e=[];return e.toString=function(){return this.map((function(e){var t="",s=void 0!==e[5];return e[4]&&(t+="@supports (".concat(e[4],") {")),e[2]&&(t+="@media ".concat(e[2]," {")),s&&(t+="@layer".concat(e[5].length>0?" ".concat(e[5]):""," {")),t+=n(e),s&&(t+="}"),e[2]&&(t+="}"),e[4]&&(t+="}"),t})).join("")},e.i=function(n,t,s,o,l){"string"==typeof n&&(n=[[null,n,void 0]]);var r={};if(s)for(var a=0;a<this.length;a++){var i=this[a][0];null!=i&&(r[i]=!0)}for(var c=0;c<n.length;c++){var u=[].concat(n[c]);s&&r[u[0]]||(void 0!==l&&(void 0===u[5]||(u[1]="@layer".concat(u[5].length>0?" ".concat(u[5]):""," {").concat(u[1],"}")),u[5]=l),t&&(u[2]?(u[1]="@media ".concat(u[2]," {").concat(u[1],"}"),u[2]=t):u[2]=t),o&&(u[4]?(u[1]="@supports (".concat(u[4],") {").concat(u[1],"}"),u[4]=o):u[4]="".concat(o)),e.push(u))}},e}},87537:function(n){n.exports=function(n){var e=n[1],t=n[3];if(!t)return e;if("function"==typeof btoa){var s=btoa(unescape(encodeURIComponent(JSON.stringify(t)))),o="sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(s),l="/*# ".concat(o," */");return[e].concat([l]).join("\n")}return[e].join("\n")}},30217:function(n,e,t){t.r(e);var s=t(93379),o=t.n(s),l=t(7795),r=t.n(l),a=t(3565),i=t.n(a),c=t(19216),u=t.n(c),h=t(44589),f=t.n(h),p=t(65340),d={};d.styleTagTransform=f(),d.setAttributes=i(),d.insert=function(n){var e=document.head.querySelectorAll("*")[0];e?document.head.insertBefore(n,e):document.head.append(n)},d.domAPI=r(),d.insertStyleElement=u();o()(p.Z,d);e.default=p.Z&&p.Z.locals?p.Z.locals:void 0},93379:function(n){var e=[];function t(n){for(var t=-1,s=0;s<e.length;s++)if(e[s].identifier===n){t=s;break}return t}function s(n,s){for(var l={},r=[],a=0;a<n.length;a++){var i=n[a],c=s.base?i[0]+s.base:i[0],u=l[c]||0,h="".concat(c," ").concat(u);l[c]=u+1;var f=t(h),p={css:i[1],media:i[2],sourceMap:i[3],supports:i[4],layer:i[5]};if(-1!==f)e[f].references++,e[f].updater(p);else{var d=o(p,s);s.byIndex=a,e.splice(a,0,{identifier:h,updater:d,references:1})}r.push(h)}return r}function o(n,e){var t=e.domAPI(e);t.update(n);return function(e){if(e){if(e.css===n.css&&e.media===n.media&&e.sourceMap===n.sourceMap&&e.supports===n.supports&&e.layer===n.layer)return;t.update(n=e)}else t.remove()}}n.exports=function(n,o){var l=s(n=n||[],o=o||{});return function(n){n=n||[];for(var r=0;r<l.length;r++){var a=t(l[r]);e[a].references--}for(var i=s(n,o),c=0;c<l.length;c++){var u=t(l[c]);0===e[u].references&&(e[u].updater(),e.splice(u,1))}l=i}}},19216:function(n){n.exports=function(n){var e=document.createElement("style");return n.setAttributes(e,n.attributes),n.insert(e,n.options),e}},3565:function(n,e,t){n.exports=function(n){var e=t.nc;e&&n.setAttribute("nonce",e)}},7795:function(n){n.exports=function(n){if("undefined"==typeof document)return{update:function(){},remove:function(){}};var e=n.insertStyleElement(n);return{update:function(t){!function(n,e,t){var s="";t.supports&&(s+="@supports (".concat(t.supports,") {")),t.media&&(s+="@media ".concat(t.media," {"));var o=void 0!==t.layer;o&&(s+="@layer".concat(t.layer.length>0?" ".concat(t.layer):""," {")),s+=t.css,o&&(s+="}"),t.media&&(s+="}"),t.supports&&(s+="}");var l=t.sourceMap;l&&"undefined"!=typeof btoa&&(s+="\n/*# sourceMappingURL=data:application/json;base64,".concat(btoa(unescape(encodeURIComponent(JSON.stringify(l))))," */")),e.styleTagTransform(s,n,e.options)}(e,n,t)},remove:function(){!function(n){if(null===n.parentNode)return!1;n.parentNode.removeChild(n)}(e)}}}},44589:function(n){n.exports=function(n,e){if(e.styleSheet)e.styleSheet.cssText=n;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(n))}}}}]);
//# sourceMappingURL=30217.bf78e95f3a8caac48227.min.js.map | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/_MenuBase.js | define("dijit/_MenuBase",["./popup","dojo/window","./_Widget","./_KeyNavContainer","./_TemplatedMixin","dojo/_base/declare","dojo/dom","dojo/dom-attr","dojo/dom-class","dojo/_base/lang","dojo/_base/array"],function(pm,_1,_2,_3,_4,_5,_6,_7,_8,_9,_a){
return _5("dijit._MenuBase",[_2,_4,_3],{parentMenu:null,popupDelay:500,onExecute:function(){
},onCancel:function(){
},_moveToPopup:function(_b){
if(this.focusedChild&&this.focusedChild.popup&&!this.focusedChild.disabled){
this.focusedChild._onClick(_b);
}else{
var _c=this._getTopMenu();
if(_c&&_c._isMenuBar){
_c.focusNext();
}
}
},_onPopupHover:function(){
if(this.currentPopup&&this.currentPopup._pendingClose_timer){
var _d=this.currentPopup.parentMenu;
if(_d.focusedChild){
_d.focusedChild._setSelected(false);
}
_d.focusedChild=this.currentPopup.from_item;
_d.focusedChild._setSelected(true);
this._stopPendingCloseTimer(this.currentPopup);
}
},onItemHover:function(_e){
if(this.isActive){
this.focusChild(_e);
if(this.focusedChild.popup&&!this.focusedChild.disabled&&!this.hover_timer){
this.hover_timer=setTimeout(_9.hitch(this,"_openPopup"),this.popupDelay);
}
}
if(this.focusedChild){
this.focusChild(_e);
}
this._hoveredChild=_e;
},_onChildBlur:function(_f){
this._stopPopupTimer();
_f._setSelected(false);
var _10=_f.popup;
if(_10){
this._stopPendingCloseTimer(_10);
_10._pendingClose_timer=setTimeout(function(){
_10._pendingClose_timer=null;
if(_10.parentMenu){
_10.parentMenu.currentPopup=null;
}
pm.close(_10);
},this.popupDelay);
}
},onItemUnhover:function(_11){
if(this.isActive){
this._stopPopupTimer();
}
if(this._hoveredChild==_11){
this._hoveredChild=null;
}
},_stopPopupTimer:function(){
if(this.hover_timer){
clearTimeout(this.hover_timer);
this.hover_timer=null;
}
},_stopPendingCloseTimer:function(_12){
if(_12._pendingClose_timer){
clearTimeout(_12._pendingClose_timer);
_12._pendingClose_timer=null;
}
},_stopFocusTimer:function(){
if(this._focus_timer){
clearTimeout(this._focus_timer);
this._focus_timer=null;
}
},_getTopMenu:function(){
for(var top=this;top.parentMenu;top=top.parentMenu){
}
return top;
},onItemClick:function(_13,evt){
if(typeof this.isShowingNow=="undefined"){
this._markActive();
}
this.focusChild(_13);
if(_13.disabled){
return false;
}
if(_13.popup){
this._openPopup();
}else{
this.onExecute();
_13.onClick(evt);
}
},_openPopup:function(){
this._stopPopupTimer();
var _14=this.focusedChild;
if(!_14){
return;
}
var _15=_14.popup;
if(_15.isShowingNow){
return;
}
if(this.currentPopup){
this._stopPendingCloseTimer(this.currentPopup);
pm.close(this.currentPopup);
}
_15.parentMenu=this;
_15.from_item=_14;
var _16=this;
pm.open({parent:this,popup:_15,around:_14.domNode,orient:this._orient||["after","before"],onCancel:function(){
_16.focusChild(_14);
_16._cleanUp();
_14._setSelected(true);
_16.focusedChild=_14;
},onExecute:_9.hitch(this,"_cleanUp")});
this.currentPopup=_15;
_15.connect(_15.domNode,"onmouseenter",_9.hitch(_16,"_onPopupHover"));
if(_15.focus){
_15._focus_timer=setTimeout(_9.hitch(_15,function(){
this._focus_timer=null;
this.focus();
}),0);
}
},_markActive:function(){
this.isActive=true;
_8.replace(this.domNode,"dijitMenuActive","dijitMenuPassive");
},onOpen:function(){
this.isShowingNow=true;
this._markActive();
},_markInactive:function(){
this.isActive=false;
_8.replace(this.domNode,"dijitMenuPassive","dijitMenuActive");
},onClose:function(){
this._stopFocusTimer();
this._markInactive();
this.isShowingNow=false;
this.parentMenu=null;
},_closeChild:function(){
this._stopPopupTimer();
if(this.currentPopup){
if(_a.indexOf(this._focusManager.activeStack,this.id)>=0){
_7.set(this.focusedChild.focusNode,"tabIndex",this.tabIndex);
this.focusedChild.focusNode.focus();
}
pm.close(this.currentPopup);
this.currentPopup=null;
}
if(this.focusedChild){
this.focusedChild._setSelected(false);
this.focusedChild._onUnhover();
this.focusedChild=null;
}
},_onItemFocus:function(_17){
if(this._hoveredChild&&this._hoveredChild!=_17){
this._hoveredChild._onUnhover();
}
},_onBlur:function(){
this._cleanUp();
this.inherited(arguments);
},_cleanUp:function(){
this._closeChild();
if(typeof this.isShowingNow=="undefined"){
this._markInactive();
}
}});
}); | PypiClean |
/Flask-Scaffold-0.5.1.tar.gz/Flask-Scaffold-0.5.1/app/templates/static/node_modules/angular-ui-router/src/templateFactory.js | $TemplateFactory.$inject = ['$http', '$templateCache', '$injector'];
function $TemplateFactory( $http, $templateCache, $injector) {
/**
* @ngdoc function
* @name ui.router.util.$templateFactory#fromConfig
* @methodOf ui.router.util.$templateFactory
*
* @description
* Creates a template from a configuration object.
*
* @param {object} config Configuration object for which to load a template.
* The following properties are search in the specified order, and the first one
* that is defined is used to create the template:
*
* @param {string|object} config.template html string template or function to
* load via {@link ui.router.util.$templateFactory#fromString fromString}.
* @param {string|object} config.templateUrl url to load or a function returning
* the url to load via {@link ui.router.util.$templateFactory#fromUrl fromUrl}.
* @param {Function} config.templateProvider function to invoke via
* {@link ui.router.util.$templateFactory#fromProvider fromProvider}.
* @param {object} params Parameters to pass to the template function.
* @param {object} locals Locals to pass to `invoke` if the template is loaded
* via a `templateProvider`. Defaults to `{ params: params }`.
*
* @return {string|object} The template html as a string, or a promise for
* that string,or `null` if no template is configured.
*/
this.fromConfig = function (config, params, locals) {
return (
isDefined(config.template) ? this.fromString(config.template, params) :
isDefined(config.templateUrl) ? this.fromUrl(config.templateUrl, params) :
isDefined(config.templateProvider) ? this.fromProvider(config.templateProvider, params, locals) :
null
);
};
/**
* @ngdoc function
* @name ui.router.util.$templateFactory#fromString
* @methodOf ui.router.util.$templateFactory
*
* @description
* Creates a template from a string or a function returning a string.
*
* @param {string|object} template html template as a string or function that
* returns an html template as a string.
* @param {object} params Parameters to pass to the template function.
*
* @return {string|object} The template html as a string, or a promise for that
* string.
*/
this.fromString = function (template, params) {
return isFunction(template) ? template(params) : template;
};
/**
* @ngdoc function
* @name ui.router.util.$templateFactory#fromUrl
* @methodOf ui.router.util.$templateFactory
*
* @description
* Loads a template from the a URL via `$http` and `$templateCache`.
*
* @param {string|Function} url url of the template to load, or a function
* that returns a url.
* @param {Object} params Parameters to pass to the url function.
* @return {string|Promise.<string>} The template html as a string, or a promise
* for that string.
*/
this.fromUrl = function (url, params) {
if (isFunction(url)) url = url(params);
if (url == null) return null;
else return $http
.get(url, { cache: $templateCache, headers: { Accept: 'text/html' }})
.then(function(response) { return response.data; });
};
/**
* @ngdoc function
* @name ui.router.util.$templateFactory#fromProvider
* @methodOf ui.router.util.$templateFactory
*
* @description
* Creates a template by invoking an injectable provider function.
*
* @param {Function} provider Function to invoke via `$injector.invoke`
* @param {Object} params Parameters for the template.
* @param {Object} locals Locals to pass to `invoke`. Defaults to
* `{ params: params }`.
* @return {string|Promise.<string>} The template html as a string, or a promise
* for that string.
*/
this.fromProvider = function (provider, params, locals) {
return $injector.invoke(provider, null, locals || { params: params });
};
}
angular.module('ui.router.util').service('$templateFactory', $TemplateFactory); | PypiClean |
/FiPy-3.4.4.tar.gz/FiPy-3.4.4/examples/levelSet/electroChem/trenchMesh.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
__docformat__ = 'restructuredtext'
from fipy.tools import numerix
from fipy.variables.cellVariable import CellVariable
from fipy.tools import parallelComm
from .gapFillMesh import GapFillMesh
class TrenchMesh(GapFillMesh):
"""
The following test case tests for diffusion across the domain.
>>> cellSize = 0.05e-6
>>> trenchDepth = 0.5e-6
>>> boundaryLayerDepth = 50e-6
>>> domainHeight = 10 * cellSize + trenchDepth + boundaryLayerDepth
>>> mesh = TrenchMesh(trenchSpacing = 1e-6,
... cellSize = cellSize,
... trenchDepth = trenchDepth,
... boundaryLayerDepth = boundaryLayerDepth,
... aspectRatio = 1.) # doctest: +GMSH
>>> import fipy.tools.dump as dump
>>> (f, filename) = dump.write(mesh) # doctest: +GMSH
>>> if parallelComm.Nproc == 1:
... mesh = dump.read(filename, f) # doctest: +GMSH
>>> print(mesh.globalNumberOfCells - len(numerix.nonzero(mesh.electrolyteMask)[0])) # doctest: +GMSH, +SERIAL
150
>>> print(400 < mesh.globalNumberOfCells < 800) # doctest: +GMSH
True
>>> from fipy.variables.cellVariable import CellVariable
>>> var = CellVariable(mesh = mesh, value = 0.) # doctest: +GMSH
>>> from fipy.terms.diffusionTerm import DiffusionTerm
>>> eq = DiffusionTerm() # doctest: +GMSH
>>> var.constrain(0., mesh.facesBottom) # doctest: +GMSH
>>> var.constrain(domainHeight, mesh.facesTop) # doctest: +GMSH
>>> eq.solve(var) # doctest: +GMSH
Evaluate the result:
>>> centers = mesh.cellCenters[1].copy() # doctest: +GMSH
.. note:: the copy makes the array contiguous for inlining
>>> localErrors = (centers - var)**2 / centers**2 # doctest: +GMSH
>>> globalError = numerix.sqrt(numerix.sum(localErrors) / mesh.numberOfCells) # doctest: +GMSH
>>> argmax = numerix.argmax(localErrors) # doctest: +GMSH
>>> print(numerix.sqrt(localErrors[argmax]) < 0.051) # doctest: +GMSH
1
>>> print(globalError < 0.02) # doctest: +GMSH
1
"""
def __init__(self,
trenchDepth=None,
trenchSpacing=None,
boundaryLayerDepth=None,
cellSize=None,
aspectRatio=None,
angle=0.,
communicator=parallelComm):
"""
`trenchDepth` - Depth of the trench.
`trenchSpacing` - The distance between the trenches.
`boundaryLayerDepth` - The depth of the hydrodynamic boundary
layer.
`cellSize` - The cell Size.
`aspectRatio` - `trenchDepth` / `trenchWidth`
`angle` - The angle for the taper of the trench.
The trench mesh takes the parameters generally used to define
a trench region and recasts then for the general
`GapFillMesh`.
"""
heightBelowTrench = cellSize * 10.
heightAboveTrench = trenchDepth / 1.
fineRegionHeight = heightBelowTrench + trenchDepth + heightAboveTrench
transitionHeight = fineRegionHeight * 3.
domainWidth = trenchSpacing / 2.
domainHeight = heightBelowTrench + trenchDepth + boundaryLayerDepth
super(TrenchMesh, self).__init__(cellSize=cellSize,
desiredDomainWidth=domainWidth,
desiredDomainHeight=domainHeight,
desiredFineRegionHeight=fineRegionHeight,
transitionRegionHeight=transitionHeight,
communicator=parallelComm)
trenchWidth = trenchDepth / aspectRatio
x, y = self.cellCenters
Y = (y - (heightBelowTrench + trenchDepth / 2))
taper = numerix.tan(angle) * Y
self.electrolyteMask = numerix.where(y > trenchDepth + heightBelowTrench,
1,
numerix.where(y < heightBelowTrench,
0,
numerix.where(x > trenchWidth / 2 + taper,
0,
1)))
def __getstate__(self):
dict = super(TrenchMesh, self).__getstate__()
dict['electrolyteMask'] = self.electrolyteMask
return dict
def __setstate__(self, dict):
self.electrolyteMask = dict['electrolyteMask']
del dict['electrolyteMask']
super(TrenchMesh, self).__setstate__(dict)
def _test():
import fipy.tests.doctestPlus
return fipy.tests.doctestPlus.testmod()
if __name__ == "__main__":
_test() | PypiClean |
/MindustryCompiler-2.1-py3-none-any.whl/compiler/yacc/grammar/staticFor.py | from compiler import CompilationException
from compiler.yacc.classes.AsmInst import Variable
from ._start import grammar, YaccProduction, context
from boa import boa
from ..classes import Ref, Jump
# ressourceList = [(1, cuivre, @copper), (2, plomb, @lead)]
# for id, ressource, ressourceType in ressourceList
# if toTakeId == id
# it's call will be replaced with the value given
# there is no scope for macro, that is the big difference with fuctions
@grammar
def staticFor(p: YaccProduction):
'''lines : For arguments ID liNameOrList OpenCurlyBracket lines CloseCurlyBracket'''
decompose = p[2]
li = p[4]
originalLines = p[6]
refDict = {}
# create list of ref
for line in originalLines:
if isinstance(line, Ref):
refDict[line] = None
for el in li:
if len(decompose) != len(el):
raise CompilationException("cannot unpack list, el length not equal")
# do a variable replacing
lines = []
for tuple in li:
copiedLines = [el.copy() for el in originalLines]
for k in refDict.keys(): # new refs
refDict[k] = context.genRef()
for line in copiedLines:
if isinstance(line, Ref): # change ref
line.changeRef(refDict[line])
elif isinstance(line, Jump):
if line.ref in refDict:
line.changeRef(refDict[line.ref])
for toReplace, toReplaceBy in zip(decompose, tuple):
line.replace(toReplace, toReplaceBy)
else:
for toReplace, toReplaceBy in zip(decompose, tuple):
line.replace(toReplace, toReplaceBy)
lines += copiedLines
p[0] = lines
@grammar
def liNameOrList(p: YaccProduction):
'''liNameOrList : ID
| list'''
liNameOrList = p[1]
p[0] = liNameOrList if isinstance(liNameOrList, list) else context.staticVarsList[Variable(liNameOrList)]
@grammar
def staticList(p: YaccProduction):
'''noLine : affectation list'''
if len(p[1]) != 1:
raise CompilationException("afectation incorrect: {} is not accepted".format(p[1]))
name = p[1][0]
val = p[2]
if name in context.staticVarsList:
raise CompilationException("static list named: ⸄{}⸅ alrealy defined".format(name))
context.staticVarsList[name] = val
@grammar
def staticList_list(p: YaccProduction):
'''list : OpenBracket tuplesOrInfo CloseBracket'''
p[0] = p[2]
@grammar
def tuplesOrInfo(p: YaccProduction):
'''tuplesOrInfo : tuples
| arguments'''
val = p[1]
if isinstance(val[0], list):
p[0] = p[1]
else:
p[0] = boa(val).map(lambda el: [el])
@grammar
def tuples_one(p: YaccProduction):
'''tuples : tuple'''
p[0] = [p[1]]
@grammar
def tuples_many(p: YaccProduction):
'''tuples : tuples Comma tuple'''
p[0] = p[1] + [p[3]]
@grammar
def tuples_oneComma(p: YaccProduction):
'''tuples : tuples Comma'''
p[0] = p[1]
@grammar
def tupleDef(p: YaccProduction):
'''tuple : OpenParenthesis arguments CloseParenthesis'''
p[0] = p[2] | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/form/TextBox.js.uncompressed.js | require({cache:{
'url:dijit/form/templates/TextBox.html':"<div class=\"dijit dijitReset dijitInline dijitLeft\" id=\"widget_${id}\" role=\"presentation\"\n\t><div class=\"dijitReset dijitInputField dijitInputContainer\"\n\t\t><input class=\"dijitReset dijitInputInner\" data-dojo-attach-point='textbox,focusNode' autocomplete=\"off\"\n\t\t\t${!nameAttrSetting} type='${type}'\n\t/></div\n></div>\n"}});
define("dijit/form/TextBox", [
"dojo/_base/declare", // declare
"dojo/dom-construct", // domConstruct.create
"dojo/dom-style", // domStyle.getComputedStyle
"dojo/_base/kernel", // kernel.deprecated
"dojo/_base/lang", // lang.hitch
"dojo/_base/sniff", // has("ie") has("mozilla")
"dojo/_base/window", // win.doc.selection.createRange
"./_FormValueWidget",
"./_TextBoxMixin",
"dojo/text!./templates/TextBox.html",
".." // to export dijit._setSelectionRange, remove in 2.0
], function(declare, domConstruct, domStyle, kernel, lang, has, win,
_FormValueWidget, _TextBoxMixin, template, dijit){
/*=====
var _FormValueWidget = dijit.form._FormValueWidget;
var _TextBoxMixin = dijit.form._TextBoxMixin;
=====*/
// module:
// dijit/form/TextBox
// summary:
// A base class for textbox form inputs
var TextBox = declare(/*====="dijit.form.TextBox", =====*/ [_FormValueWidget, _TextBoxMixin], {
// summary:
// A base class for textbox form inputs
templateString: template,
_singleNodeTemplate: '<input class="dijit dijitReset dijitLeft dijitInputField" data-dojo-attach-point="textbox,focusNode" autocomplete="off" type="${type}" ${!nameAttrSetting} />',
_buttonInputDisabled: has("ie") ? "disabled" : "", // allows IE to disallow focus, but Firefox cannot be disabled for mousedown events
baseClass: "dijitTextBox",
postMixInProperties: function(){
var type = this.type.toLowerCase();
if(this.templateString && this.templateString.toLowerCase() == "input" || ((type == "hidden" || type == "file") && this.templateString == this.constructor.prototype.templateString)){
this.templateString = this._singleNodeTemplate;
}
this.inherited(arguments);
},
_onInput: function(e){
this.inherited(arguments);
if(this.intermediateChanges){ // _TextBoxMixin uses onInput
var _this = this;
// the setTimeout allows the key to post to the widget input box
setTimeout(function(){ _this._handleOnChange(_this.get('value'), false); }, 0);
}
},
_setPlaceHolderAttr: function(v){
this._set("placeHolder", v);
if(!this._phspan){
this._attachPoints.push('_phspan');
// dijitInputField class gives placeHolder same padding as the input field
// parent node already has dijitInputField class but it doesn't affect this <span>
// since it's position: absolute.
this._phspan = domConstruct.create('span',{className:'dijitPlaceHolder dijitInputField'},this.textbox,'after');
}
this._phspan.innerHTML="";
this._phspan.appendChild(document.createTextNode(v));
this._updatePlaceHolder();
},
_updatePlaceHolder: function(){
if(this._phspan){
this._phspan.style.display=(this.placeHolder&&!this.focused&&!this.textbox.value)?"":"none";
}
},
_setValueAttr: function(value, /*Boolean?*/ priorityChange, /*String?*/ formattedValue){
this.inherited(arguments);
this._updatePlaceHolder();
},
getDisplayedValue: function(){
// summary:
// Deprecated. Use get('displayedValue') instead.
// tags:
// deprecated
kernel.deprecated(this.declaredClass+"::getDisplayedValue() is deprecated. Use set('displayedValue') instead.", "", "2.0");
return this.get('displayedValue');
},
setDisplayedValue: function(/*String*/ value){
// summary:
// Deprecated. Use set('displayedValue', ...) instead.
// tags:
// deprecated
kernel.deprecated(this.declaredClass+"::setDisplayedValue() is deprecated. Use set('displayedValue', ...) instead.", "", "2.0");
this.set('displayedValue', value);
},
_onBlur: function(e){
if(this.disabled){ return; }
this.inherited(arguments);
this._updatePlaceHolder();
},
_onFocus: function(/*String*/ by){
if(this.disabled || this.readOnly){ return; }
this.inherited(arguments);
this._updatePlaceHolder();
}
});
if(has("ie")){
TextBox = declare(/*===== "dijit.form.TextBox.IEMixin", =====*/ TextBox, {
declaredClass: "dijit.form.TextBox", // for user code referencing declaredClass
_isTextSelected: function(){
var range = win.doc.selection.createRange();
var parent = range.parentElement();
return parent == this.textbox && range.text.length == 0;
},
postCreate: function(){
this.inherited(arguments);
// IE INPUT tag fontFamily has to be set directly using STYLE
// the setTimeout gives IE a chance to render the TextBox and to deal with font inheritance
setTimeout(lang.hitch(this, function(){
try{
var s = domStyle.getComputedStyle(this.domNode); // can throw an exception if widget is immediately destroyed
if(s){
var ff = s.fontFamily;
if(ff){
var inputs = this.domNode.getElementsByTagName("INPUT");
if(inputs){
for(var i=0; i < inputs.length; i++){
inputs[i].style.fontFamily = ff;
}
}
}
}
}catch(e){/*when used in a Dialog, and this is called before the dialog is
shown, s.fontFamily would trigger "Invalid Argument" error.*/}
}), 0);
}
});
// Overrides definition of _setSelectionRange from _TextBoxMixin (TODO: move to _TextBoxMixin.js?)
dijit._setSelectionRange = _TextBoxMixin._setSelectionRange = function(/*DomNode*/ element, /*Number?*/ start, /*Number?*/ stop){
if(element.createTextRange){
var r = element.createTextRange();
r.collapse(true);
r.moveStart("character", -99999); // move to 0
r.moveStart("character", start); // delta from 0 is the correct position
r.moveEnd("character", stop-start);
r.select();
}
}
}else if(has("mozilla")){
TextBox = declare(/*===== "dijit.form.TextBox.MozMixin", =====*/TextBox, {
declaredClass: "dijit.form.TextBox", // for user code referencing declaredClass
_onBlur: function(e){
this.inherited(arguments);
if(this.selectOnClick){
// clear selection so that the next mouse click doesn't reselect
this.textbox.selectionStart = this.textbox.selectionEnd = undefined;
}
}
});
}else{
TextBox.prototype.declaredClass = "dijit.form.TextBox";
}
lang.setObject("dijit.form.TextBox", TextBox); // don't do direct assignment, it confuses API doc parser
return TextBox;
}); | PypiClean |
/LinkPython-0.1.1.tar.gz/LinkPython-0.1.1/modules/link/LICENSE.md | # License
Copyright 2016, Ableton AG, Berlin. All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
If you would like to incorporate Link into a proprietary software application,
please contact <link-devs@ableton.com>.
| PypiClean |
/BuildStream-2.0.1-cp39-cp39-manylinux_2_28_x86_64.whl/buildstream/sandbox/_sandboxreapi.py |
import os
import shlex
from .sandbox import Sandbox, _SandboxFlags, SandboxCommandError, _SandboxBatch
from .. import utils
from .._exceptions import ImplError, SandboxError
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
# SandboxREAPI()
#
# Abstract class providing a skeleton for sandbox implementations based on
# the Remote Execution API.
#
class SandboxREAPI(Sandbox):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._output_node_properties = kwargs.get("output_node_properties")
def _run(self, command, *, flags, cwd, env):
context = self._get_context()
cascache = context.get_cascache()
# set up virtual dircetory
vdir = self.get_virtual_directory()
if not self._has_command(command[0], env):
raise SandboxCommandError(
"Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
)
# Ensure working directory exists
if len(cwd) > 1:
assert cwd.startswith("/")
vdir.open_directory(cwd[1:], create=True)
# Ensure directories required for sandboxed execution exist
for directory in ["dev", "proc", "tmp"]:
vsubdir = vdir.open_directory(directory, create=True)
if flags & _SandboxFlags.ROOT_READ_ONLY:
vsubdir._set_subtree_read_only(False)
# Create directories for all marked directories. This emulates
# some of the behaviour of other sandboxes, which create these
# to use as mount points.
read_write_directories = []
mount_sources = self._get_mount_sources()
for directory in self._get_marked_directories():
if directory in mount_sources:
# Bind mount
mount_point = directory.lstrip(os.path.sep)
mount_source = mount_sources[directory]
# Ensure mount point exists in sandbox
if not vdir.exists(mount_point):
if os.path.isdir(mount_source):
# Mounting a directory, mount point must be a directory
vdir.open_directory(mount_point, create=True)
else:
# Mounting a file or device node, mount point must be a file
split_mount_point = mount_point.rsplit(os.path.sep, 1)
parent_vdir = vdir.open_directory(split_mount_point[0], create=True)
parent_vdir._create_empty_file(split_mount_point[1])
else:
# Read-write directory
marked_vdir = vdir.open_directory(directory.lstrip(os.path.sep), create=True)
read_write_directories.append(directory)
if flags & _SandboxFlags.ROOT_READ_ONLY:
marked_vdir._set_subtree_read_only(False)
if flags & _SandboxFlags.ROOT_READ_ONLY:
vdir._set_subtree_read_only(True)
else:
# The whole sandbox is writable
read_write_directories = [os.path.sep]
# Generate Action proto
input_root_digest = vdir._get_digest()
platform = self._create_platform(flags)
command_proto = self._create_command(command, cwd, env, read_write_directories, platform)
command_digest = cascache.add_object(buffer=command_proto.SerializeToString())
action = remote_execution_pb2.Action(
command_digest=command_digest, input_root_digest=input_root_digest, platform=platform
)
action_result = self._execute_action(action, flags) # pylint: disable=assignment-from-no-return
# Get output of build
self._process_job_output(
cwd, action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0
)
# Non-zero exit code means a normal error during the build:
# the remote execution system has worked correctly but the command failed.
return action_result.exit_code
def _create_platform(self, flags):
config = self._get_config()
platform_dict = {}
platform_dict["OSFamily"] = config.build_os
platform_dict["ISA"] = config.build_arch
if flags & _SandboxFlags.INHERIT_UID:
uid = os.geteuid()
gid = os.getegid()
else:
uid = config.build_uid
gid = config.build_gid
if uid is not None:
platform_dict["unixUID"] = str(uid)
if gid is not None:
platform_dict["unixGID"] = str(gid)
if flags & _SandboxFlags.NETWORK_ENABLED:
platform_dict["network"] = "on"
# Remove unsupported platform properties from the dict
supported_properties = self._supported_platform_properties()
platform_dict = {key: value for (key, value) in platform_dict.items() if key in supported_properties}
# Create Platform message with properties sorted by name in code point order
platform = remote_execution_pb2.Platform()
for key, value in sorted(platform_dict.items()):
platform.properties.add(name=key, value=value)
return platform
def _create_command(self, command, working_directory, environment, read_write_directories, platform):
# Creates a command proto
environment_variables = [
remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v) for (k, v) in environment.items()
]
# Request read-write directories as output
output_directories = [os.path.relpath(dir, start=working_directory) for dir in read_write_directories]
return remote_execution_pb2.Command(
arguments=command,
working_directory=working_directory[1:],
environment_variables=environment_variables,
output_paths=output_directories,
output_node_properties=self._output_node_properties,
platform=platform,
)
def _process_job_output(self, working_directory, output_directories, output_files, *, failure):
# Reads the remote execution server response to an execution request.
#
# output_directories is an array of OutputDirectory objects.
# output_files is an array of OutputFile objects.
#
if output_files:
raise SandboxError("Output files were returned when we didn't request any.")
context = self._get_context()
cascache = context.get_cascache()
vdir = self.get_virtual_directory()
for output_directory in output_directories:
tree_digest = output_directory.tree_digest
if tree_digest is None or not tree_digest.hash:
raise SandboxError("Output directory structure had no digest attached.")
# Get digest of output directory from tree digest
tree = remote_execution_pb2.Tree()
with open(cascache.objpath(tree_digest), "rb") as f:
tree.ParseFromString(f.read())
root_directory = tree.root.SerializeToString()
dir_digest = utils._message_digest(root_directory)
# Create a normalized absolute path (inside the input tree)
path = os.path.normpath(os.path.join(working_directory, output_directory.path)).lstrip(os.path.sep)
# Get virtual directory at the path of the output directory
vsubdir = vdir.open_directory(path, create=True)
# Replace contents with returned output
vsubdir._reset(digest=dir_digest)
def _create_batch(self, main_group, flags, *, collect=None):
return _SandboxREAPIBatch(self, main_group, flags, collect=collect)
def _execute_action(self, action, flags):
raise ImplError("Sandbox of type '{}' does not implement _execute_action()".format(type(self).__name__))
def _supported_platform_properties(self):
return {"OSFamily", "ISA"}
# _SandboxREAPIBatch()
#
# Command batching by shell script generation.
#
class _SandboxREAPIBatch(_SandboxBatch):
def __init__(self, sandbox, main_group, flags, *, collect=None):
super().__init__(sandbox, main_group, flags, collect=collect)
self.script = None
self.first_command = None
self.cwd = None
self.env = None
def execute(self):
self.script = ""
self.main_group.execute(self)
first = self.first_command
if first:
context = self.sandbox._get_context()
with context.messenger.timed_activity(
"Running commands",
detail=self.main_group.combined_label(),
element_name=self.sandbox._get_element_name(),
):
if (
self.sandbox._run_with_flags(
["sh", "-c", "-e", self.script], flags=self.flags, cwd=first.cwd, env=first.env
)
!= 0
):
raise SandboxCommandError("Command failed", collect=self.collect)
def execute_group(self, group):
group.execute_children(self)
def execute_command(self, command):
if self.first_command is None:
# First command in batch
# Initial working directory and environment of script already matches
# the command configuration.
self.first_command = command
else:
# Change working directory for this command
if command.cwd != self.cwd:
self.script += "mkdir -p {}\n".format(command.cwd)
self.script += "cd {}\n".format(command.cwd)
# Update environment for this command
for key in self.env.keys():
if key not in command.env:
self.script += "unset {}\n".format(key)
for key, value in command.env.items():
if key not in self.env or self.env[key] != value:
self.script += "export {}={}\n".format(key, shlex.quote(value))
# Keep track of current working directory and environment
self.cwd = command.cwd
self.env = command.env
# Actual command execution
cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
self.script += "(set -ex; {})".format(cmdline)
# Error handling
label = command.label or cmdline
quoted_label = shlex.quote("'{}'".format(label))
self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
def create_empty_file(self, name):
self.script += "touch -- {}\n".format(shlex.quote(name))
def clean_directory(self, name):
# Do not treat error during cleanup as a fatal build error
self.script += "rm -rf -- {} || true\n".format(shlex.quote(name))
if self.first_command:
# Working directory may be a subdirectory of the build directory.
# Recreate it if necessary as output capture requires the working directory to exist.
self.script += "mkdir -p {} || true\n".format(shlex.quote(self.first_command.cwd)) | PypiClean |
/Monitis-SDK-1.0.2.tar.gz/Monitis-SDK-1.0.2/monitis/monitors/custom.py | from urllib import quote
from json import dumps
from monitis.api import Monitis, MonitisError, get, decode_json
from monitis.api import checktime as api_checktime
from monitis.monitors.params import MonitorParams, ResultParams
from monitis.monitors.params import AdditionalResultParams
# API operations on existing Custom Monitors are CustomMonitor methods
# API operations that return one or more monitors are functions
def _api_url():
if Monitis.sandbox is True:
return CustomMonitor.sandbox_url
else:
return CustomMonitor.default_url
# Custom Monitor API uses a different HTTP endpoint from the rest of the API
def _custom_get(**kwargs):
'''HTTP GET using URL for customMonitor API'''
return get(url=_api_url(), **kwargs)
def get_monitors(tag=None, m_type=None):
"""Return a list of CustomMontior instances that match the tag and m_type
"""
get_args = dict()
if m_type is not None:
get_args['m_type'] = m_type
if tag is not None:
get_args['tag'] = tag
mon_list = _custom_get(action='getMonitors', **get_args)
result_list = list()
for mon in mon_list:
result_list.append(CustomMonitor(monitor_id=mon['id'],
name=mon['name'],
**get_args))
return result_list
# TODO make this work when keys aren't in the ENV
def get_monitor_info(monitor_id=None, exclude_hidden='false'):
"""Get information regarding the specified Custom monitor"""
get_args = dict()
get_args['action'] = 'getMonitorInfo'
get_args['monitorId'] = monitor_id
# cannonicalize exclude_hidden
bool2string = {True:'true', False:'false'}
if exclude_hidden in bool2string.keys():
get_args['excludeHidden'] = bool2string[exclude_hidden]
else:
get_args['excludeHidden'] = exclude_hidden
# sanity check
if get_args['monitorId'] is None:
raise MonitisError('get_monitor_info: monitor_id is required')
if get_args['excludeHidden'] not in bool2string.values():
raise MonitisError(
'get_monitor_info: exclude_hidden is not boolean')
try:
result = _custom_get(**get_args)
except Exception, msg:
raise MonitisError(msg)
return result
def _encode_params(*args):
"""Return a tuple of strings representing params lists suitable
for posting to add_monitor
(result_params, additional_result_params, monitor_params)
"""
r_params = list() # result_params encoded strings
a_params = list() # additional_result_params encoded strings
m_params = list() # monitor_params encoded strings
for arg in args:
if isinstance(arg, ResultParams):
r_params.append(arg.encode())
elif isinstance(arg, AdditionalResultParams):
a_params.append(arg.encode())
elif isinstance(arg, MonitorParams):
m_params.append(arg.encode())
else:
raise MonitisError("Non-param unnamed argument")
result_params = ';'.join(r_params)
additional_result_params = ';'.join(a_params)
monitor_params = ';'.join(m_params)
return (result_params, additional_result_params, monitor_params)
class CustomMonitor(Monitis):
# This constructor generally should not be called directly.
# Rather, construct a CustomMonitor either via
# CustomMonitor.add_monitor or CustomMonitor.fetch
'''
CustomMonitor encapsulates the API for Monitis custom monitors.
Generally, do not instantiate CustomMonitor directly using the
constructor. Instead, to add a new monitor, use CustomMonitor.addMonitor.
To create an instance for working with an existing monitor, use
CustomMonitor.fetch.
'''
debug = False
default_url = 'http://monitis.com/customMonitorApi'
sandbox_url = 'http://sandbox.monitis.com/customMonitorApi'
def __init__(self, apikey=None, secretkey=None, url=None,
version=None, validation=None,
monitor_id=None, name=None, m_type=None, tag=None):
customurl = url or _api_url()
self.monitor_id = monitor_id
self.name = name
self.m_type = m_type
self.tag = tag
self.monitor_params = None
# if monitor_id is None:
# raise MonitisError("monitor_id is required")
Monitis.__init__(self, apikey=apikey, secretkey=secretkey,
url=customurl, version=version,
validation=validation)
def get_monitor_info(self):
"""Return the information for an existing CustomMonitor instance"""
return get_monitor_info(monitor_id=self.monitor_id)
def refresh(self):
'''Update the monitor with fresh data from the API
This option is currently limited to getting the tag'''
# TODO complete the mapping of API params to class attributes
# and refresh all of them
monitor_info = self.get_monitor_info()
self.tag = monitor_info['tag']
@classmethod
def fetch(cls, monitor_id=None, **kwargs):
"""Create a CustomMonitor instance based on get_monitor_info"""
if monitor_id is None:
raise MonitisError('fetch: monitor_id is required')
mon_data = get_monitor_info(monitor_id)
mon = CustomMonitor(monitor_id=monitor_id, **kwargs)
for i in ['name', 'tag', 'm_type']:
try:
mon.__dict__[i] = mon_data[i]
except KeyError:
pass
# result_params
for par in mon_data['resultParams']:
mon.__dict__['resultParams'] = ResultParams(par['name'],
par['displayName'],
par['uom'],
par['dataType'])
# additional_result_params
for par in mon_data['additionalResultParams']:
mon.__dict__['additionalResultParams'] = AdditionalResultParams(
par['name'],
par['displayName'],
par['uom'],
par['dataType'])
# monitor_params
# hidden is not returned in getMonitorInfo in the API, so we have to
# make an assumption that it is false
for par in mon_data['monitorParams']:
mon.__dict__['monitorParams'] = MonitorParams(par['name'],
par['displayName'],
par['value'],
par['dataType'],
'false')
return mon
@staticmethod
def _validate_kwargs(required, allowed, **kwargs):
'''Validate keyword arguments passed in as **kwargs
required: kwargs that must exist and evaluate to true
allowed: kwargs that may exist
kwargs : the kwargs to validate
Return true if all required kwargs exist, and no kwargs
exist that are neither allowed or required
Raise MonitisError if any arguments are missing or if any
invalid arguments are found
'''
all_allowed = allowed
all_allowed.extend(required)
# input validataion
for key in required:
if not key in kwargs.keys():
raise MonitisError("Argument " + key + " is required")
for key in kwargs.keys():
if not key in all_allowed:
raise MonitisError("Unexpected kwarg " + key)
return True
@classmethod
def add_monitor(cls, *args, **kwargs):
"""Add a custom monitor
Required parameters:
name - string
tag - string
One or more ResultParams instances
Optional parameters:
customUserAgentId - the id of the custom user agent
m_type - custom string that represents monitor type
One or more MonitorParams instances
One or more AdditionalResultParams instances
Note that all Params objects must come before the keyword arguments
Return a CustomMonitor instance if the operation suceeds, or raises
MonitisError if the operation fails.
"""
# Need to use kwargs rather than named args so we can have
# a variable numer of args for *Params
required = ['name','tag']
allowed = ['customUserAgentId','m_type']
# ensure that we have the correct args
# raises MonitisError if it fails
CustomMonitor._validate_kwargs(required, allowed, **kwargs)
# all required keys exist in kwargs, none unexpected
# build the dict to pass to post
add = dict()
add.update(**kwargs) # everything in kwargs passed on to post
add['action'] = 'addMonitor'
result_params, additional_result_params, monitor_params = \
_encode_params(*args)
# add the *params args to add
if result_params:
add['resultParams'] = result_params
else:
raise MonitisError('add_monitor: result_params is required')
if monitor_params:
add['monitorParams'] = monitor_params
if additional_result_params:
add['additionalResultParams'] = additional_result_params
# Create a mostly empty CustomMonitor, and then populate it
# once we've successfully created it on the server
mon = cls(url=_api_url())
json_result = mon.post(**add)
result = decode_json(json_result)
if result['status'] == 'ok':
mon.monitor_id = result['data']
# copy additional values into the new CustomMonitor object
for key in ('name', 'm_type', 'tag', 'customUserAgentId'):
if key in kwargs:
mon.__dict__[key] = kwargs[key]
else:
raise MonitisError("add_monitor failed: " + json_result)
return mon
def _set_id(self, monitor_id):
"""Set the monitor_id when creating a new instance via add_monitor"""
self.monitor_id = monitor_id
def __repr__(self):
"""Nicer string representation of CustomMonitor"""
return "<CustomMonitor(id={0}, name={1})>".format(self.monitor_id,
self.name)
def edit_monitor(self, *args, **kwargs):
"""Edit an existing custom monitor
monitor_params
name
tag
"""
# Need to use kwargs rather than named args so we can have
# a variable numer of args for *Params
required = []
allowed = ['name', 'tag', 'monitor_params']
CustomMonitor._validate_kwargs(required, allowed, **kwargs)
# build the dict to pass to post
add = dict()
# copy name and tag to add, if they exist
for arg_name in ['name', 'tag']:
if arg_name in kwargs.keys():
add[arg_name] = kwargs[arg_name]
# copy monitorParams into add, if it exists
if 'monitor_params' in kwargs.keys():
add['monitorParams'] = kwargs['monitor_params']
add['action'] = 'editMonitor'
add['monitorId'] = self.monitor_id
result_params, additional_result_params, monitor_params = \
_encode_params(*args)
# add the *params args to add
if monitor_params:
add['monitorParams'] = monitor_params
if result_params:
raise MonitisError("edit_monitor: result_params not allowed")
if additional_result_params:
raise MonitisError("edit_monitor: additional_result_params " + \
"not allowed")
json_result = self.post(**add)
result = decode_json(json_result)
if result['status'] == 'ok':
# copy additional values into the new CustomMonitor object
for key in ('name','tag'):
if key in kwargs:
self.__dict__[key] = kwargs[key]
self.monitor_params = monitor_params
else:
raise MonitisError("add_monitor failed: " + json_result)
return result
def delete_monitor(self):
"""Delete the custom monitor with ID monitor_id"""
result = decode_json(self.post(action='deleteMonitor',
monitorId=self.monitor_id))
if result['status'] != 'ok':
raise MonitisError(
'delete_monitor error: ' + result['status'])
# @classmethod
# def add_result_and_monitor(
# self, checktime=None, name, tag=None, result_params, **kwargs):
# '''Add results as with add_result, but also create the named monitor if
# none with that name exists.
#
# checktime - optional, the current time is used if none given
# name - required, if it matches an existing name, that will be used
# tag - required, used if creating a new monitor
# result_params - list of one or more ResultParams for the monitor
#
# '''
# # check to see if the monitor already exists
#
# # if not, create it
# # in either case, we now have a handle to a monitor
# # post the results to the monitor
# pass
def add_result(self, checktime=None, results=None, **kwargs):
"""add results for the specified Custom monitor
One or more results should be passed in a kwargs of the form:
paramName1=paramValue1,paramName2=paramValue2,...
Returns the checktime, whether passed in as a parameter
or automatically generated. This is useful for passing
to add_additional_results
"""
action = 'addResult'
# use the current time if the user didn't specify one
result_checktime = checktime or api_checktime()
# merge kwargs and results items into one list
if results is None:
results_items = list()
else:
results_items = results.items()
results_items.extend(kwargs.items())
# build post string elements from combined results items
result_strings = list()
for name, value in results_items:
# urllib.quote each to implement "double encoding"
# http://monitis.com/api/api.html#api_home
result_strings.append(quote(':'.join((name, str(value)))))
result_string = self.post(action=action, monitorId=self.monitor_id,
checktime=result_checktime,
results=';'.join(result_strings))
result = decode_json(result_string)
if result['status'] != 'ok':
raise MonitisError('add_result failed')
return result_checktime
def add_additional_results(self, checktime=None, results=None):
"""Add additional results to an existing result
checktime: the checktime of a previously submitted result
results: an instance of AdditionalResults
"""
if not checktime:
raise MonitisError("addAdditionalResults: checktime required")
if not results:
raise MonitisError("addAdditionalResults: " + \
"results required")
action = 'addAdditionalResults'
# use the current time if the user didn't specify one
checktime = checktime or api_checktime()
# use dumps to format the results parameter as a JSON list
response_string = self.post(action=action,
monitorId=self.monitor_id,
checktime=checktime,
results=dumps(results))
response = decode_json(response_string)
if response['status'] != 'ok':
raise MonitisError('addAdditionalResults failed: ' + response)
return checktime
def get_monitor_results(self, year=None, month=None, day=None,
timezone=None):
"""Get results from the specified custom monitor"""
get_args = dict()
if year is None or month is None or day is None:
raise MonitisError(
'get_monitor_results: year, month, and day are required')
get_args['action'] = 'getMonitorResults'
get_args['monitorId'] = self.monitor_id
for i, j in ((year, 'year'), (month, 'month'), (day, 'day')):
get_args[j] = i
if timezone:
get_args['timezone'] = timezone
return _custom_get(**get_args)
def get_additional_results(self, checktime=None):
"""Get additional results associated with a specific posted result
Given an existing result and associated additional results, retrieve
those additional results, using the checktime as the key to identify
the specific additional result.
checktime -- identifies which additional result to retrieve
"""
if checktime is None:
raise MonitisError("getAdditionalResults: checktime required")
get_args = {'action': 'getAdditionalResults',
'monitorId': self.monitor_id,
'checktime': checktime}
return _custom_get(**get_args) | PypiClean |
/FlexGet-3.9.6-py3-none-any.whl/flexget/ui/v2/dist/assets/default~EntryListPlugin~MovieListPlugin~PendingListPlugin~TasksPlugin.6286e3dceaa32382c7aa.js | (this.webpackJsonp=this.webpackJsonp||[]).push([[1],{1119:function(e,t,n){"use strict";var a=n(13),r=n(51),o=n(3),l=n(0),i=n.n(l),c=(n(24),n(15)),s=n(19),d=n(389),u=n(32),f=i.a.forwardRef((function(e,t){var n=e.classes,r=e.className,l=e.disabled,s=void 0!==l&&l,f=e.disableFocusRipple,b=void 0!==f&&f,p=e.fullWidth,v=e.icon,m=e.indicator,h=e.label,g=e.onChange,w=e.onClick,x=e.selected,j=e.textColor,C=void 0===j?"inherit":j,O=e.value,y=e.wrapped,E=void 0!==y&&y,N=Object(a.a)(e,["classes","className","disabled","disableFocusRipple","fullWidth","icon","indicator","label","onChange","onClick","selected","textColor","value","wrapped"]);return i.a.createElement(d.a,Object(o.a)({focusRipple:!b,className:Object(c.a)(n.root,n["textColor".concat(Object(u.a)(C))],r,s&&n.disabled,x&&n.selected,h&&v&&n.labelIcon,p&&n.fullWidth,E&&n.wrapped),ref:t,role:"tab","aria-selected":x,disabled:s,onClick:function(e){g&&g(e,O),w&&w(e)}},N),i.a.createElement("span",{className:n.wrapper},v,h),m)}));t.a=Object(s.a)((function(e){var t;return{root:Object(o.a)({},e.typography.button,(t={maxWidth:264,minWidth:72,position:"relative",boxSizing:"border-box",minHeight:48,flexShrink:0,padding:"6px 12px"},Object(r.a)(t,e.breakpoints.up("sm"),{padding:"6px 24px"}),Object(r.a)(t,"overflow","hidden"),Object(r.a)(t,"whiteSpace","normal"),Object(r.a)(t,"textAlign","center"),Object(r.a)(t,e.breakpoints.up("sm"),{minWidth:160}),t)),labelIcon:{minHeight:72,paddingTop:9,"& $wrapper > *:first-child":{marginBottom:6}},textColorInherit:{color:"inherit",opacity:.7,"&$selected":{opacity:1},"&$disabled":{opacity:.5}},textColorPrimary:{color:e.palette.text.secondary,"&$selected":{color:e.palette.primary.main},"&$disabled":{color:e.palette.text.disabled}},textColorSecondary:{color:e.palette.text.secondary,"&$selected":{color:e.palette.secondary.main},"&$disabled":{color:e.palette.text.disabled}},selected:{},disabled:{},fullWidth:{flexShrink:1,flexGrow:1,flexBasis:0,maxWidth:"none"},wrapped:{fontSize:e.typography.pxToRem(12),lineHeight:1.5},wrapper:{display:"inline-flex",alignItems:"center",justifyContent:"center",width:"100%",flexDirection:"column"}}}),{name:"MuiTab"})(f)},1120:function(e,t,n){"use strict";var a=n(0),r=n.n(a),o=n(43);t.a=Object(o.a)(r.a.createElement("path",{d:"M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm-2 15l-5-5 1.41-1.41L10 14.17l7.59-7.59L19 8l-9 9z"}),"CheckCircle")},1133:function(e,t,n){"use strict";var a,r=n(3),o=n(13),l=n(51),i=n(0),c=n.n(i),s=(n(247),n(24),n(15)),d=n(239),u=n(240),f=!("undefined"==typeof window||!window.document||!window.document.createElement);function b(){if(a)return a;if(!f||!window.document.body)return"indeterminate";var e=window.document.createElement("div");return e.appendChild(document.createTextNode("ABCD")),e.dir="rtl",e.style.fontSize="14px",e.style.width="4px",e.style.height="1px",e.style.position="absolute",e.style.top="-1000px",e.style.overflow="scroll",document.body.appendChild(e),a="reverse",e.scrollLeft>0?a="default":(e.scrollLeft=1,0===e.scrollLeft&&(a="negative")),document.body.removeChild(e),a}function p(e,t){var n=e.scrollLeft;if("rtl"!==t)return n;var a=b();if("indeterminate"===a)return Number.NaN;switch(a){case"negative":return e.scrollWidth-e.clientWidth+n;case"reverse":return e.scrollWidth-e.clientWidth-n}return n}function v(e){return(1+Math.sin(Math.PI*e-Math.PI/2))/2}var m={width:99,height:99,position:"absolute",top:-9999,overflow:"scroll"};function h(e){var t=e.onChange,n=Object(o.a)(e,["onChange"]),a=c.a.useRef(),l=c.a.useRef(null),i=function(){a.current=l.current.offsetHeight-l.current.clientHeight};return c.a.useEffect((function(){var e=Object(d.a)((function(){var e=a.current;i(),e!==a.current&&t(a.current)}));return window.addEventListener("resize",e),function(){e.clear(),window.removeEventListener("resize",e)}}),[t]),c.a.useEffect((function(){i(),t(a.current)}),[t]),c.a.createElement("div",Object(r.a)({style:m,ref:l},n))}var g=n(19),w=n(32),x=c.a.forwardRef((function(e,t){var n=e.classes,a=e.className,l=e.color,i=e.orientation,d=Object(o.a)(e,["classes","className","color","orientation"]);return c.a.createElement("span",Object(r.a)({className:Object(s.a)(n.root,n["color".concat(Object(w.a)(l))],a,{vertical:n.vertical}[i]),ref:t},d))})),j=Object(g.a)((function(e){return{root:{position:"absolute",height:2,bottom:0,width:"100%",transition:e.transitions.create()},colorPrimary:{backgroundColor:e.palette.primary.main},colorSecondary:{backgroundColor:e.palette.secondary.main},vertical:{height:"100%",width:2,right:0}}}),{name:"PrivateTabIndicator"})(x),C=n(956),O=n(955),y=n(389),E=c.a.createElement(C.a,{fontSize:"small"}),N=c.a.createElement(O.a,{fontSize:"small"}),S=c.a.forwardRef((function(e,t){var n=e.classes,a=e.className,l=e.direction,i=e.orientation,d=e.visible,u=Object(o.a)(e,["classes","className","direction","orientation","visible"]),f=Object(s.a)(n.root,a,{vertical:n.vertical}[i]);return d?c.a.createElement(y.a,Object(r.a)({component:"div",className:f,ref:t,role:null,tabIndex:null},u),"left"===l?E:N):c.a.createElement("div",{className:f})})),k=Object(g.a)({root:{width:40,flexShrink:0},vertical:{width:"100%",height:40,"& svg":{transform:"rotate(90deg)"}}},{name:"PrivateTabScrollButton"})(S),W=n(119),B=n(109),L=c.a.forwardRef((function(e,t){var n=e.action,a=e.centered,i=void 0!==a&&a,f=e.children,m=e.classes,g=e.className,w=e.component,x=void 0===w?"div":w,C=e.indicatorColor,O=void 0===C?"secondary":C,y=e.onChange,E=e.orientation,N=void 0===E?"horizontal":E,S=e.ScrollButtonComponent,L=void 0===S?k:S,z=e.scrollButtons,M=void 0===z?"auto":z,R=e.TabIndicatorProps,T=void 0===R?{}:R,I=e.textColor,H=void 0===I?"inherit":I,P=e.value,D=e.variant,$=void 0===D?"standard":D,A=Object(o.a)(e,["action","centered","children","classes","className","component","indicatorColor","onChange","orientation","ScrollButtonComponent","scrollButtons","TabIndicatorProps","textColor","value","variant"]),F=Object(B.a)(),q="scrollable"===$,V="rtl"===F.direction,J="vertical"===N,X=J?"scrollTop":"scrollLeft",G=J?"top":"left",K=J?"bottom":"right",Q=J?"clientHeight":"clientWidth",U=J?"height":"width";var Y=c.a.useState(!1),Z=Y[0],_=Y[1],ee=c.a.useState({}),te=ee[0],ne=ee[1],ae=c.a.useState({start:!1,end:!1}),re=ae[0],oe=ae[1],le=c.a.useState({overflow:"hidden",marginBottom:null}),ie=le[0],ce=le[1],se=new Map,de=c.a.useRef(null),ue=c.a.useRef(null),fe=function(){var e,t,n=de.current;if(n){var a=n.getBoundingClientRect();e={clientWidth:n.clientWidth,scrollLeft:n.scrollLeft,scrollTop:n.scrollTop,scrollLeftNormalized:p(n,F.direction),scrollWidth:n.scrollWidth,top:a.top,bottom:a.bottom,left:a.left,right:a.right}}if(n&&!1!==P){var r=ue.current.children;if(r.length>0){var o=r[se.get(P)];0,t=o?o.getBoundingClientRect():null}}return{tabsMeta:e,tabMeta:t}},be=Object(W.a)((function(){var e,t=fe(),n=t.tabsMeta,a=t.tabMeta,r=0;if(a&&n)if(J)r=a.top-n.top+n.scrollTop;else{var o=V?n.scrollLeftNormalized+n.clientWidth-n.scrollWidth:n.scrollLeft;r=a.left-n.left+o}var i=(e={},Object(l.a)(e,G,r),Object(l.a)(e,U,a?a[U]:0),e);if(isNaN(te[G])||isNaN(te[U]))ne(i);else{var c=Math.abs(te[G]-i[G]),s=Math.abs(te[U]-i[U]);(c>=1||s>=1)&&ne(i)}})),pe=function(e){!function(e,t,n){var a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},r=arguments.length>4&&void 0!==arguments[4]?arguments[4]:function(){},o=a.ease,l=void 0===o?v:o,i=a.duration,c=void 0===i?300:i,s=null,d=t[e],u=!1,f=function(){u=!0},b=function a(o){if(u)r(new Error("Animation cancelled"));else{null===s&&(s=o);var i=Math.min(1,(o-s)/c);t[e]=l(i)*(n-d)+d,i>=1?requestAnimationFrame((function(){r(null)})):requestAnimationFrame(a)}};d===n?r(new Error("Element already at target position")):requestAnimationFrame(b)}(X,de.current,e)},ve=function(e){var t=de.current[X];J?t+=e:(t+=e*(V?-1:1),t*=V&&"reverse"===b()?-1:1),pe(t)},me=function(){ve(-de.current[Q])},he=function(){ve(de.current[Q])},ge=c.a.useCallback((function(e){ce({overflow:null,marginBottom:-e})}),[]),we=Object(W.a)((function(){var e=fe(),t=e.tabsMeta,n=e.tabMeta;if(n&&t)if(n[G]<t[G]){var a=t[X]+(n[G]-t[G]);pe(a)}else if(n[K]>t[K]){var r=t[X]+(n[K]-t[K]);pe(r)}})),xe=Object(W.a)((function(){if(q&&"off"!==M){var e,t,n=de.current,a=n.scrollTop,r=n.scrollHeight,o=n.clientHeight,l=n.scrollWidth,i=n.clientWidth;if(J)e=a>1,t=a<r-o-1;else{var c=p(de.current,F.direction);e=V?c<l-i-1:c>1,t=V?c>1:c<l-i-1}e===re.start&&t===re.end||oe({start:e,end:t})}}));c.a.useEffect((function(){var e=Object(d.a)((function(){be(),xe()})),t=Object(u.a)(de.current);return t.addEventListener("resize",e),function(){e.clear(),t.removeEventListener("resize",e)}}),[be,xe]);var je=c.a.useCallback(Object(d.a)((function(){xe()})));c.a.useEffect((function(){return function(){je.clear()}}),[je]),c.a.useEffect((function(){_(!0)}),[]),c.a.useEffect((function(){be(),xe()})),c.a.useEffect((function(){we()}),[we,te]),c.a.useImperativeHandle(n,(function(){return{updateIndicator:be,updateScrollButtons:xe}}),[be,xe]);var Ce=c.a.createElement(j,Object(r.a)({className:m.indicator,orientation:N,color:O},T,{style:Object(r.a)({},te,{},T.style)})),Oe=0,ye=c.a.Children.map(f,(function(e){if(!c.a.isValidElement(e))return null;var t=void 0===e.props.value?Oe:e.props.value;se.set(t,Oe);var n=t===P;return Oe+=1,c.a.cloneElement(e,{fullWidth:"fullWidth"===$,indicator:n&&!Z&&Ce,selected:n,onChange:y,textColor:H,value:t})})),Ee=function(){var e={};e.scrollbarSizeListener=q?c.a.createElement(h,{className:m.scrollable,onChange:ge}):null;var t=re.start||re.end,n=q&&("auto"===M&&t||"desktop"===M||"on"===M);return e.scrollButtonStart=n?c.a.createElement(L,{orientation:N,direction:V?"right":"left",onClick:me,visible:re.start,className:Object(s.a)(m.scrollButtons,"on"!==M&&m.scrollButtonsDesktop)}):null,e.scrollButtonEnd=n?c.a.createElement(L,{orientation:N,direction:V?"left":"right",onClick:he,visible:re.end,className:Object(s.a)(m.scrollButtons,"on"!==M&&m.scrollButtonsDesktop)}):null,e}();return c.a.createElement(x,Object(r.a)({className:Object(s.a)(m.root,g,J&&m.vertical),ref:t},A),Ee.scrollButtonStart,Ee.scrollbarSizeListener,c.a.createElement("div",{className:Object(s.a)(m.scroller,q?m.scrollable:m.fixed),style:ie,ref:de,onScroll:je},c.a.createElement("div",{className:Object(s.a)(m.flexContainer,J&&m.flexContainerVertical,i&&!q&&m.centered),ref:ue,role:"tablist"},ye),Z&&Ce),Ee.scrollButtonEnd)}));t.a=Object(g.a)((function(e){return{root:{overflow:"hidden",minHeight:48,WebkitOverflowScrolling:"touch",display:"flex"},vertical:{flexDirection:"column"},flexContainer:{display:"flex"},flexContainerVertical:{flexDirection:"column"},centered:{justifyContent:"center"},scroller:{position:"relative",display:"inline-block",flex:"1 1 auto",whiteSpace:"nowrap"},fixed:{overflowX:"hidden",width:"100%"},scrollable:{overflowX:"scroll",scrollbarWidth:"none","&::-webkit-scrollbar":{display:"none"}},scrollButtons:{},scrollButtonsDesktop:Object(l.a)({},e.breakpoints.down("xs"),{display:"none"}),indicator:{}}}),{name:"MuiTabs"})(L)}}]);
//# sourceMappingURL=default~EntryListPlugin~MovieListPlugin~PendingListPlugin~TasksPlugin.6286e3dceaa32382c7aa.js.map | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/djblets/features/checkers.py | from __future__ import annotations
import logging
from typing import Dict, Optional, cast
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from djblets.features.level import FeatureLevel
_feature_checker: Optional[BaseFeatureChecker] = None
class BaseFeatureChecker:
"""Base class for a feature checker.
Subclasses are responsible for overriding :py:meth:`is_feature_enabled`
and returning a suitable result for any given feature.
"""
@cached_property
def min_enabled_level(self) -> FeatureLevel:
"""The minimum feature level to enable by default.
If ``settings.MIN_ENABLED_FEATURE_LEVEL`` is set, that value will be
used.
If ``settings.DEBUG`` is ``True``, then anything
:py:attr:`~djblets.features.feature.FeatureLevel.BETA` or higher will
be enabled by default.
If ``settings.DEBUG`` is ``False``, then anything
:py:attr:`~djblets.features.feature.FeatureLevel.STABLE` or higher will
be enabled by default.
Subclasses can override this to provide custom logic.
"""
min_level = getattr(settings, 'MIN_ENABLED_FEATURE_LEVEL', None)
if not min_level:
if settings.DEBUG:
min_level = FeatureLevel.BETA
else:
min_level = FeatureLevel.STABLE
return min_level
def is_feature_enabled(
self,
feature_id: str,
**kwargs,
) -> bool:
"""Return whether a feature is enabled for a given ID.
Subclasses must override this to provide a suitable implementation
for that type of feature checker.
Args:
feature_id (str):
The ID corresponding to a
:py:class:`~djblets.features.feature.Feature` class to check.
**kwargs (dict):
Additional keyword arguments relevant for this particular
feature check.
Returns:
bool:
A boolean value indicating if the feature is enabled.
"""
raise NotImplementedError('%s must implement is_feature_enabled'
% self.__class__.__name__)
class SettingsFeatureChecker(BaseFeatureChecker):
"""Feature checker that checks against a SiteConfiguration.
This feature checker will check if a feature is enabled by checking the
the ``settings.ENABLED_FEATURES`` dictionary. This key can be changed
by subclassing and modifying :py:attr:`settings_key`.
"""
#: The key in settings used for the enabled features.
settings_key: str = 'ENABLED_FEATURES'
def is_feature_enabled(
self,
feature_id: str,
**kwargs,
) -> bool:
"""Return whether a feature is enabled for a given ID.
The feature will be enabled if its feature ID is set to ``True`` in a
``settings.ENABLED_FEATURES`` dictionary.
Args:
feature_id (str):
The ID corresponding to a
:py:class:`~djblets.features.feature.Feature` class to check.
**kwargs (dict):
Additional keyword arguments relevant for this particular
feature check. These are unused for this checker.
Returns:
bool:
A boolean value indicating if the feature is enabled.
"""
enabled_features = getattr(settings, self.settings_key, {})
return enabled_features.get(feature_id, False)
class SiteConfigFeatureChecker(SettingsFeatureChecker):
"""Feature checker that checks against a SiteConfiguration.
This feature checker will check two places to see if a feature is enabled:
1. The ``enabled_features`` dictionary in a
:py:class:`~djblets.siteconfig.models.SiteConfiguration` settings.
2. The ``settings.ENABLED_FEATURES`` dictionary.
These keys can be changed by subclassing and modifying
:py:attr:`siteconfig_key` and :py:attr:`settings_key`.
"""
#: The key in siteconfig used for the enabled features.
siteconfig_key: str = 'enabled_features'
def is_feature_enabled(
self,
feature_id: str,
**kwargs,
) -> bool:
"""Return whether a feature is enabled for a given ID.
The feature will be enabled if its feature ID is set to ``True`` in
either the ``enabled_features`` key in a
:py:class:`~djblets.siteconfig.models.SiteConfiguration` or in a
``settings.ENABLED_FEATURES`` dictionary.
Args:
feature_id (str):
The ID corresponding to a
:py:class:`~djblets.features.feature.Feature` class to check.
**kwargs (dict):
Additional keyword arguments relevant for this particular
feature check. These are unused for this checker.
Returns:
bool:
A boolean value indicating if the feature is enabled.
"""
# We import this here instead of at the top of the file in order to
# avoid a loading issue on Django 1.7+. Technically, nothing imported
# in an app's __init__.py should ever import models, and checkers.py
# qualifies.
from djblets.siteconfig.models import SiteConfiguration
siteconfig = SiteConfiguration.objects.get_current()
enabled_features = cast(Dict[str, bool],
siteconfig.get(self.siteconfig_key, {}))
try:
return enabled_features[feature_id]
except KeyError:
return super().is_feature_enabled(feature_id, **kwargs)
def set_feature_checker(
feature_checker: Optional[BaseFeatureChecker],
) -> None:
"""Set the feature checker to use for all features.
This can be called to manually configure a feature checker, or to unset
the feature checker in order to recompute it.
Args:
feature_checker (BaseFeatureChecker):
The new feature checker to set, or ``None`` to unset.
"""
global _feature_checker
_feature_checker = feature_checker
def get_feature_checker() -> BaseFeatureChecker:
"""Return the configured feature checker instance.
The class to use is configured through the ``settings.FEATURE_CHECKER``
setting, which must be a full module and class path. If not specified,
:py:class:`SettingsFeatureChecker` will be used.
The same feature checker instance will be returned each time this is
called.
Returns:
BaseFeatureChecker:
A feature checker instance.
Raises:
django.core.exceptions.ImproperlyConfigured:
There was an error either in the ``settings.FEATURE_CHECKER``
value or in instantiating the feature checker.
"""
if _feature_checker is None:
class_path = getattr(settings, 'FEATURE_CHECKER', None)
if class_path:
try:
checker_module_name, checker_class_name = \
class_path.rsplit('.', 1)
checker_module = __import__(checker_module_name, {}, {},
checker_class_name)
checker_class = getattr(checker_module, checker_class_name)
except Exception as e:
raise ImproperlyConfigured(
_('Unable to find feature checker class "%s": %s')
% (class_path, e))
else:
checker_class = SettingsFeatureChecker
try:
set_feature_checker(checker_class())
except Exception as e:
logging.exception('Unable to instantiate feature checker '
'class "%s": %s',
class_path, e)
raise ImproperlyConfigured(
_('Unable to instantiate feature checker class "%s": %s')
% (class_path, e))
assert _feature_checker is not None
return _feature_checker | PypiClean |
/KrypticLogger-0.4.tar.gz/KrypticLogger-0.4/README.md | # Kryptic Studio - KrypticLogger
KrypticLogger is a tool to help better organize and minimizing the amount of code while logging!
## Installation
1. Pip Installation
```bash
pip install KrypticLogger
```
### OR
1. Clone Repository to Project Folder.
```bash
$ git clone https://github.com/KrypticStudio/KrypticLogger
```
2. Install requirements
```bash
$ pip install -r requirements.txt
```
3. Install setup.py
```bash
$ python3 setup.py install
```
## Usage
1. Import KrypticLogger!
```python
from KrypticLogger import log #as log
import KrypticLogger as logPath
```
2. Call it anywhere!
```python
#### Set path for log file
logPath.path = "Logs/log.txt"
### Parameters
# tag = "EXAMPLE" #Customize the notifier tag. ##ONLY AVAILABLE FOR CUSTOM
# log = True #Logs to terminal or cmd.
# write = False #Writes log to file
# time = False #Adds current time to log
# code = "0x00" #Custime code for error, organization, etc...
# critical = False #Displays weather the message is critical or not.
log.custom(tag, message, log = True, write = False, time = False, code = "", critical = False)
```
3. EXAMPLE
```python
# Example
from KrypticLogger import log
import KrypticLogger as logPath
# Setting Log Path(Optional) ***DEFAULT "log.txt"
logPath.path = "Logs/log.txt"
#Message to be displayed...
Message = "Kryptic Studio Test. Kryptic Logger"
# Calling Logs
log.debug(Message)
log.error(Message)
log.info(Message)
log.log(Message)
log.success(Message)
log.track(Message)
log.warn(Message)
log.custom("Custom Tag", Message)
```
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
| PypiClean |
/NESTML-5.3.0-py3-none-any.whl/pynestml/codegeneration/python_standalone_code_generator.py |
from typing import Any, Dict, Mapping, Optional, Sequence, Union
import os
from pynestml.codegeneration.printers.constant_printer import ConstantPrinter
from pynestml.codegeneration.printers.python_expression_printer import PythonExpressionPrinter
from pynestml.codegeneration.printers.python_stepping_function_function_call_printer import PythonSteppingFunctionFunctionCallPrinter
from pynestml.codegeneration.printers.python_stepping_function_variable_printer import PythonSteppingFunctionVariablePrinter
from pynestml.codegeneration.python_code_generator_utils import PythonCodeGeneratorUtils
from pynestml.meta_model.ast_neuron import ASTNeuron
from pynestml.meta_model.ast_neuron_or_synapse import ASTNeuronOrSynapse
from pynestml.meta_model.ast_synapse import ASTSynapse
from pynestml.codegeneration.nest_code_generator import NESTCodeGenerator
from pynestml.codegeneration.printers.python_type_symbol_printer import PythonTypeSymbolPrinter
from pynestml.codegeneration.printers.python_standalone_printer import PythonStandalonePrinter
from pynestml.codegeneration.printers.python_function_call_printer import PythonFunctionCallPrinter
from pynestml.codegeneration.printers.python_variable_printer import PythonVariablePrinter
from pynestml.codegeneration.printers.python_simple_expression_printer import PythonSimpleExpressionPrinter
class PythonStandaloneCodeGenerator(NESTCodeGenerator):
r"""
Code generator for a standalone Python target.
Options:
- **preserve_expressions**: Set to True, or a list of strings corresponding to individual variable names, to disable internal rewriting of expressions, and return same output as input expression where possible. Only applies to variables specified as first-order differential equations. (This parameter is passed to ODE-toolbox.)
- **simplify_expression**: For all expressions ``expr`` that are rewritten by ODE-toolbox: the contents of this parameter string are ``eval()``ed in Python to obtain the final output expression. Override for custom expression simplification steps. Example: ``sympy.simplify(expr)``. Default: ``"sympy.logcombine(sympy.powsimp(sympy.expand(expr)))"``. (This parameter is passed to ODE-toolbox.)
- **templates**: Path containing jinja templates used to generate code.
- **path**: Path containing jinja templates used to generate code.
- **model_templates**: A list of the jinja templates or a relative path to a directory containing the neuron model templates.
- **neuron**: A list of neuron model jinja templates.
- **module_templates**: A list of the jinja templates or a relative path to a directory containing the templates related to generating the module/package.
- **solver**: A string identifying the preferred ODE solver. ``"analytic"`` for propagator solver preferred; fallback to numeric solver in case ODEs are not analytically solvable. Use ``"numeric"`` to disable analytic solver.
"""
_default_options = {
"preserve_expressions": False,
"simplify_expression": "sympy.logcombine(sympy.powsimp(sympy.expand(expr)))",
"templates": {
"path": "point_neuron",
"model_templates": {
"neuron": ["@NEURON_NAME@.py.jinja2"]
},
"module_templates": ["simulator.py.jinja2", "test_python_standalone_module.py.jinja2", "neuron.py.jinja2", "spike_generator.py.jinja2", "utils.py.jinja2"]
}
}
def __init__(self, options: Optional[Mapping[str, Any]] = None):
super(NESTCodeGenerator, self).__init__("python_standalone", PythonStandaloneCodeGenerator._default_options.update(options if options else {}))
self.analytic_solver = {}
self.numeric_solver = {}
self.non_equations_state_variables = {} # those state variables not defined as an ODE in the equations block
self.setup_template_env()
self.setup_printers()
def setup_printers(self):
super().setup_printers()
self._type_symbol_printer = PythonTypeSymbolPrinter()
self._constant_printer = ConstantPrinter()
# Python/mini simulation environment API printers
self._nest_variable_printer = PythonVariablePrinter(expression_printer=None, with_origin=True, with_vector_parameter=True)
self._nest_function_call_printer = PythonFunctionCallPrinter(None)
self._nest_function_call_printer_no_origin = PythonFunctionCallPrinter(None)
self._printer = PythonExpressionPrinter(simple_expression_printer=PythonSimpleExpressionPrinter(variable_printer=self._nest_variable_printer,
constant_printer=self._constant_printer,
function_call_printer=self._nest_function_call_printer))
self._nest_variable_printer._expression_printer = self._printer
self._nest_function_call_printer._expression_printer = self._printer
self._nest_printer = PythonStandalonePrinter(expression_printer=self._printer)
self._nest_variable_printer_no_origin = PythonVariablePrinter(None, with_origin=False, with_vector_parameter=False)
self._printer_no_origin = PythonExpressionPrinter(simple_expression_printer=PythonSimpleExpressionPrinter(variable_printer=self._nest_variable_printer_no_origin,
constant_printer=self._constant_printer,
function_call_printer=self._nest_function_call_printer_no_origin))
self._nest_variable_printer_no_origin._expression_printer = self._printer_no_origin
self._nest_function_call_printer_no_origin._expression_printer = self._printer_no_origin
self._nest_unitless_function_call_printer = PythonFunctionCallPrinter(None)
# GSL printers
self._gsl_variable_printer = PythonSteppingFunctionVariablePrinter(None)
print("In Python code generator: created self._gsl_variable_printer = " + str(self._gsl_variable_printer))
self._gsl_function_call_printer = PythonSteppingFunctionFunctionCallPrinter(None)
self._gsl_printer = PythonExpressionPrinter(simple_expression_printer=PythonSimpleExpressionPrinter(variable_printer=self._gsl_variable_printer,
constant_printer=self._constant_printer,
function_call_printer=self._gsl_function_call_printer))
self._gsl_function_call_printer._expression_printer = self._gsl_printer
self._gsl_variable_printer._expression_printer = self._gsl_printer
def _get_model_namespace(self, astnode: ASTNeuronOrSynapse) -> Dict:
namespace = super()._get_model_namespace(astnode)
namespace["python_codegen_utils"] = PythonCodeGeneratorUtils
namespace["gsl_printer"] = self._gsl_printer
return namespace | PypiClean |
/DragonPyEmulator-0.9.0-py3-none-any.whl/basic_editor/token_window.py | import logging
import sys
import tkinter
from dragonlib.utils.logging_utils import pformat_program_dump
from basic_editor.scrolled_text import ScrolledText
from basic_editor.status_bar import MultiStatusBar
log = logging.getLogger(__name__)
class TokenWindow:
def __init__(self, cfg, master):
self.cfg = cfg
self.machine_api = self.cfg.machine_api
self.root = tkinter.Toplevel(master)
self.root.geometry("+%d+%d" % (
master.winfo_rootx() + master.winfo_width(),
master.winfo_y() # FIXME: Different on linux.
))
self.root.columnconfigure(0, weight=1)
self.root.rowconfigure(0, weight=1)
self.base_title = f"{self.cfg.MACHINE_NAME} - Tokens"
self.root.title(self.base_title)
self.text = ScrolledText(
master=self.root, height=30, width=80
)
self.text.config(
background="#ffffff", foreground="#000000",
highlightthickness=0,
font=('courier', 11),
)
self.text.grid(row=0, column=0, sticky=tkinter.NSEW)
self.set_status_bar() # Create widget, add bindings and after_idle() update
self.text.after_idle(self.set_token_info)
def display_listing(self, content):
program_dump = self.machine_api.ascii_listing2program_dump(content)
formated_dump = pformat_program_dump(program_dump)
self.text.insert(tkinter.END, formated_dump)
self.text.bind("<Any-Motion>", self.on_mouse_move)
def on_mouse_move(self, event):
index = self.text.index(f"@{event.x},{event.y}")
try:
word = self.text.get(f"{index} wordstart", f"{index} wordend")
except tkinter.TclError as err:
log.critical("TclError: %s", err)
return
try:
token_value = int(word, 16)
except ValueError:
return
log.critical("$%x", token_value)
basic_word = self.machine_api.token_util.token2ascii(token_value)
info = f"{index} ${token_value:02x} == {basic_word!r}"
try:
selection_index = f"{self.text.index('sel.first')}-{self.text.index('sel.last')}"
selection = self.text.selection_get()
except tkinter.TclError:
# no selection
pass
else:
log.critical(" selection: %s: %r", selection_index, selection)
selection = selection.replace("$", "")
token_values = [int(part, 16) for part in selection.split() if part.strip()]
log.critical("values: %r", token_values)
basic_selection = self.machine_api.token_util.tokens2ascii(token_values)
info += f" - selection: {basic_selection!r}"
self.status_bar.set_label("cursor_info", info)
# ##########################################################################
# Status bar
def set_status_bar(self):
self.status_bar = MultiStatusBar(self.root)
if sys.platform == "darwin":
# Insert some padding to avoid obscuring some of the statusbar
# by the resize widget.
self.status_bar.set_label('_padding1', ' ', side=tkinter.RIGHT)
self.status_bar.grid(row=1, column=0)
self.text.bind("<<set-line-and-column>>", self.set_line_and_column)
self.text.event_add("<<set-line-and-column>>",
"<KeyRelease>", "<ButtonRelease>")
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(tkinter.INSERT).split('.')
self.status_bar.set_label('column', f'Column: {column}')
self.status_bar.set_label('line', f'Line: {line}')
###########################################################################
def set_token_info(self, event=None):
line, column = self.text.index(tkinter.INSERT).split('.') | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/math/random/Secure.js.uncompressed.js | define("dojox/math/random/Secure", ["dojo"], function(dojo) {
// Copyright (c) 2005 Tom Wu
// All Rights Reserved.
// See "LICENSE-BigInteger" for details.
// Random number generator - requires a PRNG backend, e.g. prng4.js
dojo.declare("dojox.math.random.Secure", null, {
// summary:
// Super simple implementation of a random number generator,
// which relies on Math.random().
constructor: function(prng, noEvents){
// summary:
// Intializes an instance of a secure random generator.
// prng: Function:
// function that returns an instance of PRNG (pseudorandom number generator)
// with two methods: init(array) and next(). It should have a property "size"
// to indicate the required pool size.
// noEvents: Boolean?:
// if false or absent, onclick and onkeypress event will be used to add
// "randomness", otherwise events will not be used.
this.prng = prng;
// Initialize the pool with junk if needed.
var p = this.pool = new Array(prng.size);
this.pptr = 0;
for(var i = 0, len = prng.size; i < len;) { // extract some randomness from Math.random()
var t = Math.floor(65536 * Math.random());
p[i++] = t >>> 8;
p[i++] = t & 255;
}
this.seedTime();
if(!noEvents){
this.h = [
dojo.connect(dojo.body(), "onclick", this, "seedTime"),
dojo.connect(dojo.body(), "onkeypress", this, "seedTime")
];
}
},
destroy: function(){
// summary:
// Disconnects events, if any, preparing the object for GC.
if(this.h){
dojo.forEach(this.h, dojo.disconnect);
}
},
nextBytes: function(/* Array */ byteArray){
// summary:
// Fills in an array of bytes with random numbers
// byteArray: Array:
// array to be filled in with random numbers, only existing
// elements will be filled.
var state = this.state;
if(!state){
this.seedTime();
state = this.state = this.prng();
state.init(this.pool);
for(var p = this.pool, i = 0, len = p.length; i < len; p[i++] = 0);
this.pptr = 0;
//this.pool = null;
}
for(var i = 0, len = byteArray.length; i < len; ++i){
byteArray[i] = state.next();
}
},
seedTime: function() {
// summary:
// Mix in the current time (w/milliseconds) into the pool
this._seed_int(new Date().getTime());
},
_seed_int: function(x) {
// summary:
// Mix in a 32-bit integer into the pool
var p = this.pool, i = this.pptr;
p[i++] ^= x & 255;
p[i++] ^= (x >> 8) & 255;
p[i++] ^= (x >> 16) & 255;
p[i++] ^= (x >> 24) & 255;
if(i >= this.prng.size){
i -= this.prng.size;
}
this.pptr = i;
}
});
return dojox.math.random.Secure;
}); | PypiClean |
/MaMMUT-pytorch-0.0.5.tar.gz/MaMMUT-pytorch-0.0.5/mammut_pytorch/mammut_pytorch.py | import torch
from torch import einsum, nn
import torch.nn.functional as F
from einops import rearrange, repeat
# helper functions
def exists(val):
return val is not None
def default(val, d):
return val if exists(val) else d
def divisible_by(numer, denom):
return (numer % denom) == 0
# normalization
# they use layernorm without bias, something that pytorch does not offer
class LayerNorm(nn.Module):
def __init__(self, dim):
super().__init__()
self.gamma = nn.Parameter(torch.ones(dim))
self.register_buffer("beta", torch.zeros(dim))
def forward(self, x):
return F.layer_norm(x, x.shape[-1:], self.gamma, self.beta)
# residual
class Residual(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x, *args, **kwargs):
return self.fn(x, *args, **kwargs) + x
# to latents
class EmbedToLatents(nn.Module):
def __init__(self, dim, dim_latents):
super().__init__()
self.to_latents = nn.Linear(dim, dim_latents, bias=False)
def forward(self, x):
latents = self.to_latents(x)
return F.normalize(latents, dim=-1)
# rotary positional embedding
# https://arxiv.org/abs/2104.09864
class RotaryEmbedding(nn.Module):
def __init__(self, dim):
super().__init__()
inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2).float() / dim))
self.register_buffer("inv_freq", inv_freq)
def forward(self, max_seq_len, *, device):
seq = torch.arange(max_seq_len, device=device, dtype=self.inv_freq.dtype)
freqs = einsum("i , j -> i j", seq, self.inv_freq)
return torch.cat((freqs, freqs), dim=-1)
def rotate_half(x):
x = rearrange(x, "... (j d) -> ... j d", j=2)
x1, x2 = x.unbind(dim=-2)
return torch.cat((-x2, x1), dim=-1)
def apply_rotary_pos_emb(pos, t):
return (t * pos.cos()) + (rotate_half(t) * pos.sin())
# classic Noam Shazeer paper, except here they use SwiGLU instead of the more popular GEGLU for gating the feedforward
# https://arxiv.org/abs/2002.05202
class SwiGLU(nn.Module):
def forward(self, x):
x, gate = x.chunk(2, dim=-1)
return F.silu(gate) * x
# parallel attention and feedforward with residual
# discovered by Wang et al + EleutherAI from GPT-J fame
class ParallelTransformerBlock(nn.Module):
def __init__(self, dim, dim_head=64, heads=8, ff_mult=4):
super().__init__()
self.norm = LayerNorm(dim)
attn_inner_dim = dim_head * heads
ff_inner_dim = dim * ff_mult
self.fused_dims = (attn_inner_dim, dim_head, dim_head, (ff_inner_dim * 2))
self.heads = heads
self.scale = dim_head**-0.5
self.rotary_emb = RotaryEmbedding(dim_head)
self.fused_attn_ff_proj = nn.Linear(dim, sum(self.fused_dims), bias=False)
self.attn_out = nn.Linear(attn_inner_dim, dim, bias=False)
self.ff_out = nn.Sequential(
SwiGLU(),
nn.Linear(ff_inner_dim, dim, bias=False)
)
# for caching causal mask and rotary embeddings
self.mask = None
self.pos_emb = None
def get_mask(self, n, device):
if self.mask is not None and self.mask.shape[-1] >= n:
return self.mask[:n, :n].to(device)
mask = torch.ones((n, n), device=device, dtype=torch.bool).triu(1)
self.mask = mask
return mask
def get_rotary_embedding(self, n, device):
if self.pos_emb is not None and self.pos_emb.shape[-2] >= n:
return self.pos_emb[:n].to(device)
pos_emb = self.rotary_emb(n, device=device)
self.pos_emb = pos_emb
return pos_emb
def forward(self, x, attn_mask=None):
"""
einstein notation
b - batch
h - heads
n, i, j - sequence length (base sequence length, source, target)
d - feature dimension
"""
n, device, h = x.shape[1], x.device, self.heads
# pre layernorm
x = self.norm(x)
# attention queries, keys, values, and feedforward inner
q, k, v, ff = self.fused_attn_ff_proj(x).split(self.fused_dims, dim=-1)
# split heads
# they use multi-query single-key-value attention, yet another Noam Shazeer paper
# they found no performance loss past a certain scale, and more efficient decoding obviously
# https://arxiv.org/abs/1911.02150
q = rearrange(q, "b n (h d) -> b h n d", h=h)
# rotary embeddings
positions = self.get_rotary_embedding(n, device)
q, k = map(lambda t: apply_rotary_pos_emb(positions, t), (q, k))
# scale
q = q * self.scale
# similarity
sim = einsum("b h i d, b j d -> b h i j", q, k)
# causal mask
causal_mask = self.get_mask(n, device)
sim = sim.masked_fill(causal_mask, -torch.finfo(sim.dtype).max)
# extra attention mask - for masking out attention from text CLS token to padding
if exists(attn_mask):
attn_mask = rearrange(attn_mask, 'b i j -> b 1 i j')
sim = sim.masked_fill(~attn_mask, -torch.finfo(sim.dtype).max)
# attention
attn = sim.softmax(dim=-1)
# aggregate values
out = einsum("b h i j, b j d -> b h i d", attn, v)
# merge heads
out = rearrange(out, "b h n d -> b n (h d)")
return self.attn_out(out) + self.ff_out(ff)
# cross attention - using multi-query + one-headed key / values as in PaLM w/ optional parallel feedforward
class CrossAttention(nn.Module):
def __init__(
self,
dim,
*,
context_dim=None,
dim_head=64,
heads=8,
parallel_ff=False,
ff_mult=4,
norm_context=False
):
super().__init__()
self.heads = heads
self.scale = dim_head ** -0.5
inner_dim = heads * dim_head
context_dim = default(context_dim, dim)
self.norm = LayerNorm(dim)
self.context_norm = LayerNorm(context_dim) if norm_context else nn.Identity()
self.to_q = nn.Linear(dim, inner_dim, bias=False)
self.to_kv = nn.Linear(context_dim, dim_head * 2, bias=False)
self.to_out = nn.Linear(inner_dim, dim, bias=False)
# whether to have parallel feedforward
ff_inner_dim = ff_mult * dim
self.ff = nn.Sequential(
nn.Linear(dim, ff_inner_dim * 2, bias=False),
SwiGLU(),
nn.Linear(ff_inner_dim, dim, bias=False)
) if parallel_ff else None
def forward(self, x, context):
"""
einstein notation
b - batch
h - heads
n, i, j - sequence length (base sequence length, source, target)
d - feature dimension
"""
# pre-layernorm, for queries and context
x = self.norm(x)
context = self.context_norm(context)
# get queries
q = self.to_q(x)
q = rearrange(q, 'b n (h d) -> b h n d', h = self.heads)
# scale
q = q * self.scale
# get key / values
k, v = self.to_kv(context).chunk(2, dim=-1)
# query / key similarity
sim = einsum('b h i d, b j d -> b h i j', q, k)
# attention
attn = sim.softmax(dim=-1)
# aggregate
out = einsum('b h i j, b j d -> b h i d', attn, v)
# merge and combine heads
out = rearrange(out, 'b h n d -> b n (h d)')
out = self.to_out(out)
# add parallel feedforward (for multimodal layers)
if exists(self.ff):
out = out + self.ff(x)
return out
# transformer
class MaMMUT(nn.Module):
def __init__(
self,
*,
dim,
num_tokens,
depth,
cross_attend_every=1,
cross_attend_layers=None,
dim_latents=None,
image_dim=None,
num_img_queries=256,
dim_head=64,
heads=8,
ff_mult=4,
img_encoder=None,
caption_loss_weight=1.,
contrastive_loss_weight=1.,
pad_id=0
):
super().__init__()
self.dim = dim
self.pad_id = pad_id
self.caption_loss_weight = caption_loss_weight
self.contrastive_loss_weight = contrastive_loss_weight
# token embeddings
self.token_emb = nn.Embedding(num_tokens, dim)
self.text_cls_token = nn.Parameter(torch.randn(dim))
# image encoder
self.img_encoder = img_encoder
# attention pooling for image tokens
self.img_queries = nn.Parameter(torch.randn(num_img_queries + 1, dim)) # num image queries for multimodal, but 1 extra CLS for contrastive learning
self.img_attn_pool = CrossAttention(dim=dim, context_dim=image_dim, dim_head=dim_head, heads=heads, norm_context=True)
self.img_attn_pool_norm = LayerNorm(dim)
self.text_cls_norm = LayerNorm(dim)
# to latents
dim_latents = default(dim_latents, dim)
self.img_to_latents = EmbedToLatents(dim, dim_latents)
self.text_to_latents = EmbedToLatents(dim, dim_latents)
# contrastive learning temperature
self.temperature = nn.Parameter(torch.Tensor([1.]))
# layers
self.layers = nn.ModuleList([])
for ind in range(depth):
layer = ind + 1
has_cross_attn = divisible_by(layer, cross_attend_every)
if exists(cross_attend_layers):
assert isinstance(cross_attend_layers, tuple)
has_cross_attn = layer in cross_attend_layers
self.layers.append(nn.ModuleList([
Residual(ParallelTransformerBlock(dim=dim, dim_head=dim_head, heads=heads, ff_mult=ff_mult)),
Residual(CrossAttention(dim=dim, dim_head=dim_head, heads=heads, parallel_ff=True, ff_mult=ff_mult)) if has_cross_attn else None
]))
# to logits
self.to_logits = nn.Sequential(
LayerNorm(dim),
nn.Linear(dim, num_tokens, bias=False)
)
# they used embedding weight tied projection out to logits, not common, but works
self.to_logits[-1].weight = self.token_emb.weight
nn.init.normal_(self.token_emb.weight, std=0.02)
def embed_text(self, text):
batch, device = text.shape[0], text.device
seq = text.shape[1]
text_tokens = self.token_emb(text)
# append text cls tokens
text_cls_tokens = repeat(self.text_cls_token, 'd -> b 1 d', b=batch)
text_tokens = torch.cat((text_tokens, text_cls_tokens), dim=-2)
# create specific mask for text cls token at the end
# to prevent it from attending to padding
cls_mask = rearrange(text!=self.pad_id, 'b j -> b 1 j')
attn_mask = F.pad(cls_mask, (0, 1, seq, 0), value=True)
# go through layers, but do not cross attend
for attn_ff, _ in self.layers:
text_tokens = attn_ff(text_tokens, attn_mask=attn_mask)
# get text cls token
text_tokens, text_cls_tokens = text_tokens[:, :-1], text_tokens[:, -1]
text_embeds = self.text_cls_norm(text_cls_tokens)
return text_embeds, text_tokens
def embed_image(self, images=None, image_tokens=None):
# encode images into embeddings
# with the img_encoder passed in at init
# it can also accept precomputed image tokens
assert not (exists(images) and exists(image_tokens))
if exists(images):
assert exists(self.img_encoder), 'img_encoder must be passed in for automatic image encoding'
image_tokens = self.img_encoder(images)
# attention pool image tokens
img_queries = repeat(self.img_queries, 'n d -> b n d', b=image_tokens.shape[0])
img_queries = self.img_attn_pool(img_queries, image_tokens)
img_queries = self.img_attn_pool_norm(img_queries)
return img_queries[:, 0], img_queries[:, 1:]
def forward(
self,
text,
text_mask = None,
images=None,
image_tokens=None,
labels=None,
return_loss=False,
return_embeddings=False
):
batch, device = text.shape[0], text.device
if return_loss and not exists(labels):
text, labels = text[:, :-1], text[:, 1:]
text_embeds, _ = self.embed_text(text)
image_embeds, image_tokens = self.embed_image(images=images, image_tokens=image_tokens)
# return embeddings if that is what the researcher wants
if return_embeddings:
return text_embeds, image_embeds
# go through layers
text_tokens = self.token_emb(text)
for attn_ff, cross_attn in self.layers:
text_tokens = attn_ff(text_tokens)
if exists(cross_attn):
text_tokens = cross_attn(text_tokens, image_tokens)
logits = self.to_logits(text_tokens)
if not return_loss:
return logits
# shorthand
ce = F.cross_entropy
# calculate caption loss (cross entropy loss)
logits = rearrange(logits, 'b n c -> b c n')
caption_loss = ce(logits, labels, ignore_index=self.pad_id)
caption_loss = caption_loss * self.caption_loss_weight
# embedding to latents
text_latents = self.text_to_latents(text_embeds)
image_latents = self.img_to_latents(image_embeds)
# calculate contrastive loss
sim = einsum('i d, j d -> i j', text_latents, image_latents)
sim = sim * self.temperature.exp()
contrastive_labels = torch.arange(batch, device=device)
contrastive_loss = (ce(sim, contrastive_labels) + ce(sim.t(), contrastive_labels)) * 0.5
contrastive_loss = contrastive_loss * self.contrastive_loss_weight
return caption_loss + contrastive_loss | PypiClean |
/NLP_LIB_cpu-0.0.12.tar.gz/NLP_LIB_cpu-0.0.12/NLP_LIB/nlp_core/data_transform_wrapper.py | class DataTransformWrapper:
# When initialize DataTransformWrapper, we pass configuration and dataset object to constructor
def __init__(self, config, dataset):
self.config = config
self.dataset = dataset
# Function used for encode batch of string data into batch of encoded integer
def encode(self, token_list, max_length = 999):
return []
# Function used for decode batch of integers back to batch of string
def decode(self, id_list):
return ""
# Function to return size of dictionary (key size)
def num(self):
return 0
# Function to return list of objects to differentiate cached of input/output that model will use.
# Basically it is configurations that effect encoded data.
def get_data_effected_configs(self):
return '_'
# This function returns dimention of data it consumes.
# Ex: X = int[Count] => return 1
# Ex: X = [int[Count], int[Count]] => return 2
def get_data_dimension(self):
return 1
# Function indicates of the data transform has aggregated transformation applied on raw dataset or not.
# Example is that BERT pretrained data transform will try to batch many lines of text from dataset.load_as_list()
# into single data row to maximize length of tranformed dataset.
# For such case, in model training, we should not use dataset.load_as_list() and call transform.encode one by one row
# but instead we should load already transformed data. The flag is to indicate which loading approach to be used.
# Note that encode/decode function should still be implemented because we will call it in online inference mode.
def is_data_preaggregated(self):
return False
# If data is pre-aggregated, this function is called to load pre-aggregated data instead of calling dataset.load_as_list()
# Returns from this function should be (X, Y, X_valid, Y_valid)
def load_preaggregated_data(self):
return None
# Function indicates if there is dynamic preprocessing needed to be applied on data or not.
# Dynamic preprocessing is the logics those will be applied on data at starting of each epoch before feeding into to the model.
# Example for such situation is "BERT" which we want to "mask" some tokens out, but we want it to be dynamically random in each eopch,
# which mean for the same input string, we mask different tokens in each epoch of training.
# This actually can be done once in data pre-aggregation step that create multiply dataset with different mask,
# or can be done here dynamically on-the-fly without need to multiple training data rows.
def is_data_dynamically_aggregated(self):
return False
# This function returns tensor operators in Keras layer form to perform dynamically aggregation on training data.
# Note that this will be added to calculation graph for to perform the operations on each input before feeding to model.
# (In case of output side, the transformation is applied to the label data before feeding to loss calculation)
# We cannot perform it outside calculation graph because it will be much more slower and will break Keras training loop.
def get_dynamically_aggregation_layer(self):
return None | PypiClean |
/GeneLearn-0.0.0.tar.gz/GeneLearn-0.0.0/docs/_build/html/_static/doctools.js | * select a different prefix for underscore
*/
$u = _.noConflict();
/**
* make the code below compatible with browsers without
* an installed firebug like debugger
if (!window.console || !console.firebug) {
var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
"dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
"profile", "profileEnd"];
window.console = {};
for (var i = 0; i < names.length; ++i)
window.console[names[i]] = function() {};
}
*/
/**
* small helper function to urldecode strings
*/
jQuery.urldecode = function(x) {
return decodeURIComponent(x).replace(/\+/g, ' ');
};
/**
* small helper function to urlencode strings
*/
jQuery.urlencode = encodeURIComponent;
/**
* This function returns the parsed url parameters of the
* current request. Multiple values per key are supported,
* it will always return arrays of strings for the value parts.
*/
jQuery.getQueryParameters = function(s) {
if (typeof s === 'undefined')
s = document.location.search;
var parts = s.substr(s.indexOf('?') + 1).split('&');
var result = {};
for (var i = 0; i < parts.length; i++) {
var tmp = parts[i].split('=', 2);
var key = jQuery.urldecode(tmp[0]);
var value = jQuery.urldecode(tmp[1]);
if (key in result)
result[key].push(value);
else
result[key] = [value];
}
return result;
};
/**
* highlight a given string on a jquery object by wrapping it in
* span elements with the given class name.
*/
jQuery.fn.highlightText = function(text, className) {
function highlight(node, addItems) {
if (node.nodeType === 3) {
var val = node.nodeValue;
var pos = val.toLowerCase().indexOf(text);
if (pos >= 0 &&
!jQuery(node.parentNode).hasClass(className) &&
!jQuery(node.parentNode).hasClass("nohighlight")) {
var span;
var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
if (isInSVG) {
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
} else {
span = document.createElement("span");
span.className = className;
}
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
document.createTextNode(val.substr(pos + text.length)),
node.nextSibling));
node.nodeValue = val.substr(0, pos);
if (isInSVG) {
var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
var bbox = node.parentElement.getBBox();
rect.x.baseVal.value = bbox.x;
rect.y.baseVal.value = bbox.y;
rect.width.baseVal.value = bbox.width;
rect.height.baseVal.value = bbox.height;
rect.setAttribute('class', className);
addItems.push({
"parent": node.parentNode,
"target": rect});
}
}
}
else if (!jQuery(node).is("button, select, textarea")) {
jQuery.each(node.childNodes, function() {
highlight(this, addItems);
});
}
}
var addItems = [];
var result = this.each(function() {
highlight(this, addItems);
});
for (var i = 0; i < addItems.length; ++i) {
jQuery(addItems[i].parent).before(addItems[i].target);
}
return result;
};
/*
* backward compatibility for jQuery.browser
* This will be supported until firefox bug is fixed.
*/
if (!jQuery.browser) {
jQuery.uaMatch = function(ua) {
ua = ua.toLowerCase();
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
/(msie) ([\w.]+)/.exec(ua) ||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
[];
return {
browser: match[ 1 ] || "",
version: match[ 2 ] || "0"
};
};
jQuery.browser = {};
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
}
/**
* Small JavaScript module for the documentation.
*/
var Documentation = {
init : function() {
this.fixFirefoxAnchorBug();
this.highlightSearchWords();
this.initIndexTable();
if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) {
this.initOnKeyListeners();
}
},
/**
* i18n support
*/
TRANSLATIONS : {},
PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; },
LOCALE : 'unknown',
// gettext and ngettext don't access this so that the functions
// can safely bound to a different name (_ = Documentation.gettext)
gettext : function(string) {
var translated = Documentation.TRANSLATIONS[string];
if (typeof translated === 'undefined')
return string;
return (typeof translated === 'string') ? translated : translated[0];
},
ngettext : function(singular, plural, n) {
var translated = Documentation.TRANSLATIONS[singular];
if (typeof translated === 'undefined')
return (n == 1) ? singular : plural;
return translated[Documentation.PLURALEXPR(n)];
},
addTranslations : function(catalog) {
for (var key in catalog.messages)
this.TRANSLATIONS[key] = catalog.messages[key];
this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
this.LOCALE = catalog.locale;
},
/**
* add context elements like header anchor links
*/
addContextElements : function() {
$('div[id] > :header:first').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this headline')).
appendTo(this);
});
$('dt[id]').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this definition')).
appendTo(this);
});
},
/**
* workaround a firefox stupidity
* see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075
*/
fixFirefoxAnchorBug : function() {
if (document.location.hash && $.browser.mozilla)
window.setTimeout(function() {
document.location.href += '';
}, 10);
},
/**
* highlight the search words provided in the url in the text
*/
highlightSearchWords : function() {
var params = $.getQueryParameters();
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
if (terms.length) {
var body = $('div.body');
if (!body.length) {
body = $('body');
}
window.setTimeout(function() {
$.each(terms, function() {
body.highlightText(this.toLowerCase(), 'highlighted');
});
}, 10);
$('<p class="highlight-link"><a href="javascript:Documentation.' +
'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
.appendTo($('#searchbox'));
}
},
/**
* init the domain index toggle buttons
*/
initIndexTable : function() {
var togglers = $('img.toggler').click(function() {
var src = $(this).attr('src');
var idnum = $(this).attr('id').substr(7);
$('tr.cg-' + idnum).toggle();
if (src.substr(-9) === 'minus.png')
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
else
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
}).css('display', '');
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
togglers.click();
}
},
/**
* helper function to hide the search marks again
*/
hideSearchWords : function() {
$('#searchbox .highlight-link').fadeOut(300);
$('span.highlighted').removeClass('highlighted');
},
/**
* make the url absolute
*/
makeURL : function(relativeURL) {
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
},
/**
* get the current relative url
*/
getCurrentURL : function() {
var path = document.location.pathname;
var parts = path.split(/\//);
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
if (this === '..')
parts.pop();
});
var url = parts.join('/');
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
},
initOnKeyListeners: function() {
$(document).keydown(function(event) {
var activeElementType = document.activeElement.tagName;
// don't navigate when in search box or textarea
if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT'
&& !event.altKey && !event.ctrlKey && !event.metaKey && !event.shiftKey) {
switch (event.keyCode) {
case 37: // left
var prevHref = $('link[rel="prev"]').prop('href');
if (prevHref) {
window.location.href = prevHref;
return false;
}
case 39: // right
var nextHref = $('link[rel="next"]').prop('href');
if (nextHref) {
window.location.href = nextHref;
return false;
}
}
}
});
}
};
// quick alias for translations
_ = Documentation.gettext;
$(document).ready(function() {
Documentation.init();
}); | PypiClean |
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/@types/node/ts4.8/dgram.d.ts | declare module 'dgram' {
import { AddressInfo } from 'node:net';
import * as dns from 'node:dns';
import { EventEmitter, Abortable } from 'node:events';
interface RemoteInfo {
address: string;
family: 'IPv4' | 'IPv6';
port: number;
size: number;
}
interface BindOptions {
port?: number | undefined;
address?: string | undefined;
exclusive?: boolean | undefined;
fd?: number | undefined;
}
type SocketType = 'udp4' | 'udp6';
interface SocketOptions extends Abortable {
type: SocketType;
reuseAddr?: boolean | undefined;
/**
* @default false
*/
ipv6Only?: boolean | undefined;
recvBufferSize?: number | undefined;
sendBufferSize?: number | undefined;
lookup?: ((hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void) | undefined;
}
/**
* Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram
* messages. When `address` and `port` are not passed to `socket.bind()` the
* method will bind the socket to the "all interfaces" address on a random port
* (it does the right thing for both `udp4` and `udp6` sockets). The bound address
* and port can be retrieved using `socket.address().address` and `socket.address().port`.
*
* If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket:
*
* ```js
* const controller = new AbortController();
* const { signal } = controller;
* const server = dgram.createSocket({ type: 'udp4', signal });
* server.on('message', (msg, rinfo) => {
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
* });
* // Later, when you want to close the server.
* controller.abort();
* ```
* @since v0.11.13
* @param options Available options are:
* @param callback Attached as a listener for `'message'` events. Optional.
*/
function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
/**
* Encapsulates the datagram functionality.
*
* New instances of `dgram.Socket` are created using {@link createSocket}.
* The `new` keyword is not to be used to create `dgram.Socket` instances.
* @since v0.1.99
*/
class Socket extends EventEmitter {
/**
* Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not
* specified, the operating system will choose
* one interface and will add membership to it. To add membership to every
* available interface, call `addMembership` multiple times, once per interface.
*
* When called on an unbound socket, this method will implicitly bind to a random
* port, listening on all interfaces.
*
* When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur:
*
* ```js
* import cluster from 'cluster';
* import dgram from 'dgram';
*
* if (cluster.isPrimary) {
* cluster.fork(); // Works ok.
* cluster.fork(); // Fails with EADDRINUSE.
* } else {
* const s = dgram.createSocket('udp4');
* s.bind(1234, () => {
* s.addMembership('224.0.0.114');
* });
* }
* ```
* @since v0.6.9
*/
addMembership(multicastAddress: string, multicastInterface?: string): void;
/**
* Returns an object containing the address information for a socket.
* For UDP sockets, this object will contain `address`, `family` and `port`properties.
*
* This method throws `EBADF` if called on an unbound socket.
* @since v0.1.99
*/
address(): AddressInfo;
/**
* For UDP sockets, causes the `dgram.Socket` to listen for datagram
* messages on a named `port` and optional `address`. If `port` is not
* specified or is `0`, the operating system will attempt to bind to a
* random port. If `address` is not specified, the operating system will
* attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is
* called.
*
* Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very
* useful.
*
* A bound datagram socket keeps the Node.js process running to receive
* datagram messages.
*
* If binding fails, an `'error'` event is generated. In rare case (e.g.
* attempting to bind with a closed socket), an `Error` may be thrown.
*
* Example of a UDP server listening on port 41234:
*
* ```js
* import dgram from 'dgram';
*
* const server = dgram.createSocket('udp4');
*
* server.on('error', (err) => {
* console.log(`server error:\n${err.stack}`);
* server.close();
* });
*
* server.on('message', (msg, rinfo) => {
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
* });
*
* server.on('listening', () => {
* const address = server.address();
* console.log(`server listening ${address.address}:${address.port}`);
* });
*
* server.bind(41234);
* // Prints: server listening 0.0.0.0:41234
* ```
* @since v0.1.99
* @param callback with no parameters. Called when binding is complete.
*/
bind(port?: number, address?: string, callback?: () => void): this;
bind(port?: number, callback?: () => void): this;
bind(callback?: () => void): this;
bind(options: BindOptions, callback?: () => void): this;
/**
* Close the underlying socket and stop listening for data on it. If a callback is
* provided, it is added as a listener for the `'close'` event.
* @since v0.1.99
* @param callback Called when the socket has been closed.
*/
close(callback?: () => void): this;
/**
* Associates the `dgram.Socket` to a remote address and port. Every
* message sent by this handle is automatically sent to that destination. Also,
* the socket will only receive messages from that remote peer.
* Trying to call `connect()` on an already connected socket will result
* in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not
* provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets)
* will be used by default. Once the connection is complete, a `'connect'` event
* is emitted and the optional `callback` function is called. In case of failure,
* the `callback` is called or, failing this, an `'error'` event is emitted.
* @since v12.0.0
* @param callback Called when the connection is completed or on error.
*/
connect(port: number, address?: string, callback?: () => void): void;
connect(port: number, callback: () => void): void;
/**
* A synchronous function that disassociates a connected `dgram.Socket` from
* its remote address. Trying to call `disconnect()` on an unbound or already
* disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception.
* @since v12.0.0
*/
disconnect(): void;
/**
* Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the
* kernel when the socket is closed or the process terminates, so most apps will
* never have reason to call this.
*
* If `multicastInterface` is not specified, the operating system will attempt to
* drop membership on all valid interfaces.
* @since v0.6.9
*/
dropMembership(multicastAddress: string, multicastInterface?: string): void;
/**
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
* @since v8.7.0
* @return the `SO_RCVBUF` socket receive buffer size in bytes.
*/
getRecvBufferSize(): number;
/**
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
* @since v8.7.0
* @return the `SO_SNDBUF` socket send buffer size in bytes.
*/
getSendBufferSize(): number;
/**
* By default, binding a socket will cause it to block the Node.js process from
* exiting as long as the socket is open. The `socket.unref()` method can be used
* to exclude the socket from the reference counting that keeps the Node.js
* process active. The `socket.ref()` method adds the socket back to the reference
* counting and restores the default behavior.
*
* Calling `socket.ref()` multiples times will have no additional effect.
*
* The `socket.ref()` method returns a reference to the socket so calls can be
* chained.
* @since v0.9.1
*/
ref(): this;
/**
* Returns an object containing the `address`, `family`, and `port` of the remote
* endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception
* if the socket is not connected.
* @since v12.0.0
*/
remoteAddress(): AddressInfo;
/**
* Broadcasts a datagram on the socket.
* For connectionless sockets, the destination `port` and `address` must be
* specified. Connected sockets, on the other hand, will use their associated
* remote endpoint, so the `port` and `address` arguments must not be set.
*
* The `msg` argument contains the message to be sent.
* Depending on its type, different behavior can apply. If `msg` is a `Buffer`,
* any `TypedArray` or a `DataView`,
* the `offset` and `length` specify the offset within the `Buffer` where the
* message begins and the number of bytes in the message, respectively.
* If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that
* contain multi-byte characters, `offset` and `length` will be calculated with
* respect to `byte length` and not the character position.
* If `msg` is an array, `offset` and `length` must not be specified.
*
* The `address` argument is a string. If the value of `address` is a host name,
* DNS will be used to resolve the address of the host. If `address` is not
* provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default.
*
* If the socket has not been previously bound with a call to `bind`, the socket
* is assigned a random port number and is bound to the "all interfaces" address
* (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.)
*
* An optional `callback` function may be specified to as a way of reporting
* DNS errors or for determining when it is safe to reuse the `buf` object.
* DNS lookups delay the time to send for at least one tick of the
* Node.js event loop.
*
* The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be
* passed as the first argument to the `callback`. If a `callback` is not given,
* the error is emitted as an `'error'` event on the `socket` object.
*
* Offset and length are optional but both _must_ be set if either are used.
* They are supported only when the first argument is a `Buffer`, a `TypedArray`,
* or a `DataView`.
*
* This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket.
*
* Example of sending a UDP packet to a port on `localhost`;
*
* ```js
* import dgram from 'dgram';
* import { Buffer } from 'buffer';
*
* const message = Buffer.from('Some bytes');
* const client = dgram.createSocket('udp4');
* client.send(message, 41234, 'localhost', (err) => {
* client.close();
* });
* ```
*
* Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`;
*
* ```js
* import dgram from 'dgram';
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from('Some ');
* const buf2 = Buffer.from('bytes');
* const client = dgram.createSocket('udp4');
* client.send([buf1, buf2], 41234, (err) => {
* client.close();
* });
* ```
*
* Sending multiple buffers might be faster or slower depending on the
* application and operating system. Run benchmarks to
* determine the optimal strategy on a case-by-case basis. Generally speaking,
* however, sending multiple buffers is faster.
*
* Example of sending a UDP packet using a socket connected to a port on`localhost`:
*
* ```js
* import dgram from 'dgram';
* import { Buffer } from 'buffer';
*
* const message = Buffer.from('Some bytes');
* const client = dgram.createSocket('udp4');
* client.connect(41234, 'localhost', (err) => {
* client.send(message, (err) => {
* client.close();
* });
* });
* ```
* @since v0.1.99
* @param msg Message to be sent.
* @param offset Offset in the buffer where the message starts.
* @param length Number of bytes in the message.
* @param port Destination port.
* @param address Destination host name or IP address.
* @param callback Called when the message has been sent.
*/
send(msg: string | Uint8Array | ReadonlyArray<any>, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void;
send(msg: string | Uint8Array | ReadonlyArray<any>, port?: number, callback?: (error: Error | null, bytes: number) => void): void;
send(msg: string | Uint8Array | ReadonlyArray<any>, callback?: (error: Error | null, bytes: number) => void): void;
send(msg: string | Uint8Array, offset: number, length: number, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void;
send(msg: string | Uint8Array, offset: number, length: number, port?: number, callback?: (error: Error | null, bytes: number) => void): void;
send(msg: string | Uint8Array, offset: number, length: number, callback?: (error: Error | null, bytes: number) => void): void;
/**
* Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP
* packets may be sent to a local interface's broadcast address.
*
* This method throws `EBADF` if called on an unbound socket.
* @since v0.6.9
*/
setBroadcast(flag: boolean): void;
/**
* _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC
* 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_
* _with a scope index is written as `'IP%scope'` where scope is an interface name_
* _or interface number._
*
* Sets the default outgoing multicast interface of the socket to a chosen
* interface or back to system interface selection. The `multicastInterface` must
* be a valid string representation of an IP from the socket's family.
*
* For IPv4 sockets, this should be the IP configured for the desired physical
* interface. All packets sent to multicast on the socket will be sent on the
* interface determined by the most recent successful use of this call.
*
* For IPv6 sockets, `multicastInterface` should include a scope to indicate the
* interface as in the examples that follow. In IPv6, individual `send` calls can
* also use explicit scope in addresses, so only packets sent to a multicast
* address without specifying an explicit scope are affected by the most recent
* successful use of this call.
*
* This method throws `EBADF` if called on an unbound socket.
*
* #### Example: IPv6 outgoing multicast interface
*
* On most systems, where scope format uses the interface name:
*
* ```js
* const socket = dgram.createSocket('udp6');
*
* socket.bind(1234, () => {
* socket.setMulticastInterface('::%eth1');
* });
* ```
*
* On Windows, where scope format uses an interface number:
*
* ```js
* const socket = dgram.createSocket('udp6');
*
* socket.bind(1234, () => {
* socket.setMulticastInterface('::%2');
* });
* ```
*
* #### Example: IPv4 outgoing multicast interface
*
* All systems use an IP of the host on the desired physical interface:
*
* ```js
* const socket = dgram.createSocket('udp4');
*
* socket.bind(1234, () => {
* socket.setMulticastInterface('10.0.0.2');
* });
* ```
* @since v8.6.0
*/
setMulticastInterface(multicastInterface: string): void;
/**
* Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`,
* multicast packets will also be received on the local interface.
*
* This method throws `EBADF` if called on an unbound socket.
* @since v0.3.8
*/
setMulticastLoopback(flag: boolean): boolean;
/**
* Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for
* "Time to Live", in this context it specifies the number of IP hops that a
* packet is allowed to travel through, specifically for multicast traffic. Each
* router or gateway that forwards a packet decrements the TTL. If the TTL is
* decremented to 0 by a router, it will not be forwarded.
*
* The `ttl` argument may be between 0 and 255\. The default on most systems is `1`.
*
* This method throws `EBADF` if called on an unbound socket.
* @since v0.3.8
*/
setMulticastTTL(ttl: number): number;
/**
* Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer
* in bytes.
*
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
* @since v8.7.0
*/
setRecvBufferSize(size: number): void;
/**
* Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer
* in bytes.
*
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
* @since v8.7.0
*/
setSendBufferSize(size: number): void;
/**
* Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live",
* in this context it specifies the number of IP hops that a packet is allowed to
* travel through. Each router or gateway that forwards a packet decrements the
* TTL. If the TTL is decremented to 0 by a router, it will not be forwarded.
* Changing TTL values is typically done for network probes or when multicasting.
*
* The `ttl` argument may be between 1 and 255\. The default on most systems
* is 64.
*
* This method throws `EBADF` if called on an unbound socket.
* @since v0.1.101
*/
setTTL(ttl: number): number;
/**
* By default, binding a socket will cause it to block the Node.js process from
* exiting as long as the socket is open. The `socket.unref()` method can be used
* to exclude the socket from the reference counting that keeps the Node.js
* process active, allowing the process to exit even if the socket is still
* listening.
*
* Calling `socket.unref()` multiple times will have no addition effect.
*
* The `socket.unref()` method returns a reference to the socket so calls can be
* chained.
* @since v0.9.1
*/
unref(): this;
/**
* Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket
* option. If the `multicastInterface` argument
* is not specified, the operating system will choose one interface and will add
* membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface.
*
* When called on an unbound socket, this method will implicitly bind to a random
* port, listening on all interfaces.
* @since v13.1.0, v12.16.0
*/
addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
/**
* Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is
* automatically called by the kernel when the
* socket is closed or the process terminates, so most apps will never have
* reason to call this.
*
* If `multicastInterface` is not specified, the operating system will attempt to
* drop membership on all valid interfaces.
* @since v13.1.0, v12.16.0
*/
dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
/**
* events.EventEmitter
* 1. close
* 2. connect
* 3. error
* 4. listening
* 5. message
*/
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: 'close', listener: () => void): this;
addListener(event: 'connect', listener: () => void): this;
addListener(event: 'error', listener: (err: Error) => void): this;
addListener(event: 'listening', listener: () => void): this;
addListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: 'close'): boolean;
emit(event: 'connect'): boolean;
emit(event: 'error', err: Error): boolean;
emit(event: 'listening'): boolean;
emit(event: 'message', msg: Buffer, rinfo: RemoteInfo): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: 'close', listener: () => void): this;
on(event: 'connect', listener: () => void): this;
on(event: 'error', listener: (err: Error) => void): this;
on(event: 'listening', listener: () => void): this;
on(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: 'close', listener: () => void): this;
once(event: 'connect', listener: () => void): this;
once(event: 'error', listener: (err: Error) => void): this;
once(event: 'listening', listener: () => void): this;
once(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: 'close', listener: () => void): this;
prependListener(event: 'connect', listener: () => void): this;
prependListener(event: 'error', listener: (err: Error) => void): this;
prependListener(event: 'listening', listener: () => void): this;
prependListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: 'close', listener: () => void): this;
prependOnceListener(event: 'connect', listener: () => void): this;
prependOnceListener(event: 'error', listener: (err: Error) => void): this;
prependOnceListener(event: 'listening', listener: () => void): this;
prependOnceListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
}
}
declare module 'node:dgram' {
export * from 'dgram';
} | PypiClean |
/CSUMMDET-1.0.23.tar.gz/CSUMMDET-1.0.23/mmdet/utils/zh_wiki.py |
zh2Hant = {
'呆': '獃',
"打印机": "印表機",
'帮助文件': '說明檔案',
"画": "畫",
"龙": "竜",
"板": "板",
"表": "表",
"才": "才",
"丑": "醜",
"出": "出",
"淀": "澱",
"冬": "冬",
"范": "範",
"丰": "豐",
"刮": "刮",
"后": "後",
"胡": "胡",
"回": "回",
"伙": "夥",
"姜": "薑",
"借": "借",
"克": "克",
"困": "困",
"漓": "漓",
"里": "里",
"帘": "簾",
"霉": "霉",
"面": "面",
"蔑": "蔑",
"千": "千",
"秋": "秋",
"松": "松",
"咸": "咸",
"向": "向",
"余": "餘",
"郁": "鬱",
"御": "御",
"愿": "願",
"云": "雲",
"芸": "芸",
"沄": "沄",
"致": "致",
"制": "制",
"朱": "朱",
"筑": "築",
"准": "準",
"厂": "廠",
"广": "廣",
"辟": "闢",
"别": "別",
"卜": "卜",
"沈": "沈",
"冲": "沖",
"种": "種",
"虫": "蟲",
"担": "擔",
"党": "黨",
"斗": "鬥",
"儿": "兒",
"干": "乾",
"谷": "谷",
"柜": "櫃",
"合": "合",
"划": "劃",
"坏": "壞",
"几": "幾",
"系": "系",
"家": "家",
"价": "價",
"据": "據",
"卷": "捲",
"适": "適",
"蜡": "蠟",
"腊": "臘",
"了": "了",
"累": "累",
"么": "麽",
"蒙": "蒙",
"万": "萬",
"宁": "寧",
"朴": "樸",
"苹": "蘋",
"仆": "僕",
"曲": "曲",
"确": "確",
"舍": "舍",
"胜": "勝",
"术": "術",
"台": "台",
"体": "體",
"涂": "塗",
"叶": "葉",
"吁": "吁",
"旋": "旋",
"佣": "傭",
"与": "與",
"折": "折",
"征": "徵",
"症": "症",
"恶": "惡",
"发": "發",
"复": "復",
"汇": "匯",
"获": "獲",
"饥": "飢",
"尽": "盡",
"历": "歷",
"卤": "滷",
"弥": "彌",
"签": "簽",
"纤": "纖",
"苏": "蘇",
"坛": "壇",
"团": "團",
"须": "須",
"脏": "臟",
"只": "只",
"钟": "鐘",
"药": "藥",
"同": "同",
"志": "志",
"杯": "杯",
"岳": "岳",
"布": "布",
"当": "當",
"吊": "弔",
"仇": "仇",
"蕴": "蘊",
"线": "線",
"为": "為",
"产": "產",
"众": "眾",
"伪": "偽",
"凫": "鳧",
"厕": "廁",
"启": "啟",
"墙": "牆",
"壳": "殼",
"奖": "獎",
"妫": "媯",
"并": "並",
"录": "錄",
"悫": "愨",
"极": "極",
"沩": "溈",
"瘘": "瘺",
"硷": "鹼",
"竖": "豎",
"绝": "絕",
"绣": "繡",
"绦": "絛",
"绱": "緔",
"绷": "綳",
"绿": "綠",
"缰": "韁",
"苧": "苎",
"莼": "蒓",
"说": "說",
"谣": "謠",
"谫": "譾",
"赃": "贓",
"赍": "齎",
"赝": "贗",
"酝": "醞",
"采": "採",
"钩": "鉤",
"钵": "缽",
"锈": "銹",
"锐": "銳",
"锨": "杴",
"镌": "鐫",
"镢": "钁",
"阅": "閱",
"颓": "頹",
"颜": "顏",
"骂": "罵",
"鲇": "鯰",
"鲞": "鯗",
"鳄": "鱷",
"鸡": "雞",
"鹚": "鶿",
"荡": "盪",
"锤": "錘",
"㟆": "㠏",
"㛟": "ð¡µ",
"专": "專",
"业": "業",
"丛": "叢",
"东": "東",
"丝": "絲",
"丢": "丟",
"两": "兩",
"严": "嚴",
"丧": "喪",
"个": "個",
"临": "臨",
"丽": "麗",
"举": "舉",
"义": "義",
"乌": "烏",
"乐": "樂",
"乔": "喬",
"习": "習",
"乡": "鄉",
"书": "書",
"买": "買",
"乱": "亂",
"争": "爭",
"于": "於",
"亏": "虧",
"亚": "亞",
"亩": "畝",
"亲": "親",
"亵": "褻",
"亸": "嚲",
"亿": "億",
"仅": "僅",
"从": "從",
"仑": "侖",
"仓": "倉",
"仪": "儀",
"们": "們",
"优": "優",
"会": "會",
"伛": "傴",
"伞": "傘",
"伟": "偉",
"传": "傳",
"伣": "俔",
"伤": "傷",
"伥": "倀",
"伦": "倫",
"伧": "傖",
"伫": "佇",
"佥": "僉",
"侠": "俠",
"侣": "侶",
"侥": "僥",
"侦": "偵",
"侧": "側",
"侨": "僑",
"侩": "儈",
"侪": "儕",
"侬": "儂",
"俣": "俁",
"俦": "儔",
"俨": "儼",
"俩": "倆",
"俪": "儷",
"俫": "倈",
"俭": "儉",
"债": "債",
"倾": "傾",
"偬": "傯",
"偻": "僂",
"偾": "僨",
"偿": "償",
"傥": "儻",
"傧": "儐",
"储": "儲",
"傩": "儺",
"㑩": "儸",
"兑": "兌",
"兖": "兗",
"兰": "蘭",
"关": "關",
"兴": "興",
"兹": "茲",
"养": "養",
"兽": "獸",
"冁": "囅",
"内": "內",
"冈": "岡",
"册": "冊",
"写": "寫",
"军": "軍",
"农": "農",
"冯": "馮",
"决": "決",
"况": "況",
"冻": "凍",
"净": "凈",
"凉": "涼",
"减": "減",
"凑": "湊",
"凛": "凜",
"凤": "鳳",
"凭": "憑",
"凯": "凱",
"击": "擊",
"凿": "鑿",
"刍": "芻",
"刘": "劉",
"则": "則",
"刚": "剛",
"创": "創",
"删": "刪",
"刬": "剗",
"刭": "剄",
"刹": "剎",
"刽": "劊",
"刿": "劌",
"剀": "剴",
"剂": "劑",
"剐": "剮",
"剑": "劍",
"剥": "剝",
"剧": "劇",
"㓥": "劏",
"㔉": "劚",
"劝": "勸",
"办": "辦",
"务": "務",
"劢": "勱",
"动": "動",
"励": "勵",
"劲": "勁",
"劳": "勞",
"势": "勢",
"勋": "勛",
"勚": "勩",
"匀": "勻",
"匦": "匭",
"匮": "匱",
"区": "區",
"医": "醫",
"华": "華",
"协": "協",
"单": "單",
"卖": "賣",
"卢": "盧",
"卫": "衛",
"却": "卻",
"厅": "廳",
"厉": "厲",
"压": "壓",
"厌": "厭",
"厍": "厙",
"厐": "龎",
"厘": "釐",
"厢": "廂",
"厣": "厴",
"厦": "廈",
"厨": "廚",
"厩": "廄",
"厮": "廝",
"县": "縣",
"叁": "叄",
"参": "參",
"双": "雙",
"变": "變",
"叙": "敘",
"叠": "疊",
"号": "號",
"叹": "嘆",
"叽": "嘰",
"吓": "嚇",
"吕": "呂",
"吗": "嗎",
"吣": "唚",
"吨": "噸",
"听": "聽",
"吴": "吳",
"呐": "吶",
"呒": "嘸",
"呓": "囈",
"呕": "嘔",
"呖": "嚦",
"呗": "唄",
"员": "員",
"呙": "咼",
"呛": "嗆",
"呜": "嗚",
"咏": "詠",
"咙": "嚨",
"咛": "嚀",
"咝": "噝",
"咤": "吒",
"响": "響",
"哑": "啞",
"哒": "噠",
"哓": "嘵",
"哔": "嗶",
"哕": "噦",
"哗": "嘩",
"哙": "噲",
"哜": "嚌",
"哝": "噥",
"哟": "喲",
"唛": "嘜",
"唝": "嗊",
"唠": "嘮",
"唡": "啢",
"唢": "嗩",
"唤": "喚",
"啧": "嘖",
"啬": "嗇",
"啭": "囀",
"啮": "嚙",
"啴": "嘽",
"啸": "嘯",
"㖞": "喎",
"喷": "噴",
"喽": "嘍",
"喾": "嚳",
"嗫": "囁",
"嗳": "噯",
"嘘": "噓",
"嘤": "嚶",
"嘱": "囑",
"㖊": "噚",
"噜": "嚕",
"嚣": "囂",
"园": "園",
"囱": "囪",
"围": "圍",
"囵": "圇",
"国": "國",
"图": "圖",
"圆": "圓",
"圣": "聖",
"圹": "壙",
"场": "場",
"坂": "阪",
"块": "塊",
"坚": "堅",
"坜": "壢",
"坝": "壩",
"坞": "塢",
"坟": "墳",
"坠": "墜",
"垄": "壟",
"垅": "壠",
"垆": "壚",
"垒": "壘",
"垦": "墾",
"垩": "堊",
"垫": "墊",
"垭": "埡",
"垱": "壋",
"垲": "塏",
"垴": "堖",
"埘": "塒",
"埙": "塤",
"埚": "堝",
"埯": "垵",
"堑": "塹",
"堕": "墮",
"ð¡": "壈",
"壮": "壯",
"声": "聲",
"壶": "壺",
"壸": "壼",
"处": "處",
"备": "備",
"够": "夠",
"头": "頭",
"夸": "誇",
"夹": "夾",
"夺": "奪",
"奁": "奩",
"奂": "奐",
"奋": "奮",
"奥": "奧",
"奸": "姦",
"妆": "妝",
"妇": "婦",
"妈": "媽",
"妩": "嫵",
"妪": "嫗",
"姗": "姍",
"姹": "奼",
"娄": "婁",
"娅": "婭",
"娆": "嬈",
"娇": "嬌",
"娈": "孌",
"娱": "娛",
"娲": "媧",
"娴": "嫻",
"婳": "嫿",
"婴": "嬰",
"婵": "嬋",
"婶": "嬸",
"媪": "媼",
"嫒": "嬡",
"嫔": "嬪",
"嫱": "嬙",
"嬷": "嬤",
"孙": "孫",
"学": "學",
"孪": "孿",
"宝": "寶",
"实": "實",
"宠": "寵",
"审": "審",
"宪": "憲",
"宫": "宮",
"宽": "寬",
"宾": "賓",
"寝": "寢",
"对": "對",
"寻": "尋",
"导": "導",
"寿": "壽",
"将": "將",
"尔": "爾",
"尘": "塵",
"尝": "嘗",
"尧": "堯",
"尴": "尷",
"尸": "屍",
"层": "層",
"屃": "屓",
"屉": "屜",
"届": "屆",
"属": "屬",
"屡": "屢",
"屦": "屨",
"屿": "嶼",
"岁": "歲",
"岂": "豈",
"岖": "嶇",
"岗": "崗",
"岘": "峴",
"岙": "嶴",
"岚": "嵐",
"岛": "島",
"岭": "嶺",
"岽": "崬",
"岿": "巋",
"峄": "嶧",
"峡": "峽",
"峣": "嶢",
"峤": "嶠",
"峥": "崢",
"峦": "巒",
"崂": "嶗",
"崃": "崍",
"崄": "嶮",
"崭": "嶄",
"嵘": "嶸",
"嵚": "嶔",
"嵝": "嶁",
"巅": "巔",
"巩": "鞏",
"巯": "巰",
"币": "幣",
"帅": "帥",
"师": "師",
"帏": "幃",
"帐": "帳",
"帜": "幟",
"带": "帶",
"帧": "幀",
"帮": "幫",
"帱": "幬",
"帻": "幘",
"帼": "幗",
"幂": "冪",
"庄": "莊",
"庆": "慶",
"庐": "廬",
"庑": "廡",
"库": "庫",
"应": "應",
"庙": "廟",
"庞": "龐",
"废": "廢",
"廪": "廩",
"开": "開",
"异": "異",
"弃": "棄",
"弑": "弒",
"张": "張",
"弪": "弳",
"弯": "彎",
"弹": "彈",
"强": "強",
"归": "歸",
"彝": "彞",
"彦": "彥",
"彻": "徹",
"径": "徑",
"徕": "徠",
"忆": "憶",
"忏": "懺",
"忧": "憂",
"忾": "愾",
"怀": "懷",
"态": "態",
"怂": "慫",
"怃": "憮",
"怄": "慪",
"怅": "悵",
"怆": "愴",
"怜": "憐",
"总": "總",
"怼": "懟",
"怿": "懌",
"恋": "戀",
"恒": "恆",
"恳": "懇",
"恸": "慟",
"恹": "懨",
"恺": "愷",
"恻": "惻",
"恼": "惱",
"恽": "惲",
"悦": "悅",
"悬": "懸",
"悭": "慳",
"悮": "悞",
"悯": "憫",
"惊": "驚",
"惧": "懼",
"惨": "慘",
"惩": "懲",
"惫": "憊",
"惬": "愜",
"惭": "慚",
"惮": "憚",
"惯": "慣",
"愠": "慍",
"愤": "憤",
"愦": "憒",
"慑": "懾",
"懑": "懣",
"懒": "懶",
"懔": "懍",
"戆": "戇",
"戋": "戔",
"戏": "戲",
"戗": "戧",
"战": "戰",
"戬": "戩",
"戯": "戱",
"户": "戶",
"扑": "撲",
"执": "執",
"扩": "擴",
"扪": "捫",
"扫": "掃",
"扬": "揚",
"扰": "擾",
"抚": "撫",
"抛": "拋",
"抟": "摶",
"抠": "摳",
"抡": "掄",
"抢": "搶",
"护": "護",
"报": "報",
"拟": "擬",
"拢": "攏",
"拣": "揀",
"拥": "擁",
"拦": "攔",
"拧": "擰",
"拨": "撥",
"择": "擇",
"挂": "掛",
"挚": "摯",
"挛": "攣",
"挜": "掗",
"挝": "撾",
"挞": "撻",
"挟": "挾",
"挠": "撓",
"挡": "擋",
"挢": "撟",
"挣": "掙",
"挤": "擠",
"挥": "揮",
"挦": "撏",
"挽": "輓",
"捝": "挩",
"捞": "撈",
"损": "損",
"捡": "撿",
"换": "換",
"捣": "搗",
"掳": "擄",
"掴": "摑",
"掷": "擲",
"掸": "撣",
"掺": "摻",
"掼": "摜",
"揽": "攬",
"揾": "搵",
"揿": "撳",
"搀": "攙",
"搁": "擱",
"搂": "摟",
"搅": "攪",
"携": "攜",
"摄": "攝",
"摅": "攄",
"摆": "擺",
"摇": "搖",
"摈": "擯",
"摊": "攤",
"撄": "攖",
"撑": "撐",
"㧑": "撝",
"撵": "攆",
"撷": "擷",
"撸": "擼",
"撺": "攛",
"㧟": "擓",
"擞": "擻",
"攒": "攢",
"敌": "敵",
"敛": "斂",
"数": "數",
"斋": "齋",
"斓": "斕",
"斩": "斬",
"断": "斷",
"无": "無",
"旧": "舊",
"时": "時",
"旷": "曠",
"旸": "暘",
"昙": "曇",
"昼": "晝",
"昽": "曨",
"显": "顯",
"晋": "晉",
"晒": "曬",
"晓": "曉",
"晔": "曄",
"晕": "暈",
"晖": "暉",
"暂": "暫",
"暧": "曖",
"机": "機",
"杀": "殺",
"杂": "雜",
"权": "權",
"杆": "桿",
"条": "條",
"来": "來",
"杨": "楊",
"杩": "榪",
"杰": "傑",
"构": "構",
"枞": "樅",
"枢": "樞",
"枣": "棗",
"枥": "櫪",
"枧": "梘",
"枨": "棖",
"枪": "槍",
"枫": "楓",
"枭": "梟",
"柠": "檸",
"柽": "檉",
"栀": "梔",
"栅": "柵",
"标": "標",
"栈": "棧",
"栉": "櫛",
"栊": "櫳",
"栋": "棟",
"栌": "櫨",
"栎": "櫟",
"栏": "欄",
"树": "樹",
"栖": "棲",
"栗": "慄",
"样": "樣",
"栾": "欒",
"桠": "椏",
"桡": "橈",
"桢": "楨",
"档": "檔",
"桤": "榿",
"桥": "橋",
"桦": "樺",
"桧": "檜",
"桨": "槳",
"桩": "樁",
"梦": "夢",
"梼": "檮",
"梾": "棶",
"梿": "槤",
"检": "檢",
"棁": "梲",
"棂": "欞",
"椁": "槨",
"椟": "櫝",
"椠": "槧",
"椤": "欏",
"椭": "橢",
"楼": "樓",
"榄": "欖",
"榅": "榲",
"榇": "櫬",
"榈": "櫚",
"榉": "櫸",
"槚": "檟",
"槛": "檻",
"槟": "檳",
"槠": "櫧",
"横": "橫",
"樯": "檣",
"樱": "櫻",
"橥": "櫫",
"橱": "櫥",
"橹": "櫓",
"橼": "櫞",
"檩": "檁",
"欢": "歡",
"欤": "歟",
"欧": "歐",
"歼": "殲",
"殁": "歿",
"殇": "殤",
"残": "殘",
"殒": "殞",
"殓": "殮",
"殚": "殫",
"殡": "殯",
"㱮": "殨",
"㱩": "殰",
"殴": "毆",
"毁": "毀",
"毂": "轂",
"毕": "畢",
"毙": "斃",
"毡": "氈",
"毵": "毿",
"氇": "氌",
"气": "氣",
"氢": "氫",
"氩": "氬",
"氲": "氳",
"汉": "漢",
"汤": "湯",
"汹": "洶",
"沟": "溝",
"没": "沒",
"沣": "灃",
"沤": "漚",
"沥": "瀝",
"沦": "淪",
"沧": "滄",
"沪": "滬",
"泞": "濘",
"注": "註",
"泪": "淚",
"泶": "澩",
"泷": "瀧",
"泸": "瀘",
"泺": "濼",
"泻": "瀉",
"泼": "潑",
"泽": "澤",
"泾": "涇",
"洁": "潔",
"洒": "灑",
"洼": "窪",
"浃": "浹",
"浅": "淺",
"浆": "漿",
"浇": "澆",
"浈": "湞",
"浊": "濁",
"测": "測",
"浍": "澮",
"济": "濟",
"浏": "瀏",
"浐": "滻",
"浑": "渾",
"浒": "滸",
"浓": "濃",
"浔": "潯",
"涛": "濤",
"涝": "澇",
"涞": "淶",
"涟": "漣",
"涠": "潿",
"涡": "渦",
"涣": "渙",
"涤": "滌",
"润": "潤",
"涧": "澗",
"涨": "漲",
"涩": "澀",
"渊": "淵",
"渌": "淥",
"渍": "漬",
"渎": "瀆",
"渐": "漸",
"渑": "澠",
"渔": "漁",
"渖": "瀋",
"渗": "滲",
"温": "溫",
"湾": "灣",
"湿": "濕",
"溃": "潰",
"溅": "濺",
"溆": "漵",
"滗": "潷",
"滚": "滾",
"滞": "滯",
"滟": "灧",
"滠": "灄",
"满": "滿",
"滢": "瀅",
"滤": "濾",
"滥": "濫",
"滦": "灤",
"滨": "濱",
"滩": "灘",
"滪": "澦",
"漤": "灠",
"潆": "瀠",
"潇": "瀟",
"潋": "瀲",
"潍": "濰",
"潜": "潛",
"潴": "瀦",
"澜": "瀾",
"濑": "瀨",
"濒": "瀕",
"㲿": "瀇",
"灏": "灝",
"灭": "滅",
"灯": "燈",
"灵": "靈",
"灶": "竈",
"灾": "災",
"灿": "燦",
"炀": "煬",
"炉": "爐",
"炖": "燉",
"炜": "煒",
"炝": "熗",
"点": "點",
"炼": "煉",
"炽": "熾",
"烁": "爍",
"烂": "爛",
"烃": "烴",
"烛": "燭",
"烟": "煙",
"烦": "煩",
"烧": "燒",
"烨": "燁",
"烩": "燴",
"烫": "燙",
"烬": "燼",
"热": "熱",
"焕": "煥",
"焖": "燜",
"焘": "燾",
"㶽": "煱",
"煴": "熅",
"㶶": "燶",
"爱": "愛",
"爷": "爺",
"牍": "牘",
"牦": "氂",
"牵": "牽",
"牺": "犧",
"犊": "犢",
"状": "狀",
"犷": "獷",
"犸": "獁",
"犹": "猶",
"狈": "狽",
"狝": "獮",
"狞": "獰",
"独": "獨",
"狭": "狹",
"狮": "獅",
"狯": "獪",
"狰": "猙",
"狱": "獄",
"狲": "猻",
"猃": "獫",
"猎": "獵",
"猕": "獼",
"猡": "玀",
"猪": "豬",
"猫": "貓",
"猬": "蝟",
"献": "獻",
"獭": "獺",
"㺍": "獱",
"玑": "璣",
"玚": "瑒",
"玛": "瑪",
"玮": "瑋",
"环": "環",
"现": "現",
"玱": "瑲",
"玺": "璽",
"珐": "琺",
"珑": "瓏",
"珰": "璫",
"珲": "琿",
"琏": "璉",
"琐": "瑣",
"琼": "瓊",
"瑶": "瑤",
"瑷": "璦",
"璎": "瓔",
"瓒": "瓚",
"瓯": "甌",
"电": "電",
"画": "畫",
"畅": "暢",
"畴": "疇",
"疖": "癤",
"疗": "療",
"疟": "瘧",
"疠": "癘",
"疡": "瘍",
"疬": "癧",
"疭": "瘲",
"疮": "瘡",
"疯": "瘋",
"疱": "皰",
"疴": "痾",
"痈": "癰",
"痉": "痙",
"痒": "癢",
"痖": "瘂",
"痨": "癆",
"痪": "瘓",
"痫": "癇",
"瘅": "癉",
"瘆": "瘮",
"瘗": "瘞",
"瘪": "癟",
"瘫": "癱",
"瘾": "癮",
"瘿": "癭",
"癞": "癩",
"癣": "癬",
"癫": "癲",
"皑": "皚",
"皱": "皺",
"皲": "皸",
"盏": "盞",
"盐": "鹽",
"监": "監",
"盖": "蓋",
"盗": "盜",
"盘": "盤",
"眍": "瞘",
"眦": "眥",
"眬": "矓",
"着": "著",
"睁": "睜",
"睐": "睞",
"睑": "瞼",
"瞆": "瞶",
"瞒": "瞞",
"䁖": "瞜",
"瞩": "矚",
"矫": "矯",
"矶": "磯",
"矾": "礬",
"矿": "礦",
"砀": "碭",
"码": "碼",
"砖": "磚",
"砗": "硨",
"砚": "硯",
"砜": "碸",
"砺": "礪",
"砻": "礱",
"砾": "礫",
"础": "礎",
"硁": "硜",
"硕": "碩",
"硖": "硤",
"硗": "磽",
"硙": "磑",
"碍": "礙",
"碛": "磧",
"碜": "磣",
"碱": "鹼",
"礼": "禮",
"祃": "禡",
"祎": "禕",
"祢": "禰",
"祯": "禎",
"祷": "禱",
"祸": "禍",
"禀": "稟",
"禄": "祿",
"禅": "禪",
"离": "離",
"秃": "禿",
"秆": "稈",
"积": "積",
"称": "稱",
"秽": "穢",
"秾": "穠",
"稆": "穭",
"税": "稅",
"䅉": "稏",
"稣": "穌",
"稳": "穩",
"穑": "穡",
"穷": "窮",
"窃": "竊",
"窍": "竅",
"窎": "窵",
"窑": "窯",
"窜": "竄",
"窝": "窩",
"窥": "窺",
"窦": "竇",
"窭": "窶",
"竞": "競",
"笃": "篤",
"笋": "筍",
"笔": "筆",
"笕": "筧",
"笺": "箋",
"笼": "籠",
"笾": "籩",
"筚": "篳",
"筛": "篩",
"筜": "簹",
"筝": "箏",
"䇲": "筴",
"筹": "籌",
"筼": "篔",
"简": "簡",
"箓": "籙",
"箦": "簀",
"箧": "篋",
"箨": "籜",
"箩": "籮",
"箪": "簞",
"箫": "簫",
"篑": "簣",
"篓": "簍",
"篮": "籃",
"篱": "籬",
"簖": "籪",
"籁": "籟",
"籴": "糴",
"类": "類",
"籼": "秈",
"粜": "糶",
"粝": "糲",
"粤": "粵",
"粪": "糞",
"粮": "糧",
"糁": "糝",
"糇": "餱",
"紧": "緊",
"䌷": "紬",
"䌹": "絅",
"絷": "縶",
"䌼": "綐",
"䌽": "綵",
"䌸": "縳",
"䍁": "繸",
"䍀": "繿",
"纟": "糹",
"纠": "糾",
"纡": "紆",
"红": "紅",
"纣": "紂",
"纥": "紇",
"约": "約",
"级": "級",
"纨": "紈",
"纩": "纊",
"纪": "紀",
"纫": "紉",
"纬": "緯",
"纭": "紜",
"纮": "紘",
"纯": "純",
"纰": "紕",
"纱": "紗",
"纲": "綱",
"纳": "納",
"纴": "紝",
"纵": "縱",
"纶": "綸",
"纷": "紛",
"纸": "紙",
"纹": "紋",
"纺": "紡",
"纻": "紵",
"纼": "紖",
"纽": "紐",
"纾": "紓",
"绀": "紺",
"绁": "紲",
"绂": "紱",
"练": "練",
"组": "組",
"绅": "紳",
"细": "細",
"织": "織",
"终": "終",
"绉": "縐",
"绊": "絆",
"绋": "紼",
"绌": "絀",
"绍": "紹",
"绎": "繹",
"经": "經",
"绐": "紿",
"绑": "綁",
"绒": "絨",
"结": "結",
"绔": "絝",
"绕": "繞",
"绖": "絰",
"绗": "絎",
"绘": "繪",
"给": "給",
"绚": "絢",
"绛": "絳",
"络": "絡",
"绞": "絞",
"统": "統",
"绠": "綆",
"绡": "綃",
"绢": "絹",
"绤": "綌",
"绥": "綏",
"继": "繼",
"绨": "綈",
"绩": "績",
"绪": "緒",
"绫": "綾",
"绬": "緓",
"续": "續",
"绮": "綺",
"绯": "緋",
"绰": "綽",
"绲": "緄",
"绳": "繩",
"维": "維",
"绵": "綿",
"绶": "綬",
"绸": "綢",
"绹": "綯",
"绺": "綹",
"绻": "綣",
"综": "綜",
"绽": "綻",
"绾": "綰",
"缀": "綴",
"缁": "緇",
"缂": "緙",
"缃": "緗",
"缄": "緘",
"缅": "緬",
"缆": "纜",
"缇": "緹",
"缈": "緲",
"缉": "緝",
"缊": "縕",
"缋": "繢",
"缌": "緦",
"缍": "綞",
"缎": "緞",
"缏": "緶",
"缑": "緱",
"缒": "縋",
"缓": "緩",
"缔": "締",
"缕": "縷",
"编": "編",
"缗": "緡",
"缘": "緣",
"缙": "縉",
"缚": "縛",
"缛": "縟",
"缜": "縝",
"缝": "縫",
"缞": "縗",
"缟": "縞",
"缠": "纏",
"缡": "縭",
"缢": "縊",
"缣": "縑",
"缤": "繽",
"缥": "縹",
"缦": "縵",
"缧": "縲",
"缨": "纓",
"缩": "縮",
"缪": "繆",
"缫": "繅",
"缬": "纈",
"缭": "繚",
"缮": "繕",
"缯": "繒",
"缱": "繾",
"缲": "繰",
"缳": "繯",
"缴": "繳",
"缵": "纘",
"罂": "罌",
"网": "網",
"罗": "羅",
"罚": "罰",
"罢": "罷",
"罴": "羆",
"羁": "羈",
"羟": "羥",
"翘": "翹",
"耢": "耮",
"耧": "耬",
"耸": "聳",
"耻": "恥",
"聂": "聶",
"聋": "聾",
"职": "職",
"聍": "聹",
"联": "聯",
"聩": "聵",
"聪": "聰",
"肃": "肅",
"肠": "腸",
"肤": "膚",
"肮": "骯",
"肴": "餚",
"肾": "腎",
"肿": "腫",
"胀": "脹",
"胁": "脅",
"胆": "膽",
"胧": "朧",
"胨": "腖",
"胪": "臚",
"胫": "脛",
"胶": "膠",
"脉": "脈",
"脍": "膾",
"脐": "臍",
"脑": "腦",
"脓": "膿",
"脔": "臠",
"脚": "腳",
"脱": "脫",
"脶": "腡",
"脸": "臉",
"腭": "齶",
"腻": "膩",
"腼": "靦",
"腽": "膃",
"腾": "騰",
"膑": "臏",
"臜": "臢",
"舆": "輿",
"舣": "艤",
"舰": "艦",
"舱": "艙",
"舻": "艫",
"艰": "艱",
"艳": "艷",
"艺": "藝",
"节": "節",
"芈": "羋",
"芗": "薌",
"芜": "蕪",
"芦": "蘆",
"苁": "蓯",
"苇": "葦",
"苈": "藶",
"苋": "莧",
"苌": "萇",
"苍": "蒼",
"苎": "苧",
"茎": "莖",
"茏": "蘢",
"茑": "蔦",
"茔": "塋",
"茕": "煢",
"茧": "繭",
"荆": "荊",
"荐": "薦",
"荙": "薘",
"荚": "莢",
"荛": "蕘",
"荜": "蓽",
"荞": "蕎",
"荟": "薈",
"荠": "薺",
"荣": "榮",
"荤": "葷",
"荥": "滎",
"荦": "犖",
"荧": "熒",
"荨": "蕁",
"荩": "藎",
"荪": "蓀",
"荫": "蔭",
"荬": "蕒",
"荭": "葒",
"荮": "葤",
"莅": "蒞",
"莱": "萊",
"莲": "蓮",
"莳": "蒔",
"莴": "萵",
"莶": "薟",
"莸": "蕕",
"莹": "瑩",
"莺": "鶯",
"萝": "蘿",
"萤": "螢",
"营": "營",
"萦": "縈",
"萧": "蕭",
"萨": "薩",
"葱": "蔥",
"蒇": "蕆",
"蒉": "蕢",
"蒋": "蔣",
"蒌": "蔞",
"蓝": "藍",
"蓟": "薊",
"蓠": "蘺",
"蓣": "蕷",
"蓥": "鎣",
"蓦": "驀",
"蔂": "虆",
"蔷": "薔",
"蔹": "蘞",
"蔺": "藺",
"蔼": "藹",
"蕰": "薀",
"蕲": "蘄",
"薮": "藪",
"䓕": "薳",
"藓": "蘚",
"蘖": "櫱",
"虏": "虜",
"虑": "慮",
"虚": "虛",
"虬": "虯",
"虮": "蟣",
"虽": "雖",
"虾": "蝦",
"虿": "蠆",
"蚀": "蝕",
"蚁": "蟻",
"蚂": "螞",
"蚕": "蠶",
"蚬": "蜆",
"蛊": "蠱",
"蛎": "蠣",
"蛏": "蟶",
"蛮": "蠻",
"蛰": "蟄",
"蛱": "蛺",
"蛲": "蟯",
"蛳": "螄",
"蛴": "蠐",
"蜕": "蛻",
"蜗": "蝸",
"蝇": "蠅",
"蝈": "蟈",
"蝉": "蟬",
"蝼": "螻",
"蝾": "蠑",
"螀": "螿",
"螨": "蟎",
"䗖": "螮",
"蟏": "蠨",
"衅": "釁",
"衔": "銜",
"补": "補",
"衬": "襯",
"衮": "袞",
"袄": "襖",
"袅": "裊",
"袆": "褘",
"袜": "襪",
"袭": "襲",
"袯": "襏",
"装": "裝",
"裆": "襠",
"裈": "褌",
"裢": "褳",
"裣": "襝",
"裤": "褲",
"裥": "襇",
"褛": "褸",
"褴": "襤",
"䙓": "襬",
"见": "見",
"观": "觀",
"觃": "覎",
"规": "規",
"觅": "覓",
"视": "視",
"觇": "覘",
"览": "覽",
"觉": "覺",
"觊": "覬",
"觋": "覡",
"觌": "覿",
"觍": "覥",
"觎": "覦",
"觏": "覯",
"觐": "覲",
"觑": "覷",
"觞": "觴",
"触": "觸",
"觯": "觶",
"訚": "誾",
"䜣": "訢",
"誉": "譽",
"誊": "謄",
"䜧": "譅",
"讠": "訁",
"计": "計",
"订": "訂",
"讣": "訃",
"认": "認",
"讥": "譏",
"讦": "訐",
"讧": "訌",
"讨": "討",
"让": "讓",
"讪": "訕",
"讫": "訖",
"讬": "託",
"训": "訓",
"议": "議",
"讯": "訊",
"记": "記",
"讱": "訒",
"讲": "講",
"讳": "諱",
"讴": "謳",
"讵": "詎",
"讶": "訝",
"讷": "訥",
"许": "許",
"讹": "訛",
"论": "論",
"讻": "訩",
"讼": "訟",
"讽": "諷",
"设": "設",
"访": "訪",
"诀": "訣",
"证": "證",
"诂": "詁",
"诃": "訶",
"评": "評",
"诅": "詛",
"识": "識",
"诇": "詗",
"诈": "詐",
"诉": "訴",
"诊": "診",
"诋": "詆",
"诌": "謅",
"词": "詞",
"诎": "詘",
"诏": "詔",
"诐": "詖",
"译": "譯",
"诒": "詒",
"诓": "誆",
"诔": "誄",
"试": "試",
"诖": "詿",
"诗": "詩",
"诘": "詰",
"诙": "詼",
"诚": "誠",
"诛": "誅",
"诜": "詵",
"话": "話",
"诞": "誕",
"诟": "詬",
"诠": "詮",
"诡": "詭",
"询": "詢",
"诣": "詣",
"诤": "諍",
"该": "該",
"详": "詳",
"诧": "詫",
"诨": "諢",
"诩": "詡",
"诪": "譸",
"诫": "誡",
"诬": "誣",
"语": "語",
"诮": "誚",
"误": "誤",
"诰": "誥",
"诱": "誘",
"诲": "誨",
"诳": "誑",
"诵": "誦",
"诶": "誒",
"请": "請",
"诸": "諸",
"诹": "諏",
"诺": "諾",
"读": "讀",
"诼": "諑",
"诽": "誹",
"课": "課",
"诿": "諉",
"谀": "諛",
"谁": "誰",
"谂": "諗",
"调": "調",
"谄": "諂",
"谅": "諒",
"谆": "諄",
"谇": "誶",
"谈": "談",
"谊": "誼",
"谋": "謀",
"谌": "諶",
"谍": "諜",
"谎": "謊",
"谏": "諫",
"谐": "諧",
"谑": "謔",
"谒": "謁",
"谓": "謂",
"谔": "諤",
"谕": "諭",
"谖": "諼",
"谗": "讒",
"谘": "諮",
"谙": "諳",
"谚": "諺",
"谛": "諦",
"谜": "謎",
"谝": "諞",
"谞": "諝",
"谟": "謨",
"谠": "讜",
"谡": "謖",
"谢": "謝",
"谤": "謗",
"谥": "謚",
"谦": "謙",
"谧": "謐",
"谨": "謹",
"谩": "謾",
"谪": "謫",
"谬": "謬",
"谭": "譚",
"谮": "譖",
"谯": "譙",
"谰": "讕",
"谱": "譜",
"谲": "譎",
"谳": "讞",
"谴": "譴",
"谵": "譫",
"谶": "讖",
"豮": "豶",
"䝙": "貙",
"䞐": "賰",
"贝": "貝",
"贞": "貞",
"负": "負",
"贠": "貟",
"贡": "貢",
"财": "財",
"责": "責",
"贤": "賢",
"败": "敗",
"账": "賬",
"货": "貨",
"质": "質",
"贩": "販",
"贪": "貪",
"贫": "貧",
"贬": "貶",
"购": "購",
"贮": "貯",
"贯": "貫",
"贰": "貳",
"贱": "賤",
"贲": "賁",
"贳": "貰",
"贴": "貼",
"贵": "貴",
"贶": "貺",
"贷": "貸",
"贸": "貿",
"费": "費",
"贺": "賀",
"贻": "貽",
"贼": "賊",
"贽": "贄",
"贾": "賈",
"贿": "賄",
"赀": "貲",
"赁": "賃",
"赂": "賂",
"资": "資",
"赅": "賅",
"赆": "贐",
"赇": "賕",
"赈": "賑",
"赉": "賚",
"赊": "賒",
"赋": "賦",
"赌": "賭",
"赎": "贖",
"赏": "賞",
"赐": "賜",
"赑": "贔",
"赒": "賙",
"赓": "賡",
"赔": "賠",
"赕": "賧",
"赖": "賴",
"赗": "賵",
"赘": "贅",
"赙": "賻",
"赚": "賺",
"赛": "賽",
"赜": "賾",
"赞": "贊",
"赟": "贇",
"赠": "贈",
"赡": "贍",
"赢": "贏",
"赣": "贛",
"赪": "赬",
"赵": "趙",
"赶": "趕",
"趋": "趨",
"趱": "趲",
"趸": "躉",
"跃": "躍",
"跄": "蹌",
"跞": "躒",
"践": "踐",
"跶": "躂",
"跷": "蹺",
"跸": "蹕",
"跹": "躚",
"跻": "躋",
"踊": "踴",
"踌": "躊",
"踪": "蹤",
"踬": "躓",
"踯": "躑",
"蹑": "躡",
"蹒": "蹣",
"蹰": "躕",
"蹿": "躥",
"躏": "躪",
"躜": "躦",
"躯": "軀",
"车": "車",
"轧": "軋",
"轨": "軌",
"轩": "軒",
"轪": "軑",
"轫": "軔",
"转": "轉",
"轭": "軛",
"轮": "輪",
"软": "軟",
"轰": "轟",
"轱": "軲",
"轲": "軻",
"轳": "轤",
"轴": "軸",
"轵": "軹",
"轶": "軼",
"轷": "軤",
"轸": "軫",
"轹": "轢",
"轺": "軺",
"轻": "輕",
"轼": "軾",
"载": "載",
"轾": "輊",
"轿": "轎",
"辀": "輈",
"辁": "輇",
"辂": "輅",
"较": "較",
"辄": "輒",
"辅": "輔",
"辆": "輛",
"辇": "輦",
"辈": "輩",
"辉": "輝",
"辊": "輥",
"辋": "輞",
"辌": "輬",
"辍": "輟",
"辎": "輜",
"辏": "輳",
"辐": "輻",
"辑": "輯",
"辒": "轀",
"输": "輸",
"辔": "轡",
"辕": "轅",
"辖": "轄",
"辗": "輾",
"辘": "轆",
"辙": "轍",
"辚": "轔",
"辞": "辭",
"辩": "辯",
"辫": "辮",
"边": "邊",
"辽": "遼",
"达": "達",
"迁": "遷",
"过": "過",
"迈": "邁",
"运": "運",
"还": "還",
"这": "這",
"进": "進",
"远": "遠",
"违": "違",
"连": "連",
"迟": "遲",
"迩": "邇",
"迳": "逕",
"迹": "跡",
"选": "選",
"逊": "遜",
"递": "遞",
"逦": "邐",
"逻": "邏",
"遗": "遺",
"遥": "遙",
"邓": "鄧",
"邝": "鄺",
"邬": "鄔",
"邮": "郵",
"邹": "鄒",
"邺": "鄴",
"邻": "鄰",
"郏": "郟",
"郐": "鄶",
"郑": "鄭",
"郓": "鄆",
"郦": "酈",
"郧": "鄖",
"郸": "鄲",
"酂": "酇",
"酦": "醱",
"酱": "醬",
"酽": "釅",
"酾": "釃",
"酿": "釀",
"释": "釋",
"鉴": "鑒",
"銮": "鑾",
"錾": "鏨",
"ð¨±": "鎝",
"钅": "釒",
"钆": "釓",
"钇": "釔",
"针": "針",
"钉": "釘",
"钊": "釗",
"钋": "釙",
"钌": "釕",
"钍": "釷",
"钎": "釺",
"钏": "釧",
"钐": "釤",
"钑": "鈒",
"钒": "釩",
"钓": "釣",
"钔": "鍆",
"钕": "釹",
"钖": "鍚",
"钗": "釵",
"钘": "鈃",
"钙": "鈣",
"钚": "鈈",
"钛": "鈦",
"钜": "鉅",
"钝": "鈍",
"钞": "鈔",
"钠": "鈉",
"钡": "鋇",
"钢": "鋼",
"钣": "鈑",
"钤": "鈐",
"钥": "鑰",
"钦": "欽",
"钧": "鈞",
"钨": "鎢",
"钪": "鈧",
"钫": "鈁",
"钬": "鈥",
"钭": "鈄",
"钮": "鈕",
"钯": "鈀",
"钰": "鈺",
"钱": "錢",
"钲": "鉦",
"钳": "鉗",
"钴": "鈷",
"钶": "鈳",
"钷": "鉕",
"钸": "鈽",
"钹": "鈸",
"钺": "鉞",
"钻": "鑽",
"钼": "鉬",
"钽": "鉭",
"钾": "鉀",
"钿": "鈿",
"铀": "鈾",
"铁": "鐵",
"铂": "鉑",
"铃": "鈴",
"铄": "鑠",
"铅": "鉛",
"铆": "鉚",
"铇": "鉋",
"铈": "鈰",
"铉": "鉉",
"铊": "鉈",
"铋": "鉍",
"铌": "鈮",
"铍": "鈹",
"铎": "鐸",
"铏": "鉶",
"铐": "銬",
"铑": "銠",
"铒": "鉺",
"铓": "鋩",
"铔": "錏",
"铕": "銪",
"铖": "鋮",
"铗": "鋏",
"铘": "鋣",
"铙": "鐃",
"铚": "銍",
"铛": "鐺",
"铜": "銅",
"铝": "鋁",
"铞": "銱",
"铟": "銦",
"铠": "鎧",
"铡": "鍘",
"铢": "銖",
"铣": "銑",
"铤": "鋌",
"铥": "銩",
"铦": "銛",
"铧": "鏵",
"铨": "銓",
"铩": "鎩",
"铪": "鉿",
"铫": "銚",
"铬": "鉻",
"铭": "銘",
"铮": "錚",
"铯": "銫",
"铰": "鉸",
"铱": "銥",
"铲": "鏟",
"铳": "銃",
"铴": "鐋",
"铵": "銨",
"银": "銀",
"铷": "銣",
"铸": "鑄",
"铹": "鐒",
"铺": "鋪",
"铻": "鋙",
"铼": "錸",
"铽": "鋱",
"链": "鏈",
"铿": "鏗",
"销": "銷",
"锁": "鎖",
"锂": "鋰",
"锃": "鋥",
"锄": "鋤",
"锅": "鍋",
"锆": "鋯",
"锇": "鋨",
"锉": "銼",
"锊": "鋝",
"锋": "鋒",
"锌": "鋅",
"锍": "鋶",
"锎": "鐦",
"锏": "鐧",
"锑": "銻",
"锒": "鋃",
"锓": "鋟",
"锔": "鋦",
"锕": "錒",
"锖": "錆",
"锗": "鍺",
"锘": "鍩",
"错": "錯",
"锚": "錨",
"锛": "錛",
"锜": "錡",
"锝": "鍀",
"锞": "錁",
"锟": "錕",
"锠": "錩",
"锡": "錫",
"锢": "錮",
"锣": "鑼",
"锥": "錐",
"锦": "錦",
"锧": "鑕",
"锩": "錈",
"锪": "鍃",
"锫": "錇",
"锬": "錟",
"锭": "錠",
"键": "鍵",
"锯": "鋸",
"锰": "錳",
"锱": "錙",
"锲": "鍥",
"锳": "鍈",
"锴": "鍇",
"锵": "鏘",
"锶": "鍶",
"锷": "鍔",
"锸": "鍤",
"锹": "鍬",
"锺": "鍾",
"锻": "鍛",
"锼": "鎪",
"锽": "鍠",
"锾": "鍰",
"锿": "鎄",
"镀": "鍍",
"镁": "鎂",
"镂": "鏤",
"镃": "鎡",
"镄": "鐨",
"镅": "鎇",
"镆": "鏌",
"镇": "鎮",
"镈": "鎛",
"镉": "鎘",
"镊": "鑷",
"镋": "鎲",
"镍": "鎳",
"镎": "鎿",
"镏": "鎦",
"镐": "鎬",
"镑": "鎊",
"镒": "鎰",
"镓": "鎵",
"镔": "鑌",
"镕": "鎔",
"镖": "鏢",
"镗": "鏜",
"镘": "鏝",
"镙": "鏍",
"镚": "鏰",
"镛": "鏞",
"镜": "鏡",
"镝": "鏑",
"镞": "鏃",
"镟": "鏇",
"镠": "鏐",
"镡": "鐔",
"镣": "鐐",
"镤": "鏷",
"镥": "鑥",
"镦": "鐓",
"镧": "鑭",
"镨": "鐠",
"镩": "鑹",
"镪": "鏹",
"镫": "鐙",
"镬": "鑊",
"镭": "鐳",
"镮": "鐶",
"镯": "鐲",
"镰": "鐮",
"镱": "鐿",
"镲": "鑔",
"镳": "鑣",
"镴": "鑞",
"镵": "鑱",
"镶": "鑲",
"长": "長",
"门": "門",
"闩": "閂",
"闪": "閃",
"闫": "閆",
"闬": "閈",
"闭": "閉",
"问": "問",
"闯": "闖",
"闰": "閏",
"闱": "闈",
"闲": "閑",
"闳": "閎",
"间": "間",
"闵": "閔",
"闶": "閌",
"闷": "悶",
"闸": "閘",
"闹": "鬧",
"闺": "閨",
"闻": "聞",
"闼": "闥",
"闽": "閩",
"闾": "閭",
"闿": "闓",
"阀": "閥",
"阁": "閣",
"阂": "閡",
"阃": "閫",
"阄": "鬮",
"阆": "閬",
"阇": "闍",
"阈": "閾",
"阉": "閹",
"阊": "閶",
"阋": "鬩",
"阌": "閿",
"阍": "閽",
"阎": "閻",
"阏": "閼",
"阐": "闡",
"阑": "闌",
"阒": "闃",
"阓": "闠",
"阔": "闊",
"阕": "闋",
"阖": "闔",
"阗": "闐",
"阘": "闒",
"阙": "闕",
"阚": "闞",
"阛": "闤",
"队": "隊",
"阳": "陽",
"阴": "陰",
"阵": "陣",
"阶": "階",
"际": "際",
"陆": "陸",
"陇": "隴",
"陈": "陳",
"陉": "陘",
"陕": "陝",
"陧": "隉",
"陨": "隕",
"险": "險",
"随": "隨",
"隐": "隱",
"隶": "隸",
"隽": "雋",
"难": "難",
"雏": "雛",
"雠": "讎",
"雳": "靂",
"雾": "霧",
"霁": "霽",
"霡": "霢",
"霭": "靄",
"靓": "靚",
"静": "靜",
"靥": "靨",
"䩄": "靦",
"鞑": "韃",
"鞒": "鞽",
"鞯": "韉",
"韦": "韋",
"韧": "韌",
"韨": "韍",
"韩": "韓",
"韪": "韙",
"韫": "韞",
"韬": "韜",
"韵": "韻",
"页": "頁",
"顶": "頂",
"顷": "頃",
"顸": "頇",
"项": "項",
"顺": "順",
"顼": "頊",
"顽": "頑",
"顾": "顧",
"顿": "頓",
"颀": "頎",
"颁": "頒",
"颂": "頌",
"颃": "頏",
"预": "預",
"颅": "顱",
"领": "領",
"颇": "頗",
"颈": "頸",
"颉": "頡",
"颊": "頰",
"颋": "頲",
"颌": "頜",
"颍": "潁",
"颎": "熲",
"颏": "頦",
"颐": "頤",
"频": "頻",
"颒": "頮",
"颔": "頷",
"颕": "頴",
"颖": "穎",
"颗": "顆",
"题": "題",
"颙": "顒",
"颚": "顎",
"颛": "顓",
"额": "額",
"颞": "顳",
"颟": "顢",
"颠": "顛",
"颡": "顙",
"颢": "顥",
"颤": "顫",
"颥": "顬",
"颦": "顰",
"颧": "顴",
"风": "風",
"飏": "颺",
"飐": "颭",
"飑": "颮",
"飒": "颯",
"飓": "颶",
"飔": "颸",
"飕": "颼",
"飖": "颻",
"飗": "飀",
"飘": "飄",
"飙": "飆",
"飚": "飈",
"飞": "飛",
"飨": "饗",
"餍": "饜",
"饣": "飠",
"饤": "飣",
"饦": "飥",
"饧": "餳",
"饨": "飩",
"饩": "餼",
"饪": "飪",
"饫": "飫",
"饬": "飭",
"饭": "飯",
"饮": "飲",
"饯": "餞",
"饰": "飾",
"饱": "飽",
"饲": "飼",
"饳": "飿",
"饴": "飴",
"饵": "餌",
"饶": "饒",
"饷": "餉",
"饸": "餄",
"饹": "餎",
"饺": "餃",
"饻": "餏",
"饼": "餅",
"饽": "餑",
"饾": "餖",
"饿": "餓",
"馀": "餘",
"馁": "餒",
"馂": "餕",
"馃": "餜",
"馄": "餛",
"馅": "餡",
"馆": "館",
"馇": "餷",
"馈": "饋",
"馉": "餶",
"馊": "餿",
"馋": "饞",
"馌": "饁",
"馍": "饃",
"馎": "餺",
"馏": "餾",
"馐": "饈",
"馑": "饉",
"馒": "饅",
"馓": "饊",
"馔": "饌",
"馕": "饢",
"䯄": "騧",
"马": "馬",
"驭": "馭",
"驮": "馱",
"驯": "馴",
"驰": "馳",
"驱": "驅",
"驲": "馹",
"驳": "駁",
"驴": "驢",
"驵": "駔",
"驶": "駛",
"驷": "駟",
"驸": "駙",
"驹": "駒",
"驺": "騶",
"驻": "駐",
"驼": "駝",
"驽": "駑",
"驾": "駕",
"驿": "驛",
"骀": "駘",
"骁": "驍",
"骃": "駰",
"骄": "驕",
"骅": "驊",
"骆": "駱",
"骇": "駭",
"骈": "駢",
"骉": "驫",
"骊": "驪",
"骋": "騁",
"验": "驗",
"骍": "騂",
"骎": "駸",
"骏": "駿",
"骐": "騏",
"骑": "騎",
"骒": "騍",
"骓": "騅",
"骔": "騌",
"骕": "驌",
"骖": "驂",
"骗": "騙",
"骘": "騭",
"骙": "騤",
"骚": "騷",
"骛": "騖",
"骜": "驁",
"骝": "騮",
"骞": "騫",
"骟": "騸",
"骠": "驃",
"骡": "騾",
"骢": "驄",
"骣": "驏",
"骤": "驟",
"骥": "驥",
"骦": "驦",
"骧": "驤",
"髅": "髏",
"髋": "髖",
"髌": "髕",
"鬓": "鬢",
"魇": "魘",
"魉": "魎",
"鱼": "魚",
"鱽": "魛",
"鱾": "魢",
"鱿": "魷",
"鲀": "魨",
"鲁": "魯",
"鲂": "魴",
"鲃": "䰾",
"鲄": "魺",
"鲅": "鮁",
"鲆": "鮃",
"鲈": "鱸",
"鲉": "鮋",
"鲊": "鮓",
"鲋": "鮒",
"鲌": "鮊",
"鲍": "鮑",
"鲎": "鱟",
"鲏": "鮍",
"鲐": "鮐",
"鲑": "鮭",
"鲒": "鮚",
"鲓": "鮳",
"鲔": "鮪",
"鲕": "鮞",
"鲖": "鮦",
"鲗": "鰂",
"鲘": "鮜",
"鲙": "鱠",
"鲚": "鱭",
"鲛": "鮫",
"鲜": "鮮",
"鲝": "鮺",
"鲟": "鱘",
"鲠": "鯁",
"鲡": "鱺",
"鲢": "鰱",
"鲣": "鰹",
"鲤": "鯉",
"鲥": "鰣",
"鲦": "鰷",
"鲧": "鯀",
"鲨": "鯊",
"鲩": "鯇",
"鲪": "鮶",
"鲫": "鯽",
"鲬": "鯒",
"鲭": "鯖",
"鲮": "鯪",
"鲯": "鯕",
"鲰": "鯫",
"鲱": "鯡",
"鲲": "鯤",
"鲳": "鯧",
"鲴": "鯝",
"鲵": "鯢",
"鲶": "鯰",
"鲷": "鯛",
"鲸": "鯨",
"鲹": "鰺",
"鲺": "鯴",
"鲻": "鯔",
"鲼": "鱝",
"鲽": "鰈",
"鲾": "鰏",
"鲿": "鱨",
"鳀": "鯷",
"鳁": "鰮",
"鳂": "鰃",
"鳃": "鰓",
"鳅": "鰍",
"鳆": "鰒",
"鳇": "鰉",
"鳈": "鰁",
"鳉": "鱂",
"鳊": "鯿",
"鳋": "鰠",
"鳌": "鰲",
"鳍": "鰭",
"鳎": "鰨",
"鳏": "鰥",
"鳐": "鰩",
"鳑": "鰟",
"鳒": "鰜",
"鳓": "鰳",
"鳔": "鰾",
"鳕": "鱈",
"鳖": "鱉",
"鳗": "鰻",
"鳘": "鰵",
"鳙": "鱅",
"鳚": "䲁",
"鳛": "鰼",
"鳜": "鱖",
"鳝": "鱔",
"鳞": "鱗",
"鳟": "鱒",
"鳠": "鱯",
"鳡": "鱤",
"鳢": "鱧",
"鳣": "鱣",
"䴓": "鳾",
"䴕": "鴷",
"䴔": "鵁",
"䴖": "鶄",
"䴗": "鶪",
"䴘": "鷈",
"䴙": "鷿",
"㶉": "鸂",
"鸟": "鳥",
"鸠": "鳩",
"鸢": "鳶",
"鸣": "鳴",
"鸤": "鳲",
"鸥": "鷗",
"鸦": "鴉",
"鸧": "鶬",
"鸨": "鴇",
"鸩": "鴆",
"鸪": "鴣",
"鸫": "鶇",
"鸬": "鸕",
"鸭": "鴨",
"鸮": "鴞",
"鸯": "鴦",
"鸰": "鴒",
"鸱": "鴟",
"鸲": "鴝",
"鸳": "鴛",
"鸴": "鷽",
"鸵": "鴕",
"鸶": "鷥",
"鸷": "鷙",
"鸸": "鴯",
"鸹": "鴰",
"鸺": "鵂",
"鸻": "鴴",
"鸼": "鵃",
"鸽": "鴿",
"鸾": "鸞",
"鸿": "鴻",
"鹀": "鵐",
"鹁": "鵓",
"鹂": "鸝",
"鹃": "鵑",
"鹄": "鵠",
"鹅": "鵝",
"鹆": "鵒",
"鹇": "鷳",
"鹈": "鵜",
"鹉": "鵡",
"鹊": "鵲",
"鹋": "鶓",
"鹌": "鵪",
"鹍": "鵾",
"鹎": "鵯",
"鹏": "鵬",
"鹐": "鵮",
"鹑": "鶉",
"鹒": "鶊",
"鹓": "鵷",
"鹔": "鷫",
"鹕": "鶘",
"鹖": "鶡",
"鹗": "鶚",
"鹘": "鶻",
"鹙": "鶖",
"鹛": "鶥",
"鹜": "鶩",
"鹝": "鷊",
"鹞": "鷂",
"鹟": "鶲",
"鹠": "鶹",
"鹡": "鶺",
"鹢": "鷁",
"鹣": "鶼",
"鹤": "鶴",
"鹥": "鷖",
"鹦": "鸚",
"鹧": "鷓",
"鹨": "鷚",
"鹩": "鷯",
"鹪": "鷦",
"鹫": "鷲",
"鹬": "鷸",
"鹭": "鷺",
"鹯": "鸇",
"鹰": "鷹",
"鹱": "鸌",
"鹲": "鸏",
"鹳": "鸛",
"鹴": "鸘",
"鹾": "鹺",
"麦": "麥",
"麸": "麩",
"黄": "黃",
"黉": "黌",
"黡": "黶",
"黩": "黷",
"黪": "黲",
"黾": "黽",
"鼋": "黿",
"鼍": "鼉",
"鼗": "鞀",
"鼹": "鼴",
"齐": "齊",
"齑": "齏",
"齿": "齒",
"龀": "齔",
"龁": "齕",
"龂": "齗",
"龃": "齟",
"龄": "齡",
"龅": "齙",
"龆": "齠",
"龇": "齜",
"龈": "齦",
"龉": "齬",
"龊": "齪",
"龋": "齲",
"龌": "齷",
"龙": "龍",
"龚": "龔",
"龛": "龕",
"龟": "龜",
"一伙": "一伙",
"一并": "一併",
"一准": "一准",
"一划": "一划",
"一地里": "一地裡",
"一干": "一干",
"一树百获": "一樹百穫",
"一台": "一臺",
"一冲": "一衝",
"一只": "一隻",
"一发千钧": "一髮千鈞",
"一出": "一齣",
"七只": "七隻",
"三元里": "三元裡",
"三国志": "三國誌",
"三复": "三複",
"三只": "三隻",
"上吊": "上吊",
"上台": "上臺",
"下不了台": "下不了臺",
"下台": "下臺",
"下面": "下麵",
"不准": "不准",
"不吊": "不吊",
"不知就里": "不知就裡",
"不知所云": "不知所云",
"不锈钢": "不鏽鋼",
"丑剧": "丑劇",
"丑旦": "丑旦",
"丑角": "丑角",
"并存着": "並存著",
"中岳": "中嶽",
"中台医专": "中臺醫專",
"丰南": "丰南",
"丰台": "丰台",
"丰姿": "丰姿",
"丰采": "丰采",
"丰韵": "丰韻",
"主干": "主幹",
"么么唱唱": "么么唱唱",
"么儿": "么兒",
"么喝": "么喝",
"么妹": "么妹",
"么弟": "么弟",
"么爷": "么爺",
"九世之雠": "九世之讎",
"九只": "九隻",
"干丝": "乾絲",
"干着急": "乾著急",
"乱发": "亂髮",
"云云": "云云",
"云尔": "云爾",
"五岳": "五嶽",
"五斗柜": "五斗櫃",
"五斗橱": "五斗櫥",
"五谷": "五穀",
"五行生克": "五行生剋",
"五只": "五隻",
"五出": "五齣",
"交卷": "交卷",
"人云亦云": "人云亦云",
"人物志": "人物誌",
"什锦面": "什錦麵",
"什么": "什麼",
"仆倒": "仆倒",
"介系词": "介係詞",
"介系词": "介繫詞",
"仿制": "仿製",
"伙伕": "伙伕",
"伙伴": "伙伴",
"伙同": "伙同",
"伙夫": "伙夫",
"伙房": "伙房",
"伙计": "伙計",
"伙食": "伙食",
"布下": "佈下",
"布告": "佈告",
"布哨": "佈哨",
"布局": "佈局",
"布岗": "佈崗",
"布施": "佈施",
"布景": "佈景",
"布满": "佈滿",
"布线": "佈線",
"布置": "佈置",
"布署": "佈署",
"布道": "佈道",
"布达": "佈達",
"布防": "佈防",
"布阵": "佈陣",
"布雷": "佈雷",
"体育锻鍊": "体育鍛鍊",
"何干": "何干",
"作准": "作准",
"佣人": "佣人",
"佣工": "佣工",
"佣金": "佣金",
"并入": "併入",
"并列": "併列",
"并到": "併到",
"并合": "併合",
"并吞": "併吞",
"并在": "併在",
"并成": "併成",
"并排": "併排",
"并拢": "併攏",
"并案": "併案",
"并为": "併為",
"并发": "併發",
"并科": "併科",
"并购": "併購",
"并进": "併進",
"来复": "來複",
"供制": "供製",
"依依不舍": "依依不捨",
"侵并": "侵併",
"便辟": "便辟",
"系数": "係數",
"系为": "係為",
"保险柜": "保險柜",
"信号台": "信號臺",
"修复": "修複",
"修胡刀": "修鬍刀",
"俯冲": "俯衝",
"个里": "個裡",
"借着": "借著",
"假发": "假髮",
"停制": "停製",
"偷鸡不着": "偷雞不著",
"家伙": "傢伙",
"家俱": "傢俱",
"家具": "傢具",
"传布": "傳佈",
"债台高筑": "債臺高築",
"傻里傻气": "傻裡傻氣",
"倾家荡产": "傾家蕩產",
"倾复": "傾複",
"倾复": "傾覆",
"僱佣": "僱佣",
"仪表": "儀錶",
"亿只": "億隻",
"尽尽": "儘儘",
"尽先": "儘先",
"尽其所有": "儘其所有",
"尽力": "儘力",
"尽快": "儘快",
"尽早": "儘早",
"尽是": "儘是",
"尽管": "儘管",
"尽速": "儘速",
"尽量": "儘量",
"允准": "允准",
"兄台": "兄臺",
"充饥": "充饑",
"光采": "光采",
"克里": "克裡",
"克复": "克複",
"入伙": "入伙",
"内制": "內製",
"两只": "兩隻",
"八字胡": "八字鬍",
"八只": "八隻",
"公布": "公佈",
"公干": "公幹",
"公斗": "公斗",
"公历": "公曆",
"六只": "六隻",
"六出": "六齣",
"兼并": "兼併",
"冤雠": "冤讎",
"准予": "准予",
"准假": "准假",
"准将": "准將",
"准考证": "准考證",
"准许": "准許",
"几几": "几几",
"几案": "几案",
"几丝": "几絲",
"凹洞里": "凹洞裡",
"出征": "出征",
"出锤": "出鎚",
"刀削面": "刀削麵",
"刁斗": "刁斗",
"分布": "分佈",
"切面": "切麵",
"刊布": "刊佈",
"划上": "划上",
"划下": "划下",
"划不来": "划不來",
"划了": "划了",
"划具": "划具",
"划出": "划出",
"划到": "划到",
"划动": "划動",
"划去": "划去",
"划子": "划子",
"划得来": "划得來",
"划拳": "划拳",
"划桨": "划槳",
"划水": "划水",
"划算": "划算",
"划船": "划船",
"划艇": "划艇",
"划着": "划著",
"划着走": "划著走",
"划行": "划行",
"划走": "划走",
"划起": "划起",
"划进": "划進",
"划过": "划過",
"初征": "初征",
"别致": "別緻",
"别着": "別著",
"别只": "別隻",
"利比里亚": "利比裡亞",
"刮着": "刮著",
"刮胡刀": "刮鬍刀",
"剃发": "剃髮",
"剃须": "剃鬚",
"削发": "削髮",
"克制": "剋制",
"克星": "剋星",
"克服": "剋服",
"克死": "剋死",
"克薄": "剋薄",
"前仆后继": "前仆後繼",
"前台": "前臺",
"前车之复": "前車之覆",
"刚才": "剛纔",
"剪发": "剪髮",
"割舍": "割捨",
"创制": "創製",
"加里宁": "加裡寧",
"动荡": "動蕩",
"劳力士表": "勞力士錶",
"包准": "包准",
"包谷": "包穀",
"北斗": "北斗",
"北回": "北迴",
"匡复": "匡複",
"匪干": "匪幹",
"十卷": "十卷",
"十台": "十臺",
"十只": "十隻",
"十出": "十齣",
"千丝万缕": "千絲萬縷",
"千回百折": "千迴百折",
"千回百转": "千迴百轉",
"千钧一发": "千鈞一髮",
"千只": "千隻",
"升斗小民": "升斗小民",
"半只": "半隻",
"南岳": "南嶽",
"南征": "南征",
"南台": "南臺",
"南回": "南迴",
"卡里": "卡裡",
"印制": "印製",
"卷入": "卷入",
"卷取": "卷取",
"卷土重来": "卷土重來",
"卷子": "卷子",
"卷宗": "卷宗",
"卷尺": "卷尺",
"卷层云": "卷層雲",
"卷帙": "卷帙",
"卷扬机": "卷揚機",
"卷曲": "卷曲",
"卷染": "卷染",
"卷烟": "卷煙",
"卷筒": "卷筒",
"卷纬": "卷緯",
"卷绕": "卷繞",
"卷装": "卷裝",
"卷轴": "卷軸",
"卷云": "卷雲",
"卷领": "卷領",
"卷发": "卷髮",
"卷须": "卷鬚",
"参与": "參与",
"参与者": "參与者",
"参合": "參合",
"参考价值": "參考價值",
"参与": "參與",
"参与人员": "參與人員",
"参与制": "參與制",
"参与感": "參與感",
"参与者": "參與者",
"参观团": "參觀團",
"参观团体": "參觀團體",
"参阅": "參閱",
"反冲": "反衝",
"反复": "反複",
"反复": "反覆",
"取舍": "取捨",
"口里": "口裡",
"只准": "只准",
"只冲": "只衝",
"叮当": "叮噹",
"可怜虫": "可憐虫",
"可紧可松": "可緊可鬆",
"台制": "台製",
"司令台": "司令臺",
"吃着不尽": "吃著不盡",
"吃里扒外": "吃裡扒外",
"吃里爬外": "吃裡爬外",
"各吊": "各吊",
"合伙": "合伙",
"合并": "合併",
"合着": "合著",
"合着者": "合著者",
"吊上": "吊上",
"吊下": "吊下",
"吊了": "吊了",
"吊个": "吊個",
"吊儿郎当": "吊兒郎當",
"吊到": "吊到",
"吊去": "吊去",
"吊取": "吊取",
"吊吊": "吊吊",
"吊嗓": "吊嗓",
"吊好": "吊好",
"吊子": "吊子",
"吊带": "吊帶",
"吊带裤": "吊帶褲",
"吊床": "吊床",
"吊得": "吊得",
"吊挂": "吊掛",
"吊挂着": "吊掛著",
"吊杆": "吊杆",
"吊架": "吊架",
"吊桶": "吊桶",
"吊杆": "吊桿",
"吊桥": "吊橋",
"吊死": "吊死",
"吊灯": "吊燈",
"吊环": "吊環",
"吊盘": "吊盤",
"吊索": "吊索",
"吊着": "吊著",
"吊装": "吊裝",
"吊裤": "吊褲",
"吊裤带": "吊褲帶",
"吊袜": "吊襪",
"吊走": "吊走",
"吊起": "吊起",
"吊车": "吊車",
"吊钩": "吊鉤",
"吊销": "吊銷",
"吊钟": "吊鐘",
"同伙": "同伙",
"名表": "名錶",
"后冠": "后冠",
"后土": "后土",
"后妃": "后妃",
"后座": "后座",
"后稷": "后稷",
"后羿": "后羿",
"后里": "后里",
"向着": "向著",
"吞并": "吞併",
"吹发": "吹髮",
"吕后": "呂后",
"獃里獃气": "呆裡呆氣",
"周而复始": "周而複始",
"呼吁": "呼籲",
"和面": "和麵",
"哪里": "哪裡",
"哭脏": "哭髒",
"问卷": "問卷",
"喝采": "喝采",
"单干": "單干",
"单只": "單隻",
"嘴里": "嘴裡",
"恶心": "噁心",
"当啷": "噹啷",
"当当": "噹噹",
"噜苏": "嚕囌",
"向导": "嚮導",
"向往": "嚮往",
"向应": "嚮應",
"向日": "嚮日",
"向迩": "嚮邇",
"严丝合缝": "嚴絲合縫",
"严复": "嚴複",
"四舍五入": "四捨五入",
"四只": "四隻",
"四出": "四齣",
"回丝": "回絲",
"回着": "回著",
"回荡": "回蕩",
"回复": "回覆",
"回采": "回采",
"圈子里": "圈子裡",
"圈里": "圈裡",
"国历": "國曆",
"国雠": "國讎",
"园里": "園裡",
"图里": "圖裡",
"土里": "土裡",
"土制": "土製",
"地志": "地誌",
"坍台": "坍臺",
"坑里": "坑裡",
"坦荡": "坦蕩",
"垂发": "垂髮",
"垮台": "垮臺",
"埋布": "埋佈",
"城里": "城裡",
"基干": "基幹",
"报复": "報複",
"塌台": "塌臺",
"塔台": "塔臺",
"涂着": "塗著",
"墓志": "墓誌",
"墨斗": "墨斗",
"墨索里尼": "墨索裡尼",
"垦复": "墾複",
"垄断价格": "壟斷價格",
"垄断资产": "壟斷資產",
"垄断集团": "壟斷集團",
"壶里": "壺裡",
"寿面": "壽麵",
"夏天里": "夏天裡",
"夏历": "夏曆",
"外制": "外製",
"多冲": "多衝",
"多采多姿": "多采多姿",
"多么": "多麼",
"夜光表": "夜光錶",
"夜里": "夜裡",
"梦里": "夢裡",
"大伙": "大伙",
"大卷": "大卷",
"大干": "大干",
"大干": "大幹",
"大锤": "大鎚",
"大只": "大隻",
"天后": "天后",
"天干": "天干",
"天文台": "天文臺",
"天翻地复": "天翻地覆",
"太后": "太后",
"奏折": "奏摺",
"女丑": "女丑",
"女佣": "女佣",
"好家夥": "好傢夥",
"好戏连台": "好戲連臺",
"如法泡制": "如法泡製",
"妆台": "妝臺",
"姜太公": "姜太公",
"姜子牙": "姜子牙",
"姜丝": "姜絲",
"字汇": "字彙",
"字里行间": "字裡行間",
"存折": "存摺",
"孟姜女": "孟姜女",
"宇宙志": "宇宙誌",
"定准": "定准",
"定制": "定製",
"宣布": "宣佈",
"宫里": "宮裡",
"家伙": "家伙",
"家里": "家裡",
"密布": "密佈",
"寇雠": "寇讎",
"实干": "實幹",
"写字台": "寫字檯",
"写字台": "寫字臺",
"宽松": "寬鬆",
"封面里": "封面裡",
"射干": "射干",
"对表": "對錶",
"小丑": "小丑",
"小伙": "小伙",
"小只": "小隻",
"少吊": "少吊",
"尺布斗粟": "尺布斗粟",
"尼克松": "尼克鬆",
"尼采": "尼采",
"尿斗": "尿斗",
"局里": "局裡",
"居里": "居裡",
"屋子里": "屋子裡",
"屋里": "屋裡",
"展布": "展佈",
"屡仆屡起": "屢仆屢起",
"屯里": "屯裡",
"山岳": "山嶽",
"山里": "山裡",
"峰回": "峰迴",
"巡回": "巡迴",
"巧干": "巧幹",
"巴尔干": "巴爾幹",
"巴里": "巴裡",
"巷里": "巷裡",
"市里": "市裡",
"布谷": "布穀",
"希腊": "希腊",
"帘子": "帘子",
"帘布": "帘布",
"席卷": "席卷",
"带团参加": "帶團參加",
"带发修行": "帶髮修行",
"干休": "干休",
"干系": "干係",
"干卿何事": "干卿何事",
"干将": "干將",
"干戈": "干戈",
"干挠": "干撓",
"干扰": "干擾",
"干支": "干支",
"干政": "干政",
"干时": "干時",
"干涉": "干涉",
"干犯": "干犯",
"干与": "干與",
"干着急": "干著急",
"干贝": "干貝",
"干预": "干預",
"平台": "平臺",
"年历": "年曆",
"年里": "年裡",
"干上": "幹上",
"干下去": "幹下去",
"干了": "幹了",
"干事": "幹事",
"干些": "幹些",
"干个": "幹個",
"干劲": "幹勁",
"干员": "幹員",
"干吗": "幹嗎",
"干嘛": "幹嘛",
"干坏事": "幹壞事",
"干完": "幹完",
"干得": "幹得",
"干性油": "幹性油",
"干才": "幹才",
"干掉": "幹掉",
"干校": "幹校",
"干活": "幹活",
"干流": "幹流",
"干球温度": "幹球溫度",
"干线": "幹線",
"干练": "幹練",
"干警": "幹警",
"干起来": "幹起來",
"干路": "幹路",
"干道": "幹道",
"干部": "幹部",
"干么": "幹麼",
"几丝": "幾絲",
"几只": "幾隻",
"几出": "幾齣",
"底里": "底裡",
"康采恩": "康采恩",
"庙里": "廟裡",
"建台": "建臺",
"弄脏": "弄髒",
"弔卷": "弔卷",
"弘历": "弘曆",
"别扭": "彆扭",
"别拗": "彆拗",
"别气": "彆氣",
"别脚": "彆腳",
"别着": "彆著",
"弹子台": "彈子檯",
"弹药": "彈葯",
"汇报": "彙報",
"汇整": "彙整",
"汇编": "彙編",
"汇总": "彙總",
"汇纂": "彙纂",
"汇辑": "彙輯",
"汇集": "彙集",
"形单影只": "形單影隻",
"影后": "影后",
"往里": "往裡",
"往复": "往複",
"征伐": "征伐",
"征兵": "征兵",
"征尘": "征塵",
"征夫": "征夫",
"征战": "征戰",
"征收": "征收",
"征服": "征服",
"征求": "征求",
"征发": "征發",
"征衣": "征衣",
"征讨": "征討",
"征途": "征途",
"后台": "後臺",
"从里到外": "從裡到外",
"从里向外": "從裡向外",
"复雠": "復讎",
"复辟": "復辟",
"德干高原": "德干高原",
"心愿": "心愿",
"心荡神驰": "心蕩神馳",
"心里": "心裡",
"忙里": "忙裡",
"快干": "快幹",
"快冲": "快衝",
"怎么": "怎麼",
"怎么着": "怎麼著",
"怒发冲冠": "怒髮衝冠",
"急冲而下": "急衝而下",
"怪里怪气": "怪裡怪氣",
"恩准": "恩准",
"情有所钟": "情有所鍾",
"意面": "意麵",
"慌里慌张": "慌裡慌張",
"慰借": "慰藉",
"忧郁": "憂郁",
"凭吊": "憑吊",
"凭借": "憑藉",
"凭借着": "憑藉著",
"蒙懂": "懞懂",
"怀里": "懷裡",
"怀表": "懷錶",
"悬吊": "懸吊",
"恋恋不舍": "戀戀不捨",
"戏台": "戲臺",
"戴表": "戴錶",
"戽斗": "戽斗",
"房里": "房裡",
"手不释卷": "手不釋卷",
"手卷": "手卷",
"手折": "手摺",
"手里": "手裡",
"手表": "手錶",
"手松": "手鬆",
"才干": "才幹",
"才高八斗": "才高八斗",
"打谷": "打穀",
"扞御": "扞禦",
"批准": "批准",
"批复": "批複",
"批复": "批覆",
"承制": "承製",
"抗御": "抗禦",
"折冲": "折衝",
"披复": "披覆",
"披发": "披髮",
"抱朴": "抱朴",
"抵御": "抵禦",
"拆伙": "拆伙",
"拆台": "拆臺",
"拈须": "拈鬚",
"拉纤": "拉縴",
"拉面": "拉麵",
"拖吊": "拖吊",
"拗别": "拗彆",
"拮据": "拮据",
"振荡": "振蕩",
"捍御": "捍禦",
"舍不得": "捨不得",
"舍出": "捨出",
"舍去": "捨去",
"舍命": "捨命",
"舍己从人": "捨己從人",
"舍己救人": "捨己救人",
"舍己为人": "捨己為人",
"舍己为公": "捨己為公",
"舍己为国": "捨己為國",
"舍得": "捨得",
"舍我其谁": "捨我其誰",
"舍本逐末": "捨本逐末",
"舍弃": "捨棄",
"舍死忘生": "捨死忘生",
"舍生": "捨生",
"舍短取长": "捨短取長",
"舍身": "捨身",
"舍车保帅": "捨車保帥",
"舍近求远": "捨近求遠",
"捲发": "捲髮",
"捵面": "捵麵",
"扫荡": "掃蕩",
"掌柜": "掌柜",
"排骨面": "排骨麵",
"挂帘": "掛帘",
"挂面": "掛麵",
"接着说": "接著說",
"提心吊胆": "提心吊膽",
"插图卷": "插圖卷",
"换吊": "換吊",
"换只": "換隻",
"换发": "換髮",
"摇荡": "搖蕩",
"搭伙": "搭伙",
"折合": "摺合",
"折奏": "摺奏",
"折子": "摺子",
"折尺": "摺尺",
"折扇": "摺扇",
"折梯": "摺梯",
"折椅": "摺椅",
"折叠": "摺疊",
"折痕": "摺痕",
"折篷": "摺篷",
"折纸": "摺紙",
"折裙": "摺裙",
"撒布": "撒佈",
"撚须": "撚鬚",
"撞球台": "撞球檯",
"擂台": "擂臺",
"担仔面": "擔仔麵",
"担担面": "擔擔麵",
"担着": "擔著",
"担负着": "擔負著",
"据云": "據云",
"擢发难数": "擢髮難數",
"摆布": "擺佈",
"摄制": "攝製",
"支干": "支幹",
"收获": "收穫",
"改制": "改製",
"攻克": "攻剋",
"放荡": "放蕩",
"放松": "放鬆",
"叙说着": "敘說著",
"散伙": "散伙",
"散布": "散佈",
"散荡": "散蕩",
"散发": "散髮",
"整只": "整隻",
"整出": "整齣",
"文采": "文采",
"斗六": "斗六",
"斗南": "斗南",
"斗大": "斗大",
"斗子": "斗子",
"斗室": "斗室",
"斗方": "斗方",
"斗栱": "斗栱",
"斗笠": "斗笠",
"斗箕": "斗箕",
"斗篷": "斗篷",
"斗胆": "斗膽",
"斗转参横": "斗轉參橫",
"斗量": "斗量",
"斗门": "斗門",
"料斗": "料斗",
"斯里兰卡": "斯裡蘭卡",
"新历": "新曆",
"断头台": "斷頭臺",
"方才": "方纔",
"施舍": "施捨",
"旋绕着": "旋繞著",
"旋回": "旋迴",
"族里": "族裡",
"日历": "日曆",
"日志": "日誌",
"日进斗金": "日進斗金",
"明了": "明瞭",
"明窗净几": "明窗淨几",
"明里": "明裡",
"星斗": "星斗",
"星历": "星曆",
"星移斗换": "星移斗換",
"星移斗转": "星移斗轉",
"星罗棋布": "星羅棋佈",
"星辰表": "星辰錶",
"春假里": "春假裡",
"春天里": "春天裡",
"晃荡": "晃蕩",
"景致": "景緻",
"暗地里": "暗地裡",
"暗沟里": "暗溝裡",
"暗里": "暗裡",
"历数": "曆數",
"历书": "曆書",
"历法": "曆法",
"书卷": "書卷",
"会干": "會幹",
"会里": "會裡",
"月历": "月曆",
"月台": "月臺",
"有只": "有隻",
"木制": "木製",
"本台": "本臺",
"朴子": "朴子",
"朴实": "朴實",
"朴硝": "朴硝",
"朴素": "朴素",
"朴资茅斯": "朴資茅斯",
"村里": "村裡",
"束发": "束髮",
"东岳": "東嶽",
"东征": "東征",
"松赞干布": "松贊干布",
"板着脸": "板著臉",
"板荡": "板蕩",
"枕借": "枕藉",
"林宏岳": "林宏嶽",
"枝干": "枝幹",
"枯干": "枯幹",
"某只": "某隻",
"染发": "染髮",
"柜上": "柜上",
"柜台": "柜台",
"柜子": "柜子",
"查卷": "查卷",
"查号台": "查號臺",
"校雠学": "校讎學",
"核准": "核准",
"核复": "核覆",
"格里": "格裡",
"案卷": "案卷",
"条干": "條幹",
"棉卷": "棉卷",
"棉制": "棉製",
"植发": "植髮",
"楼台": "樓臺",
"标志着": "標志著",
"标致": "標緻",
"标志": "標誌",
"模制": "模製",
"树干": "樹幹",
"横征暴敛": "橫征暴斂",
"横冲": "橫衝",
"档卷": "檔卷",
"检复": "檢覆",
"台子": "檯子",
"台布": "檯布",
"台灯": "檯燈",
"台球": "檯球",
"台面": "檯面",
"柜台": "櫃檯",
"柜台": "櫃臺",
"栏干": "欄干",
"欺蒙": "欺矇",
"歌后": "歌后",
"欧几里得": "歐幾裡得",
"正当着": "正當著",
"武后": "武后",
"武松": "武鬆",
"归并": "歸併",
"死里求生": "死裡求生",
"死里逃生": "死裡逃生",
"残卷": "殘卷",
"杀虫药": "殺虫藥",
"壳里": "殼裡",
"母后": "母后",
"每只": "每隻",
"比干": "比干",
"毛卷": "毛卷",
"毛发": "毛髮",
"毫发": "毫髮",
"气冲牛斗": "氣沖牛斗",
"气象台": "氣象臺",
"氯霉素": "氯黴素",
"水斗": "水斗",
"水里": "水裡",
"水表": "水錶",
"永历": "永曆",
"污蔑": "汙衊",
"池里": "池裡",
"污蔑": "污衊",
"沈着": "沈著",
"没事干": "沒事幹",
"没精打采": "沒精打采",
"冲着": "沖著",
"沙里淘金": "沙裡淘金",
"河里": "河裡",
"油面": "油麵",
"泡面": "泡麵",
"泰斗": "泰斗",
"洗手不干": "洗手不幹",
"洗发精": "洗髮精",
"派团参加": "派團參加",
"流荡": "流蕩",
"浩荡": "浩蕩",
"浪琴表": "浪琴錶",
"浪荡": "浪蕩",
"浮荡": "浮蕩",
"海里": "海裡",
"涂着": "涂著",
"液晶表": "液晶錶",
"凉面": "涼麵",
"淡朱": "淡硃",
"淫荡": "淫蕩",
"测验卷": "測驗卷",
"港制": "港製",
"游荡": "游蕩",
"凑合着": "湊合著",
"湖里": "湖裡",
"汤团": "湯糰",
"汤面": "湯麵",
"卤制": "滷製",
"卤面": "滷麵",
"满布": "滿佈",
"漂荡": "漂蕩",
"漏斗": "漏斗",
"演奏台": "演奏臺",
"潭里": "潭裡",
"激荡": "激蕩",
"浓郁": "濃郁",
"浓发": "濃髮",
"湿地松": "濕地鬆",
"蒙蒙": "濛濛",
"蒙雾": "濛霧",
"瀛台": "瀛臺",
"弥漫": "瀰漫",
"弥漫着": "瀰漫著",
"火并": "火併",
"灰蒙": "灰濛",
"炒面": "炒麵",
"炮制": "炮製",
"炸药": "炸葯",
"炸酱面": "炸醬麵",
"为着": "為著",
"乌干达": "烏干達",
"乌苏里江": "烏蘇裡江",
"乌发": "烏髮",
"乌龙面": "烏龍麵",
"烘制": "烘製",
"烽火台": "烽火臺",
"无干": "無干",
"无精打采": "無精打采",
"炼制": "煉製",
"烟卷儿": "煙卷兒",
"烟斗": "煙斗",
"烟斗丝": "煙斗絲",
"烟台": "煙臺",
"照准": "照准",
"熨斗": "熨斗",
"灯台": "燈臺",
"燎发": "燎髮",
"烫发": "燙髮",
"烫面": "燙麵",
"烛台": "燭臺",
"炉台": "爐臺",
"爽荡": "爽蕩",
"片言只语": "片言隻語",
"牛肉面": "牛肉麵",
"牛只": "牛隻",
"特准": "特准",
"特征": "特征",
"特里": "特裡",
"特制": "特製",
"牵系": "牽繫",
"狼借": "狼藉",
"猛冲": "猛衝",
"奖杯": "獎盃",
"获准": "獲准",
"率团参加": "率團參加",
"王侯后": "王侯后",
"王后": "王后",
"班里": "班裡",
"理发": "理髮",
"瑶台": "瑤臺",
"甚么": "甚麼",
"甜面酱": "甜麵醬",
"生力面": "生力麵",
"生锈": "生鏽",
"生发": "生髮",
"田里": "田裡",
"由馀": "由余",
"男佣": "男佣",
"男用表": "男用錶",
"留发": "留髮",
"畚斗": "畚斗",
"当着": "當著",
"疏松": "疏鬆",
"疲困": "疲睏",
"病症": "病癥",
"症候": "癥候",
"症状": "癥狀",
"症结": "癥結",
"登台": "登臺",
"发布": "發佈",
"发着": "發著",
"发面": "發麵",
"发霉": "發黴",
"白卷": "白卷",
"白干儿": "白干兒",
"白发": "白髮",
"白面": "白麵",
"百里": "百裡",
"百只": "百隻",
"皇后": "皇后",
"皇历": "皇曆",
"皓发": "皓髮",
"皮里阳秋": "皮裏陽秋",
"皮里春秋": "皮裡春秋",
"皮制": "皮製",
"皱折": "皺摺",
"盒里": "盒裡",
"监制": "監製",
"盘里": "盤裡",
"盘回": "盤迴",
"直接参与": "直接參与",
"直冲": "直衝",
"相克": "相剋",
"相干": "相干",
"相冲": "相衝",
"看台": "看臺",
"眼帘": "眼帘",
"眼眶里": "眼眶裡",
"眼里": "眼裡",
"困乏": "睏乏",
"睡着了": "睡著了",
"了如": "瞭如",
"了望": "瞭望",
"了然": "瞭然",
"了若指掌": "瞭若指掌",
"了解": "瞭解",
"蒙住": "矇住",
"蒙昧无知": "矇昧無知",
"蒙混": "矇混",
"蒙蒙": "矇矇",
"蒙眬": "矇矓",
"蒙蔽": "矇蔽",
"蒙骗": "矇騙",
"短发": "短髮",
"石英表": "石英錶",
"研制": "研製",
"砰当": "砰噹",
"砲台": "砲臺",
"朱唇皓齿": "硃唇皓齒",
"朱批": "硃批",
"朱砂": "硃砂",
"朱笔": "硃筆",
"朱红色": "硃紅色",
"朱色": "硃色",
"硬干": "硬幹",
"砚台": "硯臺",
"碑志": "碑誌",
"磁制": "磁製",
"磨制": "磨製",
"示复": "示覆",
"社里": "社裡",
"神采": "神采",
"御侮": "禦侮",
"御寇": "禦寇",
"御寒": "禦寒",
"御敌": "禦敵",
"秃发": "禿髮",
"秀发": "秀髮",
"私下里": "私下裡",
"秋天里": "秋天裡",
"秋裤": "秋褲",
"秒表": "秒錶",
"稀松": "稀鬆",
"禀复": "稟覆",
"稻谷": "稻穀",
"稽征": "稽征",
"谷仓": "穀倉",
"谷场": "穀場",
"谷子": "穀子",
"谷壳": "穀殼",
"谷物": "穀物",
"谷皮": "穀皮",
"谷神": "穀神",
"谷粒": "穀粒",
"谷舱": "穀艙",
"谷苗": "穀苗",
"谷草": "穀草",
"谷贱伤农": "穀賤傷農",
"谷道": "穀道",
"谷雨": "穀雨",
"谷类": "穀類",
"积极参与": "積极參与",
"积极参加": "積极參加",
"空荡": "空蕩",
"窗帘": "窗帘",
"窗明几净": "窗明几淨",
"窗台": "窗檯",
"窗台": "窗臺",
"窝里": "窩裡",
"窝阔台": "窩闊臺",
"穷追不舍": "窮追不捨",
"笆斗": "笆斗",
"笑里藏刀": "笑裡藏刀",
"第一卷": "第一卷",
"筋斗": "筋斗",
"答卷": "答卷",
"答复": "答複",
"答复": "答覆",
"筵几": "筵几",
"箕斗": "箕斗",
"签着": "簽著",
"吁求": "籲求",
"吁请": "籲請",
"粗制": "粗製",
"粗卤": "粗鹵",
"精干": "精幹",
"精明强干": "精明強幹",
"精致": "精緻",
"精制": "精製",
"精辟": "精辟",
"精采": "精采",
"糊里糊涂": "糊裡糊塗",
"团子": "糰子",
"系着": "系著",
"纪历": "紀曆",
"红发": "紅髮",
"红霉素": "紅黴素",
"纡回": "紆迴",
"纳采": "納采",
"素食面": "素食麵",
"素面": "素麵",
"紫微斗数": "紫微斗數",
"细致": "細緻",
"组里": "組裡",
"结发": "結髮",
"绝对参照": "絕對參照",
"丝来线去": "絲來線去",
"丝布": "絲布",
"丝板": "絲板",
"丝瓜布": "絲瓜布",
"丝绒布": "絲絨布",
"丝线": "絲線",
"丝织厂": "絲織廠",
"丝虫": "絲蟲",
"綑吊": "綑吊",
"经卷": "經卷",
"绿霉素": "綠黴素",
"维系": "維繫",
"绾发": "綰髮",
"网里": "網裡",
"紧绷": "緊繃",
"紧绷着": "緊繃著",
"紧追不舍": "緊追不捨",
"编制": "編製",
"编发": "編髮",
"缓冲": "緩衝",
"致密": "緻密",
"萦回": "縈迴",
"县里": "縣裡",
"县志": "縣誌",
"缝里": "縫裡",
"缝制": "縫製",
"纤夫": "縴夫",
"繁复": "繁複",
"绷住": "繃住",
"绷子": "繃子",
"绷带": "繃帶",
"绷紧": "繃緊",
"绷脸": "繃臉",
"绷着": "繃著",
"绷着脸": "繃著臉",
"绷着脸儿": "繃著臉兒",
"绷开": "繃開",
"绘制": "繪製",
"系上": "繫上",
"系到": "繫到",
"系囚": "繫囚",
"系心": "繫心",
"系念": "繫念",
"系怀": "繫懷",
"系数": "繫數",
"系于": "繫於",
"系系": "繫系",
"系紧": "繫緊",
"系绳": "繫繩",
"系着": "繫著",
"系辞": "繫辭",
"缴卷": "繳卷",
"累囚": "纍囚",
"累累": "纍纍",
"坛子": "罈子",
"坛坛罐罐": "罈罈罐罐",
"骂着": "罵著",
"美制": "美製",
"美发": "美髮",
"翻来复去": "翻來覆去",
"翻天复地": "翻天覆地",
"翻复": "翻覆",
"翻云复雨": "翻雲覆雨",
"老么": "老么",
"老板": "老闆",
"考卷": "考卷",
"耕获": "耕穫",
"聊斋志异": "聊齋誌異",
"联系": "聯係",
"联系": "聯繫",
"肉丝面": "肉絲麵",
"肉羹面": "肉羹麵",
"肉松": "肉鬆",
"肢体": "肢体",
"背向着": "背向著",
"背地里": "背地裡",
"胡里胡涂": "胡裡胡塗",
"能干": "能幹",
"脉冲": "脈衝",
"脱发": "脫髮",
"腊味": "腊味",
"腊笔": "腊筆",
"腊肉": "腊肉",
"脑子里": "腦子裡",
"腰里": "腰裡",
"胶卷": "膠卷",
"自制": "自製",
"自觉自愿": "自覺自愿",
"台上": "臺上",
"台下": "臺下",
"台中": "臺中",
"台北": "臺北",
"台南": "臺南",
"台地": "臺地",
"台塑": "臺塑",
"台大": "臺大",
"台币": "臺幣",
"台座": "臺座",
"台东": "臺東",
"台柱": "臺柱",
"台榭": "臺榭",
"台汽": "臺汽",
"台海": "臺海",
"台澎金马": "臺澎金馬",
"台湾": "臺灣",
"台灯": "臺燈",
"台球": "臺球",
"台省": "臺省",
"台端": "臺端",
"台糖": "臺糖",
"台肥": "臺肥",
"台航": "臺航",
"台视": "臺視",
"台词": "臺詞",
"台车": "臺車",
"台铁": "臺鐵",
"台阶": "臺階",
"台电": "臺電",
"台面": "臺面",
"舂谷": "舂穀",
"兴致": "興緻",
"兴高采烈": "興高采烈",
"旧历": "舊曆",
"舒卷": "舒卷",
"舞台": "舞臺",
"航海历": "航海曆",
"船只": "船隻",
"舰只": "艦隻",
"芬郁": "芬郁",
"花卷": "花卷",
"花盆里": "花盆裡",
"花采": "花采",
"苑里": "苑裡",
"若干": "若干",
"苦干": "苦幹",
"苦里": "苦裏",
"苦卤": "苦鹵",
"范仲淹": "范仲淹",
"范蠡": "范蠡",
"范阳": "范陽",
"茅台": "茅臺",
"茶几": "茶几",
"草丛里": "草叢裡",
"庄里": "莊裡",
"茎干": "莖幹",
"莽荡": "莽蕩",
"菌丝体": "菌絲体",
"菌丝体": "菌絲體",
"华里": "華裡",
"华发": "華髮",
"万卷": "萬卷",
"万历": "萬曆",
"万只": "萬隻",
"落发": "落髮",
"着儿": "著兒",
"着书立说": "著書立說",
"着色软体": "著色軟體",
"着重指出": "著重指出",
"着录": "著錄",
"着录规则": "著錄規則",
"蓄发": "蓄髮",
"蓄须": "蓄鬚",
"蓬发": "蓬髮",
"蓬松": "蓬鬆",
"莲台": "蓮臺",
"荡来荡去": "蕩來蕩去",
"荡女": "蕩女",
"荡妇": "蕩婦",
"荡寇": "蕩寇",
"荡平": "蕩平",
"荡涤": "蕩滌",
"荡漾": "蕩漾",
"荡然": "蕩然",
"荡舟": "蕩舟",
"荡船": "蕩船",
"荡荡": "蕩蕩",
"薑丝": "薑絲",
"薙发": "薙髮",
"借以": "藉以",
"借口": "藉口",
"借故": "藉故",
"借机": "藉機",
"借此": "藉此",
"借由": "藉由",
"借端": "藉端",
"借着": "藉著",
"借借": "藉藉",
"借词": "藉詞",
"借资": "藉資",
"借酒浇愁": "藉酒澆愁",
"藤制": "藤製",
"蕴含着": "蘊含著",
"蕴涵着": "蘊涵著",
"蕴借": "蘊藉",
"萝卜": "蘿蔔",
"虎须": "虎鬚",
"号志": "號誌",
"蜂后": "蜂后",
"蛮干": "蠻幹",
"行事历": "行事曆",
"胡同": "衚衕",
"冲上": "衝上",
"冲下": "衝下",
"冲来": "衝來",
"冲倒": "衝倒",
"冲出": "衝出",
"冲到": "衝到",
"冲刺": "衝刺",
"冲克": "衝剋",
"冲力": "衝力",
"冲劲": "衝勁",
"冲动": "衝動",
"冲去": "衝去",
"冲口": "衝口",
"冲垮": "衝垮",
"冲堂": "衝堂",
"冲压": "衝壓",
"冲天": "衝天",
"冲掉": "衝掉",
"冲撞": "衝撞",
"冲击": "衝擊",
"冲散": "衝散",
"冲决": "衝決",
"冲浪": "衝浪",
"冲激": "衝激",
"冲破": "衝破",
"冲程": "衝程",
"冲突": "衝突",
"冲线": "衝線",
"冲着": "衝著",
"冲冲": "衝衝",
"冲要": "衝要",
"冲起": "衝起",
"冲进": "衝進",
"冲过": "衝過",
"冲锋": "衝鋒",
"表里": "表裡",
"袖里": "袖裡",
"被里": "被裡",
"被复": "被複",
"被复": "被覆",
"被复着": "被覆著",
"被发": "被髮",
"裁并": "裁併",
"裁制": "裁製",
"里面": "裏面",
"里人": "裡人",
"里加": "裡加",
"里外": "裡外",
"里子": "裡子",
"里屋": "裡屋",
"里层": "裡層",
"里布": "裡布",
"里带": "裡帶",
"里弦": "裡弦",
"里应外合": "裡應外合",
"里拉": "裡拉",
"里斯": "裡斯",
"里海": "裡海",
"里脊": "裡脊",
"里衣": "裡衣",
"里里": "裡裡",
"里通外国": "裡通外國",
"里通外敌": "裡通外敵",
"里边": "裡邊",
"里间": "裡間",
"里面": "裡面",
"里头": "裡頭",
"制件": "製件",
"制作": "製作",
"制做": "製做",
"制备": "製備",
"制冰": "製冰",
"制冷": "製冷",
"制剂": "製劑",
"制品": "製品",
"制图": "製圖",
"制成": "製成",
"制法": "製法",
"制为": "製為",
"制片": "製片",
"制版": "製版",
"制程": "製程",
"制糖": "製糖",
"制纸": "製紙",
"制药": "製藥",
"制表": "製表",
"制裁": "製裁",
"制造": "製造",
"制革": "製革",
"制鞋": "製鞋",
"制盐": "製鹽",
"复仞年如": "複仞年如",
"复以百万": "複以百萬",
"复位": "複位",
"复信": "複信",
"复分数": "複分數",
"复列": "複列",
"复利": "複利",
"复印": "複印",
"复原": "複原",
"复句": "複句",
"复合": "複合",
"复名": "複名",
"复员": "複員",
"复壁": "複壁",
"复壮": "複壯",
"复姓": "複姓",
"复字键": "複字鍵",
"复审": "複審",
"复写": "複寫",
"复式": "複式",
"复复": "複復",
"复数": "複數",
"复本": "複本",
"复查": "複查",
"复核": "複核",
"复检": "複檢",
"复次": "複次",
"复比": "複比",
"复决": "複決",
"复活": "複活",
"复测": "複測",
"复亩珍": "複畝珍",
"复发": "複發",
"复目": "複目",
"复眼": "複眼",
"复种": "複種",
"复线": "複線",
"复习": "複習",
"复兴社": "複興社",
"复旧": "複舊",
"复色": "複色",
"复叶": "複葉",
"复盖": "複蓋",
"复苏": "複蘇",
"复制": "複製",
"复诊": "複診",
"复词": "複詞",
"复试": "複試",
"复课": "複課",
"复议": "複議",
"复变函数": "複變函數",
"复赛": "複賽",
"复述": "複述",
"复选": "複選",
"复钱": "複錢",
"复杂": "複雜",
"复电": "複電",
"复音": "複音",
"复韵": "複韻",
"衬里": "襯裡",
"西岳": "西嶽",
"西征": "西征",
"西历": "西曆",
"要冲": "要衝",
"要么": "要麼",
"复上": "覆上",
"复亡": "覆亡",
"复住": "覆住",
"复信": "覆信",
"复命": "覆命",
"复在": "覆在",
"复审": "覆審",
"复巢之下": "覆巢之下",
"复成": "覆成",
"复败": "覆敗",
"复文": "覆文",
"复校": "覆校",
"复核": "覆核",
"复水难收": "覆水難收",
"复没": "覆沒",
"复灭": "覆滅",
"复盆": "覆盆",
"复舟": "覆舟",
"复着": "覆著",
"复盖": "覆蓋",
"复盖着": "覆蓋著",
"复试": "覆試",
"复议": "覆議",
"复车": "覆車",
"复载": "覆載",
"复辙": "覆轍",
"复电": "覆電",
"见复": "見覆",
"亲征": "親征",
"观众台": "觀眾臺",
"观台": "觀臺",
"观象台": "觀象臺",
"角落里": "角落裡",
"觔斗": "觔斗",
"触须": "觸鬚",
"订制": "訂製",
"诉说着": "訴說著",
"词汇": "詞彙",
"试卷": "試卷",
"诗卷": "詩卷",
"话里有话": "話裡有話",
"志哀": "誌哀",
"志喜": "誌喜",
"志庆": "誌慶",
"语云": "語云",
"语汇": "語彙",
"诬蔑": "誣衊",
"诵经台": "誦經臺",
"说着": "說著",
"课征": "課征",
"调制": "調製",
"调频台": "調頻臺",
"请参阅": "請參閱",
"讲台": "講臺",
"谢绝参观": "謝絕參觀",
"护发": "護髮",
"雠隙": "讎隙",
"豆腐干": "豆腐干",
"竖着": "豎著",
"丰富多采": "豐富多采",
"丰滨": "豐濱",
"丰滨乡": "豐濱鄉",
"丰采": "豐采",
"象征着": "象徵著",
"贵干": "貴幹",
"贾后": "賈后",
"赈饥": "賑饑",
"贤后": "賢后",
"质朴": "質朴",
"赌台": "賭檯",
"购并": "購併",
"赤松": "赤鬆",
"起吊": "起吊",
"起复": "起複",
"赶制": "趕製",
"跌荡": "跌蕩",
"跟斗": "跟斗",
"跳荡": "跳蕩",
"跳表": "跳錶",
"踬仆": "躓仆",
"躯干": "軀幹",
"车库里": "車庫裡",
"车站里": "車站裡",
"车里": "車裡",
"轻松": "輕鬆",
"轮回": "輪迴",
"转台": "轉檯",
"辛丑": "辛丑",
"辟邪": "辟邪",
"办伙": "辦伙",
"办公台": "辦公檯",
"辞汇": "辭彙",
"农历": "農曆",
"迂回": "迂迴",
"近日里": "近日裡",
"迥然回异": "迥然迴異",
"回光返照": "迴光返照",
"回向": "迴向",
"回圈": "迴圈",
"回廊": "迴廊",
"回形夹": "迴形夾",
"回文": "迴文",
"回旋": "迴旋",
"回流": "迴流",
"回环": "迴環",
"回荡": "迴盪",
"回纹针": "迴紋針",
"回绕": "迴繞",
"回肠": "迴腸",
"回荡": "迴蕩",
"回诵": "迴誦",
"回路": "迴路",
"回转": "迴轉",
"回递性": "迴遞性",
"回避": "迴避",
"回响": "迴響",
"回风": "迴風",
"回首": "迴首",
"迷蒙": "迷濛",
"退伙": "退伙",
"这么着": "這么著",
"这里": "這裏",
"这里": "這裡",
"这只": "這隻",
"这么": "這麼",
"这么着": "這麼著",
"通心面": "通心麵",
"速食面": "速食麵",
"连系": "連繫",
"连台好戏": "連臺好戲",
"游荡": "遊蕩",
"遍布": "遍佈",
"递回": "遞迴",
"远征": "遠征",
"适才": "適纔",
"遮复": "遮覆",
"还冲": "還衝",
"邋里邋遢": "邋裡邋遢",
"那里": "那裡",
"那只": "那隻",
"那么": "那麼",
"那么着": "那麼著",
"邪辟": "邪辟",
"郁烈": "郁烈",
"郁穆": "郁穆",
"郁郁": "郁郁",
"郁闭": "郁閉",
"郁馥": "郁馥",
"乡愿": "鄉愿",
"乡里": "鄉裡",
"邻里": "鄰裡",
"配合着": "配合著",
"配制": "配製",
"酒杯": "酒盃",
"酒坛": "酒罈",
"酥松": "酥鬆",
"醋坛": "醋罈",
"酝借": "醞藉",
"酝酿着": "醞釀著",
"医药": "醫葯",
"醲郁": "醲郁",
"酿制": "釀製",
"采地": "采地",
"采女": "采女",
"采声": "采聲",
"采色": "采色",
"采邑": "采邑",
"里程表": "里程錶",
"重折": "重摺",
"重复": "重複",
"重复": "重覆",
"重锤": "重鎚",
"野台戏": "野臺戲",
"金斗": "金斗",
"金表": "金錶",
"金发": "金髮",
"金霉素": "金黴素",
"钉锤": "釘鎚",
"银朱": "銀硃",
"银发": "銀髮",
"铜制": "銅製",
"铝制": "鋁製",
"钢制": "鋼製",
"录着": "錄著",
"录制": "錄製",
"表带": "錶帶",
"表店": "錶店",
"表厂": "錶廠",
"表壳": "錶殼",
"表链": "錶鏈",
"表面": "錶面",
"锅台": "鍋臺",
"锻鍊出": "鍛鍊出",
"锻鍊身体": "鍛鍊身体",
"锲而不舍": "鍥而不捨",
"锤儿": "鎚兒",
"锤子": "鎚子",
"锤头": "鎚頭",
"链霉素": "鏈黴素",
"镜台": "鏡臺",
"锈病": "鏽病",
"锈菌": "鏽菌",
"锈蚀": "鏽蝕",
"钟表": "鐘錶",
"铁锤": "鐵鎚",
"铁锈": "鐵鏽",
"长征": "長征",
"长发": "長髮",
"长须鲸": "長鬚鯨",
"门帘": "門帘",
"门斗": "門斗",
"门里": "門裡",
"开伙": "開伙",
"开卷": "開卷",
"开诚布公": "開誠佈公",
"开采": "開采",
"閒情逸致": "閒情逸緻",
"閒荡": "閒蕩",
"间不容发": "間不容髮",
"闵采尔": "閔采爾",
"阅卷": "閱卷",
"阑干": "闌干",
"关系": "關係",
"关系着": "關係著",
"防御": "防禦",
"防锈": "防鏽",
"防台": "防颱",
"阿斗": "阿斗",
"阿里": "阿裡",
"除旧布新": "除舊佈新",
"阴干": "陰干",
"阴历": "陰曆",
"阴郁": "陰郁",
"陆征祥": "陸征祥",
"阳春面": "陽春麵",
"阳历": "陽曆",
"阳台": "陽臺",
"只字": "隻字",
"只影": "隻影",
"只手遮天": "隻手遮天",
"只眼": "隻眼",
"只言片语": "隻言片語",
"只身": "隻身",
"雅致": "雅緻",
"雇佣": "雇佣",
"双折": "雙摺",
"杂志": "雜誌",
"鸡丝": "雞絲",
"鸡丝面": "雞絲麵",
"鸡腿面": "雞腿麵",
"鸡只": "雞隻",
"难舍": "難捨",
"雪里": "雪裡",
"云须": "雲鬚",
"电子表": "電子錶",
"电台": "電臺",
"电冲": "電衝",
"电复": "電覆",
"电视台": "電視臺",
"电表": "電錶",
"震荡": "震蕩",
"雾里": "霧裡",
"露台": "露臺",
"灵台": "靈臺",
"青瓦台": "青瓦臺",
"青霉": "青黴",
"面朝着": "面朝著",
"面临着": "面臨著",
"鞋里": "鞋裡",
"鞣制": "鞣製",
"秋千": "鞦韆",
"鞭辟入里": "鞭辟入裡",
"韩国制": "韓國製",
"韩制": "韓製",
"预制": "預製",
"颁布": "頒佈",
"头里": "頭裡",
"头发": "頭髮",
"颊须": "頰鬚",
"颠仆": "顛仆",
"颠复": "顛複",
"颠复": "顛覆",
"显着标志": "顯著標志",
"风土志": "風土誌",
"风斗": "風斗",
"风物志": "風物誌",
"风里": "風裡",
"风采": "風采",
"台风": "颱風",
"刮了": "颳了",
"刮倒": "颳倒",
"刮去": "颳去",
"刮得": "颳得",
"刮着": "颳著",
"刮走": "颳走",
"刮起": "颳起",
"刮风": "颳風",
"飘荡": "飄蕩",
"饭团": "飯糰",
"饼干": "餅干",
"馄饨面": "餛飩麵",
"饥不择食": "饑不擇食",
"饥寒": "饑寒",
"饥民": "饑民",
"饥渴": "饑渴",
"饥溺": "饑溺",
"饥荒": "饑荒",
"饥饱": "饑飽",
"饥饿": "饑餓",
"饥馑": "饑饉",
"首当其冲": "首當其衝",
"香郁": "香郁",
"馥郁": "馥郁",
"马里": "馬裡",
"马表": "馬錶",
"骀荡": "駘蕩",
"腾冲": "騰衝",
"骨子里": "骨子裡",
"骨干": "骨幹",
"骨灰坛": "骨灰罈",
"肮脏": "骯髒",
"脏乱": "髒亂",
"脏兮兮": "髒兮兮",
"脏字": "髒字",
"脏得": "髒得",
"脏东西": "髒東西",
"脏水": "髒水",
"脏的": "髒的",
"脏话": "髒話",
"脏钱": "髒錢",
"高干": "高幹",
"高台": "高臺",
"髭须": "髭鬚",
"发型": "髮型",
"发夹": "髮夾",
"发妻": "髮妻",
"发姐": "髮姐",
"发带": "髮帶",
"发廊": "髮廊",
"发式": "髮式",
"发指": "髮指",
"发捲": "髮捲",
"发根": "髮根",
"发毛": "髮毛",
"发油": "髮油",
"发状": "髮狀",
"发短心长": "髮短心長",
"发端": "髮端",
"发结": "髮結",
"发丝": "髮絲",
"发网": "髮網",
"发肤": "髮膚",
"发胶": "髮膠",
"发菜": "髮菜",
"发蜡": "髮蠟",
"发辫": "髮辮",
"发针": "髮針",
"发长": "髮長",
"发际": "髮際",
"发霜": "髮霜",
"发髻": "髮髻",
"发鬓": "髮鬢",
"鬅松": "鬅鬆",
"松了": "鬆了",
"松些": "鬆些",
"松劲": "鬆勁",
"松动": "鬆動",
"松口": "鬆口",
"松土": "鬆土",
"松弛": "鬆弛",
"松快": "鬆快",
"松懈": "鬆懈",
"松手": "鬆手",
"松散": "鬆散",
"松林": "鬆林",
"松柔": "鬆柔",
"松毛虫": "鬆毛蟲",
"松浮": "鬆浮",
"松涛": "鬆濤",
"松科": "鬆科",
"松节油": "鬆節油",
"松绑": "鬆綁",
"松紧": "鬆緊",
"松缓": "鬆緩",
"松脆": "鬆脆",
"松脱": "鬆脫",
"松起": "鬆起",
"松软": "鬆軟",
"松通": "鬆通",
"松开": "鬆開",
"松饼": "鬆餅",
"松松": "鬆鬆",
"鬈发": "鬈髮",
"胡子": "鬍子",
"胡梢": "鬍梢",
"胡渣": "鬍渣",
"胡髭": "鬍髭",
"胡须": "鬍鬚",
"须根": "鬚根",
"须毛": "鬚毛",
"须生": "鬚生",
"须眉": "鬚眉",
"须发": "鬚髮",
"须须": "鬚鬚",
"鬓发": "鬢髮",
"斗着": "鬥著",
"闹着玩儿": "鬧著玩儿",
"闹着玩儿": "鬧著玩兒",
"郁郁": "鬱郁",
"鱼松": "魚鬆",
"鲸须": "鯨鬚",
"鲇鱼": "鯰魚",
"鹤发": "鶴髮",
"卤化": "鹵化",
"卤味": "鹵味",
"卤族": "鹵族",
"卤水": "鹵水",
"卤汁": "鹵汁",
"卤簿": "鹵簿",
"卤素": "鹵素",
"卤莽": "鹵莽",
"卤钝": "鹵鈍",
"咸味": "鹹味",
"咸土": "鹹土",
"咸度": "鹹度",
"咸得": "鹹得",
"咸水": "鹹水",
"咸海": "鹹海",
"咸淡": "鹹淡",
"咸湖": "鹹湖",
"咸汤": "鹹湯",
"咸的": "鹹的",
"咸肉": "鹹肉",
"咸菜": "鹹菜",
"咸蛋": "鹹蛋",
"咸猪肉": "鹹豬肉",
"咸类": "鹹類",
"咸鱼": "鹹魚",
"咸鸭蛋": "鹹鴨蛋",
"咸卤": "鹹鹵",
"咸咸": "鹹鹹",
"盐卤": "鹽鹵",
"面价": "麵價",
"面包": "麵包",
"面团": "麵團",
"面店": "麵店",
"面厂": "麵廠",
"面杖": "麵杖",
"面条": "麵條",
"面灰": "麵灰",
"面皮": "麵皮",
"面筋": "麵筋",
"面粉": "麵粉",
"面糊": "麵糊",
"面线": "麵線",
"面茶": "麵茶",
"面食": "麵食",
"面饺": "麵餃",
"面饼": "麵餅",
"麻酱面": "麻醬麵",
"黄历": "黃曆",
"黄发垂髫": "黃髮垂髫",
"黑发": "黑髮",
"黑松": "黑鬆",
"霉毒": "黴毒",
"霉菌": "黴菌",
"鼓里": "鼓裡",
"冬冬": "鼕鼕",
"龙卷": "龍卷",
"龙须": "龍鬚",
}
zh2Hans = {
'顯著': '显著',
'土著': '土著',
'印表機': '打印机',
'說明檔案': '帮助文件',
"瀋": "沈",
"畫": "划",
"鍾": "钟",
"靦": "腼",
"餘": "余",
"鯰": "鲇",
"鹼": "碱",
"㠏": "㟆",
"ð¡µ": "㛟",
"万": "万",
"与": "与",
"丑": "丑",
"丟": "丢",
"並": "并",
"丰": "丰",
"么": "么",
"乾": "干",
"乾坤": "乾坤",
"乾隆": "乾隆",
"亂": "乱",
"云": "云",
"亙": "亘",
"亞": "亚",
"仆": "仆",
"价": "价",
"伙": "伙",
"佇": "伫",
"佈": "布",
"体": "体",
"余": "余",
"余": "馀",
"佣": "佣",
"併": "并",
"來": "来",
"侖": "仑",
"侶": "侣",
"俁": "俣",
"係": "系",
"俔": "伣",
"俠": "侠",
"倀": "伥",
"倆": "俩",
"倈": "俫",
"倉": "仓",
"個": "个",
"們": "们",
"倫": "伦",
"偉": "伟",
"側": "侧",
"偵": "侦",
"偽": "伪",
"傑": "杰",
"傖": "伧",
"傘": "伞",
"備": "备",
"傢": "家",
"傭": "佣",
"傯": "偬",
"傳": "传",
"傴": "伛",
"債": "债",
"傷": "伤",
"傾": "倾",
"僂": "偻",
"僅": "仅",
"僉": "佥",
"僑": "侨",
"僕": "仆",
"僞": "伪",
"僥": "侥",
"僨": "偾",
"價": "价",
"儀": "仪",
"儂": "侬",
"億": "亿",
"儈": "侩",
"儉": "俭",
"儐": "傧",
"儔": "俦",
"儕": "侪",
"儘": "尽",
"償": "偿",
"優": "优",
"儲": "储",
"儷": "俪",
"儸": "㑩",
"儺": "傩",
"儻": "傥",
"儼": "俨",
"儿": "儿",
"兇": "凶",
"兌": "兑",
"兒": "儿",
"兗": "兖",
"党": "党",
"內": "内",
"兩": "两",
"冊": "册",
"冪": "幂",
"准": "准",
"凈": "净",
"凍": "冻",
"凜": "凛",
"几": "几",
"凱": "凯",
"划": "划",
"別": "别",
"刪": "删",
"剄": "刭",
"則": "则",
"剋": "克",
"剎": "刹",
"剗": "刬",
"剛": "刚",
"剝": "剥",
"剮": "剐",
"剴": "剀",
"創": "创",
"劃": "划",
"劇": "剧",
"劉": "刘",
"劊": "刽",
"劌": "刿",
"劍": "剑",
"劏": "㓥",
"劑": "剂",
"劚": "㔉",
"勁": "劲",
"動": "动",
"務": "务",
"勛": "勋",
"勝": "胜",
"勞": "劳",
"勢": "势",
"勩": "勚",
"勱": "劢",
"勵": "励",
"勸": "劝",
"勻": "匀",
"匭": "匦",
"匯": "汇",
"匱": "匮",
"區": "区",
"協": "协",
"卷": "卷",
"卻": "却",
"厂": "厂",
"厙": "厍",
"厠": "厕",
"厭": "厌",
"厲": "厉",
"厴": "厣",
"參": "参",
"叄": "叁",
"叢": "丛",
"台": "台",
"叶": "叶",
"吊": "吊",
"后": "后",
"吒": "咤",
"吳": "吴",
"吶": "呐",
"呂": "吕",
"獃": "呆",
"咼": "呙",
"員": "员",
"唄": "呗",
"唚": "吣",
"問": "问",
"啓": "启",
"啞": "哑",
"啟": "启",
"啢": "唡",
"喎": "㖞",
"喚": "唤",
"喪": "丧",
"喬": "乔",
"單": "单",
"喲": "哟",
"嗆": "呛",
"嗇": "啬",
"嗊": "唝",
"嗎": "吗",
"嗚": "呜",
"嗩": "唢",
"嗶": "哔",
"嘆": "叹",
"嘍": "喽",
"嘔": "呕",
"嘖": "啧",
"嘗": "尝",
"嘜": "唛",
"嘩": "哗",
"嘮": "唠",
"嘯": "啸",
"嘰": "叽",
"嘵": "哓",
"嘸": "呒",
"嘽": "啴",
"噁": "恶",
"噓": "嘘",
"噚": "㖊",
"噝": "咝",
"噠": "哒",
"噥": "哝",
"噦": "哕",
"噯": "嗳",
"噲": "哙",
"噴": "喷",
"噸": "吨",
"噹": "当",
"嚀": "咛",
"嚇": "吓",
"嚌": "哜",
"嚕": "噜",
"嚙": "啮",
"嚥": "咽",
"嚦": "呖",
"嚨": "咙",
"嚮": "向",
"嚲": "亸",
"嚳": "喾",
"嚴": "严",
"嚶": "嘤",
"囀": "啭",
"囁": "嗫",
"囂": "嚣",
"囅": "冁",
"囈": "呓",
"囌": "苏",
"囑": "嘱",
"囪": "囱",
"圇": "囵",
"國": "国",
"圍": "围",
"園": "园",
"圓": "圆",
"圖": "图",
"團": "团",
"坏": "坏",
"垵": "埯",
"埡": "垭",
"埰": "采",
"執": "执",
"堅": "坚",
"堊": "垩",
"堖": "垴",
"堝": "埚",
"堯": "尧",
"報": "报",
"場": "场",
"塊": "块",
"塋": "茔",
"塏": "垲",
"塒": "埘",
"塗": "涂",
"塚": "冢",
"塢": "坞",
"塤": "埙",
"塵": "尘",
"塹": "堑",
"墊": "垫",
"墜": "坠",
"墮": "堕",
"墳": "坟",
"墻": "墙",
"墾": "垦",
"壇": "坛",
"壈": "ð¡",
"壋": "垱",
"壓": "压",
"壘": "垒",
"壙": "圹",
"壚": "垆",
"壞": "坏",
"壟": "垄",
"壠": "垅",
"壢": "坜",
"壩": "坝",
"壯": "壮",
"壺": "壶",
"壼": "壸",
"壽": "寿",
"夠": "够",
"夢": "梦",
"夾": "夹",
"奐": "奂",
"奧": "奥",
"奩": "奁",
"奪": "夺",
"奬": "奖",
"奮": "奋",
"奼": "姹",
"妝": "妆",
"姍": "姗",
"姜": "姜",
"姦": "奸",
"娛": "娱",
"婁": "娄",
"婦": "妇",
"婭": "娅",
"媧": "娲",
"媯": "妫",
"媼": "媪",
"媽": "妈",
"嫗": "妪",
"嫵": "妩",
"嫻": "娴",
"嫿": "婳",
"嬀": "妫",
"嬈": "娆",
"嬋": "婵",
"嬌": "娇",
"嬙": "嫱",
"嬡": "嫒",
"嬤": "嬷",
"嬪": "嫔",
"嬰": "婴",
"嬸": "婶",
"孌": "娈",
"孫": "孙",
"學": "学",
"孿": "孪",
"宁": "宁",
"宮": "宫",
"寢": "寝",
"實": "实",
"寧": "宁",
"審": "审",
"寫": "写",
"寬": "宽",
"寵": "宠",
"寶": "宝",
"將": "将",
"專": "专",
"尋": "寻",
"對": "对",
"導": "导",
"尷": "尴",
"屆": "届",
"屍": "尸",
"屓": "屃",
"屜": "屉",
"屢": "屡",
"層": "层",
"屨": "屦",
"屬": "属",
"岡": "冈",
"峴": "岘",
"島": "岛",
"峽": "峡",
"崍": "崃",
"崗": "岗",
"崢": "峥",
"崬": "岽",
"嵐": "岚",
"嶁": "嵝",
"嶄": "崭",
"嶇": "岖",
"嶔": "嵚",
"嶗": "崂",
"嶠": "峤",
"嶢": "峣",
"嶧": "峄",
"嶮": "崄",
"嶴": "岙",
"嶸": "嵘",
"嶺": "岭",
"嶼": "屿",
"嶽": "岳",
"巋": "岿",
"巒": "峦",
"巔": "巅",
"巰": "巯",
"帘": "帘",
"帥": "帅",
"師": "师",
"帳": "帐",
"帶": "带",
"幀": "帧",
"幃": "帏",
"幗": "帼",
"幘": "帻",
"幟": "帜",
"幣": "币",
"幫": "帮",
"幬": "帱",
"幹": "干",
"幺": "么",
"幾": "几",
"广": "广",
"庫": "库",
"廁": "厕",
"廂": "厢",
"廄": "厩",
"廈": "厦",
"廚": "厨",
"廝": "厮",
"廟": "庙",
"廠": "厂",
"廡": "庑",
"廢": "废",
"廣": "广",
"廩": "廪",
"廬": "庐",
"廳": "厅",
"弒": "弑",
"弳": "弪",
"張": "张",
"強": "强",
"彆": "别",
"彈": "弹",
"彌": "弥",
"彎": "弯",
"彙": "汇",
"彞": "彝",
"彥": "彦",
"征": "征",
"後": "后",
"徑": "径",
"從": "从",
"徠": "徕",
"復": "复",
"徵": "征",
"徹": "彻",
"志": "志",
"恆": "恒",
"恥": "耻",
"悅": "悦",
"悞": "悮",
"悵": "怅",
"悶": "闷",
"惡": "恶",
"惱": "恼",
"惲": "恽",
"惻": "恻",
"愛": "爱",
"愜": "惬",
"愨": "悫",
"愴": "怆",
"愷": "恺",
"愾": "忾",
"愿": "愿",
"慄": "栗",
"態": "态",
"慍": "愠",
"慘": "惨",
"慚": "惭",
"慟": "恸",
"慣": "惯",
"慤": "悫",
"慪": "怄",
"慫": "怂",
"慮": "虑",
"慳": "悭",
"慶": "庆",
"憂": "忧",
"憊": "惫",
"憐": "怜",
"憑": "凭",
"憒": "愦",
"憚": "惮",
"憤": "愤",
"憫": "悯",
"憮": "怃",
"憲": "宪",
"憶": "忆",
"懇": "恳",
"應": "应",
"懌": "怿",
"懍": "懔",
"懞": "蒙",
"懟": "怼",
"懣": "懑",
"懨": "恹",
"懲": "惩",
"懶": "懒",
"懷": "怀",
"懸": "悬",
"懺": "忏",
"懼": "惧",
"懾": "慑",
"戀": "恋",
"戇": "戆",
"戔": "戋",
"戧": "戗",
"戩": "戬",
"戰": "战",
"戱": "戯",
"戲": "戏",
"戶": "户",
"担": "担",
"拋": "抛",
"挩": "捝",
"挾": "挟",
"捨": "舍",
"捫": "扪",
"据": "据",
"掃": "扫",
"掄": "抡",
"掗": "挜",
"掙": "挣",
"掛": "挂",
"採": "采",
"揀": "拣",
"揚": "扬",
"換": "换",
"揮": "挥",
"損": "损",
"搖": "摇",
"搗": "捣",
"搵": "揾",
"搶": "抢",
"摑": "掴",
"摜": "掼",
"摟": "搂",
"摯": "挚",
"摳": "抠",
"摶": "抟",
"摺": "折",
"摻": "掺",
"撈": "捞",
"撏": "挦",
"撐": "撑",
"撓": "挠",
"撝": "㧑",
"撟": "挢",
"撣": "掸",
"撥": "拨",
"撫": "抚",
"撲": "扑",
"撳": "揿",
"撻": "挞",
"撾": "挝",
"撿": "捡",
"擁": "拥",
"擄": "掳",
"擇": "择",
"擊": "击",
"擋": "挡",
"擓": "㧟",
"擔": "担",
"據": "据",
"擠": "挤",
"擬": "拟",
"擯": "摈",
"擰": "拧",
"擱": "搁",
"擲": "掷",
"擴": "扩",
"擷": "撷",
"擺": "摆",
"擻": "擞",
"擼": "撸",
"擾": "扰",
"攄": "摅",
"攆": "撵",
"攏": "拢",
"攔": "拦",
"攖": "撄",
"攙": "搀",
"攛": "撺",
"攜": "携",
"攝": "摄",
"攢": "攒",
"攣": "挛",
"攤": "摊",
"攪": "搅",
"攬": "揽",
"敗": "败",
"敘": "叙",
"敵": "敌",
"數": "数",
"斂": "敛",
"斃": "毙",
"斕": "斓",
"斗": "斗",
"斬": "斩",
"斷": "断",
"於": "于",
"時": "时",
"晉": "晋",
"晝": "昼",
"暈": "晕",
"暉": "晖",
"暘": "旸",
"暢": "畅",
"暫": "暂",
"曄": "晔",
"曆": "历",
"曇": "昙",
"曉": "晓",
"曏": "向",
"曖": "暧",
"曠": "旷",
"曨": "昽",
"曬": "晒",
"書": "书",
"會": "会",
"朧": "胧",
"朮": "术",
"术": "术",
"朴": "朴",
"東": "东",
"杴": "锨",
"极": "极",
"柜": "柜",
"柵": "栅",
"桿": "杆",
"梔": "栀",
"梘": "枧",
"條": "条",
"梟": "枭",
"梲": "棁",
"棄": "弃",
"棖": "枨",
"棗": "枣",
"棟": "栋",
"棧": "栈",
"棲": "栖",
"棶": "梾",
"椏": "桠",
"楊": "杨",
"楓": "枫",
"楨": "桢",
"業": "业",
"極": "极",
"榪": "杩",
"榮": "荣",
"榲": "榅",
"榿": "桤",
"構": "构",
"槍": "枪",
"槤": "梿",
"槧": "椠",
"槨": "椁",
"槳": "桨",
"樁": "桩",
"樂": "乐",
"樅": "枞",
"樓": "楼",
"標": "标",
"樞": "枢",
"樣": "样",
"樸": "朴",
"樹": "树",
"樺": "桦",
"橈": "桡",
"橋": "桥",
"機": "机",
"橢": "椭",
"橫": "横",
"檁": "檩",
"檉": "柽",
"檔": "档",
"檜": "桧",
"檟": "槚",
"檢": "检",
"檣": "樯",
"檮": "梼",
"檯": "台",
"檳": "槟",
"檸": "柠",
"檻": "槛",
"櫃": "柜",
"櫓": "橹",
"櫚": "榈",
"櫛": "栉",
"櫝": "椟",
"櫞": "橼",
"櫟": "栎",
"櫥": "橱",
"櫧": "槠",
"櫨": "栌",
"櫪": "枥",
"櫫": "橥",
"櫬": "榇",
"櫱": "蘖",
"櫳": "栊",
"櫸": "榉",
"櫻": "樱",
"欄": "栏",
"權": "权",
"欏": "椤",
"欒": "栾",
"欖": "榄",
"欞": "棂",
"欽": "钦",
"歐": "欧",
"歟": "欤",
"歡": "欢",
"歲": "岁",
"歷": "历",
"歸": "归",
"歿": "殁",
"殘": "残",
"殞": "殒",
"殤": "殇",
"殨": "㱮",
"殫": "殚",
"殮": "殓",
"殯": "殡",
"殰": "㱩",
"殲": "歼",
"殺": "杀",
"殻": "壳",
"殼": "壳",
"毀": "毁",
"毆": "殴",
"毿": "毵",
"氂": "牦",
"氈": "毡",
"氌": "氇",
"氣": "气",
"氫": "氢",
"氬": "氩",
"氳": "氲",
"汙": "污",
"決": "决",
"沒": "没",
"沖": "冲",
"況": "况",
"洶": "汹",
"浹": "浃",
"涂": "涂",
"涇": "泾",
"涼": "凉",
"淀": "淀",
"淒": "凄",
"淚": "泪",
"淥": "渌",
"淨": "净",
"淩": "凌",
"淪": "沦",
"淵": "渊",
"淶": "涞",
"淺": "浅",
"渙": "涣",
"減": "减",
"渦": "涡",
"測": "测",
"渾": "浑",
"湊": "凑",
"湞": "浈",
"湯": "汤",
"溈": "沩",
"準": "准",
"溝": "沟",
"溫": "温",
"滄": "沧",
"滅": "灭",
"滌": "涤",
"滎": "荥",
"滬": "沪",
"滯": "滞",
"滲": "渗",
"滷": "卤",
"滸": "浒",
"滻": "浐",
"滾": "滚",
"滿": "满",
"漁": "渔",
"漚": "沤",
"漢": "汉",
"漣": "涟",
"漬": "渍",
"漲": "涨",
"漵": "溆",
"漸": "渐",
"漿": "浆",
"潁": "颍",
"潑": "泼",
"潔": "洁",
"潙": "沩",
"潛": "潜",
"潤": "润",
"潯": "浔",
"潰": "溃",
"潷": "滗",
"潿": "涠",
"澀": "涩",
"澆": "浇",
"澇": "涝",
"澐": "沄",
"澗": "涧",
"澠": "渑",
"澤": "泽",
"澦": "滪",
"澩": "泶",
"澮": "浍",
"澱": "淀",
"濁": "浊",
"濃": "浓",
"濕": "湿",
"濘": "泞",
"濛": "蒙",
"濟": "济",
"濤": "涛",
"濫": "滥",
"濰": "潍",
"濱": "滨",
"濺": "溅",
"濼": "泺",
"濾": "滤",
"瀅": "滢",
"瀆": "渎",
"瀇": "㲿",
"瀉": "泻",
"瀋": "沈",
"瀏": "浏",
"瀕": "濒",
"瀘": "泸",
"瀝": "沥",
"瀟": "潇",
"瀠": "潆",
"瀦": "潴",
"瀧": "泷",
"瀨": "濑",
"瀰": "弥",
"瀲": "潋",
"瀾": "澜",
"灃": "沣",
"灄": "滠",
"灑": "洒",
"灕": "漓",
"灘": "滩",
"灝": "灏",
"灠": "漤",
"灣": "湾",
"灤": "滦",
"灧": "滟",
"災": "灾",
"為": "为",
"烏": "乌",
"烴": "烃",
"無": "无",
"煉": "炼",
"煒": "炜",
"煙": "烟",
"煢": "茕",
"煥": "焕",
"煩": "烦",
"煬": "炀",
"煱": "㶽",
"熅": "煴",
"熒": "荧",
"熗": "炝",
"熱": "热",
"熲": "颎",
"熾": "炽",
"燁": "烨",
"燈": "灯",
"燉": "炖",
"燒": "烧",
"燙": "烫",
"燜": "焖",
"營": "营",
"燦": "灿",
"燭": "烛",
"燴": "烩",
"燶": "㶶",
"燼": "烬",
"燾": "焘",
"爍": "烁",
"爐": "炉",
"爛": "烂",
"爭": "争",
"爲": "为",
"爺": "爷",
"爾": "尔",
"牆": "墙",
"牘": "牍",
"牽": "牵",
"犖": "荦",
"犢": "犊",
"犧": "牺",
"狀": "状",
"狹": "狭",
"狽": "狈",
"猙": "狰",
"猶": "犹",
"猻": "狲",
"獁": "犸",
"獄": "狱",
"獅": "狮",
"獎": "奖",
"獨": "独",
"獪": "狯",
"獫": "猃",
"獮": "狝",
"獰": "狞",
"獱": "㺍",
"獲": "获",
"獵": "猎",
"獷": "犷",
"獸": "兽",
"獺": "獭",
"獻": "献",
"獼": "猕",
"玀": "猡",
"現": "现",
"琺": "珐",
"琿": "珲",
"瑋": "玮",
"瑒": "玚",
"瑣": "琐",
"瑤": "瑶",
"瑩": "莹",
"瑪": "玛",
"瑲": "玱",
"璉": "琏",
"璣": "玑",
"璦": "瑷",
"璫": "珰",
"環": "环",
"璽": "玺",
"瓊": "琼",
"瓏": "珑",
"瓔": "璎",
"瓚": "瓒",
"甌": "瓯",
"產": "产",
"産": "产",
"畝": "亩",
"畢": "毕",
"異": "异",
"畵": "画",
"當": "当",
"疇": "畴",
"疊": "叠",
"痙": "痉",
"痾": "疴",
"瘂": "痖",
"瘋": "疯",
"瘍": "疡",
"瘓": "痪",
"瘞": "瘗",
"瘡": "疮",
"瘧": "疟",
"瘮": "瘆",
"瘲": "疭",
"瘺": "瘘",
"瘻": "瘘",
"療": "疗",
"癆": "痨",
"癇": "痫",
"癉": "瘅",
"癘": "疠",
"癟": "瘪",
"癢": "痒",
"癤": "疖",
"癥": "症",
"癧": "疬",
"癩": "癞",
"癬": "癣",
"癭": "瘿",
"癮": "瘾",
"癰": "痈",
"癱": "瘫",
"癲": "癫",
"發": "发",
"皚": "皑",
"皰": "疱",
"皸": "皲",
"皺": "皱",
"盃": "杯",
"盜": "盗",
"盞": "盏",
"盡": "尽",
"監": "监",
"盤": "盘",
"盧": "卢",
"盪": "荡",
"眥": "眦",
"眾": "众",
"睏": "困",
"睜": "睁",
"睞": "睐",
"瞘": "眍",
"瞜": "䁖",
"瞞": "瞒",
"瞭": "了",
"瞶": "瞆",
"瞼": "睑",
"矇": "蒙",
"矓": "眬",
"矚": "瞩",
"矯": "矫",
"硃": "朱",
"硜": "硁",
"硤": "硖",
"硨": "砗",
"确": "确",
"硯": "砚",
"碩": "硕",
"碭": "砀",
"碸": "砜",
"確": "确",
"碼": "码",
"磑": "硙",
"磚": "砖",
"磣": "碜",
"磧": "碛",
"磯": "矶",
"磽": "硗",
"礆": "硷",
"礎": "础",
"礙": "碍",
"礦": "矿",
"礪": "砺",
"礫": "砾",
"礬": "矾",
"礱": "砻",
"祿": "禄",
"禍": "祸",
"禎": "祯",
"禕": "祎",
"禡": "祃",
"禦": "御",
"禪": "禅",
"禮": "礼",
"禰": "祢",
"禱": "祷",
"禿": "秃",
"秈": "籼",
"种": "种",
"稅": "税",
"稈": "秆",
"稏": "䅉",
"稟": "禀",
"種": "种",
"稱": "称",
"穀": "谷",
"穌": "稣",
"積": "积",
"穎": "颖",
"穠": "秾",
"穡": "穑",
"穢": "秽",
"穩": "稳",
"穫": "获",
"穭": "稆",
"窩": "窝",
"窪": "洼",
"窮": "穷",
"窯": "窑",
"窵": "窎",
"窶": "窭",
"窺": "窥",
"竄": "窜",
"竅": "窍",
"竇": "窦",
"竈": "灶",
"竊": "窃",
"竪": "竖",
"競": "竞",
"筆": "笔",
"筍": "笋",
"筑": "筑",
"筧": "笕",
"筴": "䇲",
"箋": "笺",
"箏": "筝",
"節": "节",
"範": "范",
"築": "筑",
"篋": "箧",
"篔": "筼",
"篤": "笃",
"篩": "筛",
"篳": "筚",
"簀": "箦",
"簍": "篓",
"簞": "箪",
"簡": "简",
"簣": "篑",
"簫": "箫",
"簹": "筜",
"簽": "签",
"簾": "帘",
"籃": "篮",
"籌": "筹",
"籖": "签",
"籙": "箓",
"籜": "箨",
"籟": "籁",
"籠": "笼",
"籩": "笾",
"籪": "簖",
"籬": "篱",
"籮": "箩",
"籲": "吁",
"粵": "粤",
"糝": "糁",
"糞": "粪",
"糧": "粮",
"糰": "团",
"糲": "粝",
"糴": "籴",
"糶": "粜",
"糹": "纟",
"糾": "纠",
"紀": "纪",
"紂": "纣",
"約": "约",
"紅": "红",
"紆": "纡",
"紇": "纥",
"紈": "纨",
"紉": "纫",
"紋": "纹",
"納": "纳",
"紐": "纽",
"紓": "纾",
"純": "纯",
"紕": "纰",
"紖": "纼",
"紗": "纱",
"紘": "纮",
"紙": "纸",
"級": "级",
"紛": "纷",
"紜": "纭",
"紝": "纴",
"紡": "纺",
"紬": "䌷",
"細": "细",
"紱": "绂",
"紲": "绁",
"紳": "绅",
"紵": "纻",
"紹": "绍",
"紺": "绀",
"紼": "绋",
"紿": "绐",
"絀": "绌",
"終": "终",
"組": "组",
"絅": "䌹",
"絆": "绊",
"絎": "绗",
"結": "结",
"絕": "绝",
"絛": "绦",
"絝": "绔",
"絞": "绞",
"絡": "络",
"絢": "绚",
"給": "给",
"絨": "绒",
"絰": "绖",
"統": "统",
"絲": "丝",
"絳": "绛",
"絶": "绝",
"絹": "绢",
"綁": "绑",
"綃": "绡",
"綆": "绠",
"綈": "绨",
"綉": "绣",
"綌": "绤",
"綏": "绥",
"綐": "䌼",
"經": "经",
"綜": "综",
"綞": "缍",
"綠": "绿",
"綢": "绸",
"綣": "绻",
"綫": "线",
"綬": "绶",
"維": "维",
"綯": "绹",
"綰": "绾",
"綱": "纲",
"網": "网",
"綳": "绷",
"綴": "缀",
"綵": "䌽",
"綸": "纶",
"綹": "绺",
"綺": "绮",
"綻": "绽",
"綽": "绰",
"綾": "绫",
"綿": "绵",
"緄": "绲",
"緇": "缁",
"緊": "紧",
"緋": "绯",
"緑": "绿",
"緒": "绪",
"緓": "绬",
"緔": "绱",
"緗": "缃",
"緘": "缄",
"緙": "缂",
"線": "线",
"緝": "缉",
"緞": "缎",
"締": "缔",
"緡": "缗",
"緣": "缘",
"緦": "缌",
"編": "编",
"緩": "缓",
"緬": "缅",
"緯": "纬",
"緱": "缑",
"緲": "缈",
"練": "练",
"緶": "缏",
"緹": "缇",
"緻": "致",
"縈": "萦",
"縉": "缙",
"縊": "缢",
"縋": "缒",
"縐": "绉",
"縑": "缣",
"縕": "缊",
"縗": "缞",
"縛": "缚",
"縝": "缜",
"縞": "缟",
"縟": "缛",
"縣": "县",
"縧": "绦",
"縫": "缝",
"縭": "缡",
"縮": "缩",
"縱": "纵",
"縲": "缧",
"縳": "䌸",
"縴": "纤",
"縵": "缦",
"縶": "絷",
"縷": "缕",
"縹": "缥",
"總": "总",
"績": "绩",
"繃": "绷",
"繅": "缫",
"繆": "缪",
"繒": "缯",
"織": "织",
"繕": "缮",
"繚": "缭",
"繞": "绕",
"繡": "绣",
"繢": "缋",
"繩": "绳",
"繪": "绘",
"繫": "系",
"繭": "茧",
"繮": "缰",
"繯": "缳",
"繰": "缲",
"繳": "缴",
"繸": "䍁",
"繹": "绎",
"繼": "继",
"繽": "缤",
"繾": "缱",
"繿": "䍀",
"纈": "缬",
"纊": "纩",
"續": "续",
"纍": "累",
"纏": "缠",
"纓": "缨",
"纔": "才",
"纖": "纤",
"纘": "缵",
"纜": "缆",
"缽": "钵",
"罈": "坛",
"罌": "罂",
"罰": "罚",
"罵": "骂",
"罷": "罢",
"羅": "罗",
"羆": "罴",
"羈": "羁",
"羋": "芈",
"羥": "羟",
"義": "义",
"習": "习",
"翹": "翘",
"耬": "耧",
"耮": "耢",
"聖": "圣",
"聞": "闻",
"聯": "联",
"聰": "聪",
"聲": "声",
"聳": "耸",
"聵": "聩",
"聶": "聂",
"職": "职",
"聹": "聍",
"聽": "听",
"聾": "聋",
"肅": "肃",
"胜": "胜",
"脅": "胁",
"脈": "脉",
"脛": "胫",
"脫": "脱",
"脹": "胀",
"腊": "腊",
"腎": "肾",
"腖": "胨",
"腡": "脶",
"腦": "脑",
"腫": "肿",
"腳": "脚",
"腸": "肠",
"膃": "腽",
"膚": "肤",
"膠": "胶",
"膩": "腻",
"膽": "胆",
"膾": "脍",
"膿": "脓",
"臉": "脸",
"臍": "脐",
"臏": "膑",
"臘": "腊",
"臚": "胪",
"臟": "脏",
"臠": "脔",
"臢": "臜",
"臥": "卧",
"臨": "临",
"臺": "台",
"與": "与",
"興": "兴",
"舉": "举",
"舊": "旧",
"艙": "舱",
"艤": "舣",
"艦": "舰",
"艫": "舻",
"艱": "艰",
"艷": "艳",
"芻": "刍",
"苧": "苎",
"苹": "苹",
"范": "范",
"茲": "兹",
"荊": "荆",
"莊": "庄",
"莖": "茎",
"莢": "荚",
"莧": "苋",
"華": "华",
"萇": "苌",
"萊": "莱",
"萬": "万",
"萵": "莴",
"葉": "叶",
"葒": "荭",
"著": "着",
"著名": "著名",
"葤": "荮",
"葦": "苇",
"葯": "药",
"葷": "荤",
"蒓": "莼",
"蒔": "莳",
"蒞": "莅",
"蒼": "苍",
"蓀": "荪",
"蓋": "盖",
"蓮": "莲",
"蓯": "苁",
"蓴": "莼",
"蓽": "荜",
"蔔": "卜",
"蔞": "蒌",
"蔣": "蒋",
"蔥": "葱",
"蔦": "茑",
"蔭": "荫",
"蕁": "荨",
"蕆": "蒇",
"蕎": "荞",
"蕒": "荬",
"蕓": "芸",
"蕕": "莸",
"蕘": "荛",
"蕢": "蒉",
"蕩": "荡",
"蕪": "芜",
"蕭": "萧",
"蕷": "蓣",
"薀": "蕰",
"薈": "荟",
"薊": "蓟",
"薌": "芗",
"薔": "蔷",
"薘": "荙",
"薟": "莶",
"薦": "荐",
"薩": "萨",
"薳": "䓕",
"薴": "苧",
"薺": "荠",
"藉": "借",
"藍": "蓝",
"藎": "荩",
"藝": "艺",
"藥": "药",
"藪": "薮",
"藴": "蕴",
"藶": "苈",
"藹": "蔼",
"藺": "蔺",
"蘄": "蕲",
"蘆": "芦",
"蘇": "苏",
"蘊": "蕴",
"蘋": "苹",
"蘚": "藓",
"蘞": "蔹",
"蘢": "茏",
"蘭": "兰",
"蘺": "蓠",
"蘿": "萝",
"虆": "蔂",
"處": "处",
"虛": "虚",
"虜": "虏",
"號": "号",
"虧": "亏",
"虫": "虫",
"虯": "虬",
"蛺": "蛱",
"蛻": "蜕",
"蜆": "蚬",
"蜡": "蜡",
"蝕": "蚀",
"蝟": "猬",
"蝦": "虾",
"蝸": "蜗",
"螄": "蛳",
"螞": "蚂",
"螢": "萤",
"螮": "䗖",
"螻": "蝼",
"螿": "螀",
"蟄": "蛰",
"蟈": "蝈",
"蟎": "螨",
"蟣": "虮",
"蟬": "蝉",
"蟯": "蛲",
"蟲": "虫",
"蟶": "蛏",
"蟻": "蚁",
"蠅": "蝇",
"蠆": "虿",
"蠐": "蛴",
"蠑": "蝾",
"蠟": "蜡",
"蠣": "蛎",
"蠨": "蟏",
"蠱": "蛊",
"蠶": "蚕",
"蠻": "蛮",
"衆": "众",
"衊": "蔑",
"術": "术",
"衕": "同",
"衚": "胡",
"衛": "卫",
"衝": "冲",
"衹": "只",
"袞": "衮",
"裊": "袅",
"裏": "里",
"補": "补",
"裝": "装",
"裡": "里",
"製": "制",
"複": "复",
"褌": "裈",
"褘": "袆",
"褲": "裤",
"褳": "裢",
"褸": "褛",
"褻": "亵",
"襇": "裥",
"襏": "袯",
"襖": "袄",
"襝": "裣",
"襠": "裆",
"襤": "褴",
"襪": "袜",
"襬": "䙓",
"襯": "衬",
"襲": "袭",
"覆": "复",
"覆蓋": "覆盖",
"翻來覆去": "翻来覆去",
"見": "见",
"覎": "觃",
"規": "规",
"覓": "觅",
"視": "视",
"覘": "觇",
"覡": "觋",
"覥": "觍",
"覦": "觎",
"親": "亲",
"覬": "觊",
"覯": "觏",
"覲": "觐",
"覷": "觑",
"覺": "觉",
"覽": "览",
"覿": "觌",
"觀": "观",
"觴": "觞",
"觶": "觯",
"觸": "触",
"訁": "讠",
"訂": "订",
"訃": "讣",
"計": "计",
"訊": "讯",
"訌": "讧",
"討": "讨",
"訐": "讦",
"訒": "讱",
"訓": "训",
"訕": "讪",
"訖": "讫",
"託": "讬",
"記": "记",
"訛": "讹",
"訝": "讶",
"訟": "讼",
"訢": "䜣",
"訣": "诀",
"訥": "讷",
"訩": "讻",
"訪": "访",
"設": "设",
"許": "许",
"訴": "诉",
"訶": "诃",
"診": "诊",
"註": "注",
"詁": "诂",
"詆": "诋",
"詎": "讵",
"詐": "诈",
"詒": "诒",
"詔": "诏",
"評": "评",
"詖": "诐",
"詗": "诇",
"詘": "诎",
"詛": "诅",
"詞": "词",
"詠": "咏",
"詡": "诩",
"詢": "询",
"詣": "诣",
"試": "试",
"詩": "诗",
"詫": "诧",
"詬": "诟",
"詭": "诡",
"詮": "诠",
"詰": "诘",
"話": "话",
"該": "该",
"詳": "详",
"詵": "诜",
"詼": "诙",
"詿": "诖",
"誄": "诔",
"誅": "诛",
"誆": "诓",
"誇": "夸",
"誌": "志",
"認": "认",
"誑": "诳",
"誒": "诶",
"誕": "诞",
"誘": "诱",
"誚": "诮",
"語": "语",
"誠": "诚",
"誡": "诫",
"誣": "诬",
"誤": "误",
"誥": "诰",
"誦": "诵",
"誨": "诲",
"說": "说",
"説": "说",
"誰": "谁",
"課": "课",
"誶": "谇",
"誹": "诽",
"誼": "谊",
"誾": "訚",
"調": "调",
"諂": "谄",
"諄": "谆",
"談": "谈",
"諉": "诿",
"請": "请",
"諍": "诤",
"諏": "诹",
"諑": "诼",
"諒": "谅",
"論": "论",
"諗": "谂",
"諛": "谀",
"諜": "谍",
"諝": "谞",
"諞": "谝",
"諢": "诨",
"諤": "谔",
"諦": "谛",
"諧": "谐",
"諫": "谏",
"諭": "谕",
"諮": "谘",
"諱": "讳",
"諳": "谙",
"諶": "谌",
"諷": "讽",
"諸": "诸",
"諺": "谚",
"諼": "谖",
"諾": "诺",
"謀": "谋",
"謁": "谒",
"謂": "谓",
"謄": "誊",
"謅": "诌",
"謊": "谎",
"謎": "谜",
"謐": "谧",
"謔": "谑",
"謖": "谡",
"謗": "谤",
"謙": "谦",
"謚": "谥",
"講": "讲",
"謝": "谢",
"謠": "谣",
"謡": "谣",
"謨": "谟",
"謫": "谪",
"謬": "谬",
"謭": "谫",
"謳": "讴",
"謹": "谨",
"謾": "谩",
"譅": "䜧",
"證": "证",
"譎": "谲",
"譏": "讥",
"譖": "谮",
"識": "识",
"譙": "谯",
"譚": "谭",
"譜": "谱",
"譫": "谵",
"譯": "译",
"議": "议",
"譴": "谴",
"護": "护",
"譸": "诪",
"譽": "誉",
"譾": "谫",
"讀": "读",
"變": "变",
"讎": "仇",
"讎": "雠",
"讒": "谗",
"讓": "让",
"讕": "谰",
"讖": "谶",
"讜": "谠",
"讞": "谳",
"豈": "岂",
"豎": "竖",
"豐": "丰",
"豬": "猪",
"豶": "豮",
"貓": "猫",
"貙": "䝙",
"貝": "贝",
"貞": "贞",
"貟": "贠",
"負": "负",
"財": "财",
"貢": "贡",
"貧": "贫",
"貨": "货",
"販": "贩",
"貪": "贪",
"貫": "贯",
"責": "责",
"貯": "贮",
"貰": "贳",
"貲": "赀",
"貳": "贰",
"貴": "贵",
"貶": "贬",
"買": "买",
"貸": "贷",
"貺": "贶",
"費": "费",
"貼": "贴",
"貽": "贻",
"貿": "贸",
"賀": "贺",
"賁": "贲",
"賂": "赂",
"賃": "赁",
"賄": "贿",
"賅": "赅",
"資": "资",
"賈": "贾",
"賊": "贼",
"賑": "赈",
"賒": "赊",
"賓": "宾",
"賕": "赇",
"賙": "赒",
"賚": "赉",
"賜": "赐",
"賞": "赏",
"賠": "赔",
"賡": "赓",
"賢": "贤",
"賣": "卖",
"賤": "贱",
"賦": "赋",
"賧": "赕",
"質": "质",
"賫": "赍",
"賬": "账",
"賭": "赌",
"賰": "䞐",
"賴": "赖",
"賵": "赗",
"賺": "赚",
"賻": "赙",
"購": "购",
"賽": "赛",
"賾": "赜",
"贄": "贽",
"贅": "赘",
"贇": "赟",
"贈": "赠",
"贊": "赞",
"贋": "赝",
"贍": "赡",
"贏": "赢",
"贐": "赆",
"贓": "赃",
"贔": "赑",
"贖": "赎",
"贗": "赝",
"贛": "赣",
"贜": "赃",
"赬": "赪",
"趕": "赶",
"趙": "赵",
"趨": "趋",
"趲": "趱",
"跡": "迹",
"踐": "践",
"踴": "踊",
"蹌": "跄",
"蹕": "跸",
"蹣": "蹒",
"蹤": "踪",
"蹺": "跷",
"躂": "跶",
"躉": "趸",
"躊": "踌",
"躋": "跻",
"躍": "跃",
"躑": "踯",
"躒": "跞",
"躓": "踬",
"躕": "蹰",
"躚": "跹",
"躡": "蹑",
"躥": "蹿",
"躦": "躜",
"躪": "躏",
"軀": "躯",
"車": "车",
"軋": "轧",
"軌": "轨",
"軍": "军",
"軑": "轪",
"軒": "轩",
"軔": "轫",
"軛": "轭",
"軟": "软",
"軤": "轷",
"軫": "轸",
"軲": "轱",
"軸": "轴",
"軹": "轵",
"軺": "轺",
"軻": "轲",
"軼": "轶",
"軾": "轼",
"較": "较",
"輅": "辂",
"輇": "辁",
"輈": "辀",
"載": "载",
"輊": "轾",
"輒": "辄",
"輓": "挽",
"輔": "辅",
"輕": "轻",
"輛": "辆",
"輜": "辎",
"輝": "辉",
"輞": "辋",
"輟": "辍",
"輥": "辊",
"輦": "辇",
"輩": "辈",
"輪": "轮",
"輬": "辌",
"輯": "辑",
"輳": "辏",
"輸": "输",
"輻": "辐",
"輾": "辗",
"輿": "舆",
"轀": "辒",
"轂": "毂",
"轄": "辖",
"轅": "辕",
"轆": "辘",
"轉": "转",
"轍": "辙",
"轎": "轿",
"轔": "辚",
"轟": "轰",
"轡": "辔",
"轢": "轹",
"轤": "轳",
"辟": "辟",
"辦": "办",
"辭": "辞",
"辮": "辫",
"辯": "辩",
"農": "农",
"迴": "回",
"适": "适",
"逕": "迳",
"這": "这",
"連": "连",
"週": "周",
"進": "进",
"遊": "游",
"運": "运",
"過": "过",
"達": "达",
"違": "违",
"遙": "遥",
"遜": "逊",
"遞": "递",
"遠": "远",
"適": "适",
"遲": "迟",
"遷": "迁",
"選": "选",
"遺": "遗",
"遼": "辽",
"邁": "迈",
"還": "还",
"邇": "迩",
"邊": "边",
"邏": "逻",
"邐": "逦",
"郁": "郁",
"郟": "郏",
"郵": "邮",
"鄆": "郓",
"鄉": "乡",
"鄒": "邹",
"鄔": "邬",
"鄖": "郧",
"鄧": "邓",
"鄭": "郑",
"鄰": "邻",
"鄲": "郸",
"鄴": "邺",
"鄶": "郐",
"鄺": "邝",
"酇": "酂",
"酈": "郦",
"醖": "酝",
"醜": "丑",
"醞": "酝",
"醫": "医",
"醬": "酱",
"醱": "酦",
"釀": "酿",
"釁": "衅",
"釃": "酾",
"釅": "酽",
"采": "采",
"釋": "释",
"釐": "厘",
"釒": "钅",
"釓": "钆",
"釔": "钇",
"釕": "钌",
"釗": "钊",
"釘": "钉",
"釙": "钋",
"針": "针",
"釣": "钓",
"釤": "钐",
"釧": "钏",
"釩": "钒",
"釵": "钗",
"釷": "钍",
"釹": "钕",
"釺": "钎",
"鈀": "钯",
"鈁": "钫",
"鈃": "钘",
"鈄": "钭",
"鈈": "钚",
"鈉": "钠",
"鈍": "钝",
"鈎": "钩",
"鈐": "钤",
"鈑": "钣",
"鈒": "钑",
"鈔": "钞",
"鈕": "钮",
"鈞": "钧",
"鈣": "钙",
"鈥": "钬",
"鈦": "钛",
"鈧": "钪",
"鈮": "铌",
"鈰": "铈",
"鈳": "钶",
"鈴": "铃",
"鈷": "钴",
"鈸": "钹",
"鈹": "铍",
"鈺": "钰",
"鈽": "钸",
"鈾": "铀",
"鈿": "钿",
"鉀": "钾",
"鉅": "钜",
"鉈": "铊",
"鉉": "铉",
"鉋": "铇",
"鉍": "铋",
"鉑": "铂",
"鉕": "钷",
"鉗": "钳",
"鉚": "铆",
"鉛": "铅",
"鉞": "钺",
"鉢": "钵",
"鉤": "钩",
"鉦": "钲",
"鉬": "钼",
"鉭": "钽",
"鉶": "铏",
"鉸": "铰",
"鉺": "铒",
"鉻": "铬",
"鉿": "铪",
"銀": "银",
"銃": "铳",
"銅": "铜",
"銍": "铚",
"銑": "铣",
"銓": "铨",
"銖": "铢",
"銘": "铭",
"銚": "铫",
"銛": "铦",
"銜": "衔",
"銠": "铑",
"銣": "铷",
"銥": "铱",
"銦": "铟",
"銨": "铵",
"銩": "铥",
"銪": "铕",
"銫": "铯",
"銬": "铐",
"銱": "铞",
"銳": "锐",
"銷": "销",
"銹": "锈",
"銻": "锑",
"銼": "锉",
"鋁": "铝",
"鋃": "锒",
"鋅": "锌",
"鋇": "钡",
"鋌": "铤",
"鋏": "铗",
"鋒": "锋",
"鋙": "铻",
"鋝": "锊",
"鋟": "锓",
"鋣": "铘",
"鋤": "锄",
"鋥": "锃",
"鋦": "锔",
"鋨": "锇",
"鋩": "铓",
"鋪": "铺",
"鋭": "锐",
"鋮": "铖",
"鋯": "锆",
"鋰": "锂",
"鋱": "铽",
"鋶": "锍",
"鋸": "锯",
"鋼": "钢",
"錁": "锞",
"錄": "录",
"錆": "锖",
"錇": "锫",
"錈": "锩",
"錏": "铔",
"錐": "锥",
"錒": "锕",
"錕": "锟",
"錘": "锤",
"錙": "锱",
"錚": "铮",
"錛": "锛",
"錟": "锬",
"錠": "锭",
"錡": "锜",
"錢": "钱",
"錦": "锦",
"錨": "锚",
"錩": "锠",
"錫": "锡",
"錮": "锢",
"錯": "错",
"録": "录",
"錳": "锰",
"錶": "表",
"錸": "铼",
"鍀": "锝",
"鍁": "锨",
"鍃": "锪",
"鍆": "钔",
"鍇": "锴",
"鍈": "锳",
"鍋": "锅",
"鍍": "镀",
"鍔": "锷",
"鍘": "铡",
"鍚": "钖",
"鍛": "锻",
"鍠": "锽",
"鍤": "锸",
"鍥": "锲",
"鍩": "锘",
"鍬": "锹",
"鍰": "锾",
"鍵": "键",
"鍶": "锶",
"鍺": "锗",
"鍾": "钟",
"鎂": "镁",
"鎄": "锿",
"鎇": "镅",
"鎊": "镑",
"鎔": "镕",
"鎖": "锁",
"鎘": "镉",
"鎚": "锤",
"鎛": "镈",
"鎝": "ð¨±",
"鎡": "镃",
"鎢": "钨",
"鎣": "蓥",
"鎦": "镏",
"鎧": "铠",
"鎩": "铩",
"鎪": "锼",
"鎬": "镐",
"鎮": "镇",
"鎰": "镒",
"鎲": "镋",
"鎳": "镍",
"鎵": "镓",
"鎸": "镌",
"鎿": "镎",
"鏃": "镞",
"鏇": "镟",
"鏈": "链",
"鏌": "镆",
"鏍": "镙",
"鏐": "镠",
"鏑": "镝",
"鏗": "铿",
"鏘": "锵",
"鏜": "镗",
"鏝": "镘",
"鏞": "镛",
"鏟": "铲",
"鏡": "镜",
"鏢": "镖",
"鏤": "镂",
"鏨": "錾",
"鏰": "镚",
"鏵": "铧",
"鏷": "镤",
"鏹": "镪",
"鏽": "锈",
"鐃": "铙",
"鐋": "铴",
"鐐": "镣",
"鐒": "铹",
"鐓": "镦",
"鐔": "镡",
"鐘": "钟",
"鐙": "镫",
"鐝": "镢",
"鐠": "镨",
"鐦": "锎",
"鐧": "锏",
"鐨": "镄",
"鐫": "镌",
"鐮": "镰",
"鐲": "镯",
"鐳": "镭",
"鐵": "铁",
"鐶": "镮",
"鐸": "铎",
"鐺": "铛",
"鐿": "镱",
"鑄": "铸",
"鑊": "镬",
"鑌": "镔",
"鑒": "鉴",
"鑔": "镲",
"鑕": "锧",
"鑞": "镴",
"鑠": "铄",
"鑣": "镳",
"鑥": "镥",
"鑭": "镧",
"鑰": "钥",
"鑱": "镵",
"鑲": "镶",
"鑷": "镊",
"鑹": "镩",
"鑼": "锣",
"鑽": "钻",
"鑾": "銮",
"鑿": "凿",
"钁": "镢",
"镟": "旋",
"長": "长",
"門": "门",
"閂": "闩",
"閃": "闪",
"閆": "闫",
"閈": "闬",
"閉": "闭",
"開": "开",
"閌": "闶",
"閎": "闳",
"閏": "闰",
"閑": "闲",
"間": "间",
"閔": "闵",
"閘": "闸",
"閡": "阂",
"閣": "阁",
"閤": "合",
"閥": "阀",
"閨": "闺",
"閩": "闽",
"閫": "阃",
"閬": "阆",
"閭": "闾",
"閱": "阅",
"閲": "阅",
"閶": "阊",
"閹": "阉",
"閻": "阎",
"閼": "阏",
"閽": "阍",
"閾": "阈",
"閿": "阌",
"闃": "阒",
"闆": "板",
"闈": "闱",
"闊": "阔",
"闋": "阕",
"闌": "阑",
"闍": "阇",
"闐": "阗",
"闒": "阘",
"闓": "闿",
"闔": "阖",
"闕": "阙",
"闖": "闯",
"關": "关",
"闞": "阚",
"闠": "阓",
"闡": "阐",
"闤": "阛",
"闥": "闼",
"阪": "坂",
"陘": "陉",
"陝": "陕",
"陣": "阵",
"陰": "阴",
"陳": "陈",
"陸": "陆",
"陽": "阳",
"隉": "陧",
"隊": "队",
"階": "阶",
"隕": "陨",
"際": "际",
"隨": "随",
"險": "险",
"隱": "隐",
"隴": "陇",
"隸": "隶",
"隻": "只",
"雋": "隽",
"雖": "虽",
"雙": "双",
"雛": "雏",
"雜": "杂",
"雞": "鸡",
"離": "离",
"難": "难",
"雲": "云",
"電": "电",
"霢": "霡",
"霧": "雾",
"霽": "霁",
"靂": "雳",
"靄": "霭",
"靈": "灵",
"靚": "靓",
"靜": "静",
"靨": "靥",
"鞀": "鼗",
"鞏": "巩",
"鞝": "绱",
"鞦": "秋",
"鞽": "鞒",
"韁": "缰",
"韃": "鞑",
"韆": "千",
"韉": "鞯",
"韋": "韦",
"韌": "韧",
"韍": "韨",
"韓": "韩",
"韙": "韪",
"韜": "韬",
"韞": "韫",
"韻": "韵",
"響": "响",
"頁": "页",
"頂": "顶",
"頃": "顷",
"項": "项",
"順": "顺",
"頇": "顸",
"須": "须",
"頊": "顼",
"頌": "颂",
"頎": "颀",
"頏": "颃",
"預": "预",
"頑": "顽",
"頒": "颁",
"頓": "顿",
"頗": "颇",
"領": "领",
"頜": "颌",
"頡": "颉",
"頤": "颐",
"頦": "颏",
"頭": "头",
"頮": "颒",
"頰": "颊",
"頲": "颋",
"頴": "颕",
"頷": "颔",
"頸": "颈",
"頹": "颓",
"頻": "频",
"頽": "颓",
"顆": "颗",
"題": "题",
"額": "额",
"顎": "颚",
"顏": "颜",
"顒": "颙",
"顓": "颛",
"顔": "颜",
"願": "愿",
"顙": "颡",
"顛": "颠",
"類": "类",
"顢": "颟",
"顥": "颢",
"顧": "顾",
"顫": "颤",
"顬": "颥",
"顯": "显",
"顰": "颦",
"顱": "颅",
"顳": "颞",
"顴": "颧",
"風": "风",
"颭": "飐",
"颮": "飑",
"颯": "飒",
"颱": "台",
"颳": "刮",
"颶": "飓",
"颸": "飔",
"颺": "飏",
"颻": "飖",
"颼": "飕",
"飀": "飗",
"飄": "飘",
"飆": "飙",
"飈": "飚",
"飛": "飞",
"飠": "饣",
"飢": "饥",
"飣": "饤",
"飥": "饦",
"飩": "饨",
"飪": "饪",
"飫": "饫",
"飭": "饬",
"飯": "饭",
"飲": "饮",
"飴": "饴",
"飼": "饲",
"飽": "饱",
"飾": "饰",
"飿": "饳",
"餃": "饺",
"餄": "饸",
"餅": "饼",
"餉": "饷",
"養": "养",
"餌": "饵",
"餎": "饹",
"餏": "饻",
"餑": "饽",
"餒": "馁",
"餓": "饿",
"餕": "馂",
"餖": "饾",
"餚": "肴",
"餛": "馄",
"餜": "馃",
"餞": "饯",
"餡": "馅",
"館": "馆",
"餱": "糇",
"餳": "饧",
"餶": "馉",
"餷": "馇",
"餺": "馎",
"餼": "饩",
"餾": "馏",
"餿": "馊",
"饁": "馌",
"饃": "馍",
"饅": "馒",
"饈": "馐",
"饉": "馑",
"饊": "馓",
"饋": "馈",
"饌": "馔",
"饑": "饥",
"饒": "饶",
"饗": "飨",
"饜": "餍",
"饞": "馋",
"饢": "馕",
"馬": "马",
"馭": "驭",
"馮": "冯",
"馱": "驮",
"馳": "驰",
"馴": "驯",
"馹": "驲",
"駁": "驳",
"駐": "驻",
"駑": "驽",
"駒": "驹",
"駔": "驵",
"駕": "驾",
"駘": "骀",
"駙": "驸",
"駛": "驶",
"駝": "驼",
"駟": "驷",
"駡": "骂",
"駢": "骈",
"駭": "骇",
"駰": "骃",
"駱": "骆",
"駸": "骎",
"駿": "骏",
"騁": "骋",
"騂": "骍",
"騅": "骓",
"騌": "骔",
"騍": "骒",
"騎": "骑",
"騏": "骐",
"騖": "骛",
"騙": "骗",
"騤": "骙",
"騧": "䯄",
"騫": "骞",
"騭": "骘",
"騮": "骝",
"騰": "腾",
"騶": "驺",
"騷": "骚",
"騸": "骟",
"騾": "骡",
"驀": "蓦",
"驁": "骜",
"驂": "骖",
"驃": "骠",
"驄": "骢",
"驅": "驱",
"驊": "骅",
"驌": "骕",
"驍": "骁",
"驏": "骣",
"驕": "骄",
"驗": "验",
"驚": "惊",
"驛": "驿",
"驟": "骤",
"驢": "驴",
"驤": "骧",
"驥": "骥",
"驦": "骦",
"驪": "骊",
"驫": "骉",
"骯": "肮",
"髏": "髅",
"髒": "脏",
"體": "体",
"髕": "髌",
"髖": "髋",
"髮": "发",
"鬆": "松",
"鬍": "胡",
"鬚": "须",
"鬢": "鬓",
"鬥": "斗",
"鬧": "闹",
"鬩": "阋",
"鬮": "阄",
"鬱": "郁",
"魎": "魉",
"魘": "魇",
"魚": "鱼",
"魛": "鱽",
"魢": "鱾",
"魨": "鲀",
"魯": "鲁",
"魴": "鲂",
"魷": "鱿",
"魺": "鲄",
"鮁": "鲅",
"鮃": "鲆",
"鮊": "鲌",
"鮋": "鲉",
"鮍": "鲏",
"鮎": "鲇",
"鮐": "鲐",
"鮑": "鲍",
"鮒": "鲋",
"鮓": "鲊",
"鮚": "鲒",
"鮜": "鲘",
"鮝": "鲞",
"鮞": "鲕",
"鮦": "鲖",
"鮪": "鲔",
"鮫": "鲛",
"鮭": "鲑",
"鮮": "鲜",
"鮳": "鲓",
"鮶": "鲪",
"鮺": "鲝",
"鯀": "鲧",
"鯁": "鲠",
"鯇": "鲩",
"鯉": "鲤",
"鯊": "鲨",
"鯒": "鲬",
"鯔": "鲻",
"鯕": "鲯",
"鯖": "鲭",
"鯗": "鲞",
"鯛": "鲷",
"鯝": "鲴",
"鯡": "鲱",
"鯢": "鲵",
"鯤": "鲲",
"鯧": "鲳",
"鯨": "鲸",
"鯪": "鲮",
"鯫": "鲰",
"鯴": "鲺",
"鯷": "鳀",
"鯽": "鲫",
"鯿": "鳊",
"鰁": "鳈",
"鰂": "鲗",
"鰃": "鳂",
"鰈": "鲽",
"鰉": "鳇",
"鰍": "鳅",
"鰏": "鲾",
"鰐": "鳄",
"鰒": "鳆",
"鰓": "鳃",
"鰜": "鳒",
"鰟": "鳑",
"鰠": "鳋",
"鰣": "鲥",
"鰥": "鳏",
"鰨": "鳎",
"鰩": "鳐",
"鰭": "鳍",
"鰮": "鳁",
"鰱": "鲢",
"鰲": "鳌",
"鰳": "鳓",
"鰵": "鳘",
"鰷": "鲦",
"鰹": "鲣",
"鰺": "鲹",
"鰻": "鳗",
"鰼": "鳛",
"鰾": "鳔",
"鱂": "鳉",
"鱅": "鳙",
"鱈": "鳕",
"鱉": "鳖",
"鱒": "鳟",
"鱔": "鳝",
"鱖": "鳜",
"鱗": "鳞",
"鱘": "鲟",
"鱝": "鲼",
"鱟": "鲎",
"鱠": "鲙",
"鱣": "鳣",
"鱤": "鳡",
"鱧": "鳢",
"鱨": "鲿",
"鱭": "鲚",
"鱯": "鳠",
"鱷": "鳄",
"鱸": "鲈",
"鱺": "鲡",
"䰾": "鲃",
"䲁": "鳚",
"鳥": "鸟",
"鳧": "凫",
"鳩": "鸠",
"鳬": "凫",
"鳲": "鸤",
"鳳": "凤",
"鳴": "鸣",
"鳶": "鸢",
"鳾": "䴓",
"鴆": "鸩",
"鴇": "鸨",
"鴉": "鸦",
"鴒": "鸰",
"鴕": "鸵",
"鴛": "鸳",
"鴝": "鸲",
"鴞": "鸮",
"鴟": "鸱",
"鴣": "鸪",
"鴦": "鸯",
"鴨": "鸭",
"鴯": "鸸",
"鴰": "鸹",
"鴴": "鸻",
"鴷": "䴕",
"鴻": "鸿",
"鴿": "鸽",
"鵁": "䴔",
"鵂": "鸺",
"鵃": "鸼",
"鵐": "鹀",
"鵑": "鹃",
"鵒": "鹆",
"鵓": "鹁",
"鵜": "鹈",
"鵝": "鹅",
"鵠": "鹄",
"鵡": "鹉",
"鵪": "鹌",
"鵬": "鹏",
"鵮": "鹐",
"鵯": "鹎",
"鵲": "鹊",
"鵷": "鹓",
"鵾": "鹍",
"鶄": "䴖",
"鶇": "鸫",
"鶉": "鹑",
"鶊": "鹒",
"鶓": "鹋",
"鶖": "鹙",
"鶘": "鹕",
"鶚": "鹗",
"鶡": "鹖",
"鶥": "鹛",
"鶩": "鹜",
"鶪": "䴗",
"鶬": "鸧",
"鶯": "莺",
"鶲": "鹟",
"鶴": "鹤",
"鶹": "鹠",
"鶺": "鹡",
"鶻": "鹘",
"鶼": "鹣",
"鶿": "鹚",
"鷀": "鹚",
"鷁": "鹢",
"鷂": "鹞",
"鷄": "鸡",
"鷈": "䴘",
"鷊": "鹝",
"鷓": "鹧",
"鷖": "鹥",
"鷗": "鸥",
"鷙": "鸷",
"鷚": "鹨",
"鷥": "鸶",
"鷦": "鹪",
"鷫": "鹔",
"鷯": "鹩",
"鷲": "鹫",
"鷳": "鹇",
"鷸": "鹬",
"鷹": "鹰",
"鷺": "鹭",
"鷽": "鸴",
"鷿": "䴙",
"鸂": "㶉",
"鸇": "鹯",
"鸌": "鹱",
"鸏": "鹲",
"鸕": "鸬",
"鸘": "鹴",
"鸚": "鹦",
"鸛": "鹳",
"鸝": "鹂",
"鸞": "鸾",
"鹵": "卤",
"鹹": "咸",
"鹺": "鹾",
"鹽": "盐",
"麗": "丽",
"麥": "麦",
"麩": "麸",
"麯": "曲",
"麵": "面",
"麼": "么",
"麽": "么",
"黃": "黄",
"黌": "黉",
"點": "点",
"黨": "党",
"黲": "黪",
"黴": "霉",
"黶": "黡",
"黷": "黩",
"黽": "黾",
"黿": "鼋",
"鼉": "鼍",
"鼕": "冬",
"鼴": "鼹",
"齊": "齐",
"齋": "斋",
"齎": "赍",
"齏": "齑",
"齒": "齿",
"齔": "龀",
"齕": "龁",
"齗": "龂",
"齙": "龅",
"齜": "龇",
"齟": "龃",
"齠": "龆",
"齡": "龄",
"齣": "出",
"齦": "龈",
"齪": "龊",
"齬": "龉",
"齲": "龋",
"齶": "腭",
"齷": "龌",
"龍": "龙",
"龎": "厐",
"龐": "庞",
"龔": "龚",
"龕": "龛",
"龜": "龟",
"幾畫": "几画",
"賣畫": "卖画",
"滷鹼": "卤碱",
"原畫": "原画",
"口鹼": "口碱",
"古畫": "古画",
"名畫": "名画",
"奇畫": "奇画",
"如畫": "如画",
"弱鹼": "弱碱",
"彩畫": "彩画",
"所畫": "所画",
"扉畫": "扉画",
"教畫": "教画",
"水鹼": "水碱",
"洋鹼": "洋碱",
"炭畫": "炭画",
"畫一": "画一",
"畫上": "画上",
"畫下": "画下",
"畫中": "画中",
"畫供": "画供",
"畫兒": "画儿",
"畫具": "画具",
"畫出": "画出",
"畫史": "画史",
"畫品": "画品",
"畫商": "画商",
"畫圈": "画圈",
"畫境": "画境",
"畫工": "画工",
"畫帖": "画帖",
"畫幅": "画幅",
"畫意": "画意",
"畫成": "画成",
"畫景": "画景",
"畫本": "画本",
"畫架": "画架",
"畫框": "画框",
"畫法": "画法",
"畫王": "画王",
"畫界": "画界",
"畫符": "画符",
"畫紙": "画纸",
"畫線": "画线",
"畫航": "画航",
"畫舫": "画舫",
"畫虎": "画虎",
"畫論": "画论",
"畫譜": "画谱",
"畫象": "画象",
"畫質": "画质",
"畫貼": "画贴",
"畫軸": "画轴",
"畫頁": "画页",
"鹽鹼": "盐碱",
"鹼": "碱",
"鹼基": "碱基",
"鹼度": "碱度",
"鹼水": "碱水",
"鹼熔": "碱熔",
"磁畫": "磁画",
"策畫": "策画",
"組畫": "组画",
"絹畫": "绢画",
"耐鹼": "耐碱",
"肉鹼": "肉碱",
"膠畫": "胶画",
"茶鹼": "茶碱",
"西畫": "西画",
"貼畫": "贴画",
"返鹼": "返碱",
"鍾鍛": "锺锻",
"鍛鍾": "锻锺",
"雕畫": "雕画",
"鯰": "鲶",
"三聯畫": "三联画",
"中國畫": "中国画",
"書畫": "书画",
"書畫社": "书画社",
"五筆畫": "五笔画",
"作畫": "作画",
"入畫": "入画",
"寫生畫": "写生画",
"刻畫": "刻画",
"動畫": "动画",
"勾畫": "勾画",
"單色畫": "单色画",
"卡通畫": "卡通画",
"國畫": "国画",
"圖畫": "图画",
"壁畫": "壁画",
"字畫": "字画",
"宣傳畫": "宣传画",
"工筆畫": "工笔画",
"年畫": "年画",
"幽默畫": "幽默画",
"指畫": "指画",
"描畫": "描画",
"插畫": "插画",
"擘畫": "擘画",
"春畫": "春画",
"木刻畫": "木刻画",
"機械畫": "机械画",
"比畫": "比画",
"毛筆畫": "毛笔画",
"水粉畫": "水粉画",
"油畫": "油画",
"海景畫": "海景画",
"漫畫": "漫画",
"點畫": "点画",
"版畫": "版画",
"畫": "画",
"畫像": "画像",
"畫冊": "画册",
"畫刊": "画刊",
"畫匠": "画匠",
"畫捲": "画卷",
"畫圖": "画图",
"畫壇": "画坛",
"畫室": "画室",
"畫家": "画家",
"畫屏": "画屏",
"畫展": "画展",
"畫布": "画布",
"畫師": "画师",
"畫廊": "画廊",
"畫報": "画报",
"畫押": "画押",
"畫板": "画板",
"畫片": "画片",
"畫畫": "画画",
"畫皮": "画皮",
"畫眉鳥": "画眉鸟",
"畫稿": "画稿",
"畫筆": "画笔",
"畫院": "画院",
"畫集": "画集",
"畫面": "画面",
"筆畫": "笔画",
"細密畫": "细密画",
"繪畫": "绘画",
"自畫像": "自画像",
"蠟筆畫": "蜡笔画",
"裸體畫": "裸体画",
"西洋畫": "西洋画",
"透視畫": "透视画",
"銅版畫": "铜版画",
"鍾": "锺",
"靜物畫": "静物画",
"餘": "馀",
}
zh2TW = {
"缺省": "預設",
"串行": "串列",
"以太网": "乙太網",
"位图": "點陣圖",
"例程": "常式",
"信道": "通道",
"光标": "游標",
"光盘": "光碟",
"光驱": "光碟機",
"全角": "全形",
"加载": "載入",
"半角": "半形",
"变量": "變數",
"噪声": "雜訊",
"脱机": "離線",
"声卡": "音效卡",
"老字号": "老字號",
"字号": "字型大小",
"字库": "字型檔",
"字段": "欄位",
"字符": "字元",
"存盘": "存檔",
"寻址": "定址",
"尾注": "章節附註",
"异步": "非同步",
"总线": "匯流排",
"括号": "括弧",
"接口": "介面",
"控件": "控制項",
"权限": "許可權",
"盘片": "碟片",
"硅片": "矽片",
"硅谷": "矽谷",
"硬盘": "硬碟",
"磁盘": "磁碟",
"磁道": "磁軌",
"程控": "程式控制",
"端口": "埠",
"算子": "運算元",
"算法": "演算法",
"芯片": "晶片",
"芯片": "晶元",
"词组": "片語",
"译码": "解碼",
"软驱": "軟碟機",
"快闪存储器": "快閃記憶體",
"闪存": "快閃記憶體",
"鼠标": "滑鼠",
"进制": "進位",
"交互式": "互動式",
"仿真": "模擬",
"优先级": "優先順序",
"传感": "感測",
"便携式": "攜帶型",
"信息论": "資訊理論",
"写保护": "防寫",
"分布式": "分散式",
"分辨率": "解析度",
"服务器": "伺服器",
"等于": "等於",
"局域网": "區域網",
"计算机": "電腦",
"扫瞄仪": "掃瞄器",
"宽带": "寬頻",
"数据库": "資料庫",
"奶酪": "乳酪",
"巨商": "鉅賈",
"手电": "手電筒",
"万历": "萬曆",
"永历": "永曆",
"词汇": "辭彙",
"习用": "慣用",
"元音": "母音",
"任意球": "自由球",
"头球": "頭槌",
"入球": "進球",
"粒入球": "顆進球",
"打门": "射門",
"火锅盖帽": "蓋火鍋",
"打印机": "印表機",
"打印機": "印表機",
"字节": "位元組",
"字節": "位元組",
"打印": "列印",
"打印": "列印",
"硬件": "硬體",
"硬件": "硬體",
"二极管": "二極體",
"二極管": "二極體",
"三极管": "三極體",
"三極管": "三極體",
"软件": "軟體",
"軟件": "軟體",
"网络": "網路",
"網絡": "網路",
"人工智能": "人工智慧",
"航天飞机": "太空梭",
"穿梭機": "太空梭",
"因特网": "網際網路",
"互聯網": "網際網路",
"机器人": "機器人",
"機械人": "機器人",
"移动电话": "行動電話",
"流動電話": "行動電話",
"调制解调器": "數據機",
"調制解調器": "數據機",
"短信": "簡訊",
"短訊": "簡訊",
"乌兹别克斯坦": "烏茲別克",
"乍得": "查德",
"乍得": "查德",
"也门": "葉門",
"也門": "葉門",
"伯利兹": "貝里斯",
"伯利茲": "貝里斯",
"佛得角": "維德角",
"佛得角": "維德角",
"克罗地亚": "克羅埃西亞",
"克羅地亞": "克羅埃西亞",
"冈比亚": "甘比亞",
"岡比亞": "甘比亞",
"几内亚比绍": "幾內亞比索",
"幾內亞比紹": "幾內亞比索",
"列支敦士登": "列支敦斯登",
"列支敦士登": "列支敦斯登",
"利比里亚": "賴比瑞亞",
"利比里亞": "賴比瑞亞",
"加纳": "迦納",
"加納": "迦納",
"加蓬": "加彭",
"加蓬": "加彭",
"博茨瓦纳": "波札那",
"博茨瓦納": "波札那",
"卡塔尔": "卡達",
"卡塔爾": "卡達",
"卢旺达": "盧安達",
"盧旺達": "盧安達",
"危地马拉": "瓜地馬拉",
"危地馬拉": "瓜地馬拉",
"厄瓜多尔": "厄瓜多",
"厄瓜多爾": "厄瓜多",
"厄立特里亚": "厄利垂亞",
"厄立特里亞": "厄利垂亞",
"吉布提": "吉布地",
"吉布堤": "吉布地",
"哈萨克斯坦": "哈薩克",
"哥斯达黎加": "哥斯大黎加",
"哥斯達黎加": "哥斯大黎加",
"图瓦卢": "吐瓦魯",
"圖瓦盧": "吐瓦魯",
"土库曼斯坦": "土庫曼",
"圣卢西亚": "聖露西亞",
"聖盧西亞": "聖露西亞",
"圣基茨和尼维斯": "聖克里斯多福及尼維斯",
"聖吉斯納域斯": "聖克里斯多福及尼維斯",
"圣文森特和格林纳丁斯": "聖文森及格瑞那丁",
"聖文森特和格林納丁斯": "聖文森及格瑞那丁",
"圣马力诺": "聖馬利諾",
"聖馬力諾": "聖馬利諾",
"圭亚那": "蓋亞那",
"圭亞那": "蓋亞那",
"坦桑尼亚": "坦尚尼亞",
"坦桑尼亞": "坦尚尼亞",
"埃塞俄比亚": "衣索比亞",
"埃塞俄比亞": "衣索比亞",
"基里巴斯": "吉里巴斯",
"基里巴斯": "吉里巴斯",
"塔吉克斯坦": "塔吉克",
"塞拉利昂": "獅子山",
"塞拉利昂": "獅子山",
"塞浦路斯": "塞普勒斯",
"塞浦路斯": "塞普勒斯",
"塞舌尔": "塞席爾",
"塞舌爾": "塞席爾",
"多米尼加": "多明尼加",
"多明尼加共和國": "多明尼加",
"多米尼加联邦": "多米尼克",
"多明尼加聯邦": "多米尼克",
"安提瓜和巴布达": "安地卡及巴布達",
"安提瓜和巴布達": "安地卡及巴布達",
"尼日利亚": "奈及利亞",
"尼日利亞": "奈及利亞",
"尼日尔": "尼日",
"尼日爾": "尼日",
"巴巴多斯": "巴貝多",
"巴巴多斯": "巴貝多",
"巴布亚新几内亚": "巴布亞紐幾內亞",
"巴布亞新畿內亞": "巴布亞紐幾內亞",
"布基纳法索": "布吉納法索",
"布基納法索": "布吉納法索",
"布隆迪": "蒲隆地",
"布隆迪": "蒲隆地",
"希腊": "希臘",
"帕劳": "帛琉",
"意大利": "義大利",
"意大利": "義大利",
"所罗门群岛": "索羅門群島",
"所羅門群島": "索羅門群島",
"文莱": "汶萊",
"斯威士兰": "史瓦濟蘭",
"斯威士蘭": "史瓦濟蘭",
"斯洛文尼亚": "斯洛維尼亞",
"斯洛文尼亞": "斯洛維尼亞",
"新西兰": "紐西蘭",
"新西蘭": "紐西蘭",
"格林纳达": "格瑞那達",
"格林納達": "格瑞那達",
"格鲁吉亚": "喬治亞",
"格魯吉亞": "喬治亞",
"佐治亚": "喬治亞",
"佐治亞": "喬治亞",
"毛里塔尼亚": "茅利塔尼亞",
"毛里塔尼亞": "茅利塔尼亞",
"毛里求斯": "模里西斯",
"毛里裘斯": "模里西斯",
"沙特阿拉伯": "沙烏地阿拉伯",
"沙地阿拉伯": "沙烏地阿拉伯",
"波斯尼亚和黑塞哥维那": "波士尼亞赫塞哥維納",
"波斯尼亞黑塞哥維那": "波士尼亞赫塞哥維納",
"津巴布韦": "辛巴威",
"津巴布韋": "辛巴威",
"洪都拉斯": "宏都拉斯",
"洪都拉斯": "宏都拉斯",
"特立尼达和托巴哥": "千里達托貝哥",
"特立尼達和多巴哥": "千里達托貝哥",
"瑙鲁": "諾魯",
"瑙魯": "諾魯",
"瓦努阿图": "萬那杜",
"瓦努阿圖": "萬那杜",
"溫納圖萬": "那杜",
"科摩罗": "葛摩",
"科摩羅": "葛摩",
"科特迪瓦": "象牙海岸",
"突尼斯": "突尼西亞",
"索马里": "索馬利亞",
"索馬里": "索馬利亞",
"老挝": "寮國",
"老撾": "寮國",
"肯尼亚": "肯亞",
"肯雅": "肯亞",
"苏里南": "蘇利南",
"莫桑比克": "莫三比克",
"莱索托": "賴索托",
"萊索托": "賴索托",
"贝宁": "貝南",
"貝寧": "貝南",
"赞比亚": "尚比亞",
"贊比亞": "尚比亞",
"阿塞拜疆": "亞塞拜然",
"阿塞拜疆": "亞塞拜然",
"阿拉伯联合酋长国": "阿拉伯聯合大公國",
"阿拉伯聯合酋長國": "阿拉伯聯合大公國",
"马尔代夫": "馬爾地夫",
"馬爾代夫": "馬爾地夫",
"马耳他": "馬爾他",
"马里共和国": "馬利共和國",
"馬里共和國": "馬利共和國",
"方便面": "速食麵",
"快速面": "速食麵",
"即食麵": "速食麵",
"薯仔": "土豆",
"蹦极跳": "笨豬跳",
"绑紧跳": "笨豬跳",
"冷菜": "冷盤",
"凉菜": "冷盤",
"出租车": "計程車",
"台球": "撞球",
"桌球": "撞球",
"雪糕": "冰淇淋",
"卫生": "衛生",
"衞生": "衛生",
"平治": "賓士",
"奔驰": "賓士",
"積架": "捷豹",
"福士": "福斯",
"雪铁龙": "雪鐵龍",
"马自达": "馬自達",
"萬事得": "馬自達",
"拿破仑": "拿破崙",
"拿破侖": "拿破崙",
"布什": "布希",
"布殊": "布希",
"克林顿": "柯林頓",
"克林頓": "柯林頓",
"侯赛因": "海珊",
"侯賽因": "海珊",
"凡高": "梵谷",
"狄安娜": "黛安娜",
"戴安娜": "黛安娜",
"赫拉": "希拉",
}
zh2HK = {
"打印机": "打印機",
"印表機": "打印機",
"字节": "位元組",
"字節": "位元組",
"打印": "打印",
"列印": "打印",
"硬件": "硬件",
"硬體": "硬件",
"二极管": "二極管",
"二極體": "二極管",
"三极管": "三極管",
"三極體": "三極管",
"数码": "數碼",
"數位": "數碼",
"软件": "軟件",
"軟體": "軟件",
"网络": "網絡",
"網路": "網絡",
"人工智能": "人工智能",
"人工智慧": "人工智能",
"航天飞机": "穿梭機",
"太空梭": "穿梭機",
"因特网": "互聯網",
"網際網路": "互聯網",
"机器人": "機械人",
"機器人": "機械人",
"移动电话": "流動電話",
"行動電話": "流動電話",
"调制解调器": "調制解調器",
"數據機": "調制解調器",
"短信": "短訊",
"簡訊": "短訊",
"乍得": "乍得",
"查德": "乍得",
"也门": "也門",
"葉門": "也門",
"伯利兹": "伯利茲",
"貝里斯": "伯利茲",
"佛得角": "佛得角",
"維德角": "佛得角",
"克罗地亚": "克羅地亞",
"克羅埃西亞": "克羅地亞",
"冈比亚": "岡比亞",
"甘比亞": "岡比亞",
"几内亚比绍": "幾內亞比紹",
"幾內亞比索": "幾內亞比紹",
"列支敦士登": "列支敦士登",
"列支敦斯登": "列支敦士登",
"利比里亚": "利比里亞",
"賴比瑞亞": "利比里亞",
"加纳": "加納",
"迦納": "加納",
"加蓬": "加蓬",
"加彭": "加蓬",
"博茨瓦纳": "博茨瓦納",
"波札那": "博茨瓦納",
"卡塔尔": "卡塔爾",
"卡達": "卡塔爾",
"卢旺达": "盧旺達",
"盧安達": "盧旺達",
"危地马拉": "危地馬拉",
"瓜地馬拉": "危地馬拉",
"厄瓜多尔": "厄瓜多爾",
"厄瓜多": "厄瓜多爾",
"厄立特里亚": "厄立特里亞",
"厄利垂亞": "厄立特里亞",
"吉布提": "吉布堤",
"吉布地": "吉布堤",
"哥斯达黎加": "哥斯達黎加",
"哥斯大黎加": "哥斯達黎加",
"图瓦卢": "圖瓦盧",
"吐瓦魯": "圖瓦盧",
"圣卢西亚": "聖盧西亞",
"聖露西亞": "聖盧西亞",
"圣基茨和尼维斯": "聖吉斯納域斯",
"聖克里斯多福及尼維斯": "聖吉斯納域斯",
"圣文森特和格林纳丁斯": "聖文森特和格林納丁斯",
"聖文森及格瑞那丁": "聖文森特和格林納丁斯",
"圣马力诺": "聖馬力諾",
"聖馬利諾": "聖馬力諾",
"圭亚那": "圭亞那",
"蓋亞那": "圭亞那",
"坦桑尼亚": "坦桑尼亞",
"坦尚尼亞": "坦桑尼亞",
"埃塞俄比亚": "埃塞俄比亞",
"衣索匹亞": "埃塞俄比亞",
"衣索比亞": "埃塞俄比亞",
"基里巴斯": "基里巴斯",
"吉里巴斯": "基里巴斯",
"狮子山": "獅子山",
"塞普勒斯": "塞浦路斯",
"塞舌尔": "塞舌爾",
"塞席爾": "塞舌爾",
"多米尼加": "多明尼加共和國",
"多明尼加": "多明尼加共和國",
"多米尼加联邦": "多明尼加聯邦",
"多米尼克": "多明尼加聯邦",
"安提瓜和巴布达": "安提瓜和巴布達",
"安地卡及巴布達": "安提瓜和巴布達",
"尼日利亚": "尼日利亞",
"奈及利亞": "尼日利亞",
"尼日尔": "尼日爾",
"尼日": "尼日爾",
"巴巴多斯": "巴巴多斯",
"巴貝多": "巴巴多斯",
"巴布亚新几内亚": "巴布亞新畿內亞",
"巴布亞紐幾內亞": "巴布亞新畿內亞",
"布基纳法索": "布基納法索",
"布吉納法索": "布基納法索",
"布隆迪": "布隆迪",
"蒲隆地": "布隆迪",
"義大利": "意大利",
"所罗门群岛": "所羅門群島",
"索羅門群島": "所羅門群島",
"斯威士兰": "斯威士蘭",
"史瓦濟蘭": "斯威士蘭",
"斯洛文尼亚": "斯洛文尼亞",
"斯洛維尼亞": "斯洛文尼亞",
"新西兰": "新西蘭",
"紐西蘭": "新西蘭",
"格林纳达": "格林納達",
"格瑞那達": "格林納達",
"格鲁吉亚": "喬治亞",
"格魯吉亞": "喬治亞",
"梵蒂冈": "梵蒂岡",
"毛里塔尼亚": "毛里塔尼亞",
"茅利塔尼亞": "毛里塔尼亞",
"毛里求斯": "毛里裘斯",
"模里西斯": "毛里裘斯",
"沙烏地阿拉伯": "沙特阿拉伯",
"波斯尼亚和黑塞哥维那": "波斯尼亞黑塞哥維那",
"波士尼亞赫塞哥維納": "波斯尼亞黑塞哥維那",
"津巴布韦": "津巴布韋",
"辛巴威": "津巴布韋",
"洪都拉斯": "洪都拉斯",
"宏都拉斯": "洪都拉斯",
"特立尼达和托巴哥": "特立尼達和多巴哥",
"千里達托貝哥": "特立尼達和多巴哥",
"瑙鲁": "瑙魯",
"諾魯": "瑙魯",
"瓦努阿图": "瓦努阿圖",
"萬那杜": "瓦努阿圖",
"科摩罗": "科摩羅",
"葛摩": "科摩羅",
"索马里": "索馬里",
"索馬利亞": "索馬里",
"老挝": "老撾",
"寮國": "老撾",
"肯尼亚": "肯雅",
"肯亞": "肯雅",
"莫桑比克": "莫桑比克",
"莫三比克": "莫桑比克",
"莱索托": "萊索托",
"賴索托": "萊索托",
"贝宁": "貝寧",
"貝南": "貝寧",
"赞比亚": "贊比亞",
"尚比亞": "贊比亞",
"阿塞拜疆": "阿塞拜疆",
"亞塞拜然": "阿塞拜疆",
"阿拉伯联合酋长国": "阿拉伯聯合酋長國",
"阿拉伯聯合大公國": "阿拉伯聯合酋長國",
"马尔代夫": "馬爾代夫",
"馬爾地夫": "馬爾代夫",
"馬利共和國": "馬里共和國",
"方便面": "即食麵",
"快速面": "即食麵",
"速食麵": "即食麵",
"泡麵": "即食麵",
"土豆": "馬鈴薯",
"华乐": "中樂",
"民乐": "中樂",
"計程車": "的士",
"出租车": "的士",
"公車": "巴士",
"自行车": "單車",
"犬只": "狗隻",
"台球": "桌球",
"撞球": "桌球",
"冰淇淋": "雪糕",
"賓士": "平治",
"捷豹": "積架",
"福斯": "福士",
"雪铁龙": "先進",
"雪鐵龍": "先進",
"沃尓沃": "富豪",
"马自达": "萬事得",
"馬自達": "萬事得",
"寶獅": "標致",
"拿破崙": "拿破侖",
"布什": "布殊",
"布希": "布殊",
"克林顿": "克林頓",
"柯林頓": "克林頓",
"萨达姆": "薩達姆",
"海珊": "侯賽因",
"侯赛因": "侯賽因",
"大卫·贝克汉姆": "大衛碧咸",
"迈克尔·欧文": "米高奧雲",
"珍妮弗·卡普里亚蒂": "卡佩雅蒂",
"马拉特·萨芬": "沙芬",
"迈克尔·舒马赫": "舒麥加",
"希特勒": "希特拉",
"狄安娜": "戴安娜",
"黛安娜": "戴安娜",
}
zh2CN = {
"記憶體": "内存",
"預設": "默认",
"串列": "串行",
"乙太網": "以太网",
"點陣圖": "位图",
"常式": "例程",
"游標": "光标",
"光碟": "光盘",
"光碟機": "光驱",
"全形": "全角",
"共用": "共享",
"載入": "加载",
"半形": "半角",
"變數": "变量",
"雜訊": "噪声",
"因數": "因子",
"功能變數名稱": "域名",
"音效卡": "声卡",
"字型大小": "字号",
"字型檔": "字库",
"欄位": "字段",
"字元": "字符",
"存檔": "存盘",
"定址": "寻址",
"章節附註": "尾注",
"非同步": "异步",
"匯流排": "总线",
"括弧": "括号",
"介面": "接口",
"控制項": "控件",
"許可權": "权限",
"碟片": "盘片",
"矽片": "硅片",
"矽谷": "硅谷",
"硬碟": "硬盘",
"磁碟": "磁盘",
"磁軌": "磁道",
"程式控制": "程控",
"運算元": "算子",
"演算法": "算法",
"晶片": "芯片",
"晶元": "芯片",
"片語": "词组",
"軟碟機": "软驱",
"快閃記憶體": "快闪存储器",
"滑鼠": "鼠标",
"進位": "进制",
"互動式": "交互式",
"優先順序": "优先级",
"感測": "传感",
"攜帶型": "便携式",
"資訊理論": "信息论",
"迴圈": "循环",
"防寫": "写保护",
"分散式": "分布式",
"解析度": "分辨率",
"伺服器": "服务器",
"等於": "等于",
"區域網": "局域网",
"巨集": "宏",
"掃瞄器": "扫瞄仪",
"寬頻": "宽带",
"資料庫": "数据库",
"乳酪": "奶酪",
"鉅賈": "巨商",
"手電筒": "手电",
"萬曆": "万历",
"永曆": "永历",
"辭彙": "词汇",
"母音": "元音",
"自由球": "任意球",
"頭槌": "头球",
"進球": "入球",
"顆進球": "粒入球",
"射門": "打门",
"蓋火鍋": "火锅盖帽",
"印表機": "打印机",
"打印機": "打印机",
"位元組": "字节",
"字節": "字节",
"列印": "打印",
"打印": "打印",
"硬體": "硬件",
"二極體": "二极管",
"二極管": "二极管",
"三極體": "三极管",
"三極管": "三极管",
"數位": "数码",
"數碼": "数码",
"軟體": "软件",
"軟件": "软件",
"網路": "网络",
"網絡": "网络",
"人工智慧": "人工智能",
"太空梭": "航天飞机",
"穿梭機": "航天飞机",
"網際網路": "因特网",
"互聯網": "因特网",
"機械人": "机器人",
"機器人": "机器人",
"行動電話": "移动电话",
"流動電話": "移动电话",
"調制解調器": "调制解调器",
"數據機": "调制解调器",
"短訊": "短信",
"簡訊": "短信",
"烏茲別克": "乌兹别克斯坦",
"查德": "乍得",
"乍得": "乍得",
"也門": "",
"葉門": "也门",
"伯利茲": "伯利兹",
"貝里斯": "伯利兹",
"維德角": "佛得角",
"佛得角": "佛得角",
"克羅地亞": "克罗地亚",
"克羅埃西亞": "克罗地亚",
"岡比亞": "冈比亚",
"甘比亞": "冈比亚",
"幾內亞比紹": "几内亚比绍",
"幾內亞比索": "几内亚比绍",
"列支敦斯登": "列支敦士登",
"列支敦士登": "列支敦士登",
"利比里亞": "利比里亚",
"賴比瑞亞": "利比里亚",
"加納": "加纳",
"迦納": "加纳",
"加彭": "加蓬",
"加蓬": "加蓬",
"博茨瓦納": "博茨瓦纳",
"波札那": "博茨瓦纳",
"卡塔爾": "卡塔尔",
"卡達": "卡塔尔",
"盧旺達": "卢旺达",
"盧安達": "卢旺达",
"危地馬拉": "危地马拉",
"瓜地馬拉": "危地马拉",
"厄瓜多爾": "厄瓜多尔",
"厄瓜多": "厄瓜多尔",
"厄立特里亞": "厄立特里亚",
"厄利垂亞": "厄立特里亚",
"吉布堤": "吉布提",
"吉布地": "吉布提",
"哈薩克": "哈萨克斯坦",
"哥斯達黎加": "哥斯达黎加",
"哥斯大黎加": "哥斯达黎加",
"圖瓦盧": "图瓦卢",
"吐瓦魯": "图瓦卢",
"土庫曼": "土库曼斯坦",
"聖盧西亞": "圣卢西亚",
"聖露西亞": "圣卢西亚",
"聖吉斯納域斯": "圣基茨和尼维斯",
"聖克里斯多福及尼維斯": "圣基茨和尼维斯",
"聖文森特和格林納丁斯": "圣文森特和格林纳丁斯",
"聖文森及格瑞那丁": "圣文森特和格林纳丁斯",
"聖馬力諾": "圣马力诺",
"聖馬利諾": "圣马力诺",
"圭亞那": "圭亚那",
"蓋亞那": "圭亚那",
"坦桑尼亞": "坦桑尼亚",
"坦尚尼亞": "坦桑尼亚",
"埃塞俄比亞": "埃塞俄比亚",
"衣索匹亞": "埃塞俄比亚",
"衣索比亞": "埃塞俄比亚",
"吉里巴斯": "基里巴斯",
"基里巴斯": "基里巴斯",
"塔吉克": "塔吉克斯坦",
"塞拉利昂": "塞拉利昂",
"塞普勒斯": "塞浦路斯",
"塞浦路斯": "塞浦路斯",
"塞舌爾": "塞舌尔",
"塞席爾": "塞舌尔",
"多明尼加共和國": "多米尼加",
"多明尼加": "多米尼加",
"多明尼加聯邦": "多米尼加联邦",
"多米尼克": "多米尼加联邦",
"安提瓜和巴布達": "安提瓜和巴布达",
"安地卡及巴布達": "安提瓜和巴布达",
"尼日利亞": "尼日利亚",
"奈及利亞": "尼日利亚",
"尼日爾": "尼日尔",
"尼日": "尼日尔",
"巴貝多": "巴巴多斯",
"巴巴多斯": "巴巴多斯",
"巴布亞新畿內亞": "巴布亚新几内亚",
"巴布亞紐幾內亞": "巴布亚新几内亚",
"布基納法索": "布基纳法索",
"布吉納法索": "布基纳法索",
"蒲隆地": "布隆迪",
"布隆迪": "布隆迪",
"希臘": "希腊",
"帛琉": "帕劳",
"義大利": "意大利",
"意大利": "意大利",
"所羅門群島": "所罗门群岛",
"索羅門群島": "所罗门群岛",
"汶萊": "文莱",
"斯威士蘭": "斯威士兰",
"史瓦濟蘭": "斯威士兰",
"斯洛文尼亞": "斯洛文尼亚",
"斯洛維尼亞": "斯洛文尼亚",
"新西蘭": "新西兰",
"紐西蘭": "新西兰",
"格林納達": "格林纳达",
"格瑞那達": "格林纳达",
"格魯吉亞": "乔治亚",
"喬治亞": "乔治亚",
"梵蒂岡": "梵蒂冈",
"毛里塔尼亞": "毛里塔尼亚",
"茅利塔尼亞": "毛里塔尼亚",
"毛里裘斯": "毛里求斯",
"模里西斯": "毛里求斯",
"沙地阿拉伯": "沙特阿拉伯",
"沙烏地阿拉伯": "沙特阿拉伯",
"波斯尼亞黑塞哥維那": "波斯尼亚和黑塞哥维那",
"波士尼亞赫塞哥維納": "波斯尼亚和黑塞哥维那",
"津巴布韋": "津巴布韦",
"辛巴威": "津巴布韦",
"宏都拉斯": "洪都拉斯",
"洪都拉斯": "洪都拉斯",
"特立尼達和多巴哥": "特立尼达和托巴哥",
"千里達托貝哥": "特立尼达和托巴哥",
"瑙魯": "瑙鲁",
"諾魯": "瑙鲁",
"瓦努阿圖": "瓦努阿图",
"萬那杜": "瓦努阿图",
"溫納圖": "瓦努阿图",
"科摩羅": "科摩罗",
"葛摩": "科摩罗",
"象牙海岸": "科特迪瓦",
"突尼西亞": "突尼斯",
"索馬里": "索马里",
"索馬利亞": "索马里",
"老撾": "老挝",
"寮國": "老挝",
"肯雅": "肯尼亚",
"肯亞": "肯尼亚",
"蘇利南": "苏里南",
"莫三比克": "莫桑比克",
"莫桑比克": "莫桑比克",
"萊索托": "莱索托",
"賴索托": "莱索托",
"貝寧": "贝宁",
"貝南": "贝宁",
"贊比亞": "赞比亚",
"尚比亞": "赞比亚",
"亞塞拜然": "阿塞拜疆",
"阿塞拜疆": "阿塞拜疆",
"阿拉伯聯合酋長國": "阿拉伯联合酋长国",
"阿拉伯聯合大公國": "阿拉伯联合酋长国",
"南韓": "韩国",
"馬爾代夫": "马尔代夫",
"馬爾地夫": "马尔代夫",
"馬爾他": "马耳他",
"馬利共和國": "马里共和国",
"即食麵": "方便面",
"快速面": "方便面",
"速食麵": "方便面",
"泡麵": "方便面",
"笨豬跳": "蹦极跳",
"绑紧跳": "蹦极跳",
"冷盤": "凉菜",
"冷菜": "凉菜",
"散钱": "零钱",
"谐星": "笑星",
"夜学": "夜校",
"华乐": "民乐",
"中樂": "民乐",
"屋价": "房价",
"的士": "出租车",
"計程車": "出租车",
"公車": "公共汽车",
"單車": "自行车",
"節慶": "节日",
"芝士": "乾酪",
"狗隻": "犬只",
"士多啤梨": "草莓",
"忌廉": "奶油",
"桌球": "台球",
"撞球": "台球",
"雪糕": "冰淇淋",
"衞生": "卫生",
"衛生": "卫生",
"賓士": "奔驰",
"平治": "奔驰",
"積架": "捷豹",
"福斯": "大众",
"福士": "大众",
"雪鐵龍": "雪铁龙",
"萬事得": "马自达",
"馬自達": "马自达",
"寶獅": "标志",
"拿破崙": "拿破仑",
"布殊": "布什",
"布希": "布什",
"柯林頓": "克林顿",
"克林頓": "克林顿",
"薩達姆": "萨达姆",
"海珊": "萨达姆",
"梵谷": "凡高",
"大衛碧咸": "大卫·贝克汉姆",
"米高奧雲": "迈克尔·欧文",
"卡佩雅蒂": "珍妮弗·卡普里亚蒂",
"沙芬": "马拉特·萨芬",
"舒麥加": "迈克尔·舒马赫",
"希特拉": "希特勒",
"黛安娜": "戴安娜",
"希拉": "赫拉",
}
zh2SG = {
"方便面": "快速面",
"速食麵": "快速面",
"即食麵": "快速面",
"蹦极跳": "绑紧跳",
"笨豬跳": "绑紧跳",
"凉菜": "冷菜",
"冷盤": "冷菜",
"零钱": "散钱",
"散紙": "散钱",
"笑星": "谐星",
"夜校": "夜学",
"民乐": "华乐",
"住房": "住屋",
"房价": "屋价",
"泡麵": "快速面",
} | PypiClean |
/ITMO_FS-0.3.3.tar.gz/ITMO_FS-0.3.3/ITMO_FS/filters/unsupervised/trace_ratio_laplacian.py | import numpy as np
from sklearn.metrics.pairwise import pairwise_distances
from scipy.sparse import *
# TODO requests changes for MultivariateFilter to be used there
class TraceRatioLaplacian(object):
"""
Creates TraceRatio(similarity based) feature selection filter
performed in unsupervised way, i.e laplacian version
Parameters
----------
n_selected_features : int
Amount of features to filter
k : int
number of neighbours to use for knn
t : int
constant for kernel function calculation
- Note: in laplacian case only. In fisher it uses label similarity, i.e if both samples belong to same class
Notes
-----
For more details see `this paper <https://aaai.org/Papers/AAAI/2008/AAAI08-107.pdf/>`_.
Examples
--------
>>> from ITMO_FS.filters.unsupervised.trace_ratio_laplacian import TraceRatioLaplacian
>>> from sklearn.datasets import make_classification
>>> x, y = make_classification(1000, 100, n_informative = 10, \
n_redundant = 30, n_repeated = 10, shuffle = False)
>>> tracer = TraceRatioLaplacian(10)
>>> print(tracer.run(x, y)[0])
"""
def __init__(self, n_selected_features, k=5, t=1):
self.n_selected_features = n_selected_features
self.k = k
self.t = t
def run(self, X, y):
"""
Fits filter
Parameters
----------
X : numpy array, shape (n_samples, n_features)
The training input samples
y : numpy array, shape (n_samples, )
The target values
Returns
----------
feature_indices : numpy array
array of feature indices in X
See Also
--------
Examples
--------
"""
n_samples = X.shape[0]
Distances = pairwise_distances(X)
Distances **= 2
Distances_NN = np.sort(Distances, axis=1)[:, 0:self.k + 1]
Indices_NN = np.argsort(Distances, axis=1)[:, 0:self.k + 1]
Kernel = np.exp(-Distances_NN / self.t)
joined_distances = np.ravel(Kernel)
indices_axis_one = np.ravel(Indices_NN)
indices_axis_zero = np.repeat(np.arange(n_samples), self.k + 1)
A_within = csc_matrix((joined_distances, (indices_axis_zero, indices_axis_one)), shape=(n_samples, n_samples))
A_within = A_within - A_within.multiply(A_within.T > A_within) + A_within.T.multiply(A_within.T > A_within)
D_within = np.diag(np.ravel(A_within.sum(1))) # check correctness
L_within = D_within - A_within
A_between = D_within.dot(np.ones((n_samples, n_samples))).dot(D_within) / np.sum(D_within)
D_between = np.diag(A_between.sum(1))
L_between = D_between - A_between
L_within = (L_within.T + L_within) / 2
L_between = (L_between.T + L_between) / 2
E = X.T.dot(L_within).dot(X)
B = X.T.dot(L_between).dot(X)
E = (E.T + E) / 2
B = (B.T + B) / 2
# we need only diagonal elements for trace calculation
e = np.absolute(np.diag(E))
b = np.absolute(np.diag(B))
b[b == 0] = 1e-14
features_indices = np.argsort(np.divide(b, e))[::-1][0:self.n_selected_features]
lam = np.sum(b[features_indices]) / np.sum(e[features_indices])
prev_lam = 0
while (lam - prev_lam >= 1e-3):
score = b - lam * e
features_indices = np.argsort(score)[::-1][0:self.n_selected_features]
prev_lam = lam
lam = np.sum(b[features_indices]) / np.sum(e[features_indices])
return features_indices, score, lam | PypiClean |
/KernelUpgrader-1.19.1.tar.gz/KernelUpgrader-1.19.1/kernel_upgrader/exceptions/__init__.py | class ModuleNeededNotFound(RuntimeError):
def __init__(self, message=None):
self.message = message
super(ModuleNeededNotFound, self).__init__(message)
class ContentNotAvailable(RuntimeError):
def __init__(self, message=None):
self.message = message
super(ContentNotAvailable, self).__init__(message)
class ExtractionError(RuntimeError):
def __init__(self, message=None):
self.message = message
super(ExtractionError, self).__init__(message)
class CopyConfigError(RuntimeError):
def __init__(self, message=None):
self.message = message
super(CopyConfigError, self).__init__(message)
class OldConfigAdaptationError(RuntimeError):
def __init__(self, message=None):
self.message = message
super().__init__(message)
class CompilationError(RuntimeError):
def __init__(self, message=None):
self.message = message
super(CompilationError, self).__init__(message)
class RPMNotSupported(RuntimeError):
def __init__(self, message=None):
self.message = message
super(RPMNotSupported, self).__init__(message)
class InstallationError(RuntimeError):
def __init__(self, message=None):
self.message = message
super(InstallationError, self).__init__(message)
class LinuxSystemNotFound(RuntimeError):
def __init__(self, message=None):
self.message = message
super(LinuxSystemNotFound, self).__init__(message)
class RootPrivilegesNotGiven(RuntimeError):
def __init__(self, message=None):
self.message = message
super(RootPrivilegesNotGiven, self).__init__(message)
class UnableToInstallDependencies(RuntimeError):
def __init__(self, message):
self.message = message
super(UnableToInstallDependencies, self).__init__(message)
class NotEnoughFreeSpaceAvailable(RuntimeError):
def __init__(self, message):
self.message = message
super(NotEnoughFreeSpaceAvailable, self).__init__(message)
def raiserModuleNotFound(exception):
from kernel_upgrader.utils.colors import OutputColors as Colors
raise ModuleNeededNotFound(Colors.FAIL + "This app requires some modules that were not found on this device."
" More info: " + str(exception) + Colors.ENDC)
def raiserContentNotAvailable(exception):
from kernel_upgrader.utils.colors import OutputColors as Colors
raise ContentNotAvailable(Colors.FAIL + "The content is not available to download. Please, try again later or "
"check your Internet connection. More info: "
+ str(exception) + Colors.ENDC) | PypiClean |
/ImSwitchUC2-2.1.0.tar.gz/ImSwitchUC2-2.1.0/imswitch/imreconstruct/view/ImRecMainView.py | import numpy as np
import pyqtgraph as pg
from pyqtgraph.dockarea import Dock, DockArea
from pyqtgraph.parametertree import Parameter, ParameterTree
from qtpy import QtCore, QtWidgets
from imswitch.imcommon.view import PickDatasetsDialog
from .DataFrame import DataFrame
from .MultiDataFrame import MultiDataFrame
from .WatcherFrame import WatcherFrame
from .ReconstructionView import ReconstructionView
from .ScanParamsDialog import ScanParamsDialog
from .guitools import BetterPushButton
class ImRecMainView(QtWidgets.QMainWindow):
sigSaveReconstruction = QtCore.Signal()
sigSaveReconstructionAll = QtCore.Signal()
sigSaveCoeffs = QtCore.Signal()
sigSaveCoeffsAll = QtCore.Signal()
sigSetDataFolder = QtCore.Signal()
sigSetSaveFolder = QtCore.Signal()
sigReconstuctCurrent = QtCore.Signal()
sigReconstructMultiConsolidated = QtCore.Signal()
sigReconstructMultiIndividual = QtCore.Signal()
sigQuickLoadData = QtCore.Signal()
sigUpdate = QtCore.Signal()
sigShowPatternChanged = QtCore.Signal(bool)
sigFindPattern = QtCore.Signal()
sigShowScanParamsClicked = QtCore.Signal()
sigPatternParamsChanged = QtCore.Signal()
sigClosing = QtCore.Signal()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWindowTitle('Image Reconstruction')
# self parameters
self.r_l_text = 'Right/Left'
self.u_d_text = 'Up/Down'
self.b_f_text = 'Back/Forth'
self.timepoints_text = 'Timepoints'
self.p_text = 'pos'
self.n_text = 'neg'
# Actions in menubar
menuBar = self.menuBar()
file = menuBar.addMenu('&File')
quickLoadAction = QtWidgets.QAction('Quick load data…', self)
quickLoadAction.setShortcut('Ctrl+T')
quickLoadAction.triggered.connect(self.sigQuickLoadData)
file.addAction(quickLoadAction)
file.addSeparator()
saveReconAction = QtWidgets.QAction('Save reconstruction…', self)
saveReconAction.setShortcut('Ctrl+D')
saveReconAction.triggered.connect(self.sigSaveReconstruction)
file.addAction(saveReconAction)
saveReconAllAction = QtWidgets.QAction('Save all reconstructions…', self)
saveReconAllAction.setShortcut('Ctrl+Shift+D')
saveReconAllAction.triggered.connect(self.sigSaveReconstructionAll)
file.addAction(saveReconAllAction)
saveCoeffsAction = QtWidgets.QAction('Save coefficients of reconstruction…', self)
saveCoeffsAction.setShortcut('Ctrl+A')
saveCoeffsAction.triggered.connect(self.sigSaveCoeffs)
file.addAction(saveCoeffsAction)
saveCoeffsAllAction = QtWidgets.QAction('Save all coefficients…', self)
saveCoeffsAllAction.setShortcut('Ctrl+Shift+A')
saveCoeffsAllAction.triggered.connect(self.sigSaveCoeffsAll)
file.addAction(saveCoeffsAllAction)
file.addSeparator()
setDataFolder = QtWidgets.QAction('Set default data folder…', self)
setDataFolder.triggered.connect(self.sigSetDataFolder)
file.addAction(setDataFolder)
setSaveFolder = QtWidgets.QAction('Set default save folder…', self)
setSaveFolder.triggered.connect(self.sigSetSaveFolder)
file.addAction(setSaveFolder)
self.dataFrame = DataFrame()
self.multiDataFrame = MultiDataFrame()
self.watcherFrame = WatcherFrame()
btnFrame = BtnFrame()
btnFrame.sigReconstuctCurrent.connect(self.sigReconstuctCurrent)
btnFrame.sigReconstructMultiConsolidated.connect(self.sigReconstructMultiConsolidated)
btnFrame.sigReconstructMultiIndividual.connect(self.sigReconstructMultiIndividual)
btnFrame.sigQuickLoadData.connect(self.sigQuickLoadData)
btnFrame.sigUpdate.connect(self.sigUpdate)
self.reconstructionWidget = ReconstructionView()
self.parTree = ReconParTree()
self.showPatBool = self.parTree.p.param('Show pattern')
self.showPatBool.sigValueChanged.connect(lambda _, v: self.sigShowPatternChanged.emit(v))
self.bleachBool = self.parTree.p.param('Bleaching correction')
self.extension = self.parTree.p.param('File extension')
self.findPatBtn = self.parTree.p.param('Pattern').param('Find pattern')
self.findPatBtn.sigActivated.connect(self.sigFindPattern)
self.scanParWinBtn = self.parTree.p.param('Scanning parameters')
self.scanParWinBtn.sigActivated.connect(self.sigShowScanParamsClicked)
self.parTree.p.param('Pattern').sigTreeStateChanged.connect(self.sigPatternParamsChanged)
self.scanParamsDialog = ScanParamsDialog(
self, self.r_l_text, self.u_d_text, self.b_f_text,
self.timepoints_text, self.p_text, self.n_text
)
self.pickDatasetsDialog = PickDatasetsDialog(self, allowMultiSelect=True)
parameterFrame = QtWidgets.QFrame()
parameterGrid = QtWidgets.QGridLayout()
parameterFrame.setLayout(parameterGrid)
parameterGrid.addWidget(self.parTree, 0, 0)
DataDock = DockArea()
self.watcherDock = Dock('File watcher')
self.watcherDock.addWidget(self.watcherFrame)
DataDock.addDock(self.watcherDock)
self.multiDataDock = Dock('Multidata management')
self.multiDataDock.addWidget(self.multiDataFrame)
DataDock.addDock(self.multiDataDock, 'above', self.watcherDock)
self.currentDataDock = Dock('Current data')
self.currentDataDock.addWidget(self.dataFrame)
DataDock.addDock(self.currentDataDock, 'above', self.multiDataDock)
layout = QtWidgets.QHBoxLayout()
self.cwidget = QtWidgets.QWidget()
self.setCentralWidget(self.cwidget)
self.cwidget.setLayout(layout)
leftContainer = QtWidgets.QVBoxLayout()
leftContainer.setContentsMargins(0, 0, 0, 0)
rightContainer = QtWidgets.QVBoxLayout()
rightContainer.setContentsMargins(0, 0, 0, 0)
leftContainer.addWidget(parameterFrame, 1)
leftContainer.addWidget(btnFrame, 0)
leftContainer.addWidget(DataDock, 1)
rightContainer.addWidget(self.reconstructionWidget)
layout.addLayout(leftContainer, 1)
layout.addLayout(rightContainer, 3)
pg.setConfigOption('imageAxisOrder', 'row-major')
def requestFilePathFromUser(self, caption=None, defaultFolder=None, nameFilter=None,
isSaving=False):
func = (QtWidgets.QFileDialog().getOpenFileName if not isSaving
else QtWidgets.QFileDialog().getSaveFileName)
return func(self, caption=caption, directory=defaultFolder, filter=nameFilter)[0]
def requestFolderPathFromUser(self, caption=None, defaultFolder=None):
return QtWidgets.QFileDialog.getExistingDirectory(caption=caption, directory=defaultFolder)
def raiseCurrentDataDock(self):
self.currentDataDock.raiseDock()
def raiseMultiDataDock(self):
self.multiDataDock.raiseDock()
def addNewData(self, reconObj, name):
self.reconstructionWidget.addNewData(reconObj, name)
def getMultiDatas(self):
dataList = self.multiDataFrame.dataList
for i in range(dataList.count()):
yield dataList.item(i).data(1)
def showScanParamsDialog(self, blocking=False):
if blocking:
result = self.scanParamsDialog.exec_()
return result == QtWidgets.QDialog.Accepted
else:
self.scanParamsDialog.show()
def showPickDatasetsDialog(self, blocking=False):
if blocking:
result = self.pickDatasetsDialog.exec_()
return result == QtWidgets.QDialog.Accepted
else:
self.pickDatasetsDialog.show()
def getPatternParams(self):
patternPars = self.parTree.p.param('Pattern')
return (np.mod(patternPars.param('Row-offset').value(),
patternPars.param('Row-period').value()),
np.mod(patternPars.param('Col-offset').value(),
patternPars.param('Col-period').value()),
patternPars.param('Row-period').value(),
patternPars.param('Col-period').value())
def setPatternParams(self, rowOffset, colOffset, rowPeriod, colPeriod):
patternPars = self.parTree.p.param('Pattern')
patternPars.param('Row-offset').setValue(rowOffset)
patternPars.param('Col-offset').setValue(colOffset)
patternPars.param('Row-period').setValue(rowPeriod)
patternPars.param('Col-period').setValue(colPeriod)
def getComputeDevice(self):
return self.parTree.p.param('CPU/GPU').value()
def getPixelSizeNm(self):
return self.parTree.p.param('Pixel size').value()
def getFwhmNm(self):
return self.parTree.p.param('Reconstruction options').param('PSF FWHM').value()
def getBgModelling(self):
return self.parTree.p.param('Reconstruction options').param('BG modelling').value()
def getBgGaussianSize(self):
return self.parTree.p.param('Reconstruction options').param('BG modelling') \
.param('BG Gaussian size').value()
def closeEvent(self, event):
self.sigClosing.emit()
event.accept()
class ReconParTree(ParameterTree):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Parameter tree for the reconstruction
params = [
{'name': 'Pixel size', 'type': 'float', 'value': 65, 'suffix': 'nm'},
{'name': 'CPU/GPU', 'type': 'list', 'values': ['GPU', 'CPU']},
{'name': 'Pattern', 'type': 'group', 'children': [
{'name': 'Row-offset', 'type': 'float', 'value': 9.89, 'limits': (0, 9999)},
{'name': 'Col-offset', 'type': 'float', 'value': 10.4, 'limits': (0, 9999)},
{'name': 'Row-period', 'type': 'float', 'value': 11.05, 'limits': (0, 9999)},
{'name': 'Col-period', 'type': 'float', 'value': 11.05, 'limits': (0, 9999)},
{'name': 'Find pattern', 'type': 'action'}]},
{'name': 'Reconstruction options', 'type': 'group', 'children': [
{'name': 'PSF FWHM', 'type': 'float', 'value': 220, 'limits': (0, 9999),
'suffix': 'nm'},
{'name': 'BG modelling', 'type': 'list',
'values': ['Constant', 'Gaussian', 'No background'], 'children': [
{'name': 'BG Gaussian size', 'type': 'float', 'value': 500, 'suffix': 'nm'}]}]},
{'name': 'Scanning parameters', 'type': 'action'},
{'name': 'Show pattern', 'type': 'bool'},
{'name': 'Bleaching correction', 'type': 'bool'},
{'name': 'File extension', 'type': 'list', 'values': ['hdf5', 'zarr']}]
self.p = Parameter.create(name='params', type='group', children=params)
self.setParameters(self.p, showTop=False)
self._writable = True
class BtnFrame(QtWidgets.QFrame):
sigReconstuctCurrent = QtCore.Signal()
sigReconstructMultiConsolidated = QtCore.Signal()
sigReconstructMultiIndividual = QtCore.Signal()
sigQuickLoadData = QtCore.Signal()
sigUpdate = QtCore.Signal()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reconCurrBtn = BetterPushButton('Reconstruct current')
self.reconCurrBtn.clicked.connect(self.sigReconstuctCurrent)
self.quickLoadDataBtn = BetterPushButton('Quick load data')
self.quickLoadDataBtn.clicked.connect(self.sigQuickLoadData)
self.updateBtn = BetterPushButton('Update reconstruction')
self.updateBtn.clicked.connect(self.sigUpdate)
self.reconMultiBtn = QtWidgets.QToolButton()
self.reconMultiBtn.setSizePolicy(
QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
)
self.reconMultiBtn.setText('Reconstruct multidata')
self.reconMultiBtn.setPopupMode(QtWidgets.QToolButton.ToolButtonPopupMode.InstantPopup)
self.reconMultiConsolidated = QtWidgets.QAction('Consolidate into a single reconstruction')
self.reconMultiConsolidated.triggered.connect(self.sigReconstructMultiConsolidated)
self.reconMultiBtn.addAction(self.reconMultiConsolidated)
self.reconMultiIndividual = QtWidgets.QAction('Reconstruct data items individually')
self.reconMultiIndividual.triggered.connect(self.sigReconstructMultiIndividual)
self.reconMultiBtn.addAction(self.reconMultiIndividual)
layout = QtWidgets.QGridLayout()
self.setLayout(layout)
layout.addWidget(self.quickLoadDataBtn, 0, 0, 1, 2)
layout.addWidget(self.reconCurrBtn, 1, 0)
layout.addWidget(self.reconMultiBtn, 1, 1)
layout.addWidget(self.updateBtn, 2, 0, 1, 2)
# Copyright (C) 2020-2021 ImSwitch developers
# This file is part of ImSwitch.
#
# ImSwitch is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ImSwitch is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>. | PypiClean |
/Jalapeno-0.1.4.tar.gz/Jalapeno-0.1.4/Jalapeno_data/theme/default/static/ijs/ihome.js | (function()
{
var mask = $('.mask');
function hidesidecontent(sidecont,maincont,sideme)
{
sidecont.animate({'right':sideme.width()-sidecont.width()},{duration:500,queue:false});
}
function hidesidemenu(sidecont,maincont,sideme,closeBarEl)
{
sidecont.animate({'right':-sidecont.width()},{duration:0});
maincont.animate({'margin-right':0},{duration:500,queue:false});
sideme.animate({'right':-sideme.width()},{duration:500,queue:false});
closeBarEl.className= 'showbar';
}
var Sidemenu = function()
{
this.el = document.querySelector('#sidebar-menu ul');
this.state = 'allClosed';
this.el.addEventListener('click',function(evt){
evt.stopPropagation();
});
/*animation*/
this.sidecont = $('#sidebar-content');
this.sideme = $('#sidebar-menu');
this.menuList = document.querySelectorAll('#sidebar-menu ul>li');
var self = this;
for(var i = 0;i<this.menuList.length;i++)
{
this.menuList[i].addEventListener('click',function(evt)
{
var sidecontel = '#'+evt.currentTarget.id+'-content',
menuContEl = $('#'+evt.currentTarget.id+'-content');
if(self.state === 'allClosed'){
self.sideme.animate({'right':-50},{duration:500});
mask.fadeIn(500);
self.sidecont.animate({'right':0},{duration:500});
/*menuContEl.delay(800).fadeIn(500);*/
self.state = 'oneOpened';
self.MenuContNow = menuContEl;
}
if(self.state === 'oneOpened')
{
menuContEl.delay(500).fadeIn(500);
console.log('open'+menuContEl.id);
self.state ='oneOpened';
self.MenuContNow = menuContEl;
}
});
}
$('.nav-con-close').on('click',function(){
self.MenuContNow.fadeOut(500);
mask.fadeOut(500);
self.sidecont.animate({'right':-250},{duration:500,queue:false});
self.sideme.animate({'right':0},{duration:500});
self.state = 'allClosed';
});
};
var Sidebar = function(eventId,closeBarId,sidebarContId)
{
this.state = 'opened';
this.el = document.getElementById(eventId||'sidebar-menu');
this.closeBarEl = document.getElementById(closeBarId||'closeBar');
this.contel = document.getElementById(sidebarContId||'sidebar-content');
this.sidemenu = new Sidemenu();
var self = this;
this.sideme = $('#sidebar-menu');
this.sidecont = $('#sidebar-content');
this.sideclose = $('#closeBar');
this.maincont = $('#main-page');
this.closeBarEl.addEventListener('click',function(event){
if(event.target !== self.closeBarEl){
self.switchTrigger();
}
});
};
Sidebar.prototype.close = function(){
this.state = 'closed';
/*mask.fadeOut(500);*/
if(this.sidemenu.state === 'oneOpened')
{
mask.fadeOut(500);
/*onsole.log(this.sidemenu.state);
this.contel.className ='a hide-back';
this.contel.className ='a hide-back-again';*/
/*this.sidecont.animate({'left':this.sideme.width()-this.sidecont.width()},{duration:500});
this.maincont.animate({'margin-left':this.sideme.width()},{duration:500});*/
/*hidesidecontent(this.sidecont,this.maincont,this.sideme);
console.log('aaa');
/*this.sidecont.animate({'left':-this.sidecont.width()},{duration:0,queue:false});*/
hidesidemenu(this.sidecont,this.maincont,this.sideme,this.closeBarEl);
this.sidemenu.state = 'allClosed';
}
else if(this.sidemenu.state === 'allClosed')
{
hidesidemenu(this.sidecont,this.maincont,this.sideme,this.closeBarEl);
/*this.sidecont.animate({'left':-this.sidecont.width()},{duration:0,queue:false});
console.log(this.sideme.width());
this.maincont.animate({'margin-left':0},{duration:800,queue:false});
this.sideme.animate({'left':-this.sideme.width()},{duration:500,queue:false});*/
/*this.closeBarEl.className= 'showbar';*/
}
};
Sidebar.prototype.open = function(){
this.state = 'opened';
this.sideme.animate({'right':0},{duration:300,queue:false});
this.maincont.animate({'margin-right':50},{duration:500,queue:false});
this.sidecont.delay(1000).animate({'right':this.sideme.width()-this.sidecont.width()},{duration:0});
this.closeBarEl.className= 'closebar';
};
Sidebar.prototype.switchTrigger = function(){
if(this.state === 'opened'){
this.close();
}
else{
this.open();
}
};
var menu = $('.menu'),
backbutton=$('.back-to-top'),
menu_trigger = $('.menu_trigger'),
mask = $('.mask');
function backback()
{
$('html,body').animate({
scrollTop:0
},800)
}
function hidebutton()
{
if($(window).scrollTop() > $(window).height()/2)
backbutton.fadeIn();
else
backbutton.fadeOut();
}
$(function(){
if($(window).width()<1024){
var sidebar = new Sidebar('sidebar-menu','closeBar','sidebar-content');
setTimeout(function(){sidebar.close()},600);
menu_trigger.click(function(){menu.slideToggle()});
/*$(function(){setTimeout(hideSideBar,600)});
sidebar_trigger.on('click',showhideSideBar);*/
backbutton.on('click',backback);
$(window).on('scroll',hidebutton);
$(window).trigger('scroll');
}
})
})(); | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/drawing/ui/dom/Pan.js | if(!dojo._hasResource["dojox.drawing.ui.dom.Pan"]){
dojo._hasResource["dojox.drawing.ui.dom.Pan"]=true;
dojo.provide("dojox.drawing.ui.dom.Pan");
dojo.require("dojox.drawing.plugins._Plugin");
dojo.deprecated("dojox.drawing.ui.dom.Pan","It may not even make it to the 1.4 release.",1.4);
dojox.drawing.ui.dom.Pan=dojox.drawing.util.oo.declare(dojox.drawing.plugins._Plugin,function(_1){
this.domNode=_1.node;
var _2;
dojo.connect(this.domNode,"click",this,"onSetPan");
dojo.connect(this.keys,"onKeyUp",this,"onKeyUp");
dojo.connect(this.keys,"onKeyDown",this,"onKeyDown");
dojo.connect(this.anchors,"onAnchorUp",this,"checkBounds");
dojo.connect(this.stencils,"register",this,"checkBounds");
dojo.connect(this.canvas,"resize",this,"checkBounds");
dojo.connect(this.canvas,"setZoom",this,"checkBounds");
dojo.connect(this.canvas,"onScroll",this,function(){
if(this._blockScroll){
this._blockScroll=false;
return;
}
_2&&clearTimeout(_2);
_2=setTimeout(dojo.hitch(this,"checkBounds"),200);
});
this._mouseHandle=this.mouse.register(this);
},{selected:false,type:"dojox.drawing.ui.dom.Pan",onKeyUp:function(_3){
if(_3.keyCode==32){
this.onSetPan(false);
}
},onKeyDown:function(_4){
if(_4.keyCode==32){
this.onSetPan(true);
}
},onSetPan:function(_5){
if(_5===true||_5===false){
this.selected=!_5;
}
if(this.selected){
this.selected=false;
dojo.removeClass(this.domNode,"selected");
}else{
this.selected=true;
dojo.addClass(this.domNode,"selected");
}
this.mouse.setEventMode(this.selected?"pan":"");
},onPanDrag:function(_6){
var x=_6.x-_6.last.x;
var y=_6.y-_6.last.y;
this.canvas.domNode.parentNode.scrollTop-=_6.move.y;
this.canvas.domNode.parentNode.scrollLeft-=_6.move.x;
this.canvas.onScroll();
},onStencilUp:function(_7){
this.checkBounds();
},onStencilDrag:function(_8){
},checkBounds:function(){
var _9=function(){
};
var _a=function(){
};
var t=Infinity,r=-Infinity,b=-Infinity,l=Infinity,sx=0,sy=0,dy=0,dx=0,mx=this.stencils.group?this.stencils.group.getTransform():{dx:0,dy:0},sc=this.mouse.scrollOffset(),_b=sc.left?10:0,_c=sc.top?10:0,ch=this.canvas.height,cw=this.canvas.width,z=this.canvas.zoom,_d=this.canvas.parentHeight,_e=this.canvas.parentWidth;
this.stencils.withSelected(function(m){
var o=m.getBounds();
_a("SEL BOUNDS:",o);
t=Math.min(o.y1+mx.dy,t);
r=Math.max(o.x2+mx.dx,r);
b=Math.max(o.y2+mx.dy,b);
l=Math.min(o.x1+mx.dx,l);
});
this.stencils.withUnselected(function(m){
var o=m.getBounds();
_a("UN BOUNDS:",o);
t=Math.min(o.y1,t);
r=Math.max(o.x2,r);
b=Math.max(o.y2,b);
l=Math.min(o.x1,l);
});
b*=z;
var _f=0,_10=0;
_9("Bottom test","b:",b,"z:",z,"ch:",ch,"pch:",_d,"top:",sc.top,"sy:",sy);
if(b>_d||sc.top){
_9("*bottom scroll*");
ch=Math.max(b,_d+sc.top);
sy=sc.top;
_f+=this.canvas.getScrollWidth();
}else{
if(!sy&&ch>_d){
_9("*bottom remove*");
ch=_d;
}
}
r*=z;
if(r>_e||sc.left){
cw=Math.max(r,_e+sc.left);
sx=sc.left;
_10+=this.canvas.getScrollWidth();
}else{
if(!sx&&cw>_e){
cw=_e;
}
}
cw+=_f*2;
ch+=_10*2;
this._blockScroll=true;
this.stencils.group&&this.stencils.group.applyTransform({dx:dx,dy:dy});
this.stencils.withUnselected(function(m){
m.transformPoints({dx:dx,dy:dy});
});
this.canvas.setDimensions(cw,ch,sx,sy);
}});
dojox.drawing.ui.dom.Pan.setup={name:"dojox.drawing.ui.dom.Pan",tooltip:"Pan Tool",iconClass:"iconPan"};
dojox.drawing.register(dojox.drawing.ui.dom.Pan.setup,"plugin");
} | PypiClean |
/EasyPlot-1.0.0b2.zip/EasyPlot-1.0.0b2/easyplot/easyplot.py | import matplotlib.pyplot as plt
import matplotlib as mpl
if not plt.isinteractive():
print("Warning! Matplotlib interactive mode is currently OFF. It is \
recommended to use a suitable matplotlib backend and turn it \
on by calling matplotlib.pyplot.ion()")
class EasyPlot(object):
"""
Class that implements thin matplotlib wrapper for easy, reusable plotting
"""
def __init__(self, *args, **kwargs):
"""
Arguments
=========
*args : Support for plot(y), plot(x, y), plot(x, y, 'b-o'). x, y and
format string are passed through for plotting
**kwargs: All kwargs are optional
Plot Parameters:
----------------
fig : figure instance for drawing plots
ax : axes instance for drawing plots (If user wants to supply axes,
figure externally, both ax and fig must be supplied together)
figSize : tuple of integers ~ width & height in inches
dpi : dots per inch setting for figure
label : Label for line plot as determined by *args, string
color / c : Color of line plot, overrides format string in *args if
supplied. Accepts any valid matplotlib color
linewidth / lw : Plot linewidth
linestyle / ls : Plot linestyle ['-','--','-.',':','None',' ','']
marker : '+', 'o', '*', 's', 'D', ',', '.', '<', '>', '^', '1', '2'
markerfacecolor / mfc : Face color of marker
markeredgewidth / mew :
markeredgecolor / mec :
markersize / ms : Size of markers
markevery / mev : Mark every Nth marker
[None|integer|(startind, stride)]
alpha : Opacity of line plot (0 - 1.0), default = 1.0
title : Plot title, string
xlabel : X-axis label, string
ylabel : Y-axis label, string
xlim : X-axis limits - tuple. eg: xlim=(0,10). Set to None for auto
ylim : Y-axis limits - tuple. eg: ylim=(0,10). Set to None for auto
xscale : Set x axis scale ['linear'|'log'|'symlog']
yscale : Set y axis scale ['linear'|'log'|'symlog']
Only supports basic xscale/yscale functionality. Use
get_axes().set_xscale() if further customization is required
grid : Display axes grid. ['on'|'off']. See grid() for more options
colorcycle / cs: Set plot colorcycle to list of valid matplotlib
colors
fontsize : Global fontsize for all plots
Legend Parameters:
------------------
showlegend : set to True to display legend
fancybox : True by default. Enables rounded corners for legend box
framealpha : Legend box opacity (0 - 1.0), default = 1.0
loc : Location of legend box in plot, default = 'best'
numpoints : number of markers in legend, default = 1.0
ncol : number of columns for legend. default is 1
markerscale : The relative size of legend markers vs. original.
If None, use rc settings.
mode : if mode is “expand”, the legend will be horizontally
expanded to fill the axes area (or bbox_to_anchor)
bbox_to_anchor : The bbox that the legend will be anchored. Tuple of
2 or 4 floats
"""
self._default_kwargs = {'fig': None,
'ax': None,
'figsize': None,
'dpi': mpl.rcParams['figure.dpi'],
'showlegend': False,
'fancybox': True,
'loc': 'best',
'numpoints': 1
}
# Dictionary of plot parameter aliases
self.alias_dict = {'lw': 'linewidth', 'ls': 'linestyle',
'mfc': 'markerfacecolor', 'mew': 'markeredgewidth',
'mec': 'markeredgecolor', 'ms': 'markersize',
'mev': 'markevery', 'c': 'color', 'fs': 'fontsize'}
# List of all named plot parameters passable to plot method
self.plot_kwargs = ['label', 'linewidth', 'linestyle', 'marker',
'markerfacecolor', 'markeredgewidth', 'markersize',
'markeredgecolor', 'markevery', 'alpha', 'color']
self.legend_kwargs = ['fancybox', 'loc', 'framealpha', 'numpoints',
'ncol', 'markerscale', 'mode', 'bbox_to_anchor']
# Parameters that should only be passed to the plot once, then reset
self._uniqueparams = ['color', 'label', 'marker', 'linestyle',
'colorcycle']
self._colorcycle = []
# Mapping between plot parameter and corresponding axes function to call
self._ax_funcs = {'xlabel': 'set_xlabel',
'ylabel': 'set_ylabel',
'xlim': 'set_xlim',
'ylim': 'set_ylim',
'title': 'set_title',
'colorcycle': 'set_color_cycle',
'grid': 'grid',
'xscale': 'set_xscale',
'yscale': 'set_yscale'}
self.kwargs = self._default_kwargs.copy() #Prevent mutating dictionary
self.args = []
self.line_list = [] # List of all Line2D items that are plotted
self.add_plot(*args, **kwargs)
def add_plot(self, *args, **kwargs):
"""
Add plot using supplied parameters and existing instance parameters
Creates new Figure and Axes object if 'fig' and 'ax' parameters not
supplied. Stores references to all Line2D objects plotted in
self.line_list.
Arguments
=========
*args : Supports format plot(y), plot(x, y), plot(x, y, 'b-'). x, y
and format string are passed through for plotting
**kwargs : Plot parameters. Refer to __init__ docstring for details
"""
self._update(*args, **kwargs)
# Create figure and axes if needed
if self.kwargs['fig'] is None: #TODO: and self.isnewargs:
if not self.isnewargs:
return # Don't create fig, ax yet if no x, y data provided
self.kwargs['fig'] = plt.figure(figsize=self.kwargs['figsize'],
dpi=self.kwargs['dpi'])
self.kwargs['ax'] = self.kwargs['fig'].gca()
self.kwargs['fig'].add_axes(self.kwargs['ax'])
ax, fig = self.kwargs['ax'], self.kwargs['fig']
ax.ticklabel_format(useOffset=False) # Prevent offset notation in plots
# Apply axes functions if present in kwargs
for kwarg in self.kwargs:
if kwarg in self._ax_funcs:
# eg: f = getattr(ax,'set_title'); f('new title')
func = getattr(ax, self._ax_funcs[kwarg])
func(self.kwargs[kwarg])
# Add plot only if new args passed to this instance
if self.isnewargs:
# Create updated name, value dict to pass to plot method
plot_kwargs = {kwarg: self.kwargs[kwarg] for kwarg
in self.plot_kwargs if kwarg in self.kwargs}
line, = ax.plot(*self.args, **plot_kwargs)
self.line_list.append(line)
# Display legend if required
if self.kwargs['showlegend']:
legend_kwargs = {kwarg: self.kwargs[kwarg] for kwarg
in self.legend_kwargs if kwarg in self.kwargs}
leg = ax.legend(**legend_kwargs)
if leg is not None:
leg.draggable(state=True)
if 'fontsize' in self.kwargs:
self.set_fontsize(self.kwargs['fontsize'])
self._delete_uniqueparams() # Clear unique parameters from kwargs list
if plt.isinteractive(): # Only redraw canvas in interactive mode
self.redraw()
def update_plot(self, **kwargs):
""""Update plot parameters (keyword arguments) and replot figure
Usage:
a = EasyPlot([1,2,3], [2,4,8], 'r-o', label='label 1')
# Update title and xlabel string and redraw plot
a.update_plot(title='Title', xlabel='xlabel')
"""
self.add_plot(**kwargs)
def new_plot(self, *args, **kwargs):
"""
Plot new plot using EasyPlot object and default plot parameters
Pass a named argument reset=True if all plotting parameters should
be reset to original defaults
"""
reset = kwargs['reset'] if 'reset' in kwargs else False
self._reset(reset=reset)
if self._colorcycle:
self.kwargs['colorcycle'] = self._colorcycle
self.add_plot(*args, **kwargs)
def iter_plot(self, x, y, mode='dict', **kwargs):
"""
Plot multiple plots by iterating through x, y and parameter lists
Arguments:
==========
x : x values. 1D List/Array, Dictionary or Numpy 2D Array
y : y values. Dictionary or 2D Python array (List of Lists where each
sub-list is one set of y-data) or Numpy 2D Array
mode : y, labels and other parameters should either be a Dictionary
or a 2D Numpy array/2D List where each row corresponds to a
single plot ['dict'|'array']
**kwargs : Plot params as defined in __init__ documentation.
Params can either be:
scalars (same value applied to all plots),
dictionaries (mode='dict', key[val] value applies to each plot)
1D Lists/Numpy Arrays (mode='array', param[index] applies to each
plot)
"""
if mode.lower() == 'dict':
for key in y:
loop_kwargs={}
for kwarg in kwargs:
try: # Check if parameter is a dictionary
loop_kwargs[kwarg] = kwargs[kwarg][key]
except:
loop_kwargs[kwarg] = kwargs[kwarg]
try:
x_loop = x[key]
except:
x_loop = x
self.add_plot(x_loop, y[key], **loop_kwargs)
elif mode.lower() == 'array':
for ind in range(len(y)):
loop_kwargs={}
for kwarg in kwargs:
# Do not iterate through tuple/string plot parameters
if isinstance(kwargs[kwarg], (basestring, tuple)):
loop_kwargs[kwarg] = kwargs[kwarg]
else:
try: # Check if parameter is a 1-D List/Array
loop_kwargs[kwarg] = kwargs[kwarg][ind]
except:
loop_kwargs[kwarg] = kwargs[kwarg]
try:
x_loop = x[ind][:]
except:
x_loop = x
self.add_plot(x_loop, y[ind], **loop_kwargs)
else:
print('Error! Incorrect mode specification. Ignoring method call')
def autoscale(self, enable=True, axis='both', tight=None):
"""Autoscale the axis view to the data (toggle).
Convenience method for simple axis view autoscaling. It turns
autoscaling on or off, and then, if autoscaling for either axis is on,
it performs the autoscaling on the specified axis or axes.
Arguments
=========
enable: [True | False | None]
axis: ['x' | 'y' | 'both']
tight: [True | False | None]
"""
ax = self.get_axes()
ax.autoscale(enable=enable, axis=axis, tight=tight)
# Reset xlim and ylim parameters to None if previously set to some value
if 'xlim' in self.kwargs and (axis=='x' or axis=='both'):
self.kwargs.pop('xlim')
if 'ylim' in self.kwargs and (axis=='y' or axis=='both'):
self.kwargs.pop('ylim')
self.redraw()
def grid(self, **kwargs):
"""Turn axes grid on or off
Call signature: grid(self, b=None, which='major', axis='both', **kwargs)
**kwargs are passed to linespec of grid lines (eg: linewidth=2)
"""
self.get_axes().grid(**kwargs)
self.redraw()
def get_figure(self):
"""Returns figure instance of current plot"""
return self.kwargs['fig']
def get_axes(self):
"""Returns axes instance for current plot"""
return self.kwargs['ax']
def redraw(self):
"""
Redraw plot. Use after custom user modifications of axes & fig objects
"""
if plt.isinteractive():
fig = self.kwargs['fig']
#Redraw figure if it was previously closed prior to updating it
if not plt.fignum_exists(fig.number):
fig.show()
fig.canvas.draw()
else:
print('redraw() is unsupported in non-interactive plotting mode!')
def set_fontsize(self, font_size):
""" Updates global font size for all plot elements"""
mpl.rcParams['font.size'] = font_size
self.redraw()
#TODO: Implement individual font size setting
# params = {'font.family': 'serif',
# 'font.size': 16,
# 'axes.labelsize': 18,
# 'text.fontsize': 18,
# 'legend.fontsize': 18,
# 'xtick.labelsize': 18,
# 'ytick.labelsize': 18,
# 'text.usetex': True}
# mpl.rcParams.update(params)
# def set_font(self, family=None, weight=None, size=None):
# """ Updates global font properties for all plot elements
#
# TODO: Font family and weight don't update dynamically"""
# if family is None:
# family = mpl.rcParams['font.family']
# if weight is None:
# weight = mpl.rcParams['font.weight']
# if size is None:
# size = mpl.rcParams['font.size']
# mpl.rc('font', family=family, weight=weight, size=size)
# self.redraw()
def _delete_uniqueparams(self):
"""Delete plot parameters that are unique per plot
Prevents unique parameters (eg: label) carrying over to future plots"""
# Store colorcycle list prior to deleting from this instance
if 'colorcycle' in self.kwargs:
self._colorcycle = self.kwargs['colorcycle']
for param in self._uniqueparams:
self.kwargs.pop(param, None)
def _update(self, *args, **kwargs):
"""Update instance variables args and kwargs with supplied values """
if args:
self.args = args # Args to be directly passed to plot command
self.isnewargs = True
else:
self.isnewargs = False
# Update self.kwargs with full parameter name of aliased plot parameter
for alias in self.alias_dict:
if alias in kwargs:
self.kwargs[self.alias_dict[alias]] = kwargs.pop(alias)
# Update kwargs dictionary
for key in kwargs:
self.kwargs[key] = kwargs[key]
def _reset(self, reset=False):
"""Reset instance variables in preparation for new plots
reset: True if current instance defaults for plotting parameters should
be reset to Class defaults"""
self.args = []
self.line_list = []
self.kwargs['fig'] = None
self.kwargs['ax'] = None
if reset:
self.kwargs = self._default_kwargs.copy() | PypiClean |
/aleksis_core-3.1.5-py3-none-any.whl/aleksis/core/migrations/0013_pdf_file.py |
import aleksis.core.models
import django.contrib.sites.managers
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
('core', '0012_valid_from_announcement'),
]
operations = [
migrations.AlterModelOptions(
name='globalpermissions',
options={'default_permissions': (), 'managed': False, 'permissions': (
('view_system_status', 'Can view system status'), ('link_persons_accounts', 'Can link persons to accounts'),
('manage_data', 'Can manage data'), ('impersonate', 'Can impersonate'), ('search', 'Can use search'),
('change_site_preferences', 'Can change site preferences'),
('change_person_preferences', 'Can change person preferences'),
('change_group_preferences', 'Can change group preferences'), ('test_pdf', 'Can test PDF generation'))},
),
migrations.CreateModel(
name='PDFFile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('extended_data', models.JSONField(default=dict, editable=False)),
('expires_at', models.DateTimeField(default=aleksis.core.models.PDFFile._get_default_expiration, verbose_name='File expires at')),
('html', models.TextField(verbose_name='Rendered HTML')),
('file', models.FileField(blank=True, null=True, upload_to='pdfs/', verbose_name='Generated PDF file')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pdf_files', to='core.person', verbose_name='Owner')),
('site', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='sites.site')),
],
options={
'verbose_name': 'PDF file',
'verbose_name_plural': 'PDF files',
},
managers=[
('objects', django.contrib.sites.managers.CurrentSiteManager()),
],
),
] | PypiClean |
/21cmFAST-3.3.1.tar.gz/21cmFAST-3.3.1/src/py21cmfast/plotting.py | from __future__ import annotations
import matplotlib.pyplot as plt
import numpy as np
from astropy import units as un
from astropy.cosmology import z_at_value
from matplotlib import colors
from matplotlib.ticker import AutoLocator
from typing import Optional, Union
from . import outputs
from .outputs import Coeval, LightCone
eor_colour = colors.LinearSegmentedColormap.from_list(
"EoR",
[
(0, "white"),
(0.21, "yellow"),
(0.42, "orange"),
(0.63, "red"),
(0.86, "black"),
(0.9, "blue"),
(1, "cyan"),
],
)
plt.register_cmap(cmap=eor_colour)
def _imshow_slice(
cube,
slice_axis=-1,
slice_index=0,
fig=None,
ax=None,
fig_kw=None,
cbar=True,
cbar_horizontal=False,
rotate=False,
cmap="EoR",
log: [bool] = False,
**imshow_kw,
):
"""
Plot a slice of some kind of cube.
Parameters
----------
cube : nd-array
A 3D array of some quantity.
slice_axis : int, optional
The axis over which to take a slice, in order to plot.
slice_index :
The index of the slice.
fig : Figure object
An optional matplotlib figure object on which to plot
ax : Axis object
The matplotlib axis object on which to plot (created by default).
fig_kw :
Optional arguments passed to the figure construction.
cbar : bool
Whether to plot the colorbar
cbar_horizontal : bool
Whether the colorbar should be horizontal underneath the plot.
rotate : bool
Whether to rotate the plot vertically.
imshow_kw :
Optional keywords to pass to :func:`maplotlib.imshow`.
Returns
-------
fig, ax :
The figure and axis objects from matplotlib.
"""
# If no axis is passed, create a new one
# This allows the user to add this plot into an existing grid, or alter it afterwards.
if fig_kw is None:
fig_kw = {}
if ax is None and fig is None:
fig, ax = plt.subplots(1, 1, **fig_kw)
elif ax is None:
ax = plt.gca()
elif fig is None:
fig = plt.gcf()
plt.sca(ax)
if slice_index >= cube.shape[slice_axis]:
raise IndexError(
"slice_index is too large for that axis (slice_index=%s >= %s"
% (slice_index, cube.shape[slice_axis])
)
slc = np.take(cube, slice_index, axis=slice_axis)
if not rotate:
slc = slc.T
if cmap == "EoR":
imshow_kw["vmin"] = -150
imshow_kw["vmax"] = 30
norm_kw = {k: imshow_kw.pop(k) for k in ["vmin", "vmax"] if k in imshow_kw}
norm = imshow_kw.get(
"norm", colors.LogNorm(**norm_kw) if log else colors.Normalize(**norm_kw)
)
plt.imshow(slc, origin="lower", cmap=cmap, norm=norm, **imshow_kw)
if cbar:
cb = plt.colorbar(
orientation="horizontal" if cbar_horizontal else "vertical", aspect=40
)
cb.outline.set_edgecolor(None)
return fig, ax
def coeval_sliceplot(
struct: outputs._OutputStruct | Coeval,
kind: str | None = None,
cbar_label: str | None = None,
**kwargs,
):
"""
Show a slice of a given coeval box.
Parameters
----------
struct : :class:`~outputs._OutputStruct` or :class:`~wrapper.Coeval` instance
The output of a function such as `ionize_box` (a class containing several quantities), or
`run_coeval`.
kind : str
The quantity within the structure to be shown. A full list of available options
can be obtained by running ``Coeval.get_fields()``.
cbar_label : str, optional
A label for the colorbar. Some values of `kind` will have automatically chosen
labels, but these can be turned off by setting ``cbar_label=''``.
Returns
-------
fig, ax :
figure and axis objects from matplotlib
Other Parameters
----------------
All other parameters are passed directly to :func:`_imshow_slice`. These include `slice_axis`
and `slice_index`,
which choose the actual slice to plot, optional `fig` and `ax` keywords which enable
over-plotting previous figures,
and the `imshow_kw` argument, which allows arbitrary styling of the plot.
"""
if kind is None:
if isinstance(struct, outputs._OutputStruct):
kind = struct.fieldnames[0]
elif isinstance(struct, Coeval):
kind = "brightness_temp"
try:
cube = getattr(struct, kind)
except AttributeError:
raise AttributeError(
f"The given OutputStruct does not have the quantity {kind}"
)
if kind != "brightness_temp" and "cmap" not in kwargs:
kwargs["cmap"] = "viridis"
fig, ax = _imshow_slice(cube, extent=(0, struct.user_params.BOX_LEN) * 2, **kwargs)
slice_axis = kwargs.get("slice_axis", -1)
# Determine which axes are being plotted.
if slice_axis in (2, -1):
xax = "x"
yax = "y"
elif slice_axis == 1:
xax = "x"
yax = "z"
elif slice_axis == 0:
xax = "y"
yax = "z"
else:
raise ValueError("slice_axis should be between -1 and 2")
# Now put on the decorations.
ax.set_xlabel(f"{xax}-axis [Mpc]")
ax.set_ylabel(f"{yax}-axis [Mpc]")
cbar = fig._gci().colorbar
if cbar is not None:
if cbar_label is None:
if kind == "brightness_temp":
cbar_label = r"Brightness Temperature, $\delta T_B$ [mK]"
elif kind == "xH_box":
cbar_label = r"Neutral fraction"
cbar.ax.set_ylabel(cbar_label)
return fig, ax
def lightcone_sliceplot(
lightcone: LightCone,
kind: str = "brightness_temp",
lightcone2: LightCone = None,
vertical: bool = False,
xlabel: str | None = None,
ylabel: str | None = None,
cbar_label: str | None = None,
zticks: str = "redshift",
fig: plt.Figure | None = None,
ax: plt.Axes | None = None,
**kwargs,
):
"""Create a 2D plot of a slice through a lightcone.
Parameters
----------
lightcone : :class:`~py21cmfast.wrapper.Lightcone`
The lightcone object to plot
kind : str, optional
The attribute of the lightcone to plot. Must be an array.
lightcone2 : str, optional
If provided, plot the _difference_ of the selected attribute between the two
lightcones.
vertical : bool, optional
Whether to plot the redshift in the vertical direction.
cbar_label : str, optional
A label for the colorbar. Some quantities have automatically chosen labels, but
these can be removed by setting `cbar_label=''`.
zticks : str, optional
Defines the co-ordinates of the ticks along the redshift axis.
Can be "redshift" (default), "frequency", "distance" (which starts at zero
for the lowest redshift) or the name of any function in an astropy cosmology
that is purely a function of redshift.
kwargs :
Passed through to ``imshow()``.
Returns
-------
fig :
The matplotlib Figure object
ax :
The matplotlib Axis object onto which the plot was drawn.
"""
slice_axis = kwargs.pop("slice_axis", 0)
if slice_axis <= -2 or slice_axis >= 3:
raise ValueError(f"slice_axis should be between -1 and 2 (got {slice_axis})")
z_axis = ("y" if vertical else "x") if slice_axis in (0, 1) else None
# Dictionary mapping axis to dimension in lightcone
axis_dct = {
"x": 2 if z_axis == "x" else [1, 0, 0][slice_axis],
"y": 2 if z_axis == "y" else [1, 0, 1][slice_axis],
}
if fig is None and ax is None:
fig, ax = plt.subplots(
1,
1,
figsize=(
lightcone.shape[axis_dct["x"]] * 0.015 + 0.5,
lightcone.shape[axis_dct["y"]] * 0.015
+ (2.5 if kwargs.get("cbar", True) else 0.05),
),
)
elif fig is None:
fig = ax._gci().figure
elif ax is None:
ax = fig.get_axes()
# Get x,y labels if they're not the redshift axis.
if xlabel is None:
xlabel = (
None if axis_dct["x"] == 2 else "{}-axis [Mpc]".format("xy"[axis_dct["x"]])
)
if ylabel is None:
ylabel = (
None if axis_dct["y"] == 2 else "{}-axis [Mpc]".format("xy"[axis_dct["y"]])
)
extent = (
0,
lightcone.lightcone_dimensions[axis_dct["x"]],
0,
lightcone.lightcone_dimensions[axis_dct["y"]],
)
if lightcone2 is None:
fig, ax = _imshow_slice(
getattr(lightcone, kind),
extent=extent,
slice_axis=slice_axis,
rotate=not vertical,
cbar_horizontal=not vertical,
cmap=kwargs.get("cmap", "EoR" if kind == "brightness_temp" else "viridis"),
fig=fig,
ax=ax,
**kwargs,
)
else:
d = getattr(lightcone, kind) - getattr(lightcone2, kind)
fig, ax = _imshow_slice(
d,
extent=extent,
slice_axis=slice_axis,
rotate=not vertical,
cbar_horizontal=not vertical,
cmap=kwargs.pop("cmap", "bwr"),
vmin=-np.abs(d.max()),
vmax=np.abs(d.max()),
fig=fig,
ax=ax,
**kwargs,
)
if z_axis:
zlabel = _set_zaxis_ticks(ax, lightcone, zticks, z_axis)
if ylabel != "":
ax.set_ylabel(ylabel or zlabel)
if xlabel != "":
ax.set_xlabel(xlabel or zlabel)
cbar = fig._gci().colorbar
if cbar_label is None:
if kind == "brightness_temp":
cbar_label = r"Brightness Temperature, $\delta T_B$ [mK]"
elif kind == "xH":
cbar_label = r"Neutral fraction"
if vertical:
cbar.ax.set_ylabel(cbar_label)
else:
cbar.ax.set_xlabel(cbar_label)
return fig, ax
def _set_zaxis_ticks(ax, lightcone, zticks, z_axis):
if zticks != "distance":
loc = AutoLocator()
# Get redshift ticks.
lc_z = lightcone.lightcone_redshifts
if zticks == "redshift":
coords = lc_z
elif zticks == "frequency":
coords = 1420 / (1 + lc_z) * un.MHz
else:
try:
coords = getattr(lightcone.cosmo_params.cosmo, zticks)(lc_z)
except AttributeError:
raise AttributeError(f"zticks '{zticks}' is not a cosmology function.")
zlabel = " ".join(z.capitalize() for z in zticks.split("_"))
units = getattr(coords, "unit", None)
if units:
zlabel += f" [{str(coords.unit)}]"
coords = coords.value
ticks = loc.tick_values(coords.min(), coords.max())
if ticks.min() < coords.min() / 1.00001:
ticks = ticks[1:]
if ticks.max() > coords.max() * 1.00001:
ticks = ticks[:-1]
if coords[1] < coords[0]:
ticks = ticks[::-1]
if zticks == "redshift":
z_ticks = ticks
elif zticks == "frequency":
z_ticks = 1420 / ticks - 1
else:
z_ticks = [
z_at_value(getattr(lightcone.cosmo_params.cosmo, zticks), z * units)
for z in ticks
]
d_ticks = (
lightcone.cosmo_params.cosmo.comoving_distance(z_ticks).value
- lightcone.lightcone_distances[0]
)
getattr(ax, f"set_{z_axis}ticks")(d_ticks)
getattr(ax, f"set_{z_axis}ticklabels")(ticks)
else:
zlabel = "Line-of-Sight Distance [Mpc]"
return zlabel
def plot_global_history(
lightcone: LightCone,
kind: str | None = None,
ylabel: str | None = None,
ylog: bool = False,
ax: plt.Axes | None = None,
):
"""
Plot the global history of a given quantity from a lightcone.
Parameters
----------
lightcone : :class:`~LightCone` instance
The lightcone containing the quantity to plot.
kind : str, optional
The quantity to plot. Must be in the `global_quantities` dict in the lightcone.
By default, will choose the first entry in the dict.
ylabel : str, optional
A y-label for the plot. If None, will use ``kind``.
ax : Axes, optional
The matplotlib Axes object on which to plot. Otherwise, created.
"""
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(7, 4))
else:
fig = ax._gci().figure
if kind is None:
kind = list(lightcone.global_quantities.keys())[0]
assert (
kind in lightcone.global_quantities
or hasattr(lightcone, "global_" + kind)
or (kind.startswith("global_") and hasattr(lightcone, kind))
)
if kind in lightcone.global_quantities:
value = lightcone.global_quantities[kind]
elif kind.startswith("global)"):
value = getattr(lightcone, kind)
else:
value = getattr(lightcone, "global_" + kind)
ax.plot(lightcone.node_redshifts, value)
ax.set_xlabel("Redshift")
if ylabel is None:
ylabel = kind
if ylabel:
ax.set_ylabel(ylabel)
if ylog:
ax.set_yscale("log")
return fig, ax | PypiClean |
/AGouTI-1.0.3.tar.gz/AGouTI-1.0.3/agouti_pkg/pyfaidx/__init__.py | from __future__ import division
import os
import sys
from os.path import getmtime
from agouti_pkg.six import PY2, PY3, string_types, integer_types
from agouti_pkg.six.moves import zip_longest
try:
from collections import OrderedDict
except ImportError: #python 2.6
from ordereddict import OrderedDict
from collections import namedtuple
import re
import string
import warnings
from math import ceil
from threading import Lock
if sys.version_info > (3, ):
buffer = memoryview
dna_bases = re.compile(r'([ACTGNactgnYRWSKMDVHBXyrwskmdvhbx]+)')
__version__ = '0.5.5.2'
class KeyFunctionError(ValueError):
"""Raised if the key_function argument is invalid."""
class FastaIndexingError(Exception):
"""Raised if we encounter malformed FASTA that prevents indexing."""
class IndexNotFoundError(IOError):
"""Raised if read_fai cannot open the index file."""
class FastaNotFoundError(IOError):
"""Raised if the fasta file cannot be opened."""
class FetchError(IndexError):
"""Raised if a request to fetch a FASTA sequence cannot be fulfilled."""
class BedError(ValueError):
"""Indicates a malformed BED entry."""
class RegionError(Exception):
# This exception class is currently unused, but has been retained for
# backwards compatibility.
"""A region error occurred."""
class UnsupportedCompressionFormat(IOError):
"""
Raised when a FASTA file is given with a recognized but unsupported
compression extension.
"""
class Sequence(object):
"""
name = FASTA entry name
seq = FASTA sequence
start, end = coordinates of subsequence (optional)
comp = boolean switch for complement property
"""
def __init__(self, name='', seq='', start=None, end=None, comp=False):
self.name = name
self.seq = seq
self.start = start
self.end = end
self.comp = comp
assert isinstance(name, string_types)
assert isinstance(seq, string_types)
def __getitem__(self, n):
""" Returns a sliced version of Sequence
>>> x = Sequence(name='chr1', seq='ATCGTA', start=1, end=6)
>>> x
>chr1:1-6
ATCGTA
>>> x[:3]
>chr1:1-3
ATC
>>> x[3:]
>chr1:4-6
GTA
>>> x[1:-1]
>chr1:2-5
TCGT
>>> x[::-1]
>chr1:6-1
ATGCTA
>>> x[::-3]
>chr1
AC
>>> x = Sequence(name='chr1', seq='ATCGTA', start=0, end=6)
>>> x
>chr1:0-6
ATCGTA
>>> x[:3]
>chr1:0-3
ATC
>>> x[3:]
>chr1:3-6
GTA
>>> x[1:-1]
>chr1:1-5
TCGT
>>> x[::-1]
>chr1:6-0
ATGCTA
>>> x[::-3]
>chr1
AC
"""
if self.start is None or self.end is None:
correction_factor = 0
elif len(
self.seq
) == abs(self.end - self.start) + 1: # determine coordinate system
one_based = True
correction_factor = -1
elif len(self.seq) == abs(self.end - self.start):
one_based = False
correction_factor = 0
elif len(self.seq) != abs(self.end - self.start):
raise ValueError(
"Coordinates (Sequence.start=%s and Sequence.end=%s) imply a different length than Sequence.seq (len=%s). Did you modify Sequence.seq?"
% (self.start, self.end, len(self.seq)))
if isinstance(n, slice):
slice_start, slice_stop, slice_step = n.indices(len(self))
if self.start is None or self.end is None: # there should never be self.start != self.end == None
start = None
end = None
return self.__class__(self.name, self.seq[n], start, end,
self.comp)
self_end, self_start = (self.end, self.start)
if abs(slice_step) > 1:
start = None
end = None
elif slice_step == -1: # flip the coordinates when we reverse
if slice_stop == -1:
slice_stop = 0
start = self_end - slice_stop
end = self_start + slice_stop
#print(locals())
else:
start = self_start + slice_start
end = self_start + slice_stop + correction_factor
return self.__class__(self.name, self.seq[n], start, end,
self.comp)
elif isinstance(n, integer_types):
if n < 0:
n = len(self) + n
if self.start:
return self.__class__(self.name, self.seq[n], self.start + n,
self.start + n, self.comp)
else:
return self.__class__(self.name, self.seq[n], self.comp)
def __str__(self):
return self.seq
def __neg__(self):
""" Returns the reverse compliment of sequence
>>> x = Sequence(name='chr1', seq='ATCGTA', start=1, end=6)
>>> x
>chr1:1-6
ATCGTA
>>> y = -x
>>> y
>chr1:6-1 (complement)
TACGAT
>>> -y
>chr1:1-6
ATCGTA
"""
return self[::-1].complement
def __repr__(self):
return '\n'.join([''.join(['>', self.fancy_name]), self.seq])
def __len__(self):
"""
>>> len(Sequence('chr1', 'ACT'))
3
"""
return len(self.seq)
@property
def fancy_name(self):
""" Return the fancy name for the sequence, including start, end, and complementation.
>>> x = Sequence(name='chr1', seq='ATCGTA', start=1, end=6, comp=True)
>>> x.fancy_name
'chr1:1-6 (complement)'
"""
name = self.name
if self.start is not None and self.end is not None:
name = ':'.join([name, '-'.join([str(self.start), str(self.end)])])
if self.comp:
name += ' (complement)'
return name
@property
def long_name(self):
""" DEPRECATED: Use fancy_name instead.
Return the fancy name for the sequence, including start, end, and complementation.
>>> x = Sequence(name='chr1', seq='ATCGTA', start=1, end=6, comp=True)
>>> x.long_name
'chr1:1-6 (complement)'
"""
msg = "The `Sequence.long_name` property is deprecated, and will be removed in future versions. Please use `Sequence.fancy_name` instead."
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return self.fancy_name
@property
def complement(self):
""" Returns the compliment of self.
>>> x = Sequence(name='chr1', seq='ATCGTA')
>>> x.complement
>chr1 (complement)
TAGCAT
"""
comp = self.__class__(
self.name, complement(self.seq), start=self.start, end=self.end)
comp.comp = False if self.comp else True
return comp
@property
def reverse(self):
""" Returns the reverse of self.
>>> x = Sequence(name='chr1', seq='ATCGTA')
>>> x.reverse
>chr1
ATGCTA
"""
return self[::-1]
@property
def orientation(self):
""" get the orientation forward=1, reverse=-1
>>> x = Sequence(name='chr1', seq='ATCGTA', start=1, end=6)
>>> x.orientation
1
>>> x.complement.orientation is None
True
>>> x[::-1].orientation is None
True
>>> x = -x
>>> x.orientation
-1
"""
if self.start < self.end and not self.comp:
return 1
elif self.start > self.end and self.comp:
return -1
else:
return None
@property
def gc(self):
""" Return the GC content of seq as a float
>>> x = Sequence(name='chr1', seq='ATCGTA')
>>> y = round(x.gc, 2)
>>> y == 0.33
True
"""
g = self.seq.count('G')
g += self.seq.count('g')
c = self.seq.count('C')
c += self.seq.count('c')
return (g + c) / len(self.seq)
class IndexRecord(
namedtuple('IndexRecord',
['rlen', 'offset', 'lenc', 'lenb', 'bend', 'prev_bend'])):
__slots__ = ()
def __getitem__(self, key):
if type(key) == str:
return getattr(self, key)
return tuple.__getitem__(self, key)
def __str__(self):
return "{rlen:n}\t{offset:n}\t{lenc:n}\t{lenb:n}\n".format(
**self._asdict())
def __len__(self):
return self.rlen
class Faidx(object):
""" A python implementation of samtools faidx FASTA indexing """
def __init__(self,
filename,
default_seq=None,
key_function=lambda x: x,
as_raw=False,
strict_bounds=False,
read_ahead=None,
mutable=False,
split_char=None,
duplicate_action="stop",
filt_function=lambda x: True,
one_based_attributes=True,
read_long_names=False,
sequence_always_upper=False,
rebuild=True,
build_index=True):
"""
filename: name of fasta file
key_function: optional callback function which should return a unique
key for the self.index dictionary when given rname.
as_raw: optional parameter to specify whether to return sequences as a
Sequence() object or as a raw string.
Default: False (i.e. return a Sequence() object).
"""
self.filename = filename
if filename.lower().endswith('.bgz') or filename.lower().endswith(
'.gz'):
# Only try to import Bio if we actually need the bgzf reader.
try:
from Bio import bgzf
from Bio import __version__ as bgzf_version
from distutils.version import LooseVersion
if LooseVersion(bgzf_version) < LooseVersion('1.73'):
raise ImportError
except ImportError:
raise ImportError(
"BioPython >= 1.73 must be installed to read block gzip files.")
else:
self._fasta_opener = bgzf.open
self._bgzf = True
elif filename.lower().endswith('.bz2') or filename.lower().endswith(
'.zip'):
raise UnsupportedCompressionFormat(
"Compressed FASTA is only supported in BGZF format. Use "
"bgzip to compresss your FASTA.")
else:
self._fasta_opener = open
self._bgzf = False
try:
self.file = self._fasta_opener(filename, 'r+b'
if mutable else 'rb')
except (ValueError, IOError) as e:
if str(e).find('BGZF') > -1:
raise UnsupportedCompressionFormat(
"Compressed FASTA is only supported in BGZF format. Use "
"the samtools bgzip utility (instead of gzip) to "
"compress your FASTA.")
else:
raise FastaNotFoundError(
"Cannot read FASTA file %s" % filename)
self.indexname = filename + '.fai'
self.read_long_names = read_long_names
self.key_function = key_function
try:
key_fn_test = self.key_function(
"TestingReturnType of_key_function")
if not isinstance(key_fn_test, string_types):
raise KeyFunctionError(
"key_function argument should return a string, not {0}".
format(type(key_fn_test)))
except Exception as e:
pass
self.filt_function = filt_function
assert duplicate_action in ("stop", "first", "last", "longest",
"shortest", "drop")
self.duplicate_action = duplicate_action
self.as_raw = as_raw
self.default_seq = default_seq
if self._bgzf and self.default_seq is not None:
raise FetchError(
"The default_seq argument is not supported with using BGZF compression. Please decompress your FASTA file and try again."
)
if self._bgzf:
self.strict_bounds = True
else:
self.strict_bounds = strict_bounds
self.split_char = split_char
self.one_based_attributes = one_based_attributes
self.sequence_always_upper = sequence_always_upper
self.index = OrderedDict()
self.lock = Lock()
self.buffer = dict((('seq', None), ('name', None), ('start', None),
('end', None)))
if not read_ahead or isinstance(read_ahead, integer_types):
self.read_ahead = read_ahead
elif not isinstance(read_ahead, integer_types):
raise ValueError("read_ahead value must be int, not {0}".format(
type(read_ahead)))
self.mutable = mutable
with self.lock: # lock around index generation so only one thread calls method
try:
if os.path.exists(self.indexname) and getmtime(
self.indexname) >= getmtime(self.filename):
self.read_fai()
elif os.path.exists(self.indexname) and getmtime(
self.indexname) < getmtime(
self.filename) and not rebuild:
self.read_fai()
warnings.warn(
"Index file {0} is older than FASTA file {1}.".format(
self.indexname, self.filename), RuntimeWarning)
elif build_index:
self.build_index()
self.read_fai()
else:
self.read_fai()
except FastaIndexingError:
os.remove(self.indexname)
self.file.close()
raise
except Exception:
# Handle potential exceptions other than 'FastaIndexingError'
self.file.close()
raise
def __contains__(self, region):
if not self.buffer['name']:
return False
name, start, end = region
if self.buffer['name'] == name and self.buffer['start'] <= start and self.buffer['end'] >= end:
return True
else:
return False
def __repr__(self):
return 'Faidx("%s")' % (self.filename)
def _index_as_string(self):
""" Returns the string representation of the index as iterable """
for k, v in self.index.items():
yield '\t'.join([k, str(v)])
def read_fai(self):
try:
with open(self.indexname) as index:
prev_bend = 0
drop_keys = []
for line in index:
line = line.rstrip()
rname, rlen, offset, lenc, lenb = line.split('\t')
rlen, offset, lenc, lenb = map(int,
(rlen, offset, lenc, lenb))
newlines = int(ceil(rlen / lenc) * (lenb - lenc))
bend = offset + newlines + rlen
rec = IndexRecord(rlen, offset, lenc, lenb, bend,
prev_bend)
if self.read_long_names:
rname = self._long_name_from_index_record(rec)
if self.split_char:
rname = filter(self.filt_function,
self.key_function(rname).split(
self.split_char))
else:
# filter must act on an iterable
rname = filter(self.filt_function,
[self.key_function(rname)])
for key in rname: # mdshw5/pyfaidx/issues/64
if key in self.index:
if self.duplicate_action == "stop":
raise ValueError('Duplicate key "%s"' % key)
elif self.duplicate_action == "first":
continue
elif self.duplicate_action == "last":
self.index[key] = rec
elif self.duplicate_action == "longest":
if len(rec) > len(self.index[key]):
self.index[key] = rec
elif self.duplicate_action == "shortest":
if len(rec) < len(self.index[key]):
self.index[key] = rec
elif self.duplicate_action == "drop":
if key not in drop_keys:
drop_keys.append(key)
else:
self.index[key] = rec
prev_bend = bend
for dup in drop_keys:
self.index.pop(dup, None)
except IOError:
raise IndexNotFoundError(
"Could not read index file %s" % self.indexname)
def build_index(self):
try:
with self._fasta_opener(self.filename, 'rb') as fastafile:
with open(self.indexname, 'w') as indexfile:
rname = None # reference sequence name
offset = 0 # binary offset of end of current line
rlen = 0 # reference character length
blen = None # binary line length (includes newline)
clen = None # character line length
bad_lines = [] # lines > || < than blen
thisoffset = offset
valid_entry = False
lastline = None
for i, line in enumerate(fastafile):
line_blen = len(line)
line = line.decode()
line_clen = len(line.rstrip('\n\r'))
lastline = i
# write an index line
if line[0] == '>':
valid_entry = check_bad_lines(
rname, bad_lines, i - 1)
if valid_entry and i > 0:
indexfile.write(
"{0}\t{1:d}\t{2:d}\t{3:d}\t{4:d}\n".format(
rname, rlen, thisoffset, clen, blen))
elif not valid_entry:
raise FastaIndexingError(
"Line length of fasta"
" file is not "
"consistent! "
"Inconsistent line found in >{0} at "
"line {1:n}.".format(
rname, bad_lines[0][0] + 1))
blen = None
rlen = 0
clen = None
bad_lines = []
try: # must catch empty deflines (actually these might be okay: https://github.com/samtools/htslib/pull/258)
rname = line.rstrip('\n\r')[1:].split()[
0] # duplicates are detected with read_fai
except IndexError:
raise FastaIndexingError(
"Bad sequence name %s at line %s." %
(line.rstrip('\n\r'), str(i)))
offset += line_blen
thisoffset = fastafile.tell(
) if self._bgzf else offset
else: # check line and advance offset
if not blen:
blen = line_blen
if not clen:
clen = line_clen
# only one short line should be allowed
# before we hit the next header, and it
# should be the last line in the entry
if line_blen != blen or line_blen == 1:
bad_lines.append((i, line_blen))
offset += line_blen
rlen += line_clen
# check that we find at least 1 valid FASTA record
if not valid_entry:
raise FastaIndexingError(
"The FASTA file %s does not contain a valid sequence. "
"Check that sequence definition lines start with '>'." % self.filename)
# write the final index line, if there is one.
if lastline is not None:
valid_entry = check_bad_lines(
rname, bad_lines, lastline
) # advance index since we're at the end of the file
if valid_entry:
indexfile.write(
"{0:s}\t{1:d}\t{2:d}\t{3:d}\t{4:d}\n".format(
rname, rlen, thisoffset, clen, blen))
else:
raise FastaIndexingError(
"Line length of fasta"
" file is not "
"consistent! "
"Inconsistent line found in >{0} at "
"line {1:n}.".format(rname,
bad_lines[0][0] + 1))
except (IOError, FastaIndexingError) as e:
if isinstance(e, IOError):
raise IOError(
"%s may not be writable. Please use Fasta(rebuild=False), Faidx(rebuild=False) or faidx --no-rebuild."
% self.indexname)
elif isinstance(e, FastaIndexingError):
raise e
def write_fai(self):
with self.lock:
with open(self.indexname, 'w') as outfile:
for line in self._index_as_string:
outfile.write(line)
def from_buffer(self, start, end):
i_start = start - self.buffer['start'] # want [0, 1) coordinates from [1, 1] coordinates
i_end = end - self.buffer['start'] + 1
return self.buffer['seq'][i_start:i_end]
def fill_buffer(self, name, start, end):
try:
seq = self.from_file(name, start, end)
self.buffer['seq'] = seq
self.buffer['start'] = start
self.buffer['end'] = end
self.buffer['name'] = name
except FetchError:
pass
def fetch(self, name, start, end):
if self.read_ahead and not (name, start, end) in self:
self.fill_buffer(name, start, end + self.read_ahead)
if (name, start, end) in self:
seq = self.from_buffer(start, end)
else:
seq = self.from_file(name, start, end)
return self.format_seq(seq, name, start, end)
def from_file(self, rname, start, end, internals=False):
""" Fetch the sequence ``[start:end]`` from ``rname`` using 1-based coordinates
1. Count newlines before start
2. Count newlines to end
3. Difference of 1 and 2 is number of newlines in [start:end]
4. Seek to start position, taking newlines into account
5. Read to end position, return sequence
"""
assert start == int(start)
assert end == int(end)
try:
i = self.index[rname]
except KeyError:
raise FetchError("Requested rname {0} does not exist! "
"Please check your FASTA file.".format(rname))
start0 = start - 1 # make coordinates [0,1)
if start0 < 0:
raise FetchError(
"Requested start coordinate must be greater than 1.")
seq_len = end - start0
# Calculate offset (https://github.com/samtools/htslib/blob/20238f354894775ed22156cdd077bc0d544fa933/faidx.c#L398)
newlines_before = int(
(start0 - 1) / i.lenc * (i.lenb - i.lenc)) if start0 > 0 else 0
newlines_to_end = int(end / i.lenc * (i.lenb - i.lenc))
newlines_inside = newlines_to_end - newlines_before
seq_blen = newlines_inside + seq_len
bstart = i.offset + newlines_before + start0
if seq_blen < 0 and self.strict_bounds:
raise FetchError("Requested coordinates start={0:n} end={1:n} are "
"invalid.\n".format(start, end))
elif end > i.rlen and self.strict_bounds:
raise FetchError("Requested end coordinate {0:n} outside of {1}. "
"\n".format(end, rname))
with self.lock:
if self._bgzf: # We can't add to virtual offsets, so we need to read from the beginning of the record and trim the beginning if needed
self.file.seek(i.offset)
chunk = start0 + newlines_before + newlines_inside + seq_len
chunk_seq = self.file.read(chunk).decode()
seq = chunk_seq[start0 + newlines_before:]
else:
self.file.seek(bstart)
if bstart + seq_blen > i.bend and not self.strict_bounds:
seq_blen = i.bend - bstart
if seq_blen > 0:
seq = self.file.read(seq_blen).decode()
elif seq_blen <= 0 and not self.strict_bounds:
seq = ''
if not internals:
return seq.replace('\n', '').replace('\r', '')
else:
return (seq, locals())
def format_seq(self, seq, rname, start, end):
start0 = start - 1
if len(
seq
) < end - start0 and self.default_seq: # Pad missing positions with default_seq
pad_len = end - start0 - len(seq)
seq = ''.join([seq, pad_len * self.default_seq])
else: # Return less than requested range
end = start0 + len(seq)
if self.sequence_always_upper:
seq = seq.upper()
if not self.one_based_attributes:
start = start0
if self.as_raw:
return seq
else:
return Sequence(
name=rname, start=int(start), end=int(end), seq=seq)
def to_file(self, rname, start, end, seq):
""" Write sequence in region from start-end, overwriting current
contents of the FASTA file. """
if not self.mutable:
raise IOError(
"Write attempted for immutable Faidx instance. Set mutable=True to modify original FASTA."
)
file_seq, internals = self.from_file(rname, start, end, internals=True)
with self.lock:
if len(seq) != len(file_seq) - internals['newlines_inside']:
raise IOError(
"Specified replacement sequence needs to have the same length as original."
)
elif len(seq) == len(file_seq) - internals['newlines_inside']:
line_len = internals['i'].lenc
if '\r\n' in file_seq:
newline_char = '\r\n'
elif '\r' in file_seq:
newline_char = '\r'
else:
newline_char = '\n'
self.file.seek(internals['bstart'])
if internals['newlines_inside'] == 0:
self.file.write(seq.encode())
elif internals['newlines_inside'] > 0:
n = 0
m = file_seq.index(newline_char)
while m < len(seq):
self.file.write(''.join([seq[n:m], newline_char]).encode())
n = m
m += line_len
self.file.write(seq[n:].encode())
self.file.flush()
def get_long_name(self, rname):
""" Return the full sequence defline and description. External method using the self.index """
index_record = self.index[rname]
if self._bgzf:
return self._long_name_from_bgzf(index_record)
else:
return self._long_name_from_index_record(index_record)
def _long_name_from_index_record(self, index_record):
""" Return the full sequence defline and description. Internal method passing IndexRecord """
prev_bend = index_record.prev_bend
defline_end = index_record.offset
self.file.seek(prev_bend)
return self.file.read(defline_end - prev_bend).decode()[1:-1]
def _long_name_from_bgzf(self, index_record):
""" Return the full sequence defline and description. Internal method passing IndexRecord
This method is present for compatibility with BGZF files, since we cannot subtract their offsets.
It may be possible to implement a more efficient method. """
raise NotImplementedError(
"FastaRecord.long_name and Fasta(read_long_names=True) "
"are not supported currently for BGZF compressed files.")
prev_bend = index_record.prev_bend
self.file.seek(prev_bend)
defline = []
while True:
chunk = self.file.read(4096).decode()
defline.append(chunk)
if '\n' in chunk or '\r' in chunk:
break
return ''.join(defline)[1:].split('\n\r')[0]
def close(self):
self.__exit__()
def __enter__(self):
return self
def __exit__(self, *args):
self.file.close()
class FastaRecord(object):
__slots__ = ['name', '_fa']
def __init__(self, name, fa):
self.name = name
self._fa = fa
def __getitem__(self, n):
"""Return sequence from region [start, end)
Coordinates are 0-based, end-exclusive."""
try:
if isinstance(n, slice):
start, stop, step = n.start, n.stop, n.step
if start is None:
start = 0
if stop is None:
stop = len(self)
if stop < 0:
stop = len(self) + stop
if start < 0:
start = len(self) + start
return self._fa.get_seq(self.name, start + 1, stop)[::step]
elif isinstance(n, integer_types):
if n < 0:
n = len(self) + n
return self._fa.get_seq(self.name, n + 1, n + 1)
except FetchError:
raise
def __iter__(self):
""" Construct a line-based generator that respects the original line lengths. """
line_len = self._fa.faidx.index[self.name].lenc
start = 0
while True:
end = start + line_len
if end < len(self):
yield self[start:end]
else:
yield self[start:]
return
start += line_len
def __reversed__(self):
""" Reverse line-based generator """
line_len = self._fa.faidx.index[self.name].lenc
# have to determine last line length
last_line = len(self) % line_len
if last_line == 0:
last_line = line_len
end = len(self)
start = end - last_line
while True:
if start > 0:
yield self[start:end][::-1]
else:
yield self[:end][::-1]
return
if end == len(self): # first iteration
end -= last_line
else:
end -= line_len
start = end - line_len
def __repr__(self):
return 'FastaRecord("%s")' % (self.name)
def __len__(self):
return self._fa.faidx.index[self.name].rlen
@property
def unpadded_len(self):
""" Returns the length of the contig without 5' and 3' N padding.
Functions the same as contigNonNSize in Fasta.cpp at
https://github.com/Illumina/hap.py/blob/master/src/c%2B%2B/lib/tools/Fasta.cpp#L284
"""
length = len(self)
stop = False
for line in iter(self):
if stop:
break
if isinstance(line, Sequence):
line = line.seq
for base in line.upper():
if base == 'N':
length -= 1
else:
stop = True
break
stop = False
for line in reversed(self):
if stop:
break
if isinstance(line, Sequence):
line = line.seq
for base in line.upper():
if base == 'N':
length -= 1
else:
stop = True
break
return length
def __str__(self):
return str(self[:])
@property
def variant_sites(self):
if isinstance(self._fa, FastaVariant):
pos = []
var = self._fa.vcf.fetch(self.name, 0, len(self))
for site in var:
if site.is_snp:
sample = site.genotype(self._fa.sample)
if sample.gt_type in self._fa.gt_type and eval(
self._fa.filter):
pos.append(site.POS)
return tuple(pos)
else:
raise NotImplementedError(
"variant_sites() only valid for FastaVariant.")
@property
def long_name(self):
""" Read the actual defline from self._fa.faidx mdshw5/pyfaidx#54 """
return self._fa.faidx.get_long_name(self.name)
@property
def __array_interface__(self):
""" Implement numpy array interface for issue #139"""
return {
'shape': (len(self), ),
'typestr': '|S1',
'version': 3,
'data': buffer(str(self).encode('ascii'))
}
class MutableFastaRecord(FastaRecord):
def __init__(self, name, fa):
super(MutableFastaRecord, self).__init__(name, fa)
if self._fa.faidx._fasta_opener != open:
raise UnsupportedCompressionFormat(
"BGZF compressed FASTA is not supported for MutableFastaRecord. "
"Please decompress your FASTA file.")
def __setitem__(self, n, value):
"""Mutate sequence in region [start, end)
to value.
Coordinates are 0-based, end-exclusive."""
try:
if isinstance(n, slice):
start, stop, step = n.start, n.stop, n.step
if step:
raise IndexError("Step operator is not implemented.")
if not start:
start = 0
if not stop:
stop = len(self)
if stop < 0:
stop = len(self) + stop
if start < 0:
start = len(self) + start
self._fa.faidx.to_file(self.name, start + 1, stop, value)
elif isinstance(n, integer_types):
if n < 0:
n = len(self) + n
return self._fa.faidx.to_file(self.name, n + 1, n + 1, value)
except (FetchError, IOError):
raise
class Fasta(object):
def __init__(self,
filename,
default_seq=None,
key_function=lambda x: x,
as_raw=False,
strict_bounds=False,
read_ahead=None,
mutable=False,
split_char=None,
filt_function=lambda x: True,
one_based_attributes=True,
read_long_names=False,
duplicate_action="stop",
sequence_always_upper=False,
rebuild=True,
build_index=True):
"""
An object that provides a pygr compatible interface.
filename: name of fasta file
"""
self.filename = filename
self.mutable = mutable
self.faidx = Faidx(
filename,
key_function=key_function,
as_raw=as_raw,
default_seq=default_seq,
strict_bounds=strict_bounds,
read_ahead=read_ahead,
mutable=mutable,
split_char=split_char,
filt_function=filt_function,
one_based_attributes=one_based_attributes,
read_long_names=read_long_names,
duplicate_action=duplicate_action,
sequence_always_upper=sequence_always_upper,
rebuild=rebuild,
build_index=build_index)
self.keys = self.faidx.index.keys
if not self.mutable:
self.records = dict(
[(rname, FastaRecord(rname, self)) for rname in self.keys()])
elif self.mutable:
self.records = dict([(rname, MutableFastaRecord(rname, self))
for rname in self.keys()])
def __contains__(self, rname):
"""Return True if genome contains record."""
return rname in self.faidx.index
def __getitem__(self, rname):
"""Return a chromosome by its name, or its numerical index."""
if isinstance(rname, integer_types):
rname = tuple(self.keys())[rname]
try:
return self.records[rname]
except KeyError:
raise KeyError("{0} not in {1}.".format(rname, self.filename))
def __repr__(self):
return 'Fasta("%s")' % (self.filename)
def __iter__(self):
for rname in self.keys():
yield self[rname]
def get_seq(self, name, start, end, rc=False):
"""Return a sequence by record name and interval [start, end).
Coordinates are 1-based, end-exclusive.
If rc is set, reverse complement will be returned.
"""
# Get sequence from real genome object and save result.
seq = self.faidx.fetch(name, start, end)
if rc:
return -seq
else:
return seq
def get_spliced_seq(self, name, intervals, rc=False):
"""Return a sequence by record name and list of intervals
Interval list is an iterable of [start, end].
Coordinates are 1-based, end-exclusive.
If rc is set, reverse complement will be returned.
"""
# Get sequence for all intervals
chunks = [self.faidx.fetch(name, s, e) for s, e in intervals]
start = chunks[0].start
end = chunks[-1].end
# reverce complement
if rc:
seq = "".join([(-chunk).seq for chunk in chunks[::-1]])
else:
seq = "".join([chunk.seq for chunk in chunks])
# Sequence coordinate validation wont work since
# len(Sequence.seq) != end - start
return Sequence(name=name, seq=seq, start=None, end=None)
def close(self):
self.__exit__()
def __enter__(self):
return self
def __exit__(self, *args):
self.faidx.__exit__(*args)
class FastaVariant(Fasta):
""" Return consensus sequence from FASTA and VCF inputs
"""
expr = set(('>', '<', '=', '!'))
def __init__(self,
filename,
vcf_file,
sample=None,
het=True,
hom=True,
call_filter=None,
**kwargs):
super(FastaVariant, self).__init__(filename, **kwargs)
try:
import pysam
except ImportError:
raise ImportError("pysam must be installed for FastaVariant.")
try:
import vcf
except ImportError:
raise ImportError("PyVCF must be installed for FastaVariant.")
if call_filter is not None:
try:
key, expr, value = call_filter.split() # 'GQ > 30'
except IndexError:
raise ValueError(
"call_filter must be a string in the format 'XX <>!= NN'")
assert all([x in self.expr for x in list(expr)])
assert all([x in string.ascii_uppercase for x in list(key)])
assert all([x in string.printable for x in list(value)])
self.filter = "sample['{key}'] {expr} {value}".format(**locals())
else:
self.filter = 'True'
if os.path.exists(vcf_file):
self.vcf = vcf.Reader(filename=vcf_file)
else:
raise IOError("File {0} does not exist.".format(vcf_file))
if sample is not None:
self.sample = sample
else:
self.sample = self.vcf.samples[0]
if len(self.vcf.samples) > 1 and sample is None:
warnings.warn("Using sample {0} genotypes.".format(
self.sample), RuntimeWarning)
if het and hom:
self.gt_type = set((1, 2))
elif het:
self.gt_type = set((1, ))
elif hom:
self.gt_type = set((2, ))
else:
self.gt_type = set()
def __repr__(self):
return 'FastaVariant("%s", "%s", gt="%s")' % (self.filename,
self.vcf.filename,
str(self.gt_type))
def get_seq(self, name, start, end):
"""Return a sequence by record name and interval [start, end).
Replace positions with polymorphism with variant.
Coordinates are 0-based, end-exclusive.
"""
seq = self.faidx.fetch(name, start, end)
if self.faidx.as_raw:
seq_mut = list(seq)
del seq
else:
seq_mut = list(seq.seq)
del seq.seq
var = self.vcf.fetch(name, start - 1, end)
for record in var:
if record.is_snp: # skip indels
sample = record.genotype(self.sample)
if sample.gt_type in self.gt_type and eval(self.filter):
alt = record.ALT[0]
i = (record.POS - 1) - (start - 1)
seq_mut[i:i + len(alt)] = str(alt)
# slice the list in case we added an MNP in last position
if self.faidx.as_raw:
return ''.join(seq_mut[:end - start + 1])
else:
seq.seq = ''.join(seq_mut[:end - start + 1])
return seq
def wrap_sequence(n, sequence, fillvalue=''):
args = [iter(sequence)] * n
for line in zip_longest(fillvalue=fillvalue, *args):
yield ''.join(line + ("\n", ))
# To take a complement, we map each character in the first string in this pair
# to the corresponding character in the second string.
complement_map = ('ACTGNactgnYRWSKMDVHBXyrwskmdvhbx',
'TGACNtgacnRYWSMKHBDVXrywsmkhbdvx')
invalid_characters_set = set(
chr(x) for x in range(256) if chr(x) not in complement_map[0])
invalid_characters_string = ''.join(invalid_characters_set)
if PY3:
complement_table = str.maketrans(complement_map[0], complement_map[1],
invalid_characters_string)
translate_arguments = (complement_table, )
elif PY2:
complement_table = string.maketrans(complement_map[0], complement_map[1])
translate_arguments = (complement_table, invalid_characters_string)
def complement(seq):
""" Returns the complement of seq.
>>> seq = 'ATCGTA'
>>> complement(seq)
'TAGCAT'
"""
seq = str(seq)
result = seq.translate(*translate_arguments)
if len(result) != len(seq):
first_invalid_position = next(
i for i in range(len(seq)) if seq[i] in invalid_characters_set)
raise ValueError(
"Sequence contains non-DNA character '{0}' at position {1:n}\n".
format(seq[first_invalid_position], first_invalid_position + 1))
return result
def translate_chr_name(from_name, to_name):
chr_name_map = dict(zip(from_name, to_name))
def map_to_function(rname):
return chr_name_map[rname]
return map_to_function
def bed_split(bed_entry):
try:
rname, start, end = bed_entry.rstrip().split()[:3]
except (IndexError, ValueError):
raise BedError('Malformed BED entry! {0}\n'.format(bed_entry.rstrip()))
start, end = (int(start), int(end))
return (rname, start, end)
def ucsc_split(region):
try:
rname, interval = region.split(':')
except ValueError:
rname = region
interval = None
try:
start, end = interval.split('-')
start, end = (int(start) - 1, int(end))
except (AttributeError, ValueError):
start, end = (None, None)
return (rname, start, end)
def check_bad_lines(rname, bad_lines, i):
""" Find inconsistent line lengths in the middle of an
entry. Allow blank lines between entries, and short lines
occurring at the last line of an entry. Returns boolean
validating the entry.
>>> check_bad_lines('chr0', [(10, 79)], 10)
True
>>> check_bad_lines('chr0', [(9, 79)], 10)
False
>>> check_bad_lines('chr0', [(9, 79), (10, 1)], 10)
True
"""
if len(bad_lines) == 0:
return True
elif len(bad_lines) == 1:
if bad_lines[0][0] == i: # must be last line
return True
else:
return False
elif len(bad_lines) == 2:
if bad_lines[0][0] == i: # must not be last line
return False
elif bad_lines[1][0] == i and bad_lines[1][1] == 1: # blank last line
if bad_lines[0][0] + 1 == i and bad_lines[0][1] > 1: # non-blank line
return True
else:
return False
if len(bad_lines) > 2:
return False
raise RuntimeError("Unhandled exception during fasta indexing at entry " + rname + \
"Please report this issue at https://github.com/mdshw5/pyfaidx/issues " + \
str(bad_lines))
if __name__ == "__main__":
import doctest
doctest.testmod() | PypiClean |
/Gnosis-0.1.1.tar.gz/Gnosis-0.1.1/gnosis/xml/pickle/parsers/_dom.py | from gnosis.xml.pickle.util import subnodes, _EmptyClass, unsafe_string, \
unsafe_content, safe_eval, obj_from_name, unpickle_function, \
get_class_from_name
from gnosis.util.introspect import attr_update
from types import *
import gnosis.xml.pickle.ext as mutate
from xml.dom import minidom
from gnosis.util.XtoY import to_number
import gnosis.pyconfig as pyconfig
# Get appropriate array type.
try:
from Numeric import *
array_type = 'NumPy_array'
except ImportError:
from array import *
array_type = 'array'
# Define exceptions and flags
XMLPicklingError = "gnosis.xml.pickle.XMLPicklingError"
XMLUnpicklingError = "gnosis.xml.pickle.XMLUnpicklingError"
# Define our own TRUE/FALSE syms, based on Python version.
if pyconfig.Have_TrueFalse():
# Python 2.2 and up have a True/False (even though it's
# a completely different value between 2.2 & 2.3)
TRUE_VALUE = True
FALSE_VALUE = False
else:
# Below 2.2 has no True/False, so define them as they
# are in 2.2 (this allows Python < 2.2 to read pickles
# with bools created by Python 2.2+. Of course, if those
# pickles are then rewritten, they'll lose their true/false
# meaning, but hey, there's only so much we can do! :-)
TRUE_VALUE = 1
FALSE_VALUE = 0
# entry point expected by XML_Pickle
def thing_from_dom(fh, paranoia=1):
global visited
visited = {}
return _thing_from_dom(minidom.parse(fh),None,paranoia)
def _save_obj_with_id(node, obj):
id = node.getAttribute('id')
if len(id): # might be None, or empty - shouldn't use as key
visited[id] = obj
def unpickle_instance(node, paranoia):
"""Take a <PyObject> or <.. type="PyObject"> DOM node and unpickle the object."""
# we must first create an empty obj of the correct type and place
# it in visited{} (so we can handle self-refs within the object)
pyobj = obj_from_node(node, paranoia)
_save_obj_with_id(node, pyobj)
# slurp raw thing into a an empty object
raw = _thing_from_dom(node, _EmptyClass(), paranoia)
# code below has same ordering as pickle.py
# pass initargs, if defined
try:
args = raw.__getinitargs__
delattr(raw,'__getinitargs__') # don't want this in pyobj (below)
apply(pyobj.__init__,args)
except:
pass
# next, decide what "stuff" is supposed to go into pyobj
if hasattr(raw,'__getstate__'):
stuff = raw.__getstate__
else:
stuff = raw.__dict__
# finally, decide how to get the stuff into pyobj
if hasattr(pyobj,'__setstate__'):
pyobj.__setstate__(stuff)
else:
if type(stuff) is DictType: # must be a Dict if no __setstate__
# see note in pickle.py/load_build() about restricted
# execution -- do the same thing here
#try:
# pyobj.__dict__.update(stuff)
#except RuntimeError:
# for k,v in stuff.items():
# setattr(pyobj, k, v)
attr_update(pyobj, stuff)
else:
# subtle -- this can happen either because the class really
# does violate the pickle protocol, or because PARANOIA was
# set too high, and we couldn't create the real class, so
# __setstate__ is missing (and __stateinfo__ isn't a dict)
raise XMLUnpicklingError, \
"Non-DictType without setstate violates pickle protocol."+\
"(PARANOIA setting may be too high)"
return pyobj
def obj_from_node(node, paranoia=1):
"""Given a <PyObject> node, return an object of that type.
__init__ is NOT called on the new object, since the caller may want
to do some additional work first.
"""
classname = node.getAttribute('class')
# allow <PyObject> nodes w/out module name
# (possibly handwritten XML, XML containing "from-air" classes,
# or classes placed in the CLASS_STORE)
try:
modname = node.getAttribute('module')
except:
modname = None # must exist in xml_pickle namespace, or thin-air
return obj_from_name(classname, modname, paranoia)
def get_node_valuetext(node):
"Get text from node, whether in value=, or in element body."
# we know where the text is, based on whether there is
# a value= attribute. ie. pickler can place it in either
# place (based on user preference) and unpickler doesn't care
if node._attrs.has_key('value'):
# text in tag
ttext = node.getAttribute('value')
return unsafe_string(ttext)
else:
# text in body
node.normalize()
try:
btext = node.childNodes[0].nodeValue
except:
btext = ''
return unsafe_content(btext)
def _fix_family(family,typename):
"""
If family is None or empty, guess family based on typename.
(We can only guess for builtins, of course.)
"""
if family and len(family):
return family # sometimes it's None, sometimes it's empty ...
if typename == 'None':
return 'none'
if typename == 'dict':
return 'map'
elif typename == 'list':
return 'seq'
elif typename == 'tuple':
return 'seq'
elif typename == 'numeric':
return 'atom'
elif typename == 'string':
return 'atom'
elif typename == 'PyObject':
return 'obj'
elif typename == 'function':
return 'lang'
elif typename == 'class':
return 'lang'
elif typename == 'True':
return 'uniq'
elif typename == 'False':
return 'uniq'
else:
raise XMLUnpicklingError, \
"family= must be given for unknown type %s" % typename
def _thing_from_dom(dom_node, container=None, paranoia=1):
"Converts an [xml_pickle] DOM tree to a 'native' Python object"
for node in subnodes(dom_node):
if node.nodeName == "PyObject":
container = unpickle_instance(node, paranoia)
# do we need to unmutate it? (check for type= being set --
# will only be set for mutated objects)
if node.getAttribute('type'):
# get unmutator by type=
klass = node.getAttribute('type')
if mutate.can_unmutate(klass,container):
# note -- 'extra' isn't handled (yet) at the toplevel
container = mutate.unmutate(klass,container,paranoia,None)
try:
id = node.getAttribute('id')
visited[id] = container
except KeyError:
pass
elif node.nodeName in ['attr','item','key','val']:
node_family = node.getAttribute('family')
node_type = node.getAttribute('type')
node_name = node.getAttribute('name')
# check refid first (if present, type is type of referenced object)
ref_id = node.getAttribute('refid')
if len(ref_id): # might be empty or None
if node.nodeName == 'attr':
setattr(container, node_name, visited[ref_id])
else:
container.append(visited[ref_id])
# done, skip rest of block
continue
# if we didn't find a family tag, guess (do after refid check --
# old pickles will set type="ref" which _fix_family can't handle)
node_family = _fix_family(node_family,node_type)
node_valuetext = get_node_valuetext(node)
# step 1 - set node_val to basic thing
#if node_name == '__parent__' and getExcludeParentAttr():
# continue # Do not pickle xml_objectify bookkeeping attribute
if node_family == 'none':
node_val = None
elif node_family == 'atom':
node_val = node_valuetext
elif node_family == 'seq':
# seq must exist in visited{} before we unpickle subitems,
# in order to handle self-references
seq = []
_save_obj_with_id(node,seq)
node_val = _thing_from_dom(node,seq,paranoia)
elif node_family == 'map':
# map must exist in visited{} before we unpickle subitems,
# in order to handle self-references
map = {}
_save_obj_with_id(node,map)
node_val = _thing_from_dom(node,map,paranoia)
elif node_family == 'obj':
node_val = unpickle_instance(node, paranoia)
elif node_family == 'lang':
# lang is a special type - we don't know how to unpickle
# a generic 'lang' object, so we have to handle the specific
# types here instead of in the block below
#
# In the future, this might be expanded to include
# other languages, but for now, only Python is supported.
if node_type == 'function':
node_val = unpickle_function(node.getAttribute('module'),
node.getAttribute('class'),
paranoia)
elif node_type == 'class':
node_val = get_class_from_name(node.getAttribute('class'),
node.getAttribute('module'),
paranoia)
else:
raise XMLUnpicklingError, "Unknown lang type %s" % node_type
elif node_family == 'uniq':
# uniq is another special type that is handled here instead
# of below.
# Gnosis-1.0.6 encoded functions/classes as 'uniq' instead
# of 'lang' -- accept these for backward compatibility.
if node_type == 'function':
node_val = unpickle_function(node.getAttribute('module'),
node.getAttribute('class'),
paranoia)
elif node_type == 'class':
node_val = get_class_from_name(node.getAttribute('class'),
node.getAttribute('module'),
paranoia)
elif node_type == 'True':
node_val = TRUE_VALUE
elif node_type == 'False':
node_val = FALSE_VALUE
else:
raise XMLUnpicklingError, "Unknown uniq type %s" % node_type
else:
raise XMLUnpicklingError, "UNKNOWN family %s,%s,%s" % (node_family,node_type,node_name)
# step 2 - take basic thing and make exact thing
# Note there are several NOPs here since node_val has been decided
# above for certain types. However, I left them in since I think it's
# clearer to show all cases being handled (easier to see the pattern
# when doing later maintenance).
if node_type == 'None':
node_val = None
elif node_type == 'numeric':
#node_val = safe_eval(node_val)
node_val = to_number(node_val)
elif node_type == 'string':
node_val = node_val
elif node_type == 'list':
node_val = node_val
elif node_type == 'tuple':
# subtlety - if tuples could self-reference, this would be wrong
# since the self ref points to a list, yet we're making it into
# a tuple. it appears however that self-referencing tuples aren't
# really all that legal (regular pickle can't handle them), so
# this shouldn't be a practical problem.
node_val = tuple(node_val)
elif node_type == 'dict':
node_val = node_val
elif node_type == 'function':
node_val = node_val
elif node_type == 'class':
node_val = node_val
elif node_type == 'True':
node_val = node_val
elif node_type == 'False':
node_val = node_val
elif mutate.can_unmutate(node_type,node_val):
mextra = node.getAttribute('extra')
node_val = mutate.unmutate(node_type,node_val,paranoia,
mextra)
elif node_type == 'PyObject':
node_val = node_val
#elif ext.can_handle_xml(node_type,node_valuetext):
# node_val = ext.xml_to_obj(node_type, node_valuetext, paranoia)
else:
raise XMLUnpicklingError, "Unknown type %s,%s" % (node,node_type)
if node.nodeName == 'attr':
setattr(container,node_name,node_val)
else:
container.append(node_val)
_save_obj_with_id(node,node_val)
elif node.nodeName == 'entry':
keyval = _thing_from_dom(node, [], paranoia)
key, val = keyval[0], keyval[1]
container[key] = val
# <entry> has no id for refchecking
else:
raise XMLUnpicklingError, \
"element %s is not in PyObjects.dtd" % node.nodeName
return container | PypiClean |
/DEME-0.2.8.tar.gz/DEME-0.2.8/thirdparty/pybind11/noxfile.py | import os
import nox
nox.needs_version = ">=2022.1.7"
nox.options.sessions = ["lint", "tests", "tests_packaging"]
PYTHON_VERSIONS = [
"3.6",
"3.7",
"3.8",
"3.9",
"3.10",
"3.11",
"pypy3.7",
"pypy3.8",
"pypy3.9",
]
if os.environ.get("CI", None):
nox.options.error_on_missing_interpreters = True
@nox.session(reuse_venv=True)
def lint(session: nox.Session) -> None:
"""
Lint the codebase (except for clang-format/tidy).
"""
session.install("pre-commit")
session.run("pre-commit", "run", "-a", *session.posargs)
@nox.session(python=PYTHON_VERSIONS)
def tests(session: nox.Session) -> None:
"""
Run the tests (requires a compiler).
"""
tmpdir = session.create_tmp()
session.install("cmake")
session.install("-r", "tests/requirements.txt")
session.run(
"cmake",
"-S.",
f"-B{tmpdir}",
"-DPYBIND11_WERROR=ON",
"-DDOWNLOAD_CATCH=ON",
"-DDOWNLOAD_EIGEN=ON",
*session.posargs,
)
session.run("cmake", "--build", tmpdir)
session.run("cmake", "--build", tmpdir, "--config=Release", "--target", "check")
@nox.session
def tests_packaging(session: nox.Session) -> None:
"""
Run the packaging tests.
"""
session.install("-r", "tests/requirements.txt", "--prefer-binary")
session.run("pytest", "tests/extra_python_package", *session.posargs)
@nox.session(reuse_venv=True)
def docs(session: nox.Session) -> None:
"""
Build the docs. Pass "serve" to serve.
"""
session.install("-r", "docs/requirements.txt")
session.chdir("docs")
if "pdf" in session.posargs:
session.run("sphinx-build", "-M", "latexpdf", ".", "_build")
return
session.run("sphinx-build", "-M", "html", ".", "_build")
if "serve" in session.posargs:
session.log("Launching docs at http://localhost:8000/ - use Ctrl-C to quit")
session.run("python", "-m", "http.server", "8000", "-d", "_build/html")
elif session.posargs:
session.error("Unsupported argument to docs")
@nox.session(reuse_venv=True)
def make_changelog(session: nox.Session) -> None:
"""
Inspect the closed issues and make entries for a changelog.
"""
session.install("ghapi", "rich")
session.run("python", "tools/make_changelog.py")
@nox.session(reuse_venv=True)
def build(session: nox.Session) -> None:
"""
Build SDists and wheels.
"""
session.install("build")
session.log("Building normal files")
session.run("python", "-m", "build", *session.posargs)
session.log("Building pybind11-global files (PYBIND11_GLOBAL_SDIST=1)")
session.run(
"python", "-m", "build", *session.posargs, env={"PYBIND11_GLOBAL_SDIST": "1"}
) | PypiClean |
/Muphoten-0.1.0.tar.gz/Muphoten-0.1.0/muphoten/ps1_survey.py |
import subprocess
import shutil
import os
import numpy as np
from astropy.io import fits
from astropy import wcs
from astropy.table import Table, vstack
from shapely.geometry import Polygon
from muphoten.utils import (rm_p, load_config, getpath)
from muphoten.astrometry import scamp
def get_crpix(proj_crpix1, proj_crpix2, Xcell, Ycell, x, y):
"""
Compute CRPIX1 and CRPIX2 for cell based on the CRPIX values
of the projcell.
"""
x_center, y_center = 5, 5
cprix1 = proj_crpix1 + (x_center - x) * (Xcell - 480)
crpix2 = proj_crpix2 + (y_center - y) * (Ycell - 480)
return cprix1, crpix2
def get_RADEC_coord(proj_crpix1, proj_crpix2, Xcell, Ycell, x, y, RA, Dec):
pixscale = 0.25 / 3600
crpix1, crpix2 = get_crpix(proj_crpix1, proj_crpix2, Xcell, Ycell, x, y)
# Create a new WCS object. The number of axes must be set
# from the start
w = wcs.WCS(naxis=2)
# Set up projection
# Vector properties may be set with Python lists, or Numpy arrays
w.wcs.crpix = [float(crpix1), float(crpix2)]
w.wcs.cdelt = np.array([-pixscale, pixscale])
w.wcs.crval = [RA, Dec]
w.wcs.ctype = ["RA---TAN", "DEC--TAN"]
# w.wcs.set_pv([(2, 1, 45.0)])
# Three pixel coordinates of interest.
# The pixel coordinates are pairs of [X, Y].
# The "origin" argument indicates whether the input coordinates
# are 0-based (as in Numpy arrays) or
# 1-based (as in the FITS convention, for example coordinates
# coming from DS9).
pixcrd = np.array(
[[0, 0], [Xcell, 0], [Xcell, Ycell], [0, Ycell]], dtype=np.float64
)
# Convert pixel coordinates to world coordinates.
# The second argument is "origin" -- in this case we're declaring we
# have 0-based (Numpy-like) coordinates.
world = w.wcs_pix2world(pixcrd, 1)
world = np.array(world).T
return world, w
def ps1_cell_coord(im_corner_coords, projcell_id, Xcell, Ycell,
projcell_ra_center, projcell_dec_center,
proj_crpix1, proj_crpix2):
"""
Template for computing the 10x10 cells composing a PS1 projcell
"""
# cell size, 0yx
ny = 10 # bottom to top of projell (ascending dec)
nx = 10 # left to right of projcell (ascending ra)
# Need to append a 0 when < 1000 otherwise download crashes
if projcell_id < 1000:
projcell_id = "0" + str(projcell_id)
else:
projcell_id = str(projcell_id)
id_list = []
RA_min = []
RA_max = []
dec_min = []
dec_max = []
projcell_id_list = []
for y in range(ny):
for x in range(nx):
# Estimate corner coordinates for each cell
corner_coords, w = get_RADEC_coord(
proj_crpix1,
proj_crpix2,
Xcell,
Ycell,
x,
y,
projcell_ra_center,
projcell_dec_center,
)
# Check whether one cell is contained in the input image
pix_im_coord = np.array(
[im_corner_coords[0], im_corner_coords[1]]).T
pix_cell_coord = np.array([corner_coords[0], corner_coords[1]]).T
# Check for cells having RA around below and above 0 degrees
cell_RAs = pix_cell_coord[:, 0]
cell_RA_max = np.max(cell_RAs)
flag = False
# if one corner has distance more than 300 degrees
# to highest RA, add 360 to it
# and to the image RA corners
# So that intersect with shapely still make sense
# Might need to change for very large field of view.
for i, cell_RA in enumerate(cell_RAs):
if cell_RA_max - cell_RA > 300:
flag = True
pix_cell_coord[i, 0] += 360
if flag:
pix_im_coord[:, 0] += 360
im_poly = Polygon([tuple(co) for co in pix_im_coord])
cell_poly = Polygon([tuple(co) for co in pix_cell_coord])
# print (polygon_im.contains(cell_corner_coords, w))
if im_poly.intersects(cell_poly):
projcell_id_list.append(projcell_id)
id_list.append("0%d%d" % (y, x))
RA_min.append(np.min(corner_coords[0], axis=0))
RA_max.append(np.max(corner_coords[0], axis=0))
dec_min.append(np.min(corner_coords[1], axis=0))
dec_max.append(np.max(corner_coords[1], axis=0))
overlap_cells = Table(
[projcell_id_list, id_list, RA_min, RA_max, dec_min, dec_max],
names=(
"projcell_id",
"cell_id",
"RA_min",
"RA_max",
"dec_min",
"dec_max"),
)
return overlap_cells
def zone_PS1(ps1grid, cellID):
"""Returns the ZONE in which a cellID is."""
if cellID < ps1grid["PROJCELL"][0] or \
cellID > ps1grid["PROJCELL"][-1]:
errmsg = 'Cell ID %d is outside the expected range: [%d-%d]' % \
(cellID, ps1grid["PROJCELL"][0], ps1grid["PROJCELL"][-1])
raise BaseException(errmsg)
# If last zone, it contains only one cell so easy
if cellID == 2643:
zone = 45
for i in range(len(ps1grid) - 1):
if ps1grid["PROJCELL"][i] <= cellID and \
ps1grid["PROJCELL"][i+1] > cellID:
zone = ps1grid["ZONE"][i]
return zone
def ps1_grid(im_corner_coords):
"""
Return the ps1 cell IDs for a given image dimension
Skycell images have names like skycell.nnnn.0yx where nnnn
is the projection cell number (which ranges from 635 to 2643)
and 0yx gives the skycell location in the image, with y and x
ranging from 0 to 9 indicating the respective y and x section
of the projection cell. The 000 skycell is in the bottom left
corner of the projection cell, 010 is just above it, and 099
is in the upper right corner.
RA = n *360 deg / M
"""
path_muphoten = getpath()
# RA, dec min and max of input image
ra_min = np.min(im_corner_coords[0], axis=0)
ra_max = np.max(im_corner_coords[0], axis=0)
dec_min = np.min(im_corner_coords[1], axis=0)
dec_max = np.max(im_corner_coords[1], axis=0)
ps1grid = Table.read(path_muphoten + "/ps1_survey/ps1grid.fits", hdu=1)
# Get the min and max declination zones
mask = ((ps1grid["DEC_MIN"] < dec_min) & (ps1grid["DEC_MAX"] > dec_min)) | (
(ps1grid["DEC_MIN"] < dec_max) & (ps1grid["DEC_MAX"] > dec_max)
)
# Get all declinations zones
all_zones_id = np.arange(
ps1grid[mask]["ZONE"][0], ps1grid[mask]["ZONE"][-1] + 1)
all_cells = []
# Loop over the different zones
for zone in all_zones_id:
mask = ps1grid["ZONE"] == zone
idx_bkp = -1
projcell_idx_list = []
for ra in [ra_min, ra_max]:
# Get the cells covering the input ra
closet_projcell_idx = (
float(ps1grid[mask]["PROJCELL"])
+ ra * float(ps1grid[mask]["NBAND"]) / 360
)
projcell_idx = int(np.floor(closet_projcell_idx))
if projcell_idx != idx_bkp:
projcell_idx_list.append(projcell_idx)
idx_bkp = projcell_idx
# Check the 0 - 360 transition
# Assume that an image can not have a FoV larger than 60 degrees
# Need to do it dependng on the image in the future
if ra_max - ra_min < 300:
total_proj_cell_idx = np.arange(
projcell_idx_list[0], projcell_idx_list[-1] + 1
)
else:
# Image overlapping the 0 - 360 region
total_proj_cell_idx = np.arange(
ps1grid[mask]["PROJCELL"], projcell_idx_list[0] + 1
)
list_proj_cell_end = np.arange(
projcell_idx_list[-1], ps1grid[ps1grid["ZONE"]
== zone + 1]["PROJCELL"]
)
total_proj_cell_idx = np.append(
total_proj_cell_idx, list_proj_cell_end)
for cell_id in total_proj_cell_idx:
mask = ps1grid["ZONE"] == zone_PS1(ps1grid, cell_id)
diff_projcell_idx = cell_id - float(ps1grid[mask]["PROJCELL"])
ra_center_projcell = diff_projcell_idx * \
360 / float(ps1grid[mask]["NBAND"])
cell_query = ps1_cell_coord(
im_corner_coords,
cell_id,
ps1grid[mask]["XCELL"],
ps1grid[mask]["YCELL"],
ra_center_projcell,
ps1grid[mask]["DEC"],
ps1grid[mask]["CRPIX1"],
ps1grid[mask]["CRPIX2"],
)
if len(cell_query) > 0:
all_cells.append(cell_query)
if len(all_cells) == 1:
all_cells = all_cells[0]
else:
all_cells = vstack([tab for tab in all_cells])
return all_cells
def download_ps1_cells(cell_table, band, config, ps1Dir,
ps1RescaledDir, verbose="QUIET"):
"""Download the required cell from PS1 DR1"""
file_list = []
# extension to get auxiliary images
# See https://outerspace.stsci.edu/display/PANSTARRS/PS1+Stack+images
# auxiliaryFiles = ['.mask', '.wt', '.num', '.exp', '.expwt', '']
auxiliaryFiles = ["", ".mask"]
BaseURL = "http://ps1images.stsci.edu/"
# cell_table = [cell_table[7]]
for cell in cell_table:
cell_url_path = "rings.v3.skycell/%s/%s/" % (
cell["projcell_id"],
cell["cell_id"],
)
for aux in auxiliaryFiles:
cell_file = "rings.v3.skycell.%s.%s.stk.%s.unconv%s.fits" % (
cell["projcell_id"],
cell["cell_id"],
band,
aux,
)
Link = BaseURL + cell_url_path + cell_file
local_cell_file = cell_file.replace(
".", "_").replace("_fits", ".fits")
FileNameFitsPath = ps1Dir + local_cell_file
if os.path.isfile(FileNameFitsPath):
print("File %s already downloaded" % FileNameFitsPath)
else:
# wget_command = "wget %s -O %s"%(Link,FileNameFitsPath)
wget_command = "curl -m 7200 -L -o %s %s" % (
FileNameFitsPath, Link)
os.system(wget_command)
if os.path.isfile(FileNameFitsPath):
# do not really understand what this is doing
funpack_command = "fpack %s; rm %s; funpack %s.fz" % (
FileNameFitsPath,
FileNameFitsPath,
FileNameFitsPath,
)
os.system(funpack_command)
rm_command = "rm %s.fz" % (FileNameFitsPath)
os.system(rm_command)
else:
print(
"File %s was not downloaded or found on the server." %
Link)
new_filename = local_cell_file.replace(
'.fits', '_%s.fits' % config['telescope']
)
if aux == "":
# Check if targeted file was downloaded to continue
if os.path.isfile(FileNameFitsPath):
if os.path.isfile(ps1RescaledDir + new_filename):
pass
else:
# Rescale to physical flux
linear_rescale_ps1(
local_cell_file, ps1Dir, ps1RescaledDir, band
)
resample_ps1(local_cell_file,
ps1RescaledDir,
config)
# Perform astrometric calibration on each cell
config_ps1 = load_config("PS1", "default")
scamp(
ps1RescaledDir + new_filename,
config_ps1,
accuracy=0.1,
itermax=3,
band=band,
verbose="QUIET",
)
else:
if os.path.isfile(ps1RescaledDir + new_filename):
pass
else:
resample_ps1(local_cell_file,
ps1RescaledDir,
config)
# No need to run scamp, pixels are already aligned with
# PS1 image which had correct wcs.
# new_filename contains the telescope alias.
file_list.append(ps1RescaledDir + new_filename)
return file_list
def prepare_PS1_sub(ps1_cell_table, band, inputimage,
config, verbose="QUIET", method="individual"):
"""Prepare the download and formatting of PS1 images to be used for image
substraction"""
path_muphoten = getpath()
path, filenameInput = os.path.split(inputimage)
if path:
folder = path + "/"
else:
folder = ""
# Get pixelscale from input science image
# And add it to config dictionary
header = fits.getheader(inputimage)
if "CDELT1" in header:
pixScale = [abs(float(header["CDELT1"])) * 3600,
abs(float(header["CDELT2"])) * 3600]
else:
pixScale = [np.sqrt(float(header["CD1_1"])**2 +
float(header["CD2_1"]**2)) * 3600,
np.sqrt(float(header["CD1_2"])**2 +
float(header["CD2_2"]**2)) * 3600]
config['pixScale'] = pixScale
ps1Dir = path_muphoten + "/ps1Dir/"
if not os.path.isdir(ps1Dir):
os.makedirs(ps1Dir)
ps1RescaledDir = path_muphoten + "/ps1RescaledDir/"
if not os.path.isdir(ps1RescaledDir):
os.makedirs(ps1RescaledDir)
# Download PS1 files if not present, and reformat them
ps1files = download_ps1_cells(
ps1_cell_table, band, config, ps1Dir, ps1RescaledDir, verbose=verbose
)
subfiles = []
if method == "mosaic":
# Create mosaic file if it does not exist
mosaicfile = folder + os.path.splitext(filenameInput)[0] + "_ps1_mosaic.fits"
if os.path.isfile(mosaicfile):
print(
"PS1 mosaic image already exists in this location: %s."
"If you want to recompute it, delete it."
% mosaicfile
)
else:
fileref_names = create_ps1_mosaic(
ps1files, inputimage, folder, config, band, verbose=verbose
)
subfiles.append([inputimage, fileref_names[0], fileref_names[1]])
elif method == "individual":
for i in range(0, len(ps1files), 2):
ref = ps1files[i]
mask = ps1files[i + 1]
subfiles.append([inputimage, ref, mask])
return subfiles
def linear_rescale_ps1(filename, inputDir, outputDir,
band, normalise=True, method="headers"):
"""rescale the PS1 DR1 fits file"""
# Transform into linear flux scale
hdulist = fits.open(inputDir + filename)
boffset = hdulist[0].header["BOFFSET"]
bsoften = hdulist[0].header["BSOFTEN"]
a = 2.5 / np.log(10)
hdulist[0].data = boffset + 2 * bsoften * np.sinh(hdulist[0].data / a)
# Normalise to 1s exposure time
if normalise:
# 'exposure_map', exptime, 'headers'
if method == "exptime":
print("Use exptime in header to rescale to an exposure of 1s.")
exptime = float(hdulist[0].header["EXPTIME"])
hdulist[0].data /= exptime
try:
# hdulist[0].header['SATURATE'] /= hdulist[0].header['EXPTIME']
hdulist[0].header["SATURATE"] /= exptime
except BaseException:
pass
if method == "headers":
print("Use header information to rescale to an exposure of 1s.")
# Check for SCL_* keywords. It corresponds to the scaling factor
# applied to each individual exposure. If 0 it is not taken into
# in the stacked image.
# So exposure time is weighted by this factor:
# 0 if SCL_*=0
# 1 if SCL_*>0
hdr = hdulist[0].header
SCLlist = hdr["SCL_*"]
scale_flux = []
for SCL in SCLlist:
if float(hdr[SCL]) > 0:
scale_flux.append(1)
else:
scale_flux.append(0)
explist = hdr["EXP_*"]
exptime_tot = 0
for i, exp in enumerate(explist):
exptime_tot += float(scale_flux[i]) * float(hdr[exp])
# hdulist[0].data /= hdulist[0].header['EXPTIME']
hdulist[0].data /= exptime_tot
try:
# hdulist[0].header['SATURATE']/= hdulist[0].header['EXPTIME']
hdulist[0].header["SATURATE"] /= exptime_tot
except BaseException:
pass
elif method == "exposure_map":
print("Use exposure map to rescale to an exposure of 1s.")
# Normalise by exact exposure time in each pixels
# hdulist_exp=fits.open(
# inputDir+filename.split('.')[0]+'_exp.fits')
# hdulist[0].data = hdulist[0].data / hdulist_exp[0].data
hdulist_expwt = fits.open(
inputDir + os.path.splitext(filename)[0] + "_expwt.fits")
hdulist[0].data = hdulist[0].data / hdulist_expwt[0].data
hdulist[0].header["EXPTIME"] = 1
"""
# Add some keywords for performing photometric calibration with scamp
hdulist[0].header['FILTER'] = band
hdulist[0].header['PHOT_C'] = 25
hdulist[0].header['PHOT_K'] = 0
hdulist[0].header['PHOTFLAG'] = 'T'
"""
hdulist.writeto(outputDir + filename, overwrite=True)
hdulist.close()
# replace pixels == 0 with NaNs. Mostly the border, saturated pixels
hdulist = fits.open(outputDir + filename)
hdulist[0].data[hdulist[0].data == 0] = np.nan
hdulist.writeto(outputDir + filename, overwrite=True)
# Create a mask to propagate the nan pixels
hdulist = fits.open(outputDir + filename)
hdulist[0].data[np.isfinite(hdulist[0].data)] = 0
hdulist[0].data[np.isnan(hdulist[0].data)] = 1
hdulist.writeto(
outputDir +
os.path.splitext(filename)[0] +
"_mask.fits",
overwrite=True)
return True
def resample_ps1(filename, inputDir, config, useweight=False,
verbose="NORMAL"):
"""Resample PS1 image to the telescope resolution"""
pixScale = config['pixScale'][0]
basename = os.path.splitext(filename)[0]
if useweight:
subprocess.call(
[
"swarp",
inputDir+filename,
"-IMAGEOUT_NAME", inputDir + basename + \
'_%s' % config['telescope'] + ".fits",
"-WEIGHTOUT_NAME", inputDir + epoch + \
'_%s' % config['telescope'] + ".weight.fits",
"-VERBOSE_TYPE", verbose,
]
)
else:
# Use bilinear to avoid artefact, but worst for
# noise so would need to check in more details.
subprocess.call(
[
"swarp",
inputDir+filename,
"-IMAGEOUT_NAME", inputDir + basename + \
'_%s' % config['telescope'] + ".fits",
# '-GAIN_DEFAULT', str(gain),
"-FSCALE_KEYWORD", "NONE",
"-FSCALE_DEFAULT", "1, 1",
"-SUBTRACT_BACK", "N",
"-COMBINE", "N",
"-BACK_SIZE", "128",
"-BACK_FILTERSIZE", "3",
"-RESAMPLE", "Y",
"-RESAMPLE_DIR", inputDir,
"-RESAMPLE_SUFFIX", '_%s.fits' % config['telescope'],
"-PIXELSCALE_TYPE", "MANUAL",
"-PIXEL_SCALE", str(pixScale),
# '-CENTER', '%s, %s' % (header['CRVAL1'],
# header['CRVAL2']),
# '-RESAMPLING_TYPE', 'LANCZOS3',
"-RESAMPLING_TYPE", "BILINEAR",
# '-RESAMPLING_TYPE', 'NEAREST',
"-OVERSAMPLING", "0",
"-COMBINE_TYPE", "MEDIAN",
"-VERBOSE_TYPE", verbose,
"-COPY_KEYWORDS", "FILTER, EXPTIME, SATURATE",
]
)
rm_p('swarp.xml')
rm_p("coadd.weight.fits")
rm_p(inputDir+filename)
if 'mask' in basename:
# Add the new pixels created on the edge after resampling to
# the mask
hdulist1 = fits.open(inputDir + basename + \
'_%s' % config['telescope'] + ".weight.fits")
zero_pix = hdulist1[0].data == 0
hdulist1.close()
outName = inputDir + basename + \
'_%s' % config['telescope'] + ".fits"
hdulist2 = fits.open(outName)
# Put high value
hdulist2[0].data[zero_pix] = 1e8
hdulist2.writeto(outName, overwrite=True)
return True
def create_ps1_mosaic(file_list, inputimage, outputDir, config,
band, useweight=False, verbose="NORMAL"):
"""Create a single mosaic of PS1 image using swarp"""
_, filenameInput = os.path.split(inputimage)
# Create list of mask fits
ima_list = [ima for ima in file_list if "_mask" not in ima]
mask_list = [ima for ima in file_list if "_mask" in ima]
np.savetxt("mosaic.list", ima_list, fmt="%s")
np.savetxt("mask.list", mask_list, fmt="%s")
imagefiles = [
outputDir + os.path.splitext(filenameInput)[0] + "_ps1_mosaic",
outputDir + os.path.splitext(filenameInput)[0] + "_ps1_mosaic_mask",
]
# Get pixel scale from input image header
header = fits.getheader(inputimage)
try:
pixScale = abs(header["CDELT1"])
except Exception:
try:
pixScale = abs(header["CD1_1"])
except Exception:
print(
"Pixel scale could not be found in fits header.\n"
"Expected keyword: CDELT1 or CD1_1"
)
pixScale = pixScale * 3600
# print (inputimage, pixScale)
crval1 = header["CRVAL1"]
crval2 = header["CRVAL2"]
# print (crval1, crval2)
# print (header['CRPIX1'], header['CRPIX2'])
imagesize = [header["NAXIS1"], header["NAXIS2"]]
# Force reference pixel to be in the center
# File name to store the common header that will be shared by all
# images in filelist
point = "registration"
# Delete if already exists
rm_p(point + ".head")
# First run swarp to create a .head file containing the shared header
subprocess.call(
[
"swarp",
"-HEADER_ONLY", "Y",
"-IMAGEOUT_NAME", point + ".head",
"-VERBOSE_TYPE", verbose,
]
+ [inputimage]
)
# Some keywords manipulation using sed
subprocess.call(
[
"sed",
"-i",
"s/MJD-OBS/COMMENT/; s/EXPTIME/COMMENT/; s/GAIN /COMMENT/; s/SATURATE/COMMENT /",
point + ".head",
]
)
imalists = [["@" + "mosaic.list"], ["@" + "mask.list"]]
for i, imagefile in enumerate(imagefiles):
# Remove mosaic if already exists
rm_p(imagefile + ".fits")
if "mask" in imagefile:
subBackground = "N"
else:
subBackground = "Y"
# Copy the common header in the .head file
# So that it is read by sawrp for each image
shutil.copy(point + ".head", imagefile + ".head")
if useweight:
subprocess.call(
[
"swarp",
"-IMAGEOUT_NAME", imagefile + ".fits",
"-WEIGHTOUT_NAME", imagefile + ".weight.fits",
"-VERBOSE_TYPE", verbose,
]
+ imalists[i]
)
else:
subprocess.call(
[
"swarp",
"-IMAGEOUT_NAME", imagefile + ".fits",
"-SUBTRACT_BACK", subBackground,
"-COMBINE", "Y",
"-BACK_SIZE", "128",
"-BACK_FILTERSIZE", "3",
# '-CENTER_TYPE', 'MANUAL',
# '-CENTER', '%s, %s' % (crval1,crval2),
"-RESAMPLE", "Y",
#"-RESAMPLING_TYPE", "LANCZOS3",
'-RESAMPLING_TYPE', 'BILINEAR',
"-PIXELSCALE_TYPE", "MANUAL",
"-PIXEL_SCALE", str(pixScale),
# '-IMAGE_SIZE', '%s, %s' % (imagesize[0], imagesize[1]),
"-OVERSAMPLING", "0",
"-COMBINE_TYPE", "MEDIAN",
"-COPY_KEYWORDS", " PIXEL_SCALE",
"-VERBOSE_TYPE", verbose,
]
+ imalists[i]
)
rm_p(imagefile + ".head")
# Perform astrometric calibration of the mosaic with scamp
scamp(
imagefiles[0] + ".fits",
config,
useweight=False,
CheckPlot=False,
verbose=verbose,
)
# replace pixels == 0 with NaNs. Mostly the border, saturated pixels
hdulist = fits.open(imagefiles[0] + ".fits")
hdulist[0].data[hdulist[0].data == 0] = np.nan
"""
# Add header
hdulist[0].header['FILTER'] = band
hdulist[0].header['PHOT_C'] = 25
hdulist[0].header['PHOT_K'] = 0
hdulist[0].header['PHOTFLAG'] = 'T'
hdulist[0].header['EXPTIME'] = 1
"""
hdulist[0].header["GAIN"] = 1
hdulist[0].header["EXPTIME"] = 1
hdulist[0].header.remove("SATURATE")
hdulist.writeto(imagefiles[0] + ".fits", overwrite=True)
# Create a mask to propagate the nan pixels
hdulist = fits.open(imagefiles[1] + ".fits")
hdulist[0].data[hdulist[0].data > 0] = 1
hdulist[0].data[np.isnan(hdulist[0].data)] = 1
hdulist.writeto(imagefiles[1] + ".fits", overwrite=True)
# for ima in file_list:
# rm_p(ima)
# for ima in mask_list:
# rm_p(ima)
rm_p("mosaic.list")
rm_p("mask.list")
rm_p("swarp.xml")
rm_p(point + ".head")
# rm_p('coadd.weight.fits')
# Add extension to files
imagefiles = [i + ".fits" for i in imagefiles]
return imagefiles | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/widget/Calendar.js | if(!dojo._hasResource["dojox.widget.Calendar"]){
dojo._hasResource["dojox.widget.Calendar"]=true;
dojo.provide("dojox.widget.Calendar");
dojo.experimental("dojox.widget.Calendar");
dojo.require("dijit.Calendar");
dojo.require("dijit._Container");
dojo.declare("dojox.widget._CalendarBase",[dijit._Widget,dijit._Templated,dijit._Container],{templateString:dojo.cache("dojox.widget","Calendar/Calendar.html","<div class=\"dojoxCalendar\">\n <div tabindex=\"0\" class=\"dojoxCalendarContainer\" style=\"visibility: visible;\" dojoAttachPoint=\"container\">\n\t\t<div style=\"display:none\">\n\t\t\t<div dojoAttachPoint=\"previousYearLabelNode\"></div>\n\t\t\t<div dojoAttachPoint=\"nextYearLabelNode\"></div>\n\t\t\t<div dojoAttachPoint=\"monthLabelSpacer\"></div>\n\t\t</div>\n <div class=\"dojoxCalendarHeader\">\n <div>\n <div class=\"dojoxCalendarDecrease\" dojoAttachPoint=\"decrementMonth\"></div>\n </div>\n <div class=\"\">\n <div class=\"dojoxCalendarIncrease\" dojoAttachPoint=\"incrementMonth\"></div>\n </div>\n <div class=\"dojoxCalendarTitle\" dojoAttachPoint=\"header\" dojoAttachEvent=\"onclick: onHeaderClick\">\n </div>\n </div>\n <div class=\"dojoxCalendarBody\" dojoAttachPoint=\"containerNode\"></div>\n <div class=\"\">\n <div class=\"dojoxCalendarFooter\" dojoAttachPoint=\"footer\"> \n </div>\n </div>\n </div>\n</div>\n"),_views:null,useFx:true,widgetsInTemplate:true,value:new Date(),constraints:null,footerFormat:"medium",constructor:function(){
this._views=[];
},postMixInProperties:function(){
var c=this.constraints;
if(c){
var _1=dojo.date.stamp.fromISOString;
if(typeof c.min=="string"){
c.min=_1(c.min);
}
if(typeof c.max=="string"){
c.max=_1(c.max);
}
}
},postCreate:function(){
this.displayMonth=new Date(this.attr("value"));
var _2={parent:this,_getValueAttr:dojo.hitch(this,function(){
return new Date(this._internalValue||this.value);
}),_getDisplayMonthAttr:dojo.hitch(this,function(){
return new Date(this.displayMonth);
}),_getConstraintsAttr:dojo.hitch(this,function(){
return this.constraints;
}),getLang:dojo.hitch(this,function(){
return this.lang;
}),isDisabledDate:dojo.hitch(this,this.isDisabledDate),getClassForDate:dojo.hitch(this,this.getClassForDate),addFx:this.useFx?dojo.hitch(this,this.addFx):function(){
}};
dojo.forEach(this._views,function(_3){
var _4=new _3(_2,dojo.create("div"));
this.addChild(_4);
var _5=_4.getHeader();
if(_5){
this.header.appendChild(_5);
dojo.style(_5,"display","none");
}
dojo.style(_4.domNode,"visibility","hidden");
dojo.connect(_4,"onValueSelected",this,"_onDateSelected");
_4.attr("value",this.attr("value"));
},this);
if(this._views.length<2){
dojo.style(this.header,"cursor","auto");
}
this.inherited(arguments);
this._children=this.getChildren();
this._currentChild=0;
var _6=new Date();
this.footer.innerHTML="Today: "+dojo.date.locale.format(_6,{formatLength:this.footerFormat,selector:"date",locale:this.lang});
dojo.connect(this.footer,"onclick",this,"goToToday");
var _7=this._children[0];
dojo.style(_7.domNode,"top","0px");
dojo.style(_7.domNode,"visibility","visible");
var _8=_7.getHeader();
if(_8){
dojo.style(_7.getHeader(),"display","");
}
dojo[_7.useHeader?"removeClass":"addClass"](this.container,"no-header");
_7.onDisplay();
var _9=this;
var _a=function(_b,_c,_d){
dijit.typematic.addMouseListener(_9[_b],_9,function(_e){
if(_e>=0){
_9._adjustDisplay(_c,_d);
}
},0.8,500);
};
_a("incrementMonth","month",1);
_a("decrementMonth","month",-1);
this._updateTitleStyle();
},addFx:function(_f,_10){
},_setValueAttr:function(_11){
if(!_11["getFullYear"]){
_11=dojo.date.stamp.fromISOString(_11+"");
}
if(!this.value||dojo.date.compare(_11,this.value)){
_11=new Date(_11);
this.displayMonth=new Date(_11);
this._internalValue=_11;
if(!this.isDisabledDate(_11,this.lang)&&this._currentChild==0){
this.value=_11;
this.onChange(_11);
}
this._children[this._currentChild].attr("value",this.value);
return true;
}
return false;
},isDisabledDate:function(_12,_13){
var c=this.constraints;
var _14=dojo.date.compare;
return c&&(c.min&&(_14(c.min,_12,"date")>0)||(c.max&&_14(c.max,_12,"date")<0));
},onValueSelected:function(_15){
},_onDateSelected:function(_16,_17,_18){
this.displayMonth=_16;
this.attr("value",_16);
if(!this._transitionVert(-1)){
if(!_17&&_17!==0){
_17=this.attr("value");
}
this.onValueSelected(_17);
}
},onChange:function(_19){
},onHeaderClick:function(e){
this._transitionVert(1);
},goToToday:function(){
this.attr("value",new Date());
this.onValueSelected(this.attr("value"));
},_transitionVert:function(_1a){
var _1b=this._children[this._currentChild];
var _1c=this._children[this._currentChild+_1a];
if(!_1c){
return false;
}
dojo.style(_1c.domNode,"visibility","visible");
var _1d=dojo.style(this.containerNode,"height");
_1c.attr("value",this.displayMonth);
if(_1b.header){
dojo.style(_1b.header,"display","none");
}
if(_1c.header){
dojo.style(_1c.header,"display","");
}
dojo.style(_1c.domNode,"top",(_1d*-1)+"px");
dojo.style(_1c.domNode,"visibility","visible");
this._currentChild+=_1a;
var _1e=_1d*_1a;
var _1f=0;
dojo.style(_1c.domNode,"top",(_1e*-1)+"px");
var _20=dojo.animateProperty({node:_1b.domNode,properties:{top:_1e},onEnd:function(){
dojo.style(_1b.domNode,"visibility","hidden");
}});
var _21=dojo.animateProperty({node:_1c.domNode,properties:{top:_1f},onEnd:function(){
_1c.onDisplay();
}});
dojo[_1c.useHeader?"removeClass":"addClass"](this.container,"no-header");
_20.play();
_21.play();
_1b.onBeforeUnDisplay();
_1c.onBeforeDisplay();
this._updateTitleStyle();
return true;
},_updateTitleStyle:function(){
dojo[this._currentChild<this._children.length-1?"addClass":"removeClass"](this.header,"navToPanel");
},_slideTable:function(_22,_23,_24){
var _25=_22.domNode;
var _26=_25.cloneNode(true);
var _27=dojo.style(_25,"width");
_25.parentNode.appendChild(_26);
dojo.style(_25,"left",(_27*_23)+"px");
_24();
var _28=dojo.animateProperty({node:_26,properties:{left:_27*_23*-1},duration:500,onEnd:function(){
_26.parentNode.removeChild(_26);
}});
var _29=dojo.animateProperty({node:_25,properties:{left:0},duration:500});
_28.play();
_29.play();
},_addView:function(_2a){
this._views.push(_2a);
},getClassForDate:function(_2b,_2c){
},_adjustDisplay:function(_2d,_2e,_2f){
var _30=this._children[this._currentChild];
var _31=this.displayMonth=_30.adjustDate(this.displayMonth,_2e);
this._slideTable(_30,_2e,function(){
_30.attr("value",_31);
});
}});
dojo.declare("dojox.widget._CalendarView",dijit._Widget,{headerClass:"",useHeader:true,cloneClass:function(_32,n,_33){
var _34=dojo.query(_32,this.domNode)[0];
var i;
if(!_33){
for(i=0;i<n;i++){
_34.parentNode.appendChild(_34.cloneNode(true));
}
}else{
var _35=dojo.query(_32,this.domNode)[0];
for(i=0;i<n;i++){
_34.parentNode.insertBefore(_34.cloneNode(true),_35);
}
}
},_setText:function(_36,_37){
if(_36.innerHTML!=_37){
dojo.empty(_36);
_36.appendChild(dojo.doc.createTextNode(_37));
}
},getHeader:function(){
return this.header||(this.header=this.header=dojo.create("span",{"class":this.headerClass}));
},onValueSelected:function(_38){
},adjustDate:function(_39,_3a){
return dojo.date.add(_39,this.datePart,_3a);
},onDisplay:function(){
},onBeforeDisplay:function(){
},onBeforeUnDisplay:function(){
}});
dojo.declare("dojox.widget._CalendarDay",null,{parent:null,constructor:function(){
this._addView(dojox.widget._CalendarDayView);
}});
dojo.declare("dojox.widget._CalendarDayView",[dojox.widget._CalendarView,dijit._Templated],{templateString:dojo.cache("dojox.widget","Calendar/CalendarDay.html","<div class=\"dijitCalendarDayLabels\" style=\"left: 0px;\" dojoAttachPoint=\"dayContainer\">\n\t<div dojoAttachPoint=\"header\">\n\t\t<div dojoAttachPoint=\"monthAndYearHeader\">\n\t\t\t<span dojoAttachPoint=\"monthLabelNode\" class=\"dojoxCalendarMonthLabelNode\"></span>\n\t\t\t<span dojoAttachPoint=\"headerComma\" class=\"dojoxCalendarComma\">,</span>\n\t\t\t<span dojoAttachPoint=\"yearLabelNode\" class=\"dojoxCalendarDayYearLabel\"></span>\n\t\t</div>\n\t</div>\n\t<table cellspacing=\"0\" cellpadding=\"0\" border=\"0\" style=\"margin: auto;\">\n\t\t<thead>\n\t\t\t<tr>\n\t\t\t\t<td class=\"dijitCalendarDayLabelTemplate\"><div class=\"dijitCalendarDayLabel\"></div></td>\n\t\t\t</tr>\n\t\t</thead>\n\t\t<tbody dojoAttachEvent=\"onclick: _onDayClick\">\n\t\t\t<tr class=\"dijitCalendarWeekTemplate\">\n\t\t\t\t<td class=\"dojoxCalendarNextMonth dijitCalendarDateTemplate\">\n\t\t\t\t\t<div class=\"dijitCalendarDateLabel\"></div>\n\t\t\t\t</td>\n\t\t\t</tr>\n\t\t</tbody>\n\t</table>\n</div>\n"),datePart:"month",dayWidth:"narrow",postCreate:function(){
this.cloneClass(".dijitCalendarDayLabelTemplate",6);
this.cloneClass(".dijitCalendarDateTemplate",6);
this.cloneClass(".dijitCalendarWeekTemplate",5);
var _3b=dojo.date.locale.getNames("days",this.dayWidth,"standAlone",this.getLang());
var _3c=dojo.cldr.supplemental.getFirstDayOfWeek(this.getLang());
dojo.query(".dijitCalendarDayLabel",this.domNode).forEach(function(_3d,i){
this._setText(_3d,_3b[(i+_3c)%7]);
},this);
},onDisplay:function(){
if(!this._addedFx){
this._addedFx=true;
this.addFx(".dijitCalendarDateTemplate div",this.domNode);
}
},_onDayClick:function(e){
if(typeof (e.target._date)=="undefined"){
return;
}
var _3e=new Date(this.attr("displayMonth"));
var p=e.target.parentNode;
var c="dijitCalendar";
var d=dojo.hasClass(p,c+"PreviousMonth")?-1:(dojo.hasClass(p,c+"NextMonth")?1:0);
if(d){
_3e=dojo.date.add(_3e,"month",d);
}
_3e.setDate(e.target._date);
if(this.isDisabledDate(_3e)){
dojo.stopEvent(e);
return;
}
this.parent._onDateSelected(_3e);
},_setValueAttr:function(_3f){
this._populateDays();
},_populateDays:function(){
var _40=new Date(this.attr("displayMonth"));
_40.setDate(1);
var _41=_40.getDay();
var _42=dojo.date.getDaysInMonth(_40);
var _43=dojo.date.getDaysInMonth(dojo.date.add(_40,"month",-1));
var _44=new Date();
var _45=this.attr("value");
var _46=dojo.cldr.supplemental.getFirstDayOfWeek(this.getLang());
if(_46>_41){
_46-=7;
}
var _47=dojo.date.compare;
var _48=".dijitCalendarDateTemplate";
var _49="dijitCalendarSelectedDate";
var _4a=this._lastDate;
var _4b=_4a==null||_4a.getMonth()!=_40.getMonth()||_4a.getFullYear()!=_40.getFullYear();
this._lastDate=_40;
if(!_4b){
dojo.query(_48,this.domNode).removeClass(_49).filter(function(_4c){
return _4c.className.indexOf("dijitCalendarCurrent")>-1&&_4c._date==_45.getDate();
}).addClass(_49);
return;
}
dojo.query(_48,this.domNode).forEach(function(_4d,i){
i+=_46;
var _4e=new Date(_40);
var _4f,_50="dijitCalendar",adj=0;
if(i<_41){
_4f=_43-_41+i+1;
adj=-1;
_50+="Previous";
}else{
if(i>=(_41+_42)){
_4f=i-_41-_42+1;
adj=1;
_50+="Next";
}else{
_4f=i-_41+1;
_50+="Current";
}
}
if(adj){
_4e=dojo.date.add(_4e,"month",adj);
}
_4e.setDate(_4f);
if(!_47(_4e,_44,"date")){
_50="dijitCalendarCurrentDate "+_50;
}
if(!_47(_4e,_45,"date")&&!_47(_4e,_45,"month")&&!_47(_4e,_45,"year")){
_50=_49+" "+_50;
}
if(this.isDisabledDate(_4e,this.getLang())){
_50=" dijitCalendarDisabledDate "+_50;
}
var _51=this.getClassForDate(_4e,this.getLang());
if(_51){
_50+=_51+" "+_50;
}
_4d.className=_50+"Month dijitCalendarDateTemplate";
_4d.dijitDateValue=_4e.valueOf();
var _52=dojo.query(".dijitCalendarDateLabel",_4d)[0];
this._setText(_52,_4e.getDate());
_52._date=_52.parentNode._date=_4e.getDate();
},this);
var _53=dojo.date.locale.getNames("months","wide","standAlone",this.getLang());
this._setText(this.monthLabelNode,_53[_40.getMonth()]);
this._setText(this.yearLabelNode,_40.getFullYear());
}});
dojo.declare("dojox.widget._CalendarMonthYear",null,{constructor:function(){
this._addView(dojox.widget._CalendarMonthYearView);
}});
dojo.declare("dojox.widget._CalendarMonthYearView",[dojox.widget._CalendarView,dijit._Templated],{templateString:dojo.cache("dojox.widget","Calendar/CalendarMonthYear.html","<div class=\"dojoxCal-MY-labels\" style=\"left: 0px;\"\t\n\tdojoAttachPoint=\"myContainer\" dojoAttachEvent=\"onclick: onClick\">\n\t\t<table cellspacing=\"0\" cellpadding=\"0\" border=\"0\" style=\"margin: auto;\">\n\t\t\t\t<tbody>\n\t\t\t\t\t\t<tr class=\"dojoxCal-MY-G-Template\">\n\t\t\t\t\t\t\t\t<td class=\"dojoxCal-MY-M-Template\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"dojoxCalendarMonthLabel\"></div>\n\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t<td class=\"dojoxCal-MY-M-Template\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"dojoxCalendarMonthLabel\"></div>\n\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t<td class=\"dojoxCal-MY-Y-Template\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"dojoxCalendarYearLabel\"></div>\n\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t<td class=\"dojoxCal-MY-Y-Template\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"dojoxCalendarYearLabel\"></div>\n\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t </tr>\n\t\t\t\t\t\t <tr class=\"dojoxCal-MY-btns\">\n\t\t\t\t\t\t \t <td class=\"dojoxCal-MY-btns\" colspan=\"4\">\n\t\t\t\t\t\t \t\t <span class=\"dijitReset dijitInline dijitButtonNode ok-btn\" dojoAttachEvent=\"onclick: onOk\" dojoAttachPoint=\"okBtn\">\n\t\t\t\t\t\t \t \t \t <button\tclass=\"dijitReset dijitStretch dijitButtonContents\">OK</button>\n\t\t\t\t\t\t\t\t </span>\n\t\t\t\t\t\t\t\t <span class=\"dijitReset dijitInline dijitButtonNode cancel-btn\" dojoAttachEvent=\"onclick: onCancel\" dojoAttachPoint=\"cancelBtn\">\n\t\t\t\t\t\t \t \t\t <button\tclass=\"dijitReset dijitStretch dijitButtonContents\">Cancel</button>\n\t\t\t\t\t\t\t\t </span>\n\t\t\t\t\t\t \t </td>\n\t\t\t\t\t\t </tr>\n\t\t\t\t</tbody>\n\t\t</table>\n</div>\n"),datePart:"year",displayedYears:10,useHeader:false,postCreate:function(){
this.cloneClass(".dojoxCal-MY-G-Template",5,".dojoxCal-MY-btns");
this.monthContainer=this.yearContainer=this.myContainer;
var _54="dojoxCalendarYearLabel";
var _55="dojoxCalendarDecrease";
var _56="dojoxCalendarIncrease";
dojo.query("."+_54,this.myContainer).forEach(function(_57,idx){
var _58=_56;
switch(idx){
case 0:
_58=_55;
case 1:
dojo.removeClass(_57,_54);
dojo.addClass(_57,_58);
break;
}
});
this._decBtn=dojo.query("."+_55,this.myContainer)[0];
this._incBtn=dojo.query("."+_56,this.myContainer)[0];
dojo.query(".dojoxCal-MY-M-Template",this.domNode).filter(function(_59){
return _59.cellIndex==1;
}).addClass("dojoxCal-MY-M-last");
dojo.connect(this,"onBeforeDisplay",dojo.hitch(this,function(){
this._cachedDate=new Date(this.attr("value").getTime());
this._populateYears(this._cachedDate.getFullYear());
this._populateMonths();
this._updateSelectedMonth();
this._updateSelectedYear();
}));
dojo.connect(this,"_populateYears",dojo.hitch(this,function(){
this._updateSelectedYear();
}));
dojo.connect(this,"_populateMonths",dojo.hitch(this,function(){
this._updateSelectedMonth();
}));
this._cachedDate=this.attr("value");
this._populateYears();
this._populateMonths();
this.addFx(".dojoxCalendarMonthLabel,.dojoxCalendarYearLabel ",this.myContainer);
},_setValueAttr:function(_5a){
this._populateYears(_5a.getFullYear());
},getHeader:function(){
return null;
},_getMonthNames:function(_5b){
this._monthNames=this._monthNames||dojo.date.locale.getNames("months",_5b,"standAlone",this.getLang());
return this._monthNames;
},_populateMonths:function(){
var _5c=this._getMonthNames("abbr");
dojo.query(".dojoxCalendarMonthLabel",this.monthContainer).forEach(dojo.hitch(this,function(_5d,cnt){
this._setText(_5d,_5c[cnt]);
}));
var _5e=this.attr("constraints");
if(_5e){
var _5f=new Date();
_5f.setFullYear(this._year);
var min=-1,max=12;
if(_5e.min){
var _60=_5e.min.getFullYear();
if(_60>this._year){
min=12;
}else{
if(_60==this._year){
min=_5e.min.getMonth();
}
}
}
if(_5e.max){
var _61=_5e.max.getFullYear();
if(_61<this._year){
max=-1;
}else{
if(_61==this._year){
max=_5e.max.getMonth();
}
}
}
dojo.query(".dojoxCalendarMonthLabel",this.monthContainer).forEach(dojo.hitch(this,function(_62,cnt){
dojo[(cnt<min||cnt>max)?"addClass":"removeClass"](_62,"dijitCalendarDisabledDate");
}));
}
var h=this.getHeader();
if(h){
this._setText(this.getHeader(),this.attr("value").getFullYear());
}
},_populateYears:function(_63){
var _64=this.attr("constraints");
var _65=_63||this.attr("value").getFullYear();
var _66=_65-Math.floor(this.displayedYears/2);
var min=_64&&_64.min?_64.min.getFullYear():_66-10000;
_66=Math.max(min,_66);
this._displayedYear=_65;
var _67=dojo.query(".dojoxCalendarYearLabel",this.yearContainer);
var max=_64&&_64.max?_64.max.getFullYear()-_66:_67.length;
var _68="dijitCalendarDisabledDate";
_67.forEach(dojo.hitch(this,function(_69,cnt){
if(cnt<=max){
this._setText(_69,_66+cnt);
dojo.removeClass(_69,_68);
}else{
dojo.addClass(_69,_68);
}
}));
if(this._incBtn){
dojo[max<_67.length?"addClass":"removeClass"](this._incBtn,_68);
}
if(this._decBtn){
dojo[min>=_66?"addClass":"removeClass"](this._decBtn,_68);
}
var h=this.getHeader();
if(h){
this._setText(this.getHeader(),_66+" - "+(_66+11));
}
},_updateSelectedYear:function(){
this._year=String((this._cachedDate||this.attr("value")).getFullYear());
this._updateSelectedNode(".dojoxCalendarYearLabel",dojo.hitch(this,function(_6a,idx){
return this._year!==null&&_6a.innerHTML==this._year;
}));
},_updateSelectedMonth:function(){
var _6b=(this._cachedDate||this.attr("value")).getMonth();
this._month=_6b;
this._updateSelectedNode(".dojoxCalendarMonthLabel",function(_6c,idx){
return idx==_6b;
});
},_updateSelectedNode:function(_6d,_6e){
var sel="dijitCalendarSelectedDate";
dojo.query(_6d,this.domNode).forEach(function(_6f,idx,_70){
dojo[_6e(_6f,idx,_70)?"addClass":"removeClass"](_6f.parentNode,sel);
});
var _71=dojo.query(".dojoxCal-MY-M-Template div",this.myContainer).filter(function(_72){
return dojo.hasClass(_72.parentNode,sel);
})[0];
if(!_71){
return;
}
var _73=dojo.hasClass(_71,"dijitCalendarDisabledDate");
dojo[_73?"addClass":"removeClass"](this.okBtn,"dijitDisabled");
},onClick:function(evt){
var _74;
var _75=this;
var sel="dijitCalendarSelectedDate";
function hc(c){
return dojo.hasClass(evt.target,c);
};
if(hc("dijitCalendarDisabledDate")){
dojo.stopEvent(evt);
return false;
}
if(hc("dojoxCalendarMonthLabel")){
_74="dojoxCal-MY-M-Template";
this._month=evt.target.parentNode.cellIndex+(evt.target.parentNode.parentNode.rowIndex*2);
this._cachedDate.setMonth(this._month);
this._updateSelectedMonth();
}else{
if(hc("dojoxCalendarYearLabel")){
_74="dojoxCal-MY-Y-Template";
this._year=Number(evt.target.innerHTML);
this._cachedDate.setYear(this._year);
this._populateMonths();
this._updateSelectedYear();
}else{
if(hc("dojoxCalendarDecrease")){
this._populateYears(this._displayedYear-10);
return true;
}else{
if(hc("dojoxCalendarIncrease")){
this._populateYears(this._displayedYear+10);
return true;
}else{
return true;
}
}
}
}
dojo.stopEvent(evt);
return false;
},onOk:function(evt){
dojo.stopEvent(evt);
if(dojo.hasClass(this.okBtn,"dijitDisabled")){
return false;
}
this.onValueSelected(this._cachedDate);
return false;
},onCancel:function(evt){
dojo.stopEvent(evt);
this.onValueSelected(this.attr("value"));
return false;
}});
dojo.declare("dojox.widget.Calendar2Pane",[dojox.widget._CalendarBase,dojox.widget._CalendarDay,dojox.widget._CalendarMonthYear],{});
dojo.declare("dojox.widget.Calendar",[dojox.widget._CalendarBase,dojox.widget._CalendarDay,dojox.widget._CalendarMonthYear],{});
dojo.declare("dojox.widget.DailyCalendar",[dojox.widget._CalendarBase,dojox.widget._CalendarDay],{});
dojo.declare("dojox.widget.MonthAndYearlyCalendar",[dojox.widget._CalendarBase,dojox.widget._CalendarMonthYear],{});
} | PypiClean |
/ASPER_Messenger_Server-0.11.1.tar.gz/ASPER_Messenger_Server-0.11.1/server/server/config_window.py | from PyQt5.QtWidgets import QDialog, QLabel, QLineEdit, QPushButton, QFileDialog, QMessageBox
from PyQt5.QtCore import Qt
import os
class ConfigWindow(QDialog):
'''Класс окно настроек.'''
def __init__(self, config):
super().__init__()
self.config = config
self.initUI()
def initUI(self):
'''Настройки окна'''
self.setFixedSize(365, 260)
self.setWindowTitle('Настройки сервера')
self.setAttribute(Qt.WA_DeleteOnClose)
self.setModal(True)
# Надпись о файле базы данных:
self.db_path_label = QLabel('Путь до файла базы данных: ', self)
self.db_path_label.move(10, 10)
self.db_path_label.setFixedSize(240, 15)
# Строка с путём базы
self.db_path = QLineEdit(self)
self.db_path.setFixedSize(250, 20)
self.db_path.move(10, 30)
self.db_path.setReadOnly(True)
# Кнопка выбора пути.
self.db_path_select = QPushButton('Обзор...', self)
self.db_path_select.move(275, 28)
# Метка с именем поля файла базы данных
self.db_file_label = QLabel('Имя файла базы данных: ', self)
self.db_file_label.move(10, 68)
self.db_file_label.setFixedSize(180, 15)
# Поле для ввода имени файла
self.db_file = QLineEdit(self)
self.db_file.move(200, 66)
self.db_file.setFixedSize(150, 20)
# Метка с номером порта
self.port_label = QLabel('Номер порта для соединений:', self)
self.port_label.move(10, 108)
self.port_label.setFixedSize(180, 15)
# Поле для ввода номера порта
self.port = QLineEdit(self)
self.port.move(200, 108)
self.port.setFixedSize(150, 20)
# Метка с адресом для соединений
self.ip_label = QLabel('С какого IP принимаем соединения:', self)
self.ip_label.move(10, 148)
self.ip_label.setFixedSize(180, 15)
# Метка с напоминанием о пустом поле.
self.ip_label_note = QLabel(
' оставьте это поле пустым, чтобы\n принимать соединения с любых адресов.',
self)
self.ip_label_note.move(10, 168)
self.ip_label_note.setFixedSize(500, 30)
# Поле для ввода ip
self.ip = QLineEdit(self)
self.ip.move(200, 148)
self.ip.setFixedSize(150, 20)
# Кнопка сохранения настроек
self.save_btn = QPushButton('Сохранить', self)
self.save_btn.move(190, 220)
# Кнапка закрытия окна
self.close_button = QPushButton('Закрыть', self)
self.close_button.move(275, 220)
self.close_button.clicked.connect(self.close)
self.db_path_select.clicked.connect(self.open_file_dialog)
self.show()
self.db_path.insert(self.config['SETTINGS']['Database_path'])
self.db_file.insert(self.config['SETTINGS']['Database_file'])
self.port.insert(self.config['SETTINGS']['Default_port'])
self.ip.insert(self.config['SETTINGS']['Listen_Address'])
self.save_btn.clicked.connect(self.save_server_config)
def open_file_dialog(self):
'''Метод обработчик открытия окна выбора папки.'''
global dialog
dialog = QFileDialog(self)
path = dialog.getExistingDirectory()
path = path.replace('/', '\\')
self.db_path.clear()
self.db_path.insert(path)
def save_server_config(self):
'''
Метод сохранения настроек.
Проверяет правильность введённых данных и
если всё правильно сохраняет ini файл.
'''
global config_window
message = QMessageBox()
self.config['SETTINGS']['Database_path'] = self.db_path.text()
self.config['SETTINGS']['Database_file'] = self.db_file.text()
try:
port = int(self.port.text())
except ValueError:
message.warning(self, 'Ошибка', 'Порт должен быть числом')
else:
self.config['SETTINGS']['Listen_Address'] = self.ip.text()
if 1023 < port < 65536:
self.config['SETTINGS']['Default_port'] = str(port)
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = os.path.join(dir_path, '..')
with open(f"{dir_path}/{'server.ini'}", 'w') as conf:
self.config.write(conf)
message.information(
self, 'OK', 'Настройки успешно сохранены!')
else:
message.warning(
self, 'Ошибка', 'Порт должен быть от 1024 до 65536') | PypiClean |
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/emoji.py |
GRINNING_FACE = "\U0001f600"
GRINNING_FACE_WITH_BIG_EYES = "\U0001f603"
GRINNING_FACE_WITH_SMILING_EYES = "\U0001f604"
BEAMING_FACE_WITH_SMILING_EYES = "\U0001f601"
GRINNING_SQUINTING_FACE = "\U0001f606"
GRINNING_FACE_WITH_SWEAT = "\U0001f605"
ROLLING_ON_THE_FLOOR_LAUGHING = "\U0001f923"
FACE_WITH_TEARS_OF_JOY = "\U0001f602"
SLIGHTLY_SMILING_FACE = "\U0001f642"
UPSIDE_DOWN_FACE = "\U0001f643"
MELTING_FACE = "\U0001fae0"
WINKING_FACE = "\U0001f609"
SMILING_FACE_WITH_SMILING_EYES = "\U0001f60a"
SMILING_FACE_WITH_HALO = "\U0001f607"
SMILING_FACE_WITH_HEARTS = "\U0001f970"
SMILING_FACE_WITH_HEART_EYES = "\U0001f60d"
STAR_STRUCK = "\U0001f929"
FACE_BLOWING_A_KISS = "\U0001f618"
KISSING_FACE = "\U0001f617"
SMILING_FACE = "\u263a\ufe0f"
KISSING_FACE_WITH_CLOSED_EYES = "\U0001f61a"
KISSING_FACE_WITH_SMILING_EYES = "\U0001f619"
SMILING_FACE_WITH_TEAR = "\U0001f972"
FACE_SAVORING_FOOD = "\U0001f60b"
FACE_WITH_TONGUE = "\U0001f61b"
WINKING_FACE_WITH_TONGUE = "\U0001f61c"
ZANY_FACE = "\U0001f92a"
SQUINTING_FACE_WITH_TONGUE = "\U0001f61d"
MONEY_MOUTH_FACE = "\U0001f911"
SMILING_FACE_WITH_OPEN_HANDS = "\U0001f917"
FACE_WITH_HAND_OVER_MOUTH = "\U0001f92d"
FACE_WITH_OPEN_EYES_AND_HAND_OVER_MOUTH = "\U0001fae2"
FACE_WITH_PEEKING_EYE = "\U0001fae3"
SHUSHING_FACE = "\U0001f92b"
THINKING_FACE = "\U0001f914"
SALUTING_FACE = "\U0001fae1"
ZIPPER_MOUTH_FACE = "\U0001f910"
FACE_WITH_RAISED_EYEBROW = "\U0001f928"
NEUTRAL_FACE = "\U0001f610"
EXPRESSIONLESS_FACE = "\U0001f611"
FACE_WITHOUT_MOUTH = "\U0001f636"
DOTTED_LINE_FACE = "\U0001fae5"
FACE_IN_CLOUDS = "\U0001f636\u200d\U0001f32b\ufe0f"
SMIRKING_FACE = "\U0001f60f"
UNAMUSED_FACE = "\U0001f612"
FACE_WITH_ROLLING_EYES = "\U0001f644"
GRIMACING_FACE = "\U0001f62c"
FACE_EXHALING = "\U0001f62e\u200d\U0001f4a8"
LYING_FACE = "\U0001f925"
RELIEVED_FACE = "\U0001f60c"
PENSIVE_FACE = "\U0001f614"
SLEEPY_FACE = "\U0001f62a"
DROOLING_FACE = "\U0001f924"
SLEEPING_FACE = "\U0001f634"
FACE_WITH_MEDICAL_MASK = "\U0001f637"
FACE_WITH_THERMOMETER = "\U0001f912"
FACE_WITH_HEAD_BANDAGE = "\U0001f915"
NAUSEATED_FACE = "\U0001f922"
FACE_VOMITING = "\U0001f92e"
SNEEZING_FACE = "\U0001f927"
HOT_FACE = "\U0001f975"
COLD_FACE = "\U0001f976"
WOOZY_FACE = "\U0001f974"
FACE_WITH_CROSSED_OUT_EYES = "\U0001f635"
FACE_WITH_SPIRAL_EYES = "\U0001f635\u200d\U0001f4ab"
EXPLODING_HEAD = "\U0001f92f"
COWBOY_HAT_FACE = "\U0001f920"
PARTYING_FACE = "\U0001f973"
DISGUISED_FACE = "\U0001f978"
SMILING_FACE_WITH_SUNGLASSES = "\U0001f60e"
NERD_FACE = "\U0001f913"
FACE_WITH_MONOCLE = "\U0001f9d0"
CONFUSED_FACE = "\U0001f615"
FACE_WITH_DIAGONAL_MOUTH = "\U0001fae4"
WORRIED_FACE = "\U0001f61f"
SLIGHTLY_FROWNING_FACE = "\U0001f641"
FROWNING_FACE = "\u2639\ufe0f"
FACE_WITH_OPEN_MOUTH = "\U0001f62e"
HUSHED_FACE = "\U0001f62f"
ASTONISHED_FACE = "\U0001f632"
FLUSHED_FACE = "\U0001f633"
PLEADING_FACE = "\U0001f97a"
FACE_HOLDING_BACK_TEARS = "\U0001f979"
FROWNING_FACE_WITH_OPEN_MOUTH = "\U0001f626"
ANGUISHED_FACE = "\U0001f627"
FEARFUL_FACE = "\U0001f628"
ANXIOUS_FACE_WITH_SWEAT = "\U0001f630"
SAD_BUT_RELIEVED_FACE = "\U0001f625"
CRYING_FACE = "\U0001f622"
LOUDLY_CRYING_FACE = "\U0001f62d"
FACE_SCREAMING_IN_FEAR = "\U0001f631"
CONFOUNDED_FACE = "\U0001f616"
PERSEVERING_FACE = "\U0001f623"
DISAPPOINTED_FACE = "\U0001f61e"
DOWNCAST_FACE_WITH_SWEAT = "\U0001f613"
WEARY_FACE = "\U0001f629"
TIRED_FACE = "\U0001f62b"
YAWNING_FACE = "\U0001f971"
FACE_WITH_STEAM_FROM_NOSE = "\U0001f624"
ENRAGED_FACE = "\U0001f621"
ANGRY_FACE = "\U0001f620"
FACE_WITH_SYMBOLS_ON_MOUTH = "\U0001f92c"
SMILING_FACE_WITH_HORNS = "\U0001f608"
ANGRY_FACE_WITH_HORNS = "\U0001f47f"
SKULL = "\U0001f480"
SKULL_AND_CROSSBONES = "\u2620\ufe0f"
PILE_OF_POO = "\U0001f4a9"
CLOWN_FACE = "\U0001f921"
OGRE = "\U0001f479"
GOBLIN = "\U0001f47a"
GHOST = "\U0001f47b"
ALIEN = "\U0001f47d"
ALIEN_MONSTER = "\U0001f47e"
ROBOT = "\U0001f916"
GRINNING_CAT = "\U0001f63a"
GRINNING_CAT_WITH_SMILING_EYES = "\U0001f638"
CAT_WITH_TEARS_OF_JOY = "\U0001f639"
SMILING_CAT_WITH_HEART_EYES = "\U0001f63b"
CAT_WITH_WRY_SMILE = "\U0001f63c"
KISSING_CAT = "\U0001f63d"
WEARY_CAT = "\U0001f640"
CRYING_CAT = "\U0001f63f"
POUTING_CAT = "\U0001f63e"
SEE_NO_EVIL_MONKEY = "\U0001f648"
HEAR_NO_EVIL_MONKEY = "\U0001f649"
SPEAK_NO_EVIL_MONKEY = "\U0001f64a"
KISS_MARK = "\U0001f48b"
LOVE_LETTER = "\U0001f48c"
HEART_WITH_ARROW = "\U0001f498"
HEART_WITH_RIBBON = "\U0001f49d"
SPARKLING_HEART = "\U0001f496"
GROWING_HEART = "\U0001f497"
BEATING_HEART = "\U0001f493"
REVOLVING_HEARTS = "\U0001f49e"
TWO_HEARTS = "\U0001f495"
HEART_DECORATION = "\U0001f49f"
HEART_EXCLAMATION = "\u2763\ufe0f"
BROKEN_HEART = "\U0001f494"
HEART_ON_FIRE = "\u2764\ufe0f\u200d\U0001f525"
MENDING_HEART = "\u2764\ufe0f\u200d\U0001fa79"
RED_HEART = "\u2764\ufe0f"
ORANGE_HEART = "\U0001f9e1"
YELLOW_HEART = "\U0001f49b"
GREEN_HEART = "\U0001f49a"
BLUE_HEART = "\U0001f499"
PURPLE_HEART = "\U0001f49c"
BROWN_HEART = "\U0001f90e"
BLACK_HEART = "\U0001f5a4"
WHITE_HEART = "\U0001f90d"
HUNDRED_POINTS = "\U0001f4af"
ANGER_SYMBOL = "\U0001f4a2"
COLLISION = "\U0001f4a5"
DIZZY = "\U0001f4ab"
SWEAT_DROPLETS = "\U0001f4a6"
DASHING_AWAY = "\U0001f4a8"
HOLE = "\U0001f573\ufe0f"
BOMB = "\U0001f4a3"
SPEECH_BALLOON = "\U0001f4ac"
EYE_IN_SPEECH_BUBBLE = "\U0001f441\ufe0f\u200d\U0001f5e8\ufe0f"
LEFT_SPEECH_BUBBLE = "\U0001f5e8\ufe0f"
RIGHT_ANGER_BUBBLE = "\U0001f5ef\ufe0f"
THOUGHT_BALLOON = "\U0001f4ad"
ZZZ = "\U0001f4a4"
WAVING_HAND = "\U0001f44b"
WAVING_HAND_LIGHT_SKIN_TONE = "\U0001f44b\U0001f3fb"
WAVING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44b\U0001f3fc"
WAVING_HAND_MEDIUM_SKIN_TONE = "\U0001f44b\U0001f3fd"
WAVING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f44b\U0001f3fe"
WAVING_HAND_DARK_SKIN_TONE = "\U0001f44b\U0001f3ff"
RAISED_BACK_OF_HAND = "\U0001f91a"
RAISED_BACK_OF_HAND_LIGHT_SKIN_TONE = "\U0001f91a\U0001f3fb"
RAISED_BACK_OF_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91a\U0001f3fc"
RAISED_BACK_OF_HAND_MEDIUM_SKIN_TONE = "\U0001f91a\U0001f3fd"
RAISED_BACK_OF_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f91a\U0001f3fe"
RAISED_BACK_OF_HAND_DARK_SKIN_TONE = "\U0001f91a\U0001f3ff"
HAND_WITH_FINGERS_SPLAYED = "\U0001f590\ufe0f"
HAND_WITH_FINGERS_SPLAYED_LIGHT_SKIN_TONE = "\U0001f590\U0001f3fb"
HAND_WITH_FINGERS_SPLAYED_MEDIUM_LIGHT_SKIN_TONE = "\U0001f590\U0001f3fc"
HAND_WITH_FINGERS_SPLAYED_MEDIUM_SKIN_TONE = "\U0001f590\U0001f3fd"
HAND_WITH_FINGERS_SPLAYED_MEDIUM_DARK_SKIN_TONE = "\U0001f590\U0001f3fe"
HAND_WITH_FINGERS_SPLAYED_DARK_SKIN_TONE = "\U0001f590\U0001f3ff"
RAISED_HAND = "\u270b"
RAISED_HAND_LIGHT_SKIN_TONE = "\u270b\U0001f3fb"
RAISED_HAND_MEDIUM_LIGHT_SKIN_TONE = "\u270b\U0001f3fc"
RAISED_HAND_MEDIUM_SKIN_TONE = "\u270b\U0001f3fd"
RAISED_HAND_MEDIUM_DARK_SKIN_TONE = "\u270b\U0001f3fe"
RAISED_HAND_DARK_SKIN_TONE = "\u270b\U0001f3ff"
VULCAN_SALUTE = "\U0001f596"
VULCAN_SALUTE_LIGHT_SKIN_TONE = "\U0001f596\U0001f3fb"
VULCAN_SALUTE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f596\U0001f3fc"
VULCAN_SALUTE_MEDIUM_SKIN_TONE = "\U0001f596\U0001f3fd"
VULCAN_SALUTE_MEDIUM_DARK_SKIN_TONE = "\U0001f596\U0001f3fe"
VULCAN_SALUTE_DARK_SKIN_TONE = "\U0001f596\U0001f3ff"
RIGHTWARDS_HAND = "\U0001faf1"
RIGHTWARDS_HAND_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fb"
RIGHTWARDS_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fc"
RIGHTWARDS_HAND_MEDIUM_SKIN_TONE = "\U0001faf1\U0001f3fd"
RIGHTWARDS_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001faf1\U0001f3fe"
RIGHTWARDS_HAND_DARK_SKIN_TONE = "\U0001faf1\U0001f3ff"
LEFTWARDS_HAND = "\U0001faf2"
LEFTWARDS_HAND_LIGHT_SKIN_TONE = "\U0001faf2\U0001f3fb"
LEFTWARDS_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf2\U0001f3fc"
LEFTWARDS_HAND_MEDIUM_SKIN_TONE = "\U0001faf2\U0001f3fd"
LEFTWARDS_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001faf2\U0001f3fe"
LEFTWARDS_HAND_DARK_SKIN_TONE = "\U0001faf2\U0001f3ff"
PALM_DOWN_HAND = "\U0001faf3"
PALM_DOWN_HAND_LIGHT_SKIN_TONE = "\U0001faf3\U0001f3fb"
PALM_DOWN_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf3\U0001f3fc"
PALM_DOWN_HAND_MEDIUM_SKIN_TONE = "\U0001faf3\U0001f3fd"
PALM_DOWN_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001faf3\U0001f3fe"
PALM_DOWN_HAND_DARK_SKIN_TONE = "\U0001faf3\U0001f3ff"
PALM_UP_HAND = "\U0001faf4"
PALM_UP_HAND_LIGHT_SKIN_TONE = "\U0001faf4\U0001f3fb"
PALM_UP_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf4\U0001f3fc"
PALM_UP_HAND_MEDIUM_SKIN_TONE = "\U0001faf4\U0001f3fd"
PALM_UP_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001faf4\U0001f3fe"
PALM_UP_HAND_DARK_SKIN_TONE = "\U0001faf4\U0001f3ff"
OK_HAND = "\U0001f44c"
OK_HAND_LIGHT_SKIN_TONE = "\U0001f44c\U0001f3fb"
OK_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44c\U0001f3fc"
OK_HAND_MEDIUM_SKIN_TONE = "\U0001f44c\U0001f3fd"
OK_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f44c\U0001f3fe"
OK_HAND_DARK_SKIN_TONE = "\U0001f44c\U0001f3ff"
PINCHED_FINGERS = "\U0001f90c"
PINCHED_FINGERS_LIGHT_SKIN_TONE = "\U0001f90c\U0001f3fb"
PINCHED_FINGERS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f90c\U0001f3fc"
PINCHED_FINGERS_MEDIUM_SKIN_TONE = "\U0001f90c\U0001f3fd"
PINCHED_FINGERS_MEDIUM_DARK_SKIN_TONE = "\U0001f90c\U0001f3fe"
PINCHED_FINGERS_DARK_SKIN_TONE = "\U0001f90c\U0001f3ff"
PINCHING_HAND = "\U0001f90f"
PINCHING_HAND_LIGHT_SKIN_TONE = "\U0001f90f\U0001f3fb"
PINCHING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f90f\U0001f3fc"
PINCHING_HAND_MEDIUM_SKIN_TONE = "\U0001f90f\U0001f3fd"
PINCHING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f90f\U0001f3fe"
PINCHING_HAND_DARK_SKIN_TONE = "\U0001f90f\U0001f3ff"
VICTORY_HAND = "\u270c\ufe0f"
VICTORY_HAND_LIGHT_SKIN_TONE = "\u270c\U0001f3fb"
VICTORY_HAND_MEDIUM_LIGHT_SKIN_TONE = "\u270c\U0001f3fc"
VICTORY_HAND_MEDIUM_SKIN_TONE = "\u270c\U0001f3fd"
VICTORY_HAND_MEDIUM_DARK_SKIN_TONE = "\u270c\U0001f3fe"
VICTORY_HAND_DARK_SKIN_TONE = "\u270c\U0001f3ff"
CROSSED_FINGERS = "\U0001f91e"
CROSSED_FINGERS_LIGHT_SKIN_TONE = "\U0001f91e\U0001f3fb"
CROSSED_FINGERS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91e\U0001f3fc"
CROSSED_FINGERS_MEDIUM_SKIN_TONE = "\U0001f91e\U0001f3fd"
CROSSED_FINGERS_MEDIUM_DARK_SKIN_TONE = "\U0001f91e\U0001f3fe"
CROSSED_FINGERS_DARK_SKIN_TONE = "\U0001f91e\U0001f3ff"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED = "\U0001faf0"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED_LIGHT_SKIN_TONE = "\U0001faf0\U0001f3fb"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf0\U0001f3fc"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED_MEDIUM_SKIN_TONE = "\U0001faf0\U0001f3fd"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED_MEDIUM_DARK_SKIN_TONE = "\U0001faf0\U0001f3fe"
HAND_WITH_INDEX_FINGER_AND_THUMB_CROSSED_DARK_SKIN_TONE = "\U0001faf0\U0001f3ff"
LOVE_YOU_GESTURE = "\U0001f91f"
LOVE_YOU_GESTURE_LIGHT_SKIN_TONE = "\U0001f91f\U0001f3fb"
LOVE_YOU_GESTURE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91f\U0001f3fc"
LOVE_YOU_GESTURE_MEDIUM_SKIN_TONE = "\U0001f91f\U0001f3fd"
LOVE_YOU_GESTURE_MEDIUM_DARK_SKIN_TONE = "\U0001f91f\U0001f3fe"
LOVE_YOU_GESTURE_DARK_SKIN_TONE = "\U0001f91f\U0001f3ff"
SIGN_OF_THE_HORNS = "\U0001f918"
SIGN_OF_THE_HORNS_LIGHT_SKIN_TONE = "\U0001f918\U0001f3fb"
SIGN_OF_THE_HORNS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f918\U0001f3fc"
SIGN_OF_THE_HORNS_MEDIUM_SKIN_TONE = "\U0001f918\U0001f3fd"
SIGN_OF_THE_HORNS_MEDIUM_DARK_SKIN_TONE = "\U0001f918\U0001f3fe"
SIGN_OF_THE_HORNS_DARK_SKIN_TONE = "\U0001f918\U0001f3ff"
CALL_ME_HAND = "\U0001f919"
CALL_ME_HAND_LIGHT_SKIN_TONE = "\U0001f919\U0001f3fb"
CALL_ME_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f919\U0001f3fc"
CALL_ME_HAND_MEDIUM_SKIN_TONE = "\U0001f919\U0001f3fd"
CALL_ME_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f919\U0001f3fe"
CALL_ME_HAND_DARK_SKIN_TONE = "\U0001f919\U0001f3ff"
BACKHAND_INDEX_POINTING_LEFT = "\U0001f448"
BACKHAND_INDEX_POINTING_LEFT_LIGHT_SKIN_TONE = "\U0001f448\U0001f3fb"
BACKHAND_INDEX_POINTING_LEFT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f448\U0001f3fc"
BACKHAND_INDEX_POINTING_LEFT_MEDIUM_SKIN_TONE = "\U0001f448\U0001f3fd"
BACKHAND_INDEX_POINTING_LEFT_MEDIUM_DARK_SKIN_TONE = "\U0001f448\U0001f3fe"
BACKHAND_INDEX_POINTING_LEFT_DARK_SKIN_TONE = "\U0001f448\U0001f3ff"
BACKHAND_INDEX_POINTING_RIGHT = "\U0001f449"
BACKHAND_INDEX_POINTING_RIGHT_LIGHT_SKIN_TONE = "\U0001f449\U0001f3fb"
BACKHAND_INDEX_POINTING_RIGHT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f449\U0001f3fc"
BACKHAND_INDEX_POINTING_RIGHT_MEDIUM_SKIN_TONE = "\U0001f449\U0001f3fd"
BACKHAND_INDEX_POINTING_RIGHT_MEDIUM_DARK_SKIN_TONE = "\U0001f449\U0001f3fe"
BACKHAND_INDEX_POINTING_RIGHT_DARK_SKIN_TONE = "\U0001f449\U0001f3ff"
BACKHAND_INDEX_POINTING_UP = "\U0001f446"
BACKHAND_INDEX_POINTING_UP_LIGHT_SKIN_TONE = "\U0001f446\U0001f3fb"
BACKHAND_INDEX_POINTING_UP_MEDIUM_LIGHT_SKIN_TONE = "\U0001f446\U0001f3fc"
BACKHAND_INDEX_POINTING_UP_MEDIUM_SKIN_TONE = "\U0001f446\U0001f3fd"
BACKHAND_INDEX_POINTING_UP_MEDIUM_DARK_SKIN_TONE = "\U0001f446\U0001f3fe"
BACKHAND_INDEX_POINTING_UP_DARK_SKIN_TONE = "\U0001f446\U0001f3ff"
MIDDLE_FINGER = "\U0001f595"
MIDDLE_FINGER_LIGHT_SKIN_TONE = "\U0001f595\U0001f3fb"
MIDDLE_FINGER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f595\U0001f3fc"
MIDDLE_FINGER_MEDIUM_SKIN_TONE = "\U0001f595\U0001f3fd"
MIDDLE_FINGER_MEDIUM_DARK_SKIN_TONE = "\U0001f595\U0001f3fe"
MIDDLE_FINGER_DARK_SKIN_TONE = "\U0001f595\U0001f3ff"
BACKHAND_INDEX_POINTING_DOWN = "\U0001f447"
BACKHAND_INDEX_POINTING_DOWN_LIGHT_SKIN_TONE = "\U0001f447\U0001f3fb"
BACKHAND_INDEX_POINTING_DOWN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f447\U0001f3fc"
BACKHAND_INDEX_POINTING_DOWN_MEDIUM_SKIN_TONE = "\U0001f447\U0001f3fd"
BACKHAND_INDEX_POINTING_DOWN_MEDIUM_DARK_SKIN_TONE = "\U0001f447\U0001f3fe"
BACKHAND_INDEX_POINTING_DOWN_DARK_SKIN_TONE = "\U0001f447\U0001f3ff"
INDEX_POINTING_UP = "\u261d\ufe0f"
INDEX_POINTING_UP_LIGHT_SKIN_TONE = "\u261d\U0001f3fb"
INDEX_POINTING_UP_MEDIUM_LIGHT_SKIN_TONE = "\u261d\U0001f3fc"
INDEX_POINTING_UP_MEDIUM_SKIN_TONE = "\u261d\U0001f3fd"
INDEX_POINTING_UP_MEDIUM_DARK_SKIN_TONE = "\u261d\U0001f3fe"
INDEX_POINTING_UP_DARK_SKIN_TONE = "\u261d\U0001f3ff"
INDEX_POINTING_AT_THE_VIEWER = "\U0001faf5"
INDEX_POINTING_AT_THE_VIEWER_LIGHT_SKIN_TONE = "\U0001faf5\U0001f3fb"
INDEX_POINTING_AT_THE_VIEWER_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf5\U0001f3fc"
INDEX_POINTING_AT_THE_VIEWER_MEDIUM_SKIN_TONE = "\U0001faf5\U0001f3fd"
INDEX_POINTING_AT_THE_VIEWER_MEDIUM_DARK_SKIN_TONE = "\U0001faf5\U0001f3fe"
INDEX_POINTING_AT_THE_VIEWER_DARK_SKIN_TONE = "\U0001faf5\U0001f3ff"
THUMBS_UP = "\U0001f44d"
THUMBS_UP_LIGHT_SKIN_TONE = "\U0001f44d\U0001f3fb"
THUMBS_UP_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44d\U0001f3fc"
THUMBS_UP_MEDIUM_SKIN_TONE = "\U0001f44d\U0001f3fd"
THUMBS_UP_MEDIUM_DARK_SKIN_TONE = "\U0001f44d\U0001f3fe"
THUMBS_UP_DARK_SKIN_TONE = "\U0001f44d\U0001f3ff"
THUMBS_DOWN = "\U0001f44e"
THUMBS_DOWN_LIGHT_SKIN_TONE = "\U0001f44e\U0001f3fb"
THUMBS_DOWN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44e\U0001f3fc"
THUMBS_DOWN_MEDIUM_SKIN_TONE = "\U0001f44e\U0001f3fd"
THUMBS_DOWN_MEDIUM_DARK_SKIN_TONE = "\U0001f44e\U0001f3fe"
THUMBS_DOWN_DARK_SKIN_TONE = "\U0001f44e\U0001f3ff"
RAISED_FIST = "\u270a"
RAISED_FIST_LIGHT_SKIN_TONE = "\u270a\U0001f3fb"
RAISED_FIST_MEDIUM_LIGHT_SKIN_TONE = "\u270a\U0001f3fc"
RAISED_FIST_MEDIUM_SKIN_TONE = "\u270a\U0001f3fd"
RAISED_FIST_MEDIUM_DARK_SKIN_TONE = "\u270a\U0001f3fe"
RAISED_FIST_DARK_SKIN_TONE = "\u270a\U0001f3ff"
ONCOMING_FIST = "\U0001f44a"
ONCOMING_FIST_LIGHT_SKIN_TONE = "\U0001f44a\U0001f3fb"
ONCOMING_FIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44a\U0001f3fc"
ONCOMING_FIST_MEDIUM_SKIN_TONE = "\U0001f44a\U0001f3fd"
ONCOMING_FIST_MEDIUM_DARK_SKIN_TONE = "\U0001f44a\U0001f3fe"
ONCOMING_FIST_DARK_SKIN_TONE = "\U0001f44a\U0001f3ff"
LEFT_FACING_FIST = "\U0001f91b"
LEFT_FACING_FIST_LIGHT_SKIN_TONE = "\U0001f91b\U0001f3fb"
LEFT_FACING_FIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91b\U0001f3fc"
LEFT_FACING_FIST_MEDIUM_SKIN_TONE = "\U0001f91b\U0001f3fd"
LEFT_FACING_FIST_MEDIUM_DARK_SKIN_TONE = "\U0001f91b\U0001f3fe"
LEFT_FACING_FIST_DARK_SKIN_TONE = "\U0001f91b\U0001f3ff"
RIGHT_FACING_FIST = "\U0001f91c"
RIGHT_FACING_FIST_LIGHT_SKIN_TONE = "\U0001f91c\U0001f3fb"
RIGHT_FACING_FIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91c\U0001f3fc"
RIGHT_FACING_FIST_MEDIUM_SKIN_TONE = "\U0001f91c\U0001f3fd"
RIGHT_FACING_FIST_MEDIUM_DARK_SKIN_TONE = "\U0001f91c\U0001f3fe"
RIGHT_FACING_FIST_DARK_SKIN_TONE = "\U0001f91c\U0001f3ff"
CLAPPING_HANDS = "\U0001f44f"
CLAPPING_HANDS_LIGHT_SKIN_TONE = "\U0001f44f\U0001f3fb"
CLAPPING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f44f\U0001f3fc"
CLAPPING_HANDS_MEDIUM_SKIN_TONE = "\U0001f44f\U0001f3fd"
CLAPPING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f44f\U0001f3fe"
CLAPPING_HANDS_DARK_SKIN_TONE = "\U0001f44f\U0001f3ff"
RAISING_HANDS = "\U0001f64c"
RAISING_HANDS_LIGHT_SKIN_TONE = "\U0001f64c\U0001f3fb"
RAISING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64c\U0001f3fc"
RAISING_HANDS_MEDIUM_SKIN_TONE = "\U0001f64c\U0001f3fd"
RAISING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f64c\U0001f3fe"
RAISING_HANDS_DARK_SKIN_TONE = "\U0001f64c\U0001f3ff"
HEART_HANDS = "\U0001faf6"
HEART_HANDS_LIGHT_SKIN_TONE = "\U0001faf6\U0001f3fb"
HEART_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf6\U0001f3fc"
HEART_HANDS_MEDIUM_SKIN_TONE = "\U0001faf6\U0001f3fd"
HEART_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001faf6\U0001f3fe"
HEART_HANDS_DARK_SKIN_TONE = "\U0001faf6\U0001f3ff"
OPEN_HANDS = "\U0001f450"
OPEN_HANDS_LIGHT_SKIN_TONE = "\U0001f450\U0001f3fb"
OPEN_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f450\U0001f3fc"
OPEN_HANDS_MEDIUM_SKIN_TONE = "\U0001f450\U0001f3fd"
OPEN_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f450\U0001f3fe"
OPEN_HANDS_DARK_SKIN_TONE = "\U0001f450\U0001f3ff"
PALMS_UP_TOGETHER = "\U0001f932"
PALMS_UP_TOGETHER_LIGHT_SKIN_TONE = "\U0001f932\U0001f3fb"
PALMS_UP_TOGETHER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f932\U0001f3fc"
PALMS_UP_TOGETHER_MEDIUM_SKIN_TONE = "\U0001f932\U0001f3fd"
PALMS_UP_TOGETHER_MEDIUM_DARK_SKIN_TONE = "\U0001f932\U0001f3fe"
PALMS_UP_TOGETHER_DARK_SKIN_TONE = "\U0001f932\U0001f3ff"
HANDSHAKE = "\U0001f91d"
HANDSHAKE_LIGHT_SKIN_TONE = "\U0001f91d\U0001f3fb"
HANDSHAKE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f91d\U0001f3fc"
HANDSHAKE_MEDIUM_SKIN_TONE = "\U0001f91d\U0001f3fd"
HANDSHAKE_MEDIUM_DARK_SKIN_TONE = "\U0001f91d\U0001f3fe"
HANDSHAKE_DARK_SKIN_TONE = "\U0001f91d\U0001f3ff"
HANDSHAKE_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fb\u200d\U0001faf2\U0001f3fc"
HANDSHAKE_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001faf1\U0001f3fb\u200d\U0001faf2\U0001f3fd"
HANDSHAKE_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = "\U0001faf1\U0001f3fb\u200d\U0001faf2\U0001f3fe"
HANDSHAKE_LIGHT_SKIN_TONE_DARK_SKIN_TONE = "\U0001faf1\U0001f3fb\u200d\U0001faf2\U0001f3ff"
HANDSHAKE_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fc\u200d\U0001faf2\U0001f3fb"
HANDSHAKE_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001faf1\U0001f3fc\u200d\U0001faf2\U0001f3fd"
HANDSHAKE_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = "\U0001faf1\U0001f3fc\u200d\U0001faf2\U0001f3fe"
HANDSHAKE_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = "\U0001faf1\U0001f3fc\u200d\U0001faf2\U0001f3ff"
HANDSHAKE_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fd\u200d\U0001faf2\U0001f3fb"
HANDSHAKE_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fd\u200d\U0001faf2\U0001f3fc"
HANDSHAKE_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = "\U0001faf1\U0001f3fd\u200d\U0001faf2\U0001f3fe"
HANDSHAKE_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = "\U0001faf1\U0001f3fd\u200d\U0001faf2\U0001f3ff"
HANDSHAKE_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fe\u200d\U0001faf2\U0001f3fb"
HANDSHAKE_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3fe\u200d\U0001faf2\U0001f3fc"
HANDSHAKE_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001faf1\U0001f3fe\u200d\U0001faf2\U0001f3fd"
HANDSHAKE_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = "\U0001faf1\U0001f3fe\u200d\U0001faf2\U0001f3ff"
HANDSHAKE_DARK_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3ff\u200d\U0001faf2\U0001f3fb"
HANDSHAKE_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = "\U0001faf1\U0001f3ff\u200d\U0001faf2\U0001f3fc"
HANDSHAKE_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001faf1\U0001f3ff\u200d\U0001faf2\U0001f3fd"
HANDSHAKE_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = "\U0001faf1\U0001f3ff\u200d\U0001faf2\U0001f3fe"
FOLDED_HANDS = "\U0001f64f"
FOLDED_HANDS_LIGHT_SKIN_TONE = "\U0001f64f\U0001f3fb"
FOLDED_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64f\U0001f3fc"
FOLDED_HANDS_MEDIUM_SKIN_TONE = "\U0001f64f\U0001f3fd"
FOLDED_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f64f\U0001f3fe"
FOLDED_HANDS_DARK_SKIN_TONE = "\U0001f64f\U0001f3ff"
WRITING_HAND = "\u270d\ufe0f"
WRITING_HAND_LIGHT_SKIN_TONE = "\u270d\U0001f3fb"
WRITING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\u270d\U0001f3fc"
WRITING_HAND_MEDIUM_SKIN_TONE = "\u270d\U0001f3fd"
WRITING_HAND_MEDIUM_DARK_SKIN_TONE = "\u270d\U0001f3fe"
WRITING_HAND_DARK_SKIN_TONE = "\u270d\U0001f3ff"
NAIL_POLISH = "\U0001f485"
NAIL_POLISH_LIGHT_SKIN_TONE = "\U0001f485\U0001f3fb"
NAIL_POLISH_MEDIUM_LIGHT_SKIN_TONE = "\U0001f485\U0001f3fc"
NAIL_POLISH_MEDIUM_SKIN_TONE = "\U0001f485\U0001f3fd"
NAIL_POLISH_MEDIUM_DARK_SKIN_TONE = "\U0001f485\U0001f3fe"
NAIL_POLISH_DARK_SKIN_TONE = "\U0001f485\U0001f3ff"
SELFIE = "\U0001f933"
SELFIE_LIGHT_SKIN_TONE = "\U0001f933\U0001f3fb"
SELFIE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f933\U0001f3fc"
SELFIE_MEDIUM_SKIN_TONE = "\U0001f933\U0001f3fd"
SELFIE_MEDIUM_DARK_SKIN_TONE = "\U0001f933\U0001f3fe"
SELFIE_DARK_SKIN_TONE = "\U0001f933\U0001f3ff"
FLEXED_BICEPS = "\U0001f4aa"
FLEXED_BICEPS_LIGHT_SKIN_TONE = "\U0001f4aa\U0001f3fb"
FLEXED_BICEPS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f4aa\U0001f3fc"
FLEXED_BICEPS_MEDIUM_SKIN_TONE = "\U0001f4aa\U0001f3fd"
FLEXED_BICEPS_MEDIUM_DARK_SKIN_TONE = "\U0001f4aa\U0001f3fe"
FLEXED_BICEPS_DARK_SKIN_TONE = "\U0001f4aa\U0001f3ff"
MECHANICAL_ARM = "\U0001f9be"
MECHANICAL_LEG = "\U0001f9bf"
LEG = "\U0001f9b5"
LEG_LIGHT_SKIN_TONE = "\U0001f9b5\U0001f3fb"
LEG_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b5\U0001f3fc"
LEG_MEDIUM_SKIN_TONE = "\U0001f9b5\U0001f3fd"
LEG_MEDIUM_DARK_SKIN_TONE = "\U0001f9b5\U0001f3fe"
LEG_DARK_SKIN_TONE = "\U0001f9b5\U0001f3ff"
FOOT = "\U0001f9b6"
FOOT_LIGHT_SKIN_TONE = "\U0001f9b6\U0001f3fb"
FOOT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b6\U0001f3fc"
FOOT_MEDIUM_SKIN_TONE = "\U0001f9b6\U0001f3fd"
FOOT_MEDIUM_DARK_SKIN_TONE = "\U0001f9b6\U0001f3fe"
FOOT_DARK_SKIN_TONE = "\U0001f9b6\U0001f3ff"
EAR = "\U0001f442"
EAR_LIGHT_SKIN_TONE = "\U0001f442\U0001f3fb"
EAR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f442\U0001f3fc"
EAR_MEDIUM_SKIN_TONE = "\U0001f442\U0001f3fd"
EAR_MEDIUM_DARK_SKIN_TONE = "\U0001f442\U0001f3fe"
EAR_DARK_SKIN_TONE = "\U0001f442\U0001f3ff"
EAR_WITH_HEARING_AID = "\U0001f9bb"
EAR_WITH_HEARING_AID_LIGHT_SKIN_TONE = "\U0001f9bb\U0001f3fb"
EAR_WITH_HEARING_AID_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9bb\U0001f3fc"
EAR_WITH_HEARING_AID_MEDIUM_SKIN_TONE = "\U0001f9bb\U0001f3fd"
EAR_WITH_HEARING_AID_MEDIUM_DARK_SKIN_TONE = "\U0001f9bb\U0001f3fe"
EAR_WITH_HEARING_AID_DARK_SKIN_TONE = "\U0001f9bb\U0001f3ff"
NOSE = "\U0001f443"
NOSE_LIGHT_SKIN_TONE = "\U0001f443\U0001f3fb"
NOSE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f443\U0001f3fc"
NOSE_MEDIUM_SKIN_TONE = "\U0001f443\U0001f3fd"
NOSE_MEDIUM_DARK_SKIN_TONE = "\U0001f443\U0001f3fe"
NOSE_DARK_SKIN_TONE = "\U0001f443\U0001f3ff"
BRAIN = "\U0001f9e0"
ANATOMICAL_HEART = "\U0001fac0"
LUNGS = "\U0001fac1"
TOOTH = "\U0001f9b7"
BONE = "\U0001f9b4"
EYES = "\U0001f440"
EYE = "\U0001f441\ufe0f"
TONGUE = "\U0001f445"
MOUTH = "\U0001f444"
BITING_LIP = "\U0001fae6"
BABY = "\U0001f476"
BABY_LIGHT_SKIN_TONE = "\U0001f476\U0001f3fb"
BABY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f476\U0001f3fc"
BABY_MEDIUM_SKIN_TONE = "\U0001f476\U0001f3fd"
BABY_MEDIUM_DARK_SKIN_TONE = "\U0001f476\U0001f3fe"
BABY_DARK_SKIN_TONE = "\U0001f476\U0001f3ff"
CHILD = "\U0001f9d2"
CHILD_LIGHT_SKIN_TONE = "\U0001f9d2\U0001f3fb"
CHILD_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d2\U0001f3fc"
CHILD_MEDIUM_SKIN_TONE = "\U0001f9d2\U0001f3fd"
CHILD_MEDIUM_DARK_SKIN_TONE = "\U0001f9d2\U0001f3fe"
CHILD_DARK_SKIN_TONE = "\U0001f9d2\U0001f3ff"
BOY = "\U0001f466"
BOY_LIGHT_SKIN_TONE = "\U0001f466\U0001f3fb"
BOY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f466\U0001f3fc"
BOY_MEDIUM_SKIN_TONE = "\U0001f466\U0001f3fd"
BOY_MEDIUM_DARK_SKIN_TONE = "\U0001f466\U0001f3fe"
BOY_DARK_SKIN_TONE = "\U0001f466\U0001f3ff"
GIRL = "\U0001f467"
GIRL_LIGHT_SKIN_TONE = "\U0001f467\U0001f3fb"
GIRL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f467\U0001f3fc"
GIRL_MEDIUM_SKIN_TONE = "\U0001f467\U0001f3fd"
GIRL_MEDIUM_DARK_SKIN_TONE = "\U0001f467\U0001f3fe"
GIRL_DARK_SKIN_TONE = "\U0001f467\U0001f3ff"
PERSON = "\U0001f9d1"
PERSON_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb"
PERSON_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc"
PERSON_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd"
PERSON_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe"
PERSON_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff"
PERSON_BLOND_HAIR = "\U0001f471"
PERSON_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fb"
PERSON_MEDIUM_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fc"
PERSON_MEDIUM_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fd"
PERSON_MEDIUM_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fe"
PERSON_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3ff"
MAN = "\U0001f468"
MAN_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb"
MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc"
MAN_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd"
MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe"
MAN_DARK_SKIN_TONE = "\U0001f468\U0001f3ff"
PERSON_BEARD = "\U0001f9d4"
PERSON_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fb"
PERSON_MEDIUM_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fc"
PERSON_MEDIUM_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fd"
PERSON_MEDIUM_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fe"
PERSON_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3ff"
MAN_BEARD = "\U0001f9d4\u200d\u2642\ufe0f"
MAN_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fb\u200d\u2642\ufe0f"
MAN_MEDIUM_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fc\u200d\u2642\ufe0f"
MAN_MEDIUM_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fd\u200d\u2642\ufe0f"
MAN_MEDIUM_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fe\u200d\u2642\ufe0f"
MAN_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_BEARD = "\U0001f9d4\u200d\u2640\ufe0f"
WOMAN_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_MEDIUM_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_MEDIUM_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_DARK_SKIN_TONE_BEARD = "\U0001f9d4\U0001f3ff\u200d\u2640\ufe0f"
MAN_RED_HAIR = "\U0001f468\u200d\U0001f9b0"
MAN_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f468\U0001f3fb\u200d\U0001f9b0"
MAN_MEDIUM_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f468\U0001f3fc\u200d\U0001f9b0"
MAN_MEDIUM_SKIN_TONE_RED_HAIR = "\U0001f468\U0001f3fd\u200d\U0001f9b0"
MAN_MEDIUM_DARK_SKIN_TONE_RED_HAIR = "\U0001f468\U0001f3fe\u200d\U0001f9b0"
MAN_DARK_SKIN_TONE_RED_HAIR = "\U0001f468\U0001f3ff\u200d\U0001f9b0"
MAN_CURLY_HAIR = "\U0001f468\u200d\U0001f9b1"
MAN_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f468\U0001f3fb\u200d\U0001f9b1"
MAN_MEDIUM_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f468\U0001f3fc\u200d\U0001f9b1"
MAN_MEDIUM_SKIN_TONE_CURLY_HAIR = "\U0001f468\U0001f3fd\u200d\U0001f9b1"
MAN_MEDIUM_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f468\U0001f3fe\u200d\U0001f9b1"
MAN_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f468\U0001f3ff\u200d\U0001f9b1"
MAN_WHITE_HAIR = "\U0001f468\u200d\U0001f9b3"
MAN_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f468\U0001f3fb\u200d\U0001f9b3"
MAN_MEDIUM_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f468\U0001f3fc\u200d\U0001f9b3"
MAN_MEDIUM_SKIN_TONE_WHITE_HAIR = "\U0001f468\U0001f3fd\u200d\U0001f9b3"
MAN_MEDIUM_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f468\U0001f3fe\u200d\U0001f9b3"
MAN_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f468\U0001f3ff\u200d\U0001f9b3"
MAN_BALD = "\U0001f468\u200d\U0001f9b2"
MAN_LIGHT_SKIN_TONE_BALD = "\U0001f468\U0001f3fb\u200d\U0001f9b2"
MAN_MEDIUM_LIGHT_SKIN_TONE_BALD = "\U0001f468\U0001f3fc\u200d\U0001f9b2"
MAN_MEDIUM_SKIN_TONE_BALD = "\U0001f468\U0001f3fd\u200d\U0001f9b2"
MAN_MEDIUM_DARK_SKIN_TONE_BALD = "\U0001f468\U0001f3fe\u200d\U0001f9b2"
MAN_DARK_SKIN_TONE_BALD = "\U0001f468\U0001f3ff\u200d\U0001f9b2"
WOMAN = "\U0001f469"
WOMAN_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb"
WOMAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc"
WOMAN_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd"
WOMAN_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe"
WOMAN_DARK_SKIN_TONE = "\U0001f469\U0001f3ff"
WOMAN_RED_HAIR = "\U0001f469\u200d\U0001f9b0"
WOMAN_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f469\U0001f3fb\u200d\U0001f9b0"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f469\U0001f3fc\u200d\U0001f9b0"
WOMAN_MEDIUM_SKIN_TONE_RED_HAIR = "\U0001f469\U0001f3fd\u200d\U0001f9b0"
WOMAN_MEDIUM_DARK_SKIN_TONE_RED_HAIR = "\U0001f469\U0001f3fe\u200d\U0001f9b0"
WOMAN_DARK_SKIN_TONE_RED_HAIR = "\U0001f469\U0001f3ff\u200d\U0001f9b0"
PERSON_RED_HAIR = "\U0001f9d1\u200d\U0001f9b0"
PERSON_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f9d1\U0001f3fb\u200d\U0001f9b0"
PERSON_MEDIUM_LIGHT_SKIN_TONE_RED_HAIR = "\U0001f9d1\U0001f3fc\u200d\U0001f9b0"
PERSON_MEDIUM_SKIN_TONE_RED_HAIR = "\U0001f9d1\U0001f3fd\u200d\U0001f9b0"
PERSON_MEDIUM_DARK_SKIN_TONE_RED_HAIR = "\U0001f9d1\U0001f3fe\u200d\U0001f9b0"
PERSON_DARK_SKIN_TONE_RED_HAIR = "\U0001f9d1\U0001f3ff\u200d\U0001f9b0"
WOMAN_CURLY_HAIR = "\U0001f469\u200d\U0001f9b1"
WOMAN_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f469\U0001f3fb\u200d\U0001f9b1"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f469\U0001f3fc\u200d\U0001f9b1"
WOMAN_MEDIUM_SKIN_TONE_CURLY_HAIR = "\U0001f469\U0001f3fd\u200d\U0001f9b1"
WOMAN_MEDIUM_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f469\U0001f3fe\u200d\U0001f9b1"
WOMAN_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f469\U0001f3ff\u200d\U0001f9b1"
PERSON_CURLY_HAIR = "\U0001f9d1\u200d\U0001f9b1"
PERSON_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f9d1\U0001f3fb\u200d\U0001f9b1"
PERSON_MEDIUM_LIGHT_SKIN_TONE_CURLY_HAIR = "\U0001f9d1\U0001f3fc\u200d\U0001f9b1"
PERSON_MEDIUM_SKIN_TONE_CURLY_HAIR = "\U0001f9d1\U0001f3fd\u200d\U0001f9b1"
PERSON_MEDIUM_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f9d1\U0001f3fe\u200d\U0001f9b1"
PERSON_DARK_SKIN_TONE_CURLY_HAIR = "\U0001f9d1\U0001f3ff\u200d\U0001f9b1"
WOMAN_WHITE_HAIR = "\U0001f469\u200d\U0001f9b3"
WOMAN_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f469\U0001f3fb\u200d\U0001f9b3"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f469\U0001f3fc\u200d\U0001f9b3"
WOMAN_MEDIUM_SKIN_TONE_WHITE_HAIR = "\U0001f469\U0001f3fd\u200d\U0001f9b3"
WOMAN_MEDIUM_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f469\U0001f3fe\u200d\U0001f9b3"
WOMAN_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f469\U0001f3ff\u200d\U0001f9b3"
PERSON_WHITE_HAIR = "\U0001f9d1\u200d\U0001f9b3"
PERSON_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f9d1\U0001f3fb\u200d\U0001f9b3"
PERSON_MEDIUM_LIGHT_SKIN_TONE_WHITE_HAIR = "\U0001f9d1\U0001f3fc\u200d\U0001f9b3"
PERSON_MEDIUM_SKIN_TONE_WHITE_HAIR = "\U0001f9d1\U0001f3fd\u200d\U0001f9b3"
PERSON_MEDIUM_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f9d1\U0001f3fe\u200d\U0001f9b3"
PERSON_DARK_SKIN_TONE_WHITE_HAIR = "\U0001f9d1\U0001f3ff\u200d\U0001f9b3"
WOMAN_BALD = "\U0001f469\u200d\U0001f9b2"
WOMAN_LIGHT_SKIN_TONE_BALD = "\U0001f469\U0001f3fb\u200d\U0001f9b2"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_BALD = "\U0001f469\U0001f3fc\u200d\U0001f9b2"
WOMAN_MEDIUM_SKIN_TONE_BALD = "\U0001f469\U0001f3fd\u200d\U0001f9b2"
WOMAN_MEDIUM_DARK_SKIN_TONE_BALD = "\U0001f469\U0001f3fe\u200d\U0001f9b2"
WOMAN_DARK_SKIN_TONE_BALD = "\U0001f469\U0001f3ff\u200d\U0001f9b2"
PERSON_BALD = "\U0001f9d1\u200d\U0001f9b2"
PERSON_LIGHT_SKIN_TONE_BALD = "\U0001f9d1\U0001f3fb\u200d\U0001f9b2"
PERSON_MEDIUM_LIGHT_SKIN_TONE_BALD = "\U0001f9d1\U0001f3fc\u200d\U0001f9b2"
PERSON_MEDIUM_SKIN_TONE_BALD = "\U0001f9d1\U0001f3fd\u200d\U0001f9b2"
PERSON_MEDIUM_DARK_SKIN_TONE_BALD = "\U0001f9d1\U0001f3fe\u200d\U0001f9b2"
PERSON_DARK_SKIN_TONE_BALD = "\U0001f9d1\U0001f3ff\u200d\U0001f9b2"
WOMAN_BLOND_HAIR = "\U0001f471\u200d\u2640\ufe0f"
WOMAN_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_MEDIUM_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_MEDIUM_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_MEDIUM_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3ff\u200d\u2640\ufe0f"
MAN_BLOND_HAIR = "\U0001f471\u200d\u2642\ufe0f"
MAN_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fb\u200d\u2642\ufe0f"
MAN_MEDIUM_LIGHT_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fc\u200d\u2642\ufe0f"
MAN_MEDIUM_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fd\u200d\u2642\ufe0f"
MAN_MEDIUM_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3fe\u200d\u2642\ufe0f"
MAN_DARK_SKIN_TONE_BLOND_HAIR = "\U0001f471\U0001f3ff\u200d\u2642\ufe0f"
OLDER_PERSON = "\U0001f9d3"
OLDER_PERSON_LIGHT_SKIN_TONE = "\U0001f9d3\U0001f3fb"
OLDER_PERSON_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d3\U0001f3fc"
OLDER_PERSON_MEDIUM_SKIN_TONE = "\U0001f9d3\U0001f3fd"
OLDER_PERSON_MEDIUM_DARK_SKIN_TONE = "\U0001f9d3\U0001f3fe"
OLDER_PERSON_DARK_SKIN_TONE = "\U0001f9d3\U0001f3ff"
OLD_MAN = "\U0001f474"
OLD_MAN_LIGHT_SKIN_TONE = "\U0001f474\U0001f3fb"
OLD_MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f474\U0001f3fc"
OLD_MAN_MEDIUM_SKIN_TONE = "\U0001f474\U0001f3fd"
OLD_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f474\U0001f3fe"
OLD_MAN_DARK_SKIN_TONE = "\U0001f474\U0001f3ff"
OLD_WOMAN = "\U0001f475"
OLD_WOMAN_LIGHT_SKIN_TONE = "\U0001f475\U0001f3fb"
OLD_WOMAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f475\U0001f3fc"
OLD_WOMAN_MEDIUM_SKIN_TONE = "\U0001f475\U0001f3fd"
OLD_WOMAN_MEDIUM_DARK_SKIN_TONE = "\U0001f475\U0001f3fe"
OLD_WOMAN_DARK_SKIN_TONE = "\U0001f475\U0001f3ff"
PERSON_FROWNING = "\U0001f64d"
PERSON_FROWNING_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fb"
PERSON_FROWNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fc"
PERSON_FROWNING_MEDIUM_SKIN_TONE = "\U0001f64d\U0001f3fd"
PERSON_FROWNING_MEDIUM_DARK_SKIN_TONE = "\U0001f64d\U0001f3fe"
PERSON_FROWNING_DARK_SKIN_TONE = "\U0001f64d\U0001f3ff"
MAN_FROWNING = "\U0001f64d\u200d\u2642\ufe0f"
MAN_FROWNING_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fb\u200d\u2642\ufe0f"
MAN_FROWNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fc\u200d\u2642\ufe0f"
MAN_FROWNING_MEDIUM_SKIN_TONE = "\U0001f64d\U0001f3fd\u200d\u2642\ufe0f"
MAN_FROWNING_MEDIUM_DARK_SKIN_TONE = "\U0001f64d\U0001f3fe\u200d\u2642\ufe0f"
MAN_FROWNING_DARK_SKIN_TONE = "\U0001f64d\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_FROWNING = "\U0001f64d\u200d\u2640\ufe0f"
WOMAN_FROWNING_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_FROWNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64d\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_FROWNING_MEDIUM_SKIN_TONE = "\U0001f64d\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_FROWNING_MEDIUM_DARK_SKIN_TONE = "\U0001f64d\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_FROWNING_DARK_SKIN_TONE = "\U0001f64d\U0001f3ff\u200d\u2640\ufe0f"
PERSON_POUTING = "\U0001f64e"
PERSON_POUTING_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fb"
PERSON_POUTING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fc"
PERSON_POUTING_MEDIUM_SKIN_TONE = "\U0001f64e\U0001f3fd"
PERSON_POUTING_MEDIUM_DARK_SKIN_TONE = "\U0001f64e\U0001f3fe"
PERSON_POUTING_DARK_SKIN_TONE = "\U0001f64e\U0001f3ff"
MAN_POUTING = "\U0001f64e\u200d\u2642\ufe0f"
MAN_POUTING_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fb\u200d\u2642\ufe0f"
MAN_POUTING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fc\u200d\u2642\ufe0f"
MAN_POUTING_MEDIUM_SKIN_TONE = "\U0001f64e\U0001f3fd\u200d\u2642\ufe0f"
MAN_POUTING_MEDIUM_DARK_SKIN_TONE = "\U0001f64e\U0001f3fe\u200d\u2642\ufe0f"
MAN_POUTING_DARK_SKIN_TONE = "\U0001f64e\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_POUTING = "\U0001f64e\u200d\u2640\ufe0f"
WOMAN_POUTING_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_POUTING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64e\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_POUTING_MEDIUM_SKIN_TONE = "\U0001f64e\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_POUTING_MEDIUM_DARK_SKIN_TONE = "\U0001f64e\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_POUTING_DARK_SKIN_TONE = "\U0001f64e\U0001f3ff\u200d\u2640\ufe0f"
PERSON_GESTURING_NO = "\U0001f645"
PERSON_GESTURING_NO_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fb"
PERSON_GESTURING_NO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fc"
PERSON_GESTURING_NO_MEDIUM_SKIN_TONE = "\U0001f645\U0001f3fd"
PERSON_GESTURING_NO_MEDIUM_DARK_SKIN_TONE = "\U0001f645\U0001f3fe"
PERSON_GESTURING_NO_DARK_SKIN_TONE = "\U0001f645\U0001f3ff"
MAN_GESTURING_NO = "\U0001f645\u200d\u2642\ufe0f"
MAN_GESTURING_NO_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fb\u200d\u2642\ufe0f"
MAN_GESTURING_NO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fc\u200d\u2642\ufe0f"
MAN_GESTURING_NO_MEDIUM_SKIN_TONE = "\U0001f645\U0001f3fd\u200d\u2642\ufe0f"
MAN_GESTURING_NO_MEDIUM_DARK_SKIN_TONE = "\U0001f645\U0001f3fe\u200d\u2642\ufe0f"
MAN_GESTURING_NO_DARK_SKIN_TONE = "\U0001f645\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GESTURING_NO = "\U0001f645\u200d\u2640\ufe0f"
WOMAN_GESTURING_NO_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GESTURING_NO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f645\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GESTURING_NO_MEDIUM_SKIN_TONE = "\U0001f645\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GESTURING_NO_MEDIUM_DARK_SKIN_TONE = "\U0001f645\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GESTURING_NO_DARK_SKIN_TONE = "\U0001f645\U0001f3ff\u200d\u2640\ufe0f"
PERSON_GESTURING_OK = "\U0001f646"
PERSON_GESTURING_OK_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fb"
PERSON_GESTURING_OK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fc"
PERSON_GESTURING_OK_MEDIUM_SKIN_TONE = "\U0001f646\U0001f3fd"
PERSON_GESTURING_OK_MEDIUM_DARK_SKIN_TONE = "\U0001f646\U0001f3fe"
PERSON_GESTURING_OK_DARK_SKIN_TONE = "\U0001f646\U0001f3ff"
MAN_GESTURING_OK = "\U0001f646\u200d\u2642\ufe0f"
MAN_GESTURING_OK_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fb\u200d\u2642\ufe0f"
MAN_GESTURING_OK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fc\u200d\u2642\ufe0f"
MAN_GESTURING_OK_MEDIUM_SKIN_TONE = "\U0001f646\U0001f3fd\u200d\u2642\ufe0f"
MAN_GESTURING_OK_MEDIUM_DARK_SKIN_TONE = "\U0001f646\U0001f3fe\u200d\u2642\ufe0f"
MAN_GESTURING_OK_DARK_SKIN_TONE = "\U0001f646\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GESTURING_OK = "\U0001f646\u200d\u2640\ufe0f"
WOMAN_GESTURING_OK_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GESTURING_OK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f646\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GESTURING_OK_MEDIUM_SKIN_TONE = "\U0001f646\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GESTURING_OK_MEDIUM_DARK_SKIN_TONE = "\U0001f646\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GESTURING_OK_DARK_SKIN_TONE = "\U0001f646\U0001f3ff\u200d\u2640\ufe0f"
PERSON_TIPPING_HAND = "\U0001f481"
PERSON_TIPPING_HAND_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fb"
PERSON_TIPPING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fc"
PERSON_TIPPING_HAND_MEDIUM_SKIN_TONE = "\U0001f481\U0001f3fd"
PERSON_TIPPING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f481\U0001f3fe"
PERSON_TIPPING_HAND_DARK_SKIN_TONE = "\U0001f481\U0001f3ff"
MAN_TIPPING_HAND = "\U0001f481\u200d\u2642\ufe0f"
MAN_TIPPING_HAND_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fb\u200d\u2642\ufe0f"
MAN_TIPPING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fc\u200d\u2642\ufe0f"
MAN_TIPPING_HAND_MEDIUM_SKIN_TONE = "\U0001f481\U0001f3fd\u200d\u2642\ufe0f"
MAN_TIPPING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f481\U0001f3fe\u200d\u2642\ufe0f"
MAN_TIPPING_HAND_DARK_SKIN_TONE = "\U0001f481\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_TIPPING_HAND = "\U0001f481\u200d\u2640\ufe0f"
WOMAN_TIPPING_HAND_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_TIPPING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f481\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_TIPPING_HAND_MEDIUM_SKIN_TONE = "\U0001f481\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_TIPPING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f481\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_TIPPING_HAND_DARK_SKIN_TONE = "\U0001f481\U0001f3ff\u200d\u2640\ufe0f"
PERSON_RAISING_HAND = "\U0001f64b"
PERSON_RAISING_HAND_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fb"
PERSON_RAISING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fc"
PERSON_RAISING_HAND_MEDIUM_SKIN_TONE = "\U0001f64b\U0001f3fd"
PERSON_RAISING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f64b\U0001f3fe"
PERSON_RAISING_HAND_DARK_SKIN_TONE = "\U0001f64b\U0001f3ff"
MAN_RAISING_HAND = "\U0001f64b\u200d\u2642\ufe0f"
MAN_RAISING_HAND_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fb\u200d\u2642\ufe0f"
MAN_RAISING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fc\u200d\u2642\ufe0f"
MAN_RAISING_HAND_MEDIUM_SKIN_TONE = "\U0001f64b\U0001f3fd\u200d\u2642\ufe0f"
MAN_RAISING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f64b\U0001f3fe\u200d\u2642\ufe0f"
MAN_RAISING_HAND_DARK_SKIN_TONE = "\U0001f64b\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_RAISING_HAND = "\U0001f64b\u200d\u2640\ufe0f"
WOMAN_RAISING_HAND_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_RAISING_HAND_MEDIUM_LIGHT_SKIN_TONE = "\U0001f64b\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_RAISING_HAND_MEDIUM_SKIN_TONE = "\U0001f64b\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_RAISING_HAND_MEDIUM_DARK_SKIN_TONE = "\U0001f64b\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_RAISING_HAND_DARK_SKIN_TONE = "\U0001f64b\U0001f3ff\u200d\u2640\ufe0f"
DEAF_PERSON = "\U0001f9cf"
DEAF_PERSON_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fb"
DEAF_PERSON_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fc"
DEAF_PERSON_MEDIUM_SKIN_TONE = "\U0001f9cf\U0001f3fd"
DEAF_PERSON_MEDIUM_DARK_SKIN_TONE = "\U0001f9cf\U0001f3fe"
DEAF_PERSON_DARK_SKIN_TONE = "\U0001f9cf\U0001f3ff"
DEAF_MAN = "\U0001f9cf\u200d\u2642\ufe0f"
DEAF_MAN_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fb\u200d\u2642\ufe0f"
DEAF_MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fc\u200d\u2642\ufe0f"
DEAF_MAN_MEDIUM_SKIN_TONE = "\U0001f9cf\U0001f3fd\u200d\u2642\ufe0f"
DEAF_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f9cf\U0001f3fe\u200d\u2642\ufe0f"
DEAF_MAN_DARK_SKIN_TONE = "\U0001f9cf\U0001f3ff\u200d\u2642\ufe0f"
DEAF_WOMAN = "\U0001f9cf\u200d\u2640\ufe0f"
DEAF_WOMAN_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fb\u200d\u2640\ufe0f"
DEAF_WOMAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cf\U0001f3fc\u200d\u2640\ufe0f"
DEAF_WOMAN_MEDIUM_SKIN_TONE = "\U0001f9cf\U0001f3fd\u200d\u2640\ufe0f"
DEAF_WOMAN_MEDIUM_DARK_SKIN_TONE = "\U0001f9cf\U0001f3fe\u200d\u2640\ufe0f"
DEAF_WOMAN_DARK_SKIN_TONE = "\U0001f9cf\U0001f3ff\u200d\u2640\ufe0f"
PERSON_BOWING = "\U0001f647"
PERSON_BOWING_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fb"
PERSON_BOWING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fc"
PERSON_BOWING_MEDIUM_SKIN_TONE = "\U0001f647\U0001f3fd"
PERSON_BOWING_MEDIUM_DARK_SKIN_TONE = "\U0001f647\U0001f3fe"
PERSON_BOWING_DARK_SKIN_TONE = "\U0001f647\U0001f3ff"
MAN_BOWING = "\U0001f647\u200d\u2642\ufe0f"
MAN_BOWING_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fb\u200d\u2642\ufe0f"
MAN_BOWING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fc\u200d\u2642\ufe0f"
MAN_BOWING_MEDIUM_SKIN_TONE = "\U0001f647\U0001f3fd\u200d\u2642\ufe0f"
MAN_BOWING_MEDIUM_DARK_SKIN_TONE = "\U0001f647\U0001f3fe\u200d\u2642\ufe0f"
MAN_BOWING_DARK_SKIN_TONE = "\U0001f647\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_BOWING = "\U0001f647\u200d\u2640\ufe0f"
WOMAN_BOWING_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_BOWING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f647\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_BOWING_MEDIUM_SKIN_TONE = "\U0001f647\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_BOWING_MEDIUM_DARK_SKIN_TONE = "\U0001f647\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_BOWING_DARK_SKIN_TONE = "\U0001f647\U0001f3ff\u200d\u2640\ufe0f"
PERSON_FACEPALMING = "\U0001f926"
PERSON_FACEPALMING_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fb"
PERSON_FACEPALMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fc"
PERSON_FACEPALMING_MEDIUM_SKIN_TONE = "\U0001f926\U0001f3fd"
PERSON_FACEPALMING_MEDIUM_DARK_SKIN_TONE = "\U0001f926\U0001f3fe"
PERSON_FACEPALMING_DARK_SKIN_TONE = "\U0001f926\U0001f3ff"
MAN_FACEPALMING = "\U0001f926\u200d\u2642\ufe0f"
MAN_FACEPALMING_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fb\u200d\u2642\ufe0f"
MAN_FACEPALMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fc\u200d\u2642\ufe0f"
MAN_FACEPALMING_MEDIUM_SKIN_TONE = "\U0001f926\U0001f3fd\u200d\u2642\ufe0f"
MAN_FACEPALMING_MEDIUM_DARK_SKIN_TONE = "\U0001f926\U0001f3fe\u200d\u2642\ufe0f"
MAN_FACEPALMING_DARK_SKIN_TONE = "\U0001f926\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_FACEPALMING = "\U0001f926\u200d\u2640\ufe0f"
WOMAN_FACEPALMING_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_FACEPALMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f926\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_FACEPALMING_MEDIUM_SKIN_TONE = "\U0001f926\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_FACEPALMING_MEDIUM_DARK_SKIN_TONE = "\U0001f926\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_FACEPALMING_DARK_SKIN_TONE = "\U0001f926\U0001f3ff\u200d\u2640\ufe0f"
PERSON_SHRUGGING = "\U0001f937"
PERSON_SHRUGGING_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fb"
PERSON_SHRUGGING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fc"
PERSON_SHRUGGING_MEDIUM_SKIN_TONE = "\U0001f937\U0001f3fd"
PERSON_SHRUGGING_MEDIUM_DARK_SKIN_TONE = "\U0001f937\U0001f3fe"
PERSON_SHRUGGING_DARK_SKIN_TONE = "\U0001f937\U0001f3ff"
MAN_SHRUGGING = "\U0001f937\u200d\u2642\ufe0f"
MAN_SHRUGGING_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fb\u200d\u2642\ufe0f"
MAN_SHRUGGING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fc\u200d\u2642\ufe0f"
MAN_SHRUGGING_MEDIUM_SKIN_TONE = "\U0001f937\U0001f3fd\u200d\u2642\ufe0f"
MAN_SHRUGGING_MEDIUM_DARK_SKIN_TONE = "\U0001f937\U0001f3fe\u200d\u2642\ufe0f"
MAN_SHRUGGING_DARK_SKIN_TONE = "\U0001f937\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_SHRUGGING = "\U0001f937\u200d\u2640\ufe0f"
WOMAN_SHRUGGING_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_SHRUGGING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f937\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_SHRUGGING_MEDIUM_SKIN_TONE = "\U0001f937\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_SHRUGGING_MEDIUM_DARK_SKIN_TONE = "\U0001f937\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_SHRUGGING_DARK_SKIN_TONE = "\U0001f937\U0001f3ff\u200d\u2640\ufe0f"
HEALTH_WORKER = "\U0001f9d1\u200d\u2695\ufe0f"
HEALTH_WORKER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\u2695\ufe0f"
HEALTH_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\u2695\ufe0f"
HEALTH_WORKER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\u2695\ufe0f"
HEALTH_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\u2695\ufe0f"
HEALTH_WORKER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER = "\U0001f468\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\u2695\ufe0f"
MAN_HEALTH_WORKER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER = "\U0001f469\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\u2695\ufe0f"
WOMAN_HEALTH_WORKER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2695\ufe0f"
STUDENT = "\U0001f9d1\u200d\U0001f393"
STUDENT_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f393"
STUDENT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f393"
STUDENT_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f393"
STUDENT_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f393"
STUDENT_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f393"
MAN_STUDENT = "\U0001f468\u200d\U0001f393"
MAN_STUDENT_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f393"
MAN_STUDENT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f393"
MAN_STUDENT_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f393"
MAN_STUDENT_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f393"
MAN_STUDENT_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f393"
WOMAN_STUDENT = "\U0001f469\u200d\U0001f393"
WOMAN_STUDENT_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f393"
WOMAN_STUDENT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f393"
WOMAN_STUDENT_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f393"
WOMAN_STUDENT_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f393"
WOMAN_STUDENT_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f393"
TEACHER = "\U0001f9d1\u200d\U0001f3eb"
TEACHER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f3eb"
TEACHER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f3eb"
TEACHER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f3eb"
TEACHER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f3eb"
TEACHER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f3eb"
MAN_TEACHER = "\U0001f468\u200d\U0001f3eb"
MAN_TEACHER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f3eb"
MAN_TEACHER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f3eb"
MAN_TEACHER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f3eb"
MAN_TEACHER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f3eb"
MAN_TEACHER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f3eb"
WOMAN_TEACHER = "\U0001f469\u200d\U0001f3eb"
WOMAN_TEACHER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f3eb"
WOMAN_TEACHER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f3eb"
WOMAN_TEACHER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f3eb"
WOMAN_TEACHER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f3eb"
WOMAN_TEACHER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f3eb"
JUDGE = "\U0001f9d1\u200d\u2696\ufe0f"
JUDGE_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\u2696\ufe0f"
JUDGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\u2696\ufe0f"
JUDGE_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\u2696\ufe0f"
JUDGE_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\u2696\ufe0f"
JUDGE_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\u2696\ufe0f"
MAN_JUDGE = "\U0001f468\u200d\u2696\ufe0f"
MAN_JUDGE_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\u2696\ufe0f"
MAN_JUDGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\u2696\ufe0f"
MAN_JUDGE_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\u2696\ufe0f"
MAN_JUDGE_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\u2696\ufe0f"
MAN_JUDGE_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\u2696\ufe0f"
WOMAN_JUDGE = "\U0001f469\u200d\u2696\ufe0f"
WOMAN_JUDGE_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2696\ufe0f"
WOMAN_JUDGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\u2696\ufe0f"
WOMAN_JUDGE_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2696\ufe0f"
WOMAN_JUDGE_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\u2696\ufe0f"
WOMAN_JUDGE_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2696\ufe0f"
FARMER = "\U0001f9d1\u200d\U0001f33e"
FARMER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f33e"
FARMER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f33e"
FARMER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f33e"
FARMER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f33e"
FARMER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f33e"
MAN_FARMER = "\U0001f468\u200d\U0001f33e"
MAN_FARMER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f33e"
MAN_FARMER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f33e"
MAN_FARMER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f33e"
MAN_FARMER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f33e"
MAN_FARMER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f33e"
WOMAN_FARMER = "\U0001f469\u200d\U0001f33e"
WOMAN_FARMER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f33e"
WOMAN_FARMER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f33e"
WOMAN_FARMER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f33e"
WOMAN_FARMER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f33e"
WOMAN_FARMER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f33e"
COOK = "\U0001f9d1\u200d\U0001f373"
COOK_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f373"
COOK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f373"
COOK_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f373"
COOK_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f373"
COOK_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f373"
MAN_COOK = "\U0001f468\u200d\U0001f373"
MAN_COOK_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f373"
MAN_COOK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f373"
MAN_COOK_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f373"
MAN_COOK_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f373"
MAN_COOK_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f373"
WOMAN_COOK = "\U0001f469\u200d\U0001f373"
WOMAN_COOK_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f373"
WOMAN_COOK_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f373"
WOMAN_COOK_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f373"
WOMAN_COOK_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f373"
WOMAN_COOK_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f373"
MECHANIC = "\U0001f9d1\u200d\U0001f527"
MECHANIC_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f527"
MECHANIC_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f527"
MECHANIC_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f527"
MECHANIC_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f527"
MECHANIC_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f527"
MAN_MECHANIC = "\U0001f468\u200d\U0001f527"
MAN_MECHANIC_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f527"
MAN_MECHANIC_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f527"
MAN_MECHANIC_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f527"
MAN_MECHANIC_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f527"
MAN_MECHANIC_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f527"
WOMAN_MECHANIC = "\U0001f469\u200d\U0001f527"
WOMAN_MECHANIC_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f527"
WOMAN_MECHANIC_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f527"
WOMAN_MECHANIC_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f527"
WOMAN_MECHANIC_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f527"
WOMAN_MECHANIC_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f527"
FACTORY_WORKER = "\U0001f9d1\u200d\U0001f3ed"
FACTORY_WORKER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f3ed"
FACTORY_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f3ed"
FACTORY_WORKER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f3ed"
FACTORY_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f3ed"
FACTORY_WORKER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f3ed"
MAN_FACTORY_WORKER = "\U0001f468\u200d\U0001f3ed"
MAN_FACTORY_WORKER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f3ed"
MAN_FACTORY_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f3ed"
MAN_FACTORY_WORKER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f3ed"
MAN_FACTORY_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f3ed"
MAN_FACTORY_WORKER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER = "\U0001f469\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f3ed"
WOMAN_FACTORY_WORKER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f3ed"
OFFICE_WORKER = "\U0001f9d1\u200d\U0001f4bc"
OFFICE_WORKER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f4bc"
OFFICE_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f4bc"
OFFICE_WORKER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f4bc"
OFFICE_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f4bc"
OFFICE_WORKER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f4bc"
MAN_OFFICE_WORKER = "\U0001f468\u200d\U0001f4bc"
MAN_OFFICE_WORKER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f4bc"
MAN_OFFICE_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f4bc"
MAN_OFFICE_WORKER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f4bc"
MAN_OFFICE_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f4bc"
MAN_OFFICE_WORKER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER = "\U0001f469\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f4bc"
WOMAN_OFFICE_WORKER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f4bc"
SCIENTIST = "\U0001f9d1\u200d\U0001f52c"
SCIENTIST_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f52c"
SCIENTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f52c"
SCIENTIST_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f52c"
SCIENTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f52c"
SCIENTIST_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f52c"
MAN_SCIENTIST = "\U0001f468\u200d\U0001f52c"
MAN_SCIENTIST_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f52c"
MAN_SCIENTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f52c"
MAN_SCIENTIST_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f52c"
MAN_SCIENTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f52c"
MAN_SCIENTIST_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f52c"
WOMAN_SCIENTIST = "\U0001f469\u200d\U0001f52c"
WOMAN_SCIENTIST_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f52c"
WOMAN_SCIENTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f52c"
WOMAN_SCIENTIST_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f52c"
WOMAN_SCIENTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f52c"
WOMAN_SCIENTIST_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f52c"
TECHNOLOGIST = "\U0001f9d1\u200d\U0001f4bb"
TECHNOLOGIST_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f4bb"
TECHNOLOGIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f4bb"
TECHNOLOGIST_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f4bb"
TECHNOLOGIST_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f4bb"
TECHNOLOGIST_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f4bb"
MAN_TECHNOLOGIST = "\U0001f468\u200d\U0001f4bb"
MAN_TECHNOLOGIST_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f4bb"
MAN_TECHNOLOGIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f4bb"
MAN_TECHNOLOGIST_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f4bb"
MAN_TECHNOLOGIST_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f4bb"
MAN_TECHNOLOGIST_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST = "\U0001f469\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f4bb"
WOMAN_TECHNOLOGIST_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f4bb"
SINGER = "\U0001f9d1\u200d\U0001f3a4"
SINGER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f3a4"
SINGER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f3a4"
SINGER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f3a4"
SINGER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f3a4"
SINGER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f3a4"
MAN_SINGER = "\U0001f468\u200d\U0001f3a4"
MAN_SINGER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f3a4"
MAN_SINGER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f3a4"
MAN_SINGER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f3a4"
MAN_SINGER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f3a4"
MAN_SINGER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f3a4"
WOMAN_SINGER = "\U0001f469\u200d\U0001f3a4"
WOMAN_SINGER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f3a4"
WOMAN_SINGER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f3a4"
WOMAN_SINGER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f3a4"
WOMAN_SINGER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f3a4"
WOMAN_SINGER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f3a4"
ARTIST = "\U0001f9d1\u200d\U0001f3a8"
ARTIST_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f3a8"
ARTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f3a8"
ARTIST_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f3a8"
ARTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f3a8"
ARTIST_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f3a8"
MAN_ARTIST = "\U0001f468\u200d\U0001f3a8"
MAN_ARTIST_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f3a8"
MAN_ARTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f3a8"
MAN_ARTIST_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f3a8"
MAN_ARTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f3a8"
MAN_ARTIST_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f3a8"
WOMAN_ARTIST = "\U0001f469\u200d\U0001f3a8"
WOMAN_ARTIST_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f3a8"
WOMAN_ARTIST_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f3a8"
WOMAN_ARTIST_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f3a8"
WOMAN_ARTIST_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f3a8"
WOMAN_ARTIST_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f3a8"
PILOT = "\U0001f9d1\u200d\u2708\ufe0f"
PILOT_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\u2708\ufe0f"
PILOT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\u2708\ufe0f"
PILOT_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\u2708\ufe0f"
PILOT_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\u2708\ufe0f"
PILOT_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\u2708\ufe0f"
MAN_PILOT = "\U0001f468\u200d\u2708\ufe0f"
MAN_PILOT_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\u2708\ufe0f"
MAN_PILOT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\u2708\ufe0f"
MAN_PILOT_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\u2708\ufe0f"
MAN_PILOT_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\u2708\ufe0f"
MAN_PILOT_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\u2708\ufe0f"
WOMAN_PILOT = "\U0001f469\u200d\u2708\ufe0f"
WOMAN_PILOT_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2708\ufe0f"
WOMAN_PILOT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\u2708\ufe0f"
WOMAN_PILOT_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2708\ufe0f"
WOMAN_PILOT_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\u2708\ufe0f"
WOMAN_PILOT_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2708\ufe0f"
ASTRONAUT = "\U0001f9d1\u200d\U0001f680"
ASTRONAUT_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f680"
ASTRONAUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f680"
ASTRONAUT_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f680"
ASTRONAUT_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f680"
ASTRONAUT_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f680"
MAN_ASTRONAUT = "\U0001f468\u200d\U0001f680"
MAN_ASTRONAUT_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f680"
MAN_ASTRONAUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f680"
MAN_ASTRONAUT_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f680"
MAN_ASTRONAUT_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f680"
MAN_ASTRONAUT_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f680"
WOMAN_ASTRONAUT = "\U0001f469\u200d\U0001f680"
WOMAN_ASTRONAUT_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f680"
WOMAN_ASTRONAUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f680"
WOMAN_ASTRONAUT_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f680"
WOMAN_ASTRONAUT_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f680"
WOMAN_ASTRONAUT_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f680"
FIREFIGHTER = "\U0001f9d1\u200d\U0001f692"
FIREFIGHTER_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f692"
FIREFIGHTER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f692"
FIREFIGHTER_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f692"
FIREFIGHTER_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f692"
FIREFIGHTER_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f692"
MAN_FIREFIGHTER = "\U0001f468\u200d\U0001f692"
MAN_FIREFIGHTER_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f692"
MAN_FIREFIGHTER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f692"
MAN_FIREFIGHTER_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f692"
MAN_FIREFIGHTER_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f692"
MAN_FIREFIGHTER_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f692"
WOMAN_FIREFIGHTER = "\U0001f469\u200d\U0001f692"
WOMAN_FIREFIGHTER_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f692"
WOMAN_FIREFIGHTER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f692"
WOMAN_FIREFIGHTER_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f692"
WOMAN_FIREFIGHTER_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f692"
WOMAN_FIREFIGHTER_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f692"
POLICE_OFFICER = "\U0001f46e"
POLICE_OFFICER_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fb"
POLICE_OFFICER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fc"
POLICE_OFFICER_MEDIUM_SKIN_TONE = "\U0001f46e\U0001f3fd"
POLICE_OFFICER_MEDIUM_DARK_SKIN_TONE = "\U0001f46e\U0001f3fe"
POLICE_OFFICER_DARK_SKIN_TONE = "\U0001f46e\U0001f3ff"
MAN_POLICE_OFFICER = "\U0001f46e\u200d\u2642\ufe0f"
MAN_POLICE_OFFICER_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fb\u200d\u2642\ufe0f"
MAN_POLICE_OFFICER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fc\u200d\u2642\ufe0f"
MAN_POLICE_OFFICER_MEDIUM_SKIN_TONE = "\U0001f46e\U0001f3fd\u200d\u2642\ufe0f"
MAN_POLICE_OFFICER_MEDIUM_DARK_SKIN_TONE = "\U0001f46e\U0001f3fe\u200d\u2642\ufe0f"
MAN_POLICE_OFFICER_DARK_SKIN_TONE = "\U0001f46e\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_POLICE_OFFICER = "\U0001f46e\u200d\u2640\ufe0f"
WOMAN_POLICE_OFFICER_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_POLICE_OFFICER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46e\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_POLICE_OFFICER_MEDIUM_SKIN_TONE = "\U0001f46e\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_POLICE_OFFICER_MEDIUM_DARK_SKIN_TONE = "\U0001f46e\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_POLICE_OFFICER_DARK_SKIN_TONE = "\U0001f46e\U0001f3ff\u200d\u2640\ufe0f"
DETECTIVE = "\U0001f575\ufe0f"
DETECTIVE_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fb"
DETECTIVE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fc"
DETECTIVE_MEDIUM_SKIN_TONE = "\U0001f575\U0001f3fd"
DETECTIVE_MEDIUM_DARK_SKIN_TONE = "\U0001f575\U0001f3fe"
DETECTIVE_DARK_SKIN_TONE = "\U0001f575\U0001f3ff"
MAN_DETECTIVE = "\U0001f575\ufe0f\u200d\u2642\ufe0f"
MAN_DETECTIVE_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fb\u200d\u2642\ufe0f"
MAN_DETECTIVE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fc\u200d\u2642\ufe0f"
MAN_DETECTIVE_MEDIUM_SKIN_TONE = "\U0001f575\U0001f3fd\u200d\u2642\ufe0f"
MAN_DETECTIVE_MEDIUM_DARK_SKIN_TONE = "\U0001f575\U0001f3fe\u200d\u2642\ufe0f"
MAN_DETECTIVE_DARK_SKIN_TONE = "\U0001f575\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_DETECTIVE = "\U0001f575\ufe0f\u200d\u2640\ufe0f"
WOMAN_DETECTIVE_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_DETECTIVE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f575\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_DETECTIVE_MEDIUM_SKIN_TONE = "\U0001f575\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_DETECTIVE_MEDIUM_DARK_SKIN_TONE = "\U0001f575\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_DETECTIVE_DARK_SKIN_TONE = "\U0001f575\U0001f3ff\u200d\u2640\ufe0f"
GUARD = "\U0001f482"
GUARD_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fb"
GUARD_MEDIUM_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fc"
GUARD_MEDIUM_SKIN_TONE = "\U0001f482\U0001f3fd"
GUARD_MEDIUM_DARK_SKIN_TONE = "\U0001f482\U0001f3fe"
GUARD_DARK_SKIN_TONE = "\U0001f482\U0001f3ff"
MAN_GUARD = "\U0001f482\u200d\u2642\ufe0f"
MAN_GUARD_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fb\u200d\u2642\ufe0f"
MAN_GUARD_MEDIUM_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fc\u200d\u2642\ufe0f"
MAN_GUARD_MEDIUM_SKIN_TONE = "\U0001f482\U0001f3fd\u200d\u2642\ufe0f"
MAN_GUARD_MEDIUM_DARK_SKIN_TONE = "\U0001f482\U0001f3fe\u200d\u2642\ufe0f"
MAN_GUARD_DARK_SKIN_TONE = "\U0001f482\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GUARD = "\U0001f482\u200d\u2640\ufe0f"
WOMAN_GUARD_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GUARD_MEDIUM_LIGHT_SKIN_TONE = "\U0001f482\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GUARD_MEDIUM_SKIN_TONE = "\U0001f482\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GUARD_MEDIUM_DARK_SKIN_TONE = "\U0001f482\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GUARD_DARK_SKIN_TONE = "\U0001f482\U0001f3ff\u200d\u2640\ufe0f"
NINJA = "\U0001f977"
NINJA_LIGHT_SKIN_TONE = "\U0001f977\U0001f3fb"
NINJA_MEDIUM_LIGHT_SKIN_TONE = "\U0001f977\U0001f3fc"
NINJA_MEDIUM_SKIN_TONE = "\U0001f977\U0001f3fd"
NINJA_MEDIUM_DARK_SKIN_TONE = "\U0001f977\U0001f3fe"
NINJA_DARK_SKIN_TONE = "\U0001f977\U0001f3ff"
CONSTRUCTION_WORKER = "\U0001f477"
CONSTRUCTION_WORKER_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fb"
CONSTRUCTION_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fc"
CONSTRUCTION_WORKER_MEDIUM_SKIN_TONE = "\U0001f477\U0001f3fd"
CONSTRUCTION_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f477\U0001f3fe"
CONSTRUCTION_WORKER_DARK_SKIN_TONE = "\U0001f477\U0001f3ff"
MAN_CONSTRUCTION_WORKER = "\U0001f477\u200d\u2642\ufe0f"
MAN_CONSTRUCTION_WORKER_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fb\u200d\u2642\ufe0f"
MAN_CONSTRUCTION_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fc\u200d\u2642\ufe0f"
MAN_CONSTRUCTION_WORKER_MEDIUM_SKIN_TONE = "\U0001f477\U0001f3fd\u200d\u2642\ufe0f"
MAN_CONSTRUCTION_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f477\U0001f3fe\u200d\u2642\ufe0f"
MAN_CONSTRUCTION_WORKER_DARK_SKIN_TONE = "\U0001f477\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_CONSTRUCTION_WORKER = "\U0001f477\u200d\u2640\ufe0f"
WOMAN_CONSTRUCTION_WORKER_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_CONSTRUCTION_WORKER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f477\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_CONSTRUCTION_WORKER_MEDIUM_SKIN_TONE = "\U0001f477\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_CONSTRUCTION_WORKER_MEDIUM_DARK_SKIN_TONE = "\U0001f477\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_CONSTRUCTION_WORKER_DARK_SKIN_TONE = "\U0001f477\U0001f3ff\u200d\u2640\ufe0f"
PERSON_WITH_CROWN = "\U0001fac5"
PERSON_WITH_CROWN_LIGHT_SKIN_TONE = "\U0001fac5\U0001f3fb"
PERSON_WITH_CROWN_MEDIUM_LIGHT_SKIN_TONE = "\U0001fac5\U0001f3fc"
PERSON_WITH_CROWN_MEDIUM_SKIN_TONE = "\U0001fac5\U0001f3fd"
PERSON_WITH_CROWN_MEDIUM_DARK_SKIN_TONE = "\U0001fac5\U0001f3fe"
PERSON_WITH_CROWN_DARK_SKIN_TONE = "\U0001fac5\U0001f3ff"
PRINCE = "\U0001f934"
PRINCE_LIGHT_SKIN_TONE = "\U0001f934\U0001f3fb"
PRINCE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f934\U0001f3fc"
PRINCE_MEDIUM_SKIN_TONE = "\U0001f934\U0001f3fd"
PRINCE_MEDIUM_DARK_SKIN_TONE = "\U0001f934\U0001f3fe"
PRINCE_DARK_SKIN_TONE = "\U0001f934\U0001f3ff"
PRINCESS = "\U0001f478"
PRINCESS_LIGHT_SKIN_TONE = "\U0001f478\U0001f3fb"
PRINCESS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f478\U0001f3fc"
PRINCESS_MEDIUM_SKIN_TONE = "\U0001f478\U0001f3fd"
PRINCESS_MEDIUM_DARK_SKIN_TONE = "\U0001f478\U0001f3fe"
PRINCESS_DARK_SKIN_TONE = "\U0001f478\U0001f3ff"
PERSON_WEARING_TURBAN = "\U0001f473"
PERSON_WEARING_TURBAN_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fb"
PERSON_WEARING_TURBAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fc"
PERSON_WEARING_TURBAN_MEDIUM_SKIN_TONE = "\U0001f473\U0001f3fd"
PERSON_WEARING_TURBAN_MEDIUM_DARK_SKIN_TONE = "\U0001f473\U0001f3fe"
PERSON_WEARING_TURBAN_DARK_SKIN_TONE = "\U0001f473\U0001f3ff"
MAN_WEARING_TURBAN = "\U0001f473\u200d\u2642\ufe0f"
MAN_WEARING_TURBAN_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fb\u200d\u2642\ufe0f"
MAN_WEARING_TURBAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fc\u200d\u2642\ufe0f"
MAN_WEARING_TURBAN_MEDIUM_SKIN_TONE = "\U0001f473\U0001f3fd\u200d\u2642\ufe0f"
MAN_WEARING_TURBAN_MEDIUM_DARK_SKIN_TONE = "\U0001f473\U0001f3fe\u200d\u2642\ufe0f"
MAN_WEARING_TURBAN_DARK_SKIN_TONE = "\U0001f473\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_WEARING_TURBAN = "\U0001f473\u200d\u2640\ufe0f"
WOMAN_WEARING_TURBAN_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_WEARING_TURBAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f473\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_WEARING_TURBAN_MEDIUM_SKIN_TONE = "\U0001f473\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_WEARING_TURBAN_MEDIUM_DARK_SKIN_TONE = "\U0001f473\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_WEARING_TURBAN_DARK_SKIN_TONE = "\U0001f473\U0001f3ff\u200d\u2640\ufe0f"
PERSON_WITH_SKULLCAP = "\U0001f472"
PERSON_WITH_SKULLCAP_LIGHT_SKIN_TONE = "\U0001f472\U0001f3fb"
PERSON_WITH_SKULLCAP_MEDIUM_LIGHT_SKIN_TONE = "\U0001f472\U0001f3fc"
PERSON_WITH_SKULLCAP_MEDIUM_SKIN_TONE = "\U0001f472\U0001f3fd"
PERSON_WITH_SKULLCAP_MEDIUM_DARK_SKIN_TONE = "\U0001f472\U0001f3fe"
PERSON_WITH_SKULLCAP_DARK_SKIN_TONE = "\U0001f472\U0001f3ff"
WOMAN_WITH_HEADSCARF = "\U0001f9d5"
WOMAN_WITH_HEADSCARF_LIGHT_SKIN_TONE = "\U0001f9d5\U0001f3fb"
WOMAN_WITH_HEADSCARF_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d5\U0001f3fc"
WOMAN_WITH_HEADSCARF_MEDIUM_SKIN_TONE = "\U0001f9d5\U0001f3fd"
WOMAN_WITH_HEADSCARF_MEDIUM_DARK_SKIN_TONE = "\U0001f9d5\U0001f3fe"
WOMAN_WITH_HEADSCARF_DARK_SKIN_TONE = "\U0001f9d5\U0001f3ff"
PERSON_IN_TUXEDO = "\U0001f935"
PERSON_IN_TUXEDO_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fb"
PERSON_IN_TUXEDO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fc"
PERSON_IN_TUXEDO_MEDIUM_SKIN_TONE = "\U0001f935\U0001f3fd"
PERSON_IN_TUXEDO_MEDIUM_DARK_SKIN_TONE = "\U0001f935\U0001f3fe"
PERSON_IN_TUXEDO_DARK_SKIN_TONE = "\U0001f935\U0001f3ff"
MAN_IN_TUXEDO = "\U0001f935\u200d\u2642\ufe0f"
MAN_IN_TUXEDO_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fb\u200d\u2642\ufe0f"
MAN_IN_TUXEDO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fc\u200d\u2642\ufe0f"
MAN_IN_TUXEDO_MEDIUM_SKIN_TONE = "\U0001f935\U0001f3fd\u200d\u2642\ufe0f"
MAN_IN_TUXEDO_MEDIUM_DARK_SKIN_TONE = "\U0001f935\U0001f3fe\u200d\u2642\ufe0f"
MAN_IN_TUXEDO_DARK_SKIN_TONE = "\U0001f935\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_IN_TUXEDO = "\U0001f935\u200d\u2640\ufe0f"
WOMAN_IN_TUXEDO_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_IN_TUXEDO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f935\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_IN_TUXEDO_MEDIUM_SKIN_TONE = "\U0001f935\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_IN_TUXEDO_MEDIUM_DARK_SKIN_TONE = "\U0001f935\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_IN_TUXEDO_DARK_SKIN_TONE = "\U0001f935\U0001f3ff\u200d\u2640\ufe0f"
PERSON_WITH_VEIL = "\U0001f470"
PERSON_WITH_VEIL_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fb"
PERSON_WITH_VEIL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fc"
PERSON_WITH_VEIL_MEDIUM_SKIN_TONE = "\U0001f470\U0001f3fd"
PERSON_WITH_VEIL_MEDIUM_DARK_SKIN_TONE = "\U0001f470\U0001f3fe"
PERSON_WITH_VEIL_DARK_SKIN_TONE = "\U0001f470\U0001f3ff"
MAN_WITH_VEIL = "\U0001f470\u200d\u2642\ufe0f"
MAN_WITH_VEIL_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fb\u200d\u2642\ufe0f"
MAN_WITH_VEIL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fc\u200d\u2642\ufe0f"
MAN_WITH_VEIL_MEDIUM_SKIN_TONE = "\U0001f470\U0001f3fd\u200d\u2642\ufe0f"
MAN_WITH_VEIL_MEDIUM_DARK_SKIN_TONE = "\U0001f470\U0001f3fe\u200d\u2642\ufe0f"
MAN_WITH_VEIL_DARK_SKIN_TONE = "\U0001f470\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_WITH_VEIL = "\U0001f470\u200d\u2640\ufe0f"
WOMAN_WITH_VEIL_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_WITH_VEIL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f470\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_WITH_VEIL_MEDIUM_SKIN_TONE = "\U0001f470\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_WITH_VEIL_MEDIUM_DARK_SKIN_TONE = "\U0001f470\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_WITH_VEIL_DARK_SKIN_TONE = "\U0001f470\U0001f3ff\u200d\u2640\ufe0f"
PREGNANT_WOMAN = "\U0001f930"
PREGNANT_WOMAN_LIGHT_SKIN_TONE = "\U0001f930\U0001f3fb"
PREGNANT_WOMAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f930\U0001f3fc"
PREGNANT_WOMAN_MEDIUM_SKIN_TONE = "\U0001f930\U0001f3fd"
PREGNANT_WOMAN_MEDIUM_DARK_SKIN_TONE = "\U0001f930\U0001f3fe"
PREGNANT_WOMAN_DARK_SKIN_TONE = "\U0001f930\U0001f3ff"
PREGNANT_MAN = "\U0001fac3"
PREGNANT_MAN_LIGHT_SKIN_TONE = "\U0001fac3\U0001f3fb"
PREGNANT_MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001fac3\U0001f3fc"
PREGNANT_MAN_MEDIUM_SKIN_TONE = "\U0001fac3\U0001f3fd"
PREGNANT_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001fac3\U0001f3fe"
PREGNANT_MAN_DARK_SKIN_TONE = "\U0001fac3\U0001f3ff"
PREGNANT_PERSON = "\U0001fac4"
PREGNANT_PERSON_LIGHT_SKIN_TONE = "\U0001fac4\U0001f3fb"
PREGNANT_PERSON_MEDIUM_LIGHT_SKIN_TONE = "\U0001fac4\U0001f3fc"
PREGNANT_PERSON_MEDIUM_SKIN_TONE = "\U0001fac4\U0001f3fd"
PREGNANT_PERSON_MEDIUM_DARK_SKIN_TONE = "\U0001fac4\U0001f3fe"
PREGNANT_PERSON_DARK_SKIN_TONE = "\U0001fac4\U0001f3ff"
BREAST_FEEDING = "\U0001f931"
BREAST_FEEDING_LIGHT_SKIN_TONE = "\U0001f931\U0001f3fb"
BREAST_FEEDING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f931\U0001f3fc"
BREAST_FEEDING_MEDIUM_SKIN_TONE = "\U0001f931\U0001f3fd"
BREAST_FEEDING_MEDIUM_DARK_SKIN_TONE = "\U0001f931\U0001f3fe"
BREAST_FEEDING_DARK_SKIN_TONE = "\U0001f931\U0001f3ff"
WOMAN_FEEDING_BABY = "\U0001f469\u200d\U0001f37c"
WOMAN_FEEDING_BABY_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f37c"
WOMAN_FEEDING_BABY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f37c"
WOMAN_FEEDING_BABY_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f37c"
WOMAN_FEEDING_BABY_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f37c"
WOMAN_FEEDING_BABY_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f37c"
MAN_FEEDING_BABY = "\U0001f468\u200d\U0001f37c"
MAN_FEEDING_BABY_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f37c"
MAN_FEEDING_BABY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f37c"
MAN_FEEDING_BABY_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f37c"
MAN_FEEDING_BABY_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f37c"
MAN_FEEDING_BABY_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f37c"
PERSON_FEEDING_BABY = "\U0001f9d1\u200d\U0001f37c"
PERSON_FEEDING_BABY_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f37c"
PERSON_FEEDING_BABY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f37c"
PERSON_FEEDING_BABY_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f37c"
PERSON_FEEDING_BABY_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f37c"
PERSON_FEEDING_BABY_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f37c"
BABY_ANGEL = "\U0001f47c"
BABY_ANGEL_LIGHT_SKIN_TONE = "\U0001f47c\U0001f3fb"
BABY_ANGEL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f47c\U0001f3fc"
BABY_ANGEL_MEDIUM_SKIN_TONE = "\U0001f47c\U0001f3fd"
BABY_ANGEL_MEDIUM_DARK_SKIN_TONE = "\U0001f47c\U0001f3fe"
BABY_ANGEL_DARK_SKIN_TONE = "\U0001f47c\U0001f3ff"
SANTA_CLAUS = "\U0001f385"
SANTA_CLAUS_LIGHT_SKIN_TONE = "\U0001f385\U0001f3fb"
SANTA_CLAUS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f385\U0001f3fc"
SANTA_CLAUS_MEDIUM_SKIN_TONE = "\U0001f385\U0001f3fd"
SANTA_CLAUS_MEDIUM_DARK_SKIN_TONE = "\U0001f385\U0001f3fe"
SANTA_CLAUS_DARK_SKIN_TONE = "\U0001f385\U0001f3ff"
MRS_CLAUS = "\U0001f936"
MRS_CLAUS_LIGHT_SKIN_TONE = "\U0001f936\U0001f3fb"
MRS_CLAUS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f936\U0001f3fc"
MRS_CLAUS_MEDIUM_SKIN_TONE = "\U0001f936\U0001f3fd"
MRS_CLAUS_MEDIUM_DARK_SKIN_TONE = "\U0001f936\U0001f3fe"
MRS_CLAUS_DARK_SKIN_TONE = "\U0001f936\U0001f3ff"
MX_CLAUS = "\U0001f9d1\u200d\U0001f384"
MX_CLAUS_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f384"
MX_CLAUS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f384"
MX_CLAUS_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f384"
MX_CLAUS_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f384"
MX_CLAUS_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f384"
SUPERHERO = "\U0001f9b8"
SUPERHERO_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fb"
SUPERHERO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fc"
SUPERHERO_MEDIUM_SKIN_TONE = "\U0001f9b8\U0001f3fd"
SUPERHERO_MEDIUM_DARK_SKIN_TONE = "\U0001f9b8\U0001f3fe"
SUPERHERO_DARK_SKIN_TONE = "\U0001f9b8\U0001f3ff"
MAN_SUPERHERO = "\U0001f9b8\u200d\u2642\ufe0f"
MAN_SUPERHERO_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fb\u200d\u2642\ufe0f"
MAN_SUPERHERO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fc\u200d\u2642\ufe0f"
MAN_SUPERHERO_MEDIUM_SKIN_TONE = "\U0001f9b8\U0001f3fd\u200d\u2642\ufe0f"
MAN_SUPERHERO_MEDIUM_DARK_SKIN_TONE = "\U0001f9b8\U0001f3fe\u200d\u2642\ufe0f"
MAN_SUPERHERO_DARK_SKIN_TONE = "\U0001f9b8\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_SUPERHERO = "\U0001f9b8\u200d\u2640\ufe0f"
WOMAN_SUPERHERO_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_SUPERHERO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b8\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_SUPERHERO_MEDIUM_SKIN_TONE = "\U0001f9b8\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_SUPERHERO_MEDIUM_DARK_SKIN_TONE = "\U0001f9b8\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_SUPERHERO_DARK_SKIN_TONE = "\U0001f9b8\U0001f3ff\u200d\u2640\ufe0f"
SUPERVILLAIN = "\U0001f9b9"
SUPERVILLAIN_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fb"
SUPERVILLAIN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fc"
SUPERVILLAIN_MEDIUM_SKIN_TONE = "\U0001f9b9\U0001f3fd"
SUPERVILLAIN_MEDIUM_DARK_SKIN_TONE = "\U0001f9b9\U0001f3fe"
SUPERVILLAIN_DARK_SKIN_TONE = "\U0001f9b9\U0001f3ff"
MAN_SUPERVILLAIN = "\U0001f9b9\u200d\u2642\ufe0f"
MAN_SUPERVILLAIN_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fb\u200d\u2642\ufe0f"
MAN_SUPERVILLAIN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fc\u200d\u2642\ufe0f"
MAN_SUPERVILLAIN_MEDIUM_SKIN_TONE = "\U0001f9b9\U0001f3fd\u200d\u2642\ufe0f"
MAN_SUPERVILLAIN_MEDIUM_DARK_SKIN_TONE = "\U0001f9b9\U0001f3fe\u200d\u2642\ufe0f"
MAN_SUPERVILLAIN_DARK_SKIN_TONE = "\U0001f9b9\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_SUPERVILLAIN = "\U0001f9b9\u200d\u2640\ufe0f"
WOMAN_SUPERVILLAIN_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_SUPERVILLAIN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9b9\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_SUPERVILLAIN_MEDIUM_SKIN_TONE = "\U0001f9b9\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_SUPERVILLAIN_MEDIUM_DARK_SKIN_TONE = "\U0001f9b9\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_SUPERVILLAIN_DARK_SKIN_TONE = "\U0001f9b9\U0001f3ff\u200d\u2640\ufe0f"
MAGE = "\U0001f9d9"
MAGE_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fb"
MAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fc"
MAGE_MEDIUM_SKIN_TONE = "\U0001f9d9\U0001f3fd"
MAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f9d9\U0001f3fe"
MAGE_DARK_SKIN_TONE = "\U0001f9d9\U0001f3ff"
MAN_MAGE = "\U0001f9d9\u200d\u2642\ufe0f"
MAN_MAGE_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fb\u200d\u2642\ufe0f"
MAN_MAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fc\u200d\u2642\ufe0f"
MAN_MAGE_MEDIUM_SKIN_TONE = "\U0001f9d9\U0001f3fd\u200d\u2642\ufe0f"
MAN_MAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f9d9\U0001f3fe\u200d\u2642\ufe0f"
MAN_MAGE_DARK_SKIN_TONE = "\U0001f9d9\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_MAGE = "\U0001f9d9\u200d\u2640\ufe0f"
WOMAN_MAGE_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_MAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d9\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_MAGE_MEDIUM_SKIN_TONE = "\U0001f9d9\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_MAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f9d9\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_MAGE_DARK_SKIN_TONE = "\U0001f9d9\U0001f3ff\u200d\u2640\ufe0f"
FAIRY = "\U0001f9da"
FAIRY_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fb"
FAIRY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fc"
FAIRY_MEDIUM_SKIN_TONE = "\U0001f9da\U0001f3fd"
FAIRY_MEDIUM_DARK_SKIN_TONE = "\U0001f9da\U0001f3fe"
FAIRY_DARK_SKIN_TONE = "\U0001f9da\U0001f3ff"
MAN_FAIRY = "\U0001f9da\u200d\u2642\ufe0f"
MAN_FAIRY_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fb\u200d\u2642\ufe0f"
MAN_FAIRY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fc\u200d\u2642\ufe0f"
MAN_FAIRY_MEDIUM_SKIN_TONE = "\U0001f9da\U0001f3fd\u200d\u2642\ufe0f"
MAN_FAIRY_MEDIUM_DARK_SKIN_TONE = "\U0001f9da\U0001f3fe\u200d\u2642\ufe0f"
MAN_FAIRY_DARK_SKIN_TONE = "\U0001f9da\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_FAIRY = "\U0001f9da\u200d\u2640\ufe0f"
WOMAN_FAIRY_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_FAIRY_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9da\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_FAIRY_MEDIUM_SKIN_TONE = "\U0001f9da\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_FAIRY_MEDIUM_DARK_SKIN_TONE = "\U0001f9da\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_FAIRY_DARK_SKIN_TONE = "\U0001f9da\U0001f3ff\u200d\u2640\ufe0f"
VAMPIRE = "\U0001f9db"
VAMPIRE_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fb"
VAMPIRE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fc"
VAMPIRE_MEDIUM_SKIN_TONE = "\U0001f9db\U0001f3fd"
VAMPIRE_MEDIUM_DARK_SKIN_TONE = "\U0001f9db\U0001f3fe"
VAMPIRE_DARK_SKIN_TONE = "\U0001f9db\U0001f3ff"
MAN_VAMPIRE = "\U0001f9db\u200d\u2642\ufe0f"
MAN_VAMPIRE_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fb\u200d\u2642\ufe0f"
MAN_VAMPIRE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fc\u200d\u2642\ufe0f"
MAN_VAMPIRE_MEDIUM_SKIN_TONE = "\U0001f9db\U0001f3fd\u200d\u2642\ufe0f"
MAN_VAMPIRE_MEDIUM_DARK_SKIN_TONE = "\U0001f9db\U0001f3fe\u200d\u2642\ufe0f"
MAN_VAMPIRE_DARK_SKIN_TONE = "\U0001f9db\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_VAMPIRE = "\U0001f9db\u200d\u2640\ufe0f"
WOMAN_VAMPIRE_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_VAMPIRE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9db\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_VAMPIRE_MEDIUM_SKIN_TONE = "\U0001f9db\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_VAMPIRE_MEDIUM_DARK_SKIN_TONE = "\U0001f9db\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_VAMPIRE_DARK_SKIN_TONE = "\U0001f9db\U0001f3ff\u200d\u2640\ufe0f"
MERPERSON = "\U0001f9dc"
MERPERSON_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fb"
MERPERSON_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fc"
MERPERSON_MEDIUM_SKIN_TONE = "\U0001f9dc\U0001f3fd"
MERPERSON_MEDIUM_DARK_SKIN_TONE = "\U0001f9dc\U0001f3fe"
MERPERSON_DARK_SKIN_TONE = "\U0001f9dc\U0001f3ff"
MERMAN = "\U0001f9dc\u200d\u2642\ufe0f"
MERMAN_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fb\u200d\u2642\ufe0f"
MERMAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fc\u200d\u2642\ufe0f"
MERMAN_MEDIUM_SKIN_TONE = "\U0001f9dc\U0001f3fd\u200d\u2642\ufe0f"
MERMAN_MEDIUM_DARK_SKIN_TONE = "\U0001f9dc\U0001f3fe\u200d\u2642\ufe0f"
MERMAN_DARK_SKIN_TONE = "\U0001f9dc\U0001f3ff\u200d\u2642\ufe0f"
MERMAID = "\U0001f9dc\u200d\u2640\ufe0f"
MERMAID_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fb\u200d\u2640\ufe0f"
MERMAID_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dc\U0001f3fc\u200d\u2640\ufe0f"
MERMAID_MEDIUM_SKIN_TONE = "\U0001f9dc\U0001f3fd\u200d\u2640\ufe0f"
MERMAID_MEDIUM_DARK_SKIN_TONE = "\U0001f9dc\U0001f3fe\u200d\u2640\ufe0f"
MERMAID_DARK_SKIN_TONE = "\U0001f9dc\U0001f3ff\u200d\u2640\ufe0f"
ELF = "\U0001f9dd"
ELF_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fb"
ELF_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fc"
ELF_MEDIUM_SKIN_TONE = "\U0001f9dd\U0001f3fd"
ELF_MEDIUM_DARK_SKIN_TONE = "\U0001f9dd\U0001f3fe"
ELF_DARK_SKIN_TONE = "\U0001f9dd\U0001f3ff"
MAN_ELF = "\U0001f9dd\u200d\u2642\ufe0f"
MAN_ELF_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fb\u200d\u2642\ufe0f"
MAN_ELF_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fc\u200d\u2642\ufe0f"
MAN_ELF_MEDIUM_SKIN_TONE = "\U0001f9dd\U0001f3fd\u200d\u2642\ufe0f"
MAN_ELF_MEDIUM_DARK_SKIN_TONE = "\U0001f9dd\U0001f3fe\u200d\u2642\ufe0f"
MAN_ELF_DARK_SKIN_TONE = "\U0001f9dd\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_ELF = "\U0001f9dd\u200d\u2640\ufe0f"
WOMAN_ELF_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_ELF_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9dd\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_ELF_MEDIUM_SKIN_TONE = "\U0001f9dd\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_ELF_MEDIUM_DARK_SKIN_TONE = "\U0001f9dd\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_ELF_DARK_SKIN_TONE = "\U0001f9dd\U0001f3ff\u200d\u2640\ufe0f"
GENIE = "\U0001f9de"
MAN_GENIE = "\U0001f9de\u200d\u2642\ufe0f"
WOMAN_GENIE = "\U0001f9de\u200d\u2640\ufe0f"
ZOMBIE = "\U0001f9df"
MAN_ZOMBIE = "\U0001f9df\u200d\u2642\ufe0f"
WOMAN_ZOMBIE = "\U0001f9df\u200d\u2640\ufe0f"
TROLL = "\U0001f9cc"
PERSON_GETTING_MASSAGE = "\U0001f486"
PERSON_GETTING_MASSAGE_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fb"
PERSON_GETTING_MASSAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fc"
PERSON_GETTING_MASSAGE_MEDIUM_SKIN_TONE = "\U0001f486\U0001f3fd"
PERSON_GETTING_MASSAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f486\U0001f3fe"
PERSON_GETTING_MASSAGE_DARK_SKIN_TONE = "\U0001f486\U0001f3ff"
MAN_GETTING_MASSAGE = "\U0001f486\u200d\u2642\ufe0f"
MAN_GETTING_MASSAGE_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fb\u200d\u2642\ufe0f"
MAN_GETTING_MASSAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fc\u200d\u2642\ufe0f"
MAN_GETTING_MASSAGE_MEDIUM_SKIN_TONE = "\U0001f486\U0001f3fd\u200d\u2642\ufe0f"
MAN_GETTING_MASSAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f486\U0001f3fe\u200d\u2642\ufe0f"
MAN_GETTING_MASSAGE_DARK_SKIN_TONE = "\U0001f486\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GETTING_MASSAGE = "\U0001f486\u200d\u2640\ufe0f"
WOMAN_GETTING_MASSAGE_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GETTING_MASSAGE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f486\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GETTING_MASSAGE_MEDIUM_SKIN_TONE = "\U0001f486\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GETTING_MASSAGE_MEDIUM_DARK_SKIN_TONE = "\U0001f486\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GETTING_MASSAGE_DARK_SKIN_TONE = "\U0001f486\U0001f3ff\u200d\u2640\ufe0f"
PERSON_GETTING_HAIRCUT = "\U0001f487"
PERSON_GETTING_HAIRCUT_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fb"
PERSON_GETTING_HAIRCUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fc"
PERSON_GETTING_HAIRCUT_MEDIUM_SKIN_TONE = "\U0001f487\U0001f3fd"
PERSON_GETTING_HAIRCUT_MEDIUM_DARK_SKIN_TONE = "\U0001f487\U0001f3fe"
PERSON_GETTING_HAIRCUT_DARK_SKIN_TONE = "\U0001f487\U0001f3ff"
MAN_GETTING_HAIRCUT = "\U0001f487\u200d\u2642\ufe0f"
MAN_GETTING_HAIRCUT_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fb\u200d\u2642\ufe0f"
MAN_GETTING_HAIRCUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fc\u200d\u2642\ufe0f"
MAN_GETTING_HAIRCUT_MEDIUM_SKIN_TONE = "\U0001f487\U0001f3fd\u200d\u2642\ufe0f"
MAN_GETTING_HAIRCUT_MEDIUM_DARK_SKIN_TONE = "\U0001f487\U0001f3fe\u200d\u2642\ufe0f"
MAN_GETTING_HAIRCUT_DARK_SKIN_TONE = "\U0001f487\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GETTING_HAIRCUT = "\U0001f487\u200d\u2640\ufe0f"
WOMAN_GETTING_HAIRCUT_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GETTING_HAIRCUT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f487\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GETTING_HAIRCUT_MEDIUM_SKIN_TONE = "\U0001f487\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GETTING_HAIRCUT_MEDIUM_DARK_SKIN_TONE = "\U0001f487\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GETTING_HAIRCUT_DARK_SKIN_TONE = "\U0001f487\U0001f3ff\u200d\u2640\ufe0f"
PERSON_WALKING = "\U0001f6b6"
PERSON_WALKING_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fb"
PERSON_WALKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fc"
PERSON_WALKING_MEDIUM_SKIN_TONE = "\U0001f6b6\U0001f3fd"
PERSON_WALKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b6\U0001f3fe"
PERSON_WALKING_DARK_SKIN_TONE = "\U0001f6b6\U0001f3ff"
MAN_WALKING = "\U0001f6b6\u200d\u2642\ufe0f"
MAN_WALKING_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fb\u200d\u2642\ufe0f"
MAN_WALKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fc\u200d\u2642\ufe0f"
MAN_WALKING_MEDIUM_SKIN_TONE = "\U0001f6b6\U0001f3fd\u200d\u2642\ufe0f"
MAN_WALKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b6\U0001f3fe\u200d\u2642\ufe0f"
MAN_WALKING_DARK_SKIN_TONE = "\U0001f6b6\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_WALKING = "\U0001f6b6\u200d\u2640\ufe0f"
WOMAN_WALKING_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_WALKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b6\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_WALKING_MEDIUM_SKIN_TONE = "\U0001f6b6\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_WALKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b6\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_WALKING_DARK_SKIN_TONE = "\U0001f6b6\U0001f3ff\u200d\u2640\ufe0f"
PERSON_STANDING = "\U0001f9cd"
PERSON_STANDING_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fb"
PERSON_STANDING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fc"
PERSON_STANDING_MEDIUM_SKIN_TONE = "\U0001f9cd\U0001f3fd"
PERSON_STANDING_MEDIUM_DARK_SKIN_TONE = "\U0001f9cd\U0001f3fe"
PERSON_STANDING_DARK_SKIN_TONE = "\U0001f9cd\U0001f3ff"
MAN_STANDING = "\U0001f9cd\u200d\u2642\ufe0f"
MAN_STANDING_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fb\u200d\u2642\ufe0f"
MAN_STANDING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fc\u200d\u2642\ufe0f"
MAN_STANDING_MEDIUM_SKIN_TONE = "\U0001f9cd\U0001f3fd\u200d\u2642\ufe0f"
MAN_STANDING_MEDIUM_DARK_SKIN_TONE = "\U0001f9cd\U0001f3fe\u200d\u2642\ufe0f"
MAN_STANDING_DARK_SKIN_TONE = "\U0001f9cd\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_STANDING = "\U0001f9cd\u200d\u2640\ufe0f"
WOMAN_STANDING_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_STANDING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9cd\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_STANDING_MEDIUM_SKIN_TONE = "\U0001f9cd\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_STANDING_MEDIUM_DARK_SKIN_TONE = "\U0001f9cd\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_STANDING_DARK_SKIN_TONE = "\U0001f9cd\U0001f3ff\u200d\u2640\ufe0f"
PERSON_KNEELING = "\U0001f9ce"
PERSON_KNEELING_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fb"
PERSON_KNEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fc"
PERSON_KNEELING_MEDIUM_SKIN_TONE = "\U0001f9ce\U0001f3fd"
PERSON_KNEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f9ce\U0001f3fe"
PERSON_KNEELING_DARK_SKIN_TONE = "\U0001f9ce\U0001f3ff"
MAN_KNEELING = "\U0001f9ce\u200d\u2642\ufe0f"
MAN_KNEELING_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fb\u200d\u2642\ufe0f"
MAN_KNEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fc\u200d\u2642\ufe0f"
MAN_KNEELING_MEDIUM_SKIN_TONE = "\U0001f9ce\U0001f3fd\u200d\u2642\ufe0f"
MAN_KNEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f9ce\U0001f3fe\u200d\u2642\ufe0f"
MAN_KNEELING_DARK_SKIN_TONE = "\U0001f9ce\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_KNEELING = "\U0001f9ce\u200d\u2640\ufe0f"
WOMAN_KNEELING_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_KNEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9ce\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_KNEELING_MEDIUM_SKIN_TONE = "\U0001f9ce\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_KNEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f9ce\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_KNEELING_DARK_SKIN_TONE = "\U0001f9ce\U0001f3ff\u200d\u2640\ufe0f"
PERSON_WITH_WHITE_CANE = "\U0001f9d1\u200d\U0001f9af"
PERSON_WITH_WHITE_CANE_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f9af"
PERSON_WITH_WHITE_CANE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f9af"
PERSON_WITH_WHITE_CANE_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f9af"
PERSON_WITH_WHITE_CANE_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f9af"
PERSON_WITH_WHITE_CANE_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f9af"
MAN_WITH_WHITE_CANE = "\U0001f468\u200d\U0001f9af"
MAN_WITH_WHITE_CANE_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f9af"
MAN_WITH_WHITE_CANE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f9af"
MAN_WITH_WHITE_CANE_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f9af"
MAN_WITH_WHITE_CANE_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f9af"
MAN_WITH_WHITE_CANE_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE = "\U0001f469\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f9af"
WOMAN_WITH_WHITE_CANE_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f9af"
PERSON_IN_MOTORIZED_WHEELCHAIR = "\U0001f9d1\u200d\U0001f9bc"
PERSON_IN_MOTORIZED_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f9bc"
PERSON_IN_MOTORIZED_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f9bc"
PERSON_IN_MOTORIZED_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f9bc"
PERSON_IN_MOTORIZED_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f9bc"
PERSON_IN_MOTORIZED_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR = "\U0001f468\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f9bc"
MAN_IN_MOTORIZED_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR = "\U0001f469\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f9bc"
WOMAN_IN_MOTORIZED_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f9bc"
PERSON_IN_MANUAL_WHEELCHAIR = "\U0001f9d1\u200d\U0001f9bd"
PERSON_IN_MANUAL_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f9bd"
PERSON_IN_MANUAL_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f9bd"
PERSON_IN_MANUAL_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f9bd"
PERSON_IN_MANUAL_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f9bd"
PERSON_IN_MANUAL_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR = "\U0001f468\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\U0001f9bd"
MAN_IN_MANUAL_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR = "\U0001f469\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\U0001f9bd"
WOMAN_IN_MANUAL_WHEELCHAIR_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f9bd"
PERSON_RUNNING = "\U0001f3c3"
PERSON_RUNNING_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fb"
PERSON_RUNNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fc"
PERSON_RUNNING_MEDIUM_SKIN_TONE = "\U0001f3c3\U0001f3fd"
PERSON_RUNNING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c3\U0001f3fe"
PERSON_RUNNING_DARK_SKIN_TONE = "\U0001f3c3\U0001f3ff"
MAN_RUNNING = "\U0001f3c3\u200d\u2642\ufe0f"
MAN_RUNNING_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fb\u200d\u2642\ufe0f"
MAN_RUNNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fc\u200d\u2642\ufe0f"
MAN_RUNNING_MEDIUM_SKIN_TONE = "\U0001f3c3\U0001f3fd\u200d\u2642\ufe0f"
MAN_RUNNING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c3\U0001f3fe\u200d\u2642\ufe0f"
MAN_RUNNING_DARK_SKIN_TONE = "\U0001f3c3\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_RUNNING = "\U0001f3c3\u200d\u2640\ufe0f"
WOMAN_RUNNING_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_RUNNING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c3\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_RUNNING_MEDIUM_SKIN_TONE = "\U0001f3c3\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_RUNNING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c3\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_RUNNING_DARK_SKIN_TONE = "\U0001f3c3\U0001f3ff\u200d\u2640\ufe0f"
WOMAN_DANCING = "\U0001f483"
WOMAN_DANCING_LIGHT_SKIN_TONE = "\U0001f483\U0001f3fb"
WOMAN_DANCING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f483\U0001f3fc"
WOMAN_DANCING_MEDIUM_SKIN_TONE = "\U0001f483\U0001f3fd"
WOMAN_DANCING_MEDIUM_DARK_SKIN_TONE = "\U0001f483\U0001f3fe"
WOMAN_DANCING_DARK_SKIN_TONE = "\U0001f483\U0001f3ff"
MAN_DANCING = "\U0001f57a"
MAN_DANCING_LIGHT_SKIN_TONE = "\U0001f57a\U0001f3fb"
MAN_DANCING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f57a\U0001f3fc"
MAN_DANCING_MEDIUM_SKIN_TONE = "\U0001f57a\U0001f3fd"
MAN_DANCING_MEDIUM_DARK_SKIN_TONE = "\U0001f57a\U0001f3fe"
MAN_DANCING_DARK_SKIN_TONE = "\U0001f57a\U0001f3ff"
PERSON_IN_SUIT_LEVITATING = "\U0001f574\ufe0f"
PERSON_IN_SUIT_LEVITATING_LIGHT_SKIN_TONE = "\U0001f574\U0001f3fb"
PERSON_IN_SUIT_LEVITATING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f574\U0001f3fc"
PERSON_IN_SUIT_LEVITATING_MEDIUM_SKIN_TONE = "\U0001f574\U0001f3fd"
PERSON_IN_SUIT_LEVITATING_MEDIUM_DARK_SKIN_TONE = "\U0001f574\U0001f3fe"
PERSON_IN_SUIT_LEVITATING_DARK_SKIN_TONE = "\U0001f574\U0001f3ff"
PEOPLE_WITH_BUNNY_EARS = "\U0001f46f"
MEN_WITH_BUNNY_EARS = "\U0001f46f\u200d\u2642\ufe0f"
WOMEN_WITH_BUNNY_EARS = "\U0001f46f\u200d\u2640\ufe0f"
PERSON_IN_STEAMY_ROOM = "\U0001f9d6"
PERSON_IN_STEAMY_ROOM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fb"
PERSON_IN_STEAMY_ROOM_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fc"
PERSON_IN_STEAMY_ROOM_MEDIUM_SKIN_TONE = "\U0001f9d6\U0001f3fd"
PERSON_IN_STEAMY_ROOM_MEDIUM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3fe"
PERSON_IN_STEAMY_ROOM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3ff"
MAN_IN_STEAMY_ROOM = "\U0001f9d6\u200d\u2642\ufe0f"
MAN_IN_STEAMY_ROOM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fb\u200d\u2642\ufe0f"
MAN_IN_STEAMY_ROOM_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fc\u200d\u2642\ufe0f"
MAN_IN_STEAMY_ROOM_MEDIUM_SKIN_TONE = "\U0001f9d6\U0001f3fd\u200d\u2642\ufe0f"
MAN_IN_STEAMY_ROOM_MEDIUM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3fe\u200d\u2642\ufe0f"
MAN_IN_STEAMY_ROOM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_IN_STEAMY_ROOM = "\U0001f9d6\u200d\u2640\ufe0f"
WOMAN_IN_STEAMY_ROOM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_IN_STEAMY_ROOM_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d6\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_IN_STEAMY_ROOM_MEDIUM_SKIN_TONE = "\U0001f9d6\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_IN_STEAMY_ROOM_MEDIUM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_IN_STEAMY_ROOM_DARK_SKIN_TONE = "\U0001f9d6\U0001f3ff\u200d\u2640\ufe0f"
PERSON_CLIMBING = "\U0001f9d7"
PERSON_CLIMBING_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fb"
PERSON_CLIMBING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fc"
PERSON_CLIMBING_MEDIUM_SKIN_TONE = "\U0001f9d7\U0001f3fd"
PERSON_CLIMBING_MEDIUM_DARK_SKIN_TONE = "\U0001f9d7\U0001f3fe"
PERSON_CLIMBING_DARK_SKIN_TONE = "\U0001f9d7\U0001f3ff"
MAN_CLIMBING = "\U0001f9d7\u200d\u2642\ufe0f"
MAN_CLIMBING_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fb\u200d\u2642\ufe0f"
MAN_CLIMBING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fc\u200d\u2642\ufe0f"
MAN_CLIMBING_MEDIUM_SKIN_TONE = "\U0001f9d7\U0001f3fd\u200d\u2642\ufe0f"
MAN_CLIMBING_MEDIUM_DARK_SKIN_TONE = "\U0001f9d7\U0001f3fe\u200d\u2642\ufe0f"
MAN_CLIMBING_DARK_SKIN_TONE = "\U0001f9d7\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_CLIMBING = "\U0001f9d7\u200d\u2640\ufe0f"
WOMAN_CLIMBING_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_CLIMBING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d7\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_CLIMBING_MEDIUM_SKIN_TONE = "\U0001f9d7\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_CLIMBING_MEDIUM_DARK_SKIN_TONE = "\U0001f9d7\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_CLIMBING_DARK_SKIN_TONE = "\U0001f9d7\U0001f3ff\u200d\u2640\ufe0f"
PERSON_FENCING = "\U0001f93a"
HORSE_RACING = "\U0001f3c7"
HORSE_RACING_LIGHT_SKIN_TONE = "\U0001f3c7\U0001f3fb"
HORSE_RACING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c7\U0001f3fc"
HORSE_RACING_MEDIUM_SKIN_TONE = "\U0001f3c7\U0001f3fd"
HORSE_RACING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c7\U0001f3fe"
HORSE_RACING_DARK_SKIN_TONE = "\U0001f3c7\U0001f3ff"
SKIER = "\u26f7\ufe0f"
SNOWBOARDER = "\U0001f3c2"
SNOWBOARDER_LIGHT_SKIN_TONE = "\U0001f3c2\U0001f3fb"
SNOWBOARDER_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c2\U0001f3fc"
SNOWBOARDER_MEDIUM_SKIN_TONE = "\U0001f3c2\U0001f3fd"
SNOWBOARDER_MEDIUM_DARK_SKIN_TONE = "\U0001f3c2\U0001f3fe"
SNOWBOARDER_DARK_SKIN_TONE = "\U0001f3c2\U0001f3ff"
PERSON_GOLFING = "\U0001f3cc\ufe0f"
PERSON_GOLFING_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fb"
PERSON_GOLFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fc"
PERSON_GOLFING_MEDIUM_SKIN_TONE = "\U0001f3cc\U0001f3fd"
PERSON_GOLFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3cc\U0001f3fe"
PERSON_GOLFING_DARK_SKIN_TONE = "\U0001f3cc\U0001f3ff"
MAN_GOLFING = "\U0001f3cc\ufe0f\u200d\u2642\ufe0f"
MAN_GOLFING_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fb\u200d\u2642\ufe0f"
MAN_GOLFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fc\u200d\u2642\ufe0f"
MAN_GOLFING_MEDIUM_SKIN_TONE = "\U0001f3cc\U0001f3fd\u200d\u2642\ufe0f"
MAN_GOLFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3cc\U0001f3fe\u200d\u2642\ufe0f"
MAN_GOLFING_DARK_SKIN_TONE = "\U0001f3cc\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_GOLFING = "\U0001f3cc\ufe0f\u200d\u2640\ufe0f"
WOMAN_GOLFING_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_GOLFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cc\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_GOLFING_MEDIUM_SKIN_TONE = "\U0001f3cc\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_GOLFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3cc\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_GOLFING_DARK_SKIN_TONE = "\U0001f3cc\U0001f3ff\u200d\u2640\ufe0f"
PERSON_SURFING = "\U0001f3c4"
PERSON_SURFING_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fb"
PERSON_SURFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fc"
PERSON_SURFING_MEDIUM_SKIN_TONE = "\U0001f3c4\U0001f3fd"
PERSON_SURFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c4\U0001f3fe"
PERSON_SURFING_DARK_SKIN_TONE = "\U0001f3c4\U0001f3ff"
MAN_SURFING = "\U0001f3c4\u200d\u2642\ufe0f"
MAN_SURFING_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fb\u200d\u2642\ufe0f"
MAN_SURFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fc\u200d\u2642\ufe0f"
MAN_SURFING_MEDIUM_SKIN_TONE = "\U0001f3c4\U0001f3fd\u200d\u2642\ufe0f"
MAN_SURFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c4\U0001f3fe\u200d\u2642\ufe0f"
MAN_SURFING_DARK_SKIN_TONE = "\U0001f3c4\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_SURFING = "\U0001f3c4\u200d\u2640\ufe0f"
WOMAN_SURFING_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_SURFING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3c4\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_SURFING_MEDIUM_SKIN_TONE = "\U0001f3c4\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_SURFING_MEDIUM_DARK_SKIN_TONE = "\U0001f3c4\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_SURFING_DARK_SKIN_TONE = "\U0001f3c4\U0001f3ff\u200d\u2640\ufe0f"
PERSON_ROWING_BOAT = "\U0001f6a3"
PERSON_ROWING_BOAT_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fb"
PERSON_ROWING_BOAT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fc"
PERSON_ROWING_BOAT_MEDIUM_SKIN_TONE = "\U0001f6a3\U0001f3fd"
PERSON_ROWING_BOAT_MEDIUM_DARK_SKIN_TONE = "\U0001f6a3\U0001f3fe"
PERSON_ROWING_BOAT_DARK_SKIN_TONE = "\U0001f6a3\U0001f3ff"
MAN_ROWING_BOAT = "\U0001f6a3\u200d\u2642\ufe0f"
MAN_ROWING_BOAT_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fb\u200d\u2642\ufe0f"
MAN_ROWING_BOAT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fc\u200d\u2642\ufe0f"
MAN_ROWING_BOAT_MEDIUM_SKIN_TONE = "\U0001f6a3\U0001f3fd\u200d\u2642\ufe0f"
MAN_ROWING_BOAT_MEDIUM_DARK_SKIN_TONE = "\U0001f6a3\U0001f3fe\u200d\u2642\ufe0f"
MAN_ROWING_BOAT_DARK_SKIN_TONE = "\U0001f6a3\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_ROWING_BOAT = "\U0001f6a3\u200d\u2640\ufe0f"
WOMAN_ROWING_BOAT_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_ROWING_BOAT_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6a3\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_ROWING_BOAT_MEDIUM_SKIN_TONE = "\U0001f6a3\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_ROWING_BOAT_MEDIUM_DARK_SKIN_TONE = "\U0001f6a3\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_ROWING_BOAT_DARK_SKIN_TONE = "\U0001f6a3\U0001f3ff\u200d\u2640\ufe0f"
PERSON_SWIMMING = "\U0001f3ca"
PERSON_SWIMMING_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fb"
PERSON_SWIMMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fc"
PERSON_SWIMMING_MEDIUM_SKIN_TONE = "\U0001f3ca\U0001f3fd"
PERSON_SWIMMING_MEDIUM_DARK_SKIN_TONE = "\U0001f3ca\U0001f3fe"
PERSON_SWIMMING_DARK_SKIN_TONE = "\U0001f3ca\U0001f3ff"
MAN_SWIMMING = "\U0001f3ca\u200d\u2642\ufe0f"
MAN_SWIMMING_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fb\u200d\u2642\ufe0f"
MAN_SWIMMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fc\u200d\u2642\ufe0f"
MAN_SWIMMING_MEDIUM_SKIN_TONE = "\U0001f3ca\U0001f3fd\u200d\u2642\ufe0f"
MAN_SWIMMING_MEDIUM_DARK_SKIN_TONE = "\U0001f3ca\U0001f3fe\u200d\u2642\ufe0f"
MAN_SWIMMING_DARK_SKIN_TONE = "\U0001f3ca\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_SWIMMING = "\U0001f3ca\u200d\u2640\ufe0f"
WOMAN_SWIMMING_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_SWIMMING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3ca\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_SWIMMING_MEDIUM_SKIN_TONE = "\U0001f3ca\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_SWIMMING_MEDIUM_DARK_SKIN_TONE = "\U0001f3ca\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_SWIMMING_DARK_SKIN_TONE = "\U0001f3ca\U0001f3ff\u200d\u2640\ufe0f"
PERSON_BOUNCING_BALL = "\u26f9\ufe0f"
PERSON_BOUNCING_BALL_LIGHT_SKIN_TONE = "\u26f9\U0001f3fb"
PERSON_BOUNCING_BALL_MEDIUM_LIGHT_SKIN_TONE = "\u26f9\U0001f3fc"
PERSON_BOUNCING_BALL_MEDIUM_SKIN_TONE = "\u26f9\U0001f3fd"
PERSON_BOUNCING_BALL_MEDIUM_DARK_SKIN_TONE = "\u26f9\U0001f3fe"
PERSON_BOUNCING_BALL_DARK_SKIN_TONE = "\u26f9\U0001f3ff"
MAN_BOUNCING_BALL = "\u26f9\ufe0f\u200d\u2642\ufe0f"
MAN_BOUNCING_BALL_LIGHT_SKIN_TONE = "\u26f9\U0001f3fb\u200d\u2642\ufe0f"
MAN_BOUNCING_BALL_MEDIUM_LIGHT_SKIN_TONE = "\u26f9\U0001f3fc\u200d\u2642\ufe0f"
MAN_BOUNCING_BALL_MEDIUM_SKIN_TONE = "\u26f9\U0001f3fd\u200d\u2642\ufe0f"
MAN_BOUNCING_BALL_MEDIUM_DARK_SKIN_TONE = "\u26f9\U0001f3fe\u200d\u2642\ufe0f"
MAN_BOUNCING_BALL_DARK_SKIN_TONE = "\u26f9\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_BOUNCING_BALL = "\u26f9\ufe0f\u200d\u2640\ufe0f"
WOMAN_BOUNCING_BALL_LIGHT_SKIN_TONE = "\u26f9\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_BOUNCING_BALL_MEDIUM_LIGHT_SKIN_TONE = "\u26f9\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_BOUNCING_BALL_MEDIUM_SKIN_TONE = "\u26f9\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_BOUNCING_BALL_MEDIUM_DARK_SKIN_TONE = "\u26f9\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_BOUNCING_BALL_DARK_SKIN_TONE = "\u26f9\U0001f3ff\u200d\u2640\ufe0f"
PERSON_LIFTING_WEIGHTS = "\U0001f3cb\ufe0f"
PERSON_LIFTING_WEIGHTS_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fb"
PERSON_LIFTING_WEIGHTS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fc"
PERSON_LIFTING_WEIGHTS_MEDIUM_SKIN_TONE = "\U0001f3cb\U0001f3fd"
PERSON_LIFTING_WEIGHTS_MEDIUM_DARK_SKIN_TONE = "\U0001f3cb\U0001f3fe"
PERSON_LIFTING_WEIGHTS_DARK_SKIN_TONE = "\U0001f3cb\U0001f3ff"
MAN_LIFTING_WEIGHTS = "\U0001f3cb\ufe0f\u200d\u2642\ufe0f"
MAN_LIFTING_WEIGHTS_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fb\u200d\u2642\ufe0f"
MAN_LIFTING_WEIGHTS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fc\u200d\u2642\ufe0f"
MAN_LIFTING_WEIGHTS_MEDIUM_SKIN_TONE = "\U0001f3cb\U0001f3fd\u200d\u2642\ufe0f"
MAN_LIFTING_WEIGHTS_MEDIUM_DARK_SKIN_TONE = "\U0001f3cb\U0001f3fe\u200d\u2642\ufe0f"
MAN_LIFTING_WEIGHTS_DARK_SKIN_TONE = "\U0001f3cb\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_LIFTING_WEIGHTS = "\U0001f3cb\ufe0f\u200d\u2640\ufe0f"
WOMAN_LIFTING_WEIGHTS_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_LIFTING_WEIGHTS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f3cb\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_LIFTING_WEIGHTS_MEDIUM_SKIN_TONE = "\U0001f3cb\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_LIFTING_WEIGHTS_MEDIUM_DARK_SKIN_TONE = "\U0001f3cb\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_LIFTING_WEIGHTS_DARK_SKIN_TONE = "\U0001f3cb\U0001f3ff\u200d\u2640\ufe0f"
PERSON_BIKING = "\U0001f6b4"
PERSON_BIKING_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fb"
PERSON_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fc"
PERSON_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b4\U0001f3fd"
PERSON_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b4\U0001f3fe"
PERSON_BIKING_DARK_SKIN_TONE = "\U0001f6b4\U0001f3ff"
MAN_BIKING = "\U0001f6b4\u200d\u2642\ufe0f"
MAN_BIKING_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fb\u200d\u2642\ufe0f"
MAN_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fc\u200d\u2642\ufe0f"
MAN_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b4\U0001f3fd\u200d\u2642\ufe0f"
MAN_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b4\U0001f3fe\u200d\u2642\ufe0f"
MAN_BIKING_DARK_SKIN_TONE = "\U0001f6b4\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_BIKING = "\U0001f6b4\u200d\u2640\ufe0f"
WOMAN_BIKING_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b4\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b4\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b4\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_BIKING_DARK_SKIN_TONE = "\U0001f6b4\U0001f3ff\u200d\u2640\ufe0f"
PERSON_MOUNTAIN_BIKING = "\U0001f6b5"
PERSON_MOUNTAIN_BIKING_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fb"
PERSON_MOUNTAIN_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fc"
PERSON_MOUNTAIN_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b5\U0001f3fd"
PERSON_MOUNTAIN_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b5\U0001f3fe"
PERSON_MOUNTAIN_BIKING_DARK_SKIN_TONE = "\U0001f6b5\U0001f3ff"
MAN_MOUNTAIN_BIKING = "\U0001f6b5\u200d\u2642\ufe0f"
MAN_MOUNTAIN_BIKING_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fb\u200d\u2642\ufe0f"
MAN_MOUNTAIN_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fc\u200d\u2642\ufe0f"
MAN_MOUNTAIN_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b5\U0001f3fd\u200d\u2642\ufe0f"
MAN_MOUNTAIN_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b5\U0001f3fe\u200d\u2642\ufe0f"
MAN_MOUNTAIN_BIKING_DARK_SKIN_TONE = "\U0001f6b5\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_MOUNTAIN_BIKING = "\U0001f6b5\u200d\u2640\ufe0f"
WOMAN_MOUNTAIN_BIKING_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_MOUNTAIN_BIKING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6b5\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_MOUNTAIN_BIKING_MEDIUM_SKIN_TONE = "\U0001f6b5\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_MOUNTAIN_BIKING_MEDIUM_DARK_SKIN_TONE = "\U0001f6b5\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_MOUNTAIN_BIKING_DARK_SKIN_TONE = "\U0001f6b5\U0001f3ff\u200d\u2640\ufe0f"
PERSON_CARTWHEELING = "\U0001f938"
PERSON_CARTWHEELING_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fb"
PERSON_CARTWHEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fc"
PERSON_CARTWHEELING_MEDIUM_SKIN_TONE = "\U0001f938\U0001f3fd"
PERSON_CARTWHEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f938\U0001f3fe"
PERSON_CARTWHEELING_DARK_SKIN_TONE = "\U0001f938\U0001f3ff"
MAN_CARTWHEELING = "\U0001f938\u200d\u2642\ufe0f"
MAN_CARTWHEELING_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fb\u200d\u2642\ufe0f"
MAN_CARTWHEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fc\u200d\u2642\ufe0f"
MAN_CARTWHEELING_MEDIUM_SKIN_TONE = "\U0001f938\U0001f3fd\u200d\u2642\ufe0f"
MAN_CARTWHEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f938\U0001f3fe\u200d\u2642\ufe0f"
MAN_CARTWHEELING_DARK_SKIN_TONE = "\U0001f938\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_CARTWHEELING = "\U0001f938\u200d\u2640\ufe0f"
WOMAN_CARTWHEELING_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_CARTWHEELING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f938\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_CARTWHEELING_MEDIUM_SKIN_TONE = "\U0001f938\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_CARTWHEELING_MEDIUM_DARK_SKIN_TONE = "\U0001f938\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_CARTWHEELING_DARK_SKIN_TONE = "\U0001f938\U0001f3ff\u200d\u2640\ufe0f"
PEOPLE_WRESTLING = "\U0001f93c"
MEN_WRESTLING = "\U0001f93c\u200d\u2642\ufe0f"
WOMEN_WRESTLING = "\U0001f93c\u200d\u2640\ufe0f"
PERSON_PLAYING_WATER_POLO = "\U0001f93d"
PERSON_PLAYING_WATER_POLO_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fb"
PERSON_PLAYING_WATER_POLO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fc"
PERSON_PLAYING_WATER_POLO_MEDIUM_SKIN_TONE = "\U0001f93d\U0001f3fd"
PERSON_PLAYING_WATER_POLO_MEDIUM_DARK_SKIN_TONE = "\U0001f93d\U0001f3fe"
PERSON_PLAYING_WATER_POLO_DARK_SKIN_TONE = "\U0001f93d\U0001f3ff"
MAN_PLAYING_WATER_POLO = "\U0001f93d\u200d\u2642\ufe0f"
MAN_PLAYING_WATER_POLO_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fb\u200d\u2642\ufe0f"
MAN_PLAYING_WATER_POLO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fc\u200d\u2642\ufe0f"
MAN_PLAYING_WATER_POLO_MEDIUM_SKIN_TONE = "\U0001f93d\U0001f3fd\u200d\u2642\ufe0f"
MAN_PLAYING_WATER_POLO_MEDIUM_DARK_SKIN_TONE = "\U0001f93d\U0001f3fe\u200d\u2642\ufe0f"
MAN_PLAYING_WATER_POLO_DARK_SKIN_TONE = "\U0001f93d\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_PLAYING_WATER_POLO = "\U0001f93d\u200d\u2640\ufe0f"
WOMAN_PLAYING_WATER_POLO_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_PLAYING_WATER_POLO_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93d\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_PLAYING_WATER_POLO_MEDIUM_SKIN_TONE = "\U0001f93d\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_PLAYING_WATER_POLO_MEDIUM_DARK_SKIN_TONE = "\U0001f93d\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_PLAYING_WATER_POLO_DARK_SKIN_TONE = "\U0001f93d\U0001f3ff\u200d\u2640\ufe0f"
PERSON_PLAYING_HANDBALL = "\U0001f93e"
PERSON_PLAYING_HANDBALL_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fb"
PERSON_PLAYING_HANDBALL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fc"
PERSON_PLAYING_HANDBALL_MEDIUM_SKIN_TONE = "\U0001f93e\U0001f3fd"
PERSON_PLAYING_HANDBALL_MEDIUM_DARK_SKIN_TONE = "\U0001f93e\U0001f3fe"
PERSON_PLAYING_HANDBALL_DARK_SKIN_TONE = "\U0001f93e\U0001f3ff"
MAN_PLAYING_HANDBALL = "\U0001f93e\u200d\u2642\ufe0f"
MAN_PLAYING_HANDBALL_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fb\u200d\u2642\ufe0f"
MAN_PLAYING_HANDBALL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fc\u200d\u2642\ufe0f"
MAN_PLAYING_HANDBALL_MEDIUM_SKIN_TONE = "\U0001f93e\U0001f3fd\u200d\u2642\ufe0f"
MAN_PLAYING_HANDBALL_MEDIUM_DARK_SKIN_TONE = "\U0001f93e\U0001f3fe\u200d\u2642\ufe0f"
MAN_PLAYING_HANDBALL_DARK_SKIN_TONE = "\U0001f93e\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_PLAYING_HANDBALL = "\U0001f93e\u200d\u2640\ufe0f"
WOMAN_PLAYING_HANDBALL_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_PLAYING_HANDBALL_MEDIUM_LIGHT_SKIN_TONE = "\U0001f93e\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_PLAYING_HANDBALL_MEDIUM_SKIN_TONE = "\U0001f93e\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_PLAYING_HANDBALL_MEDIUM_DARK_SKIN_TONE = "\U0001f93e\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_PLAYING_HANDBALL_DARK_SKIN_TONE = "\U0001f93e\U0001f3ff\u200d\u2640\ufe0f"
PERSON_JUGGLING = "\U0001f939"
PERSON_JUGGLING_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fb"
PERSON_JUGGLING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fc"
PERSON_JUGGLING_MEDIUM_SKIN_TONE = "\U0001f939\U0001f3fd"
PERSON_JUGGLING_MEDIUM_DARK_SKIN_TONE = "\U0001f939\U0001f3fe"
PERSON_JUGGLING_DARK_SKIN_TONE = "\U0001f939\U0001f3ff"
MAN_JUGGLING = "\U0001f939\u200d\u2642\ufe0f"
MAN_JUGGLING_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fb\u200d\u2642\ufe0f"
MAN_JUGGLING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fc\u200d\u2642\ufe0f"
MAN_JUGGLING_MEDIUM_SKIN_TONE = "\U0001f939\U0001f3fd\u200d\u2642\ufe0f"
MAN_JUGGLING_MEDIUM_DARK_SKIN_TONE = "\U0001f939\U0001f3fe\u200d\u2642\ufe0f"
MAN_JUGGLING_DARK_SKIN_TONE = "\U0001f939\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_JUGGLING = "\U0001f939\u200d\u2640\ufe0f"
WOMAN_JUGGLING_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_JUGGLING_MEDIUM_LIGHT_SKIN_TONE = "\U0001f939\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_JUGGLING_MEDIUM_SKIN_TONE = "\U0001f939\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_JUGGLING_MEDIUM_DARK_SKIN_TONE = "\U0001f939\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_JUGGLING_DARK_SKIN_TONE = "\U0001f939\U0001f3ff\u200d\u2640\ufe0f"
PERSON_IN_LOTUS_POSITION = "\U0001f9d8"
PERSON_IN_LOTUS_POSITION_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fb"
PERSON_IN_LOTUS_POSITION_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fc"
PERSON_IN_LOTUS_POSITION_MEDIUM_SKIN_TONE = "\U0001f9d8\U0001f3fd"
PERSON_IN_LOTUS_POSITION_MEDIUM_DARK_SKIN_TONE = "\U0001f9d8\U0001f3fe"
PERSON_IN_LOTUS_POSITION_DARK_SKIN_TONE = "\U0001f9d8\U0001f3ff"
MAN_IN_LOTUS_POSITION = "\U0001f9d8\u200d\u2642\ufe0f"
MAN_IN_LOTUS_POSITION_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fb\u200d\u2642\ufe0f"
MAN_IN_LOTUS_POSITION_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fc\u200d\u2642\ufe0f"
MAN_IN_LOTUS_POSITION_MEDIUM_SKIN_TONE = "\U0001f9d8\U0001f3fd\u200d\u2642\ufe0f"
MAN_IN_LOTUS_POSITION_MEDIUM_DARK_SKIN_TONE = "\U0001f9d8\U0001f3fe\u200d\u2642\ufe0f"
MAN_IN_LOTUS_POSITION_DARK_SKIN_TONE = "\U0001f9d8\U0001f3ff\u200d\u2642\ufe0f"
WOMAN_IN_LOTUS_POSITION = "\U0001f9d8\u200d\u2640\ufe0f"
WOMAN_IN_LOTUS_POSITION_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fb\u200d\u2640\ufe0f"
WOMAN_IN_LOTUS_POSITION_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d8\U0001f3fc\u200d\u2640\ufe0f"
WOMAN_IN_LOTUS_POSITION_MEDIUM_SKIN_TONE = "\U0001f9d8\U0001f3fd\u200d\u2640\ufe0f"
WOMAN_IN_LOTUS_POSITION_MEDIUM_DARK_SKIN_TONE = "\U0001f9d8\U0001f3fe\u200d\u2640\ufe0f"
WOMAN_IN_LOTUS_POSITION_DARK_SKIN_TONE = "\U0001f9d8\U0001f3ff\u200d\u2640\ufe0f"
PERSON_TAKING_BATH = "\U0001f6c0"
PERSON_TAKING_BATH_LIGHT_SKIN_TONE = "\U0001f6c0\U0001f3fb"
PERSON_TAKING_BATH_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6c0\U0001f3fc"
PERSON_TAKING_BATH_MEDIUM_SKIN_TONE = "\U0001f6c0\U0001f3fd"
PERSON_TAKING_BATH_MEDIUM_DARK_SKIN_TONE = "\U0001f6c0\U0001f3fe"
PERSON_TAKING_BATH_DARK_SKIN_TONE = "\U0001f6c0\U0001f3ff"
PERSON_IN_BED = "\U0001f6cc"
PERSON_IN_BED_LIGHT_SKIN_TONE = "\U0001f6cc\U0001f3fb"
PERSON_IN_BED_MEDIUM_LIGHT_SKIN_TONE = "\U0001f6cc\U0001f3fc"
PERSON_IN_BED_MEDIUM_SKIN_TONE = "\U0001f6cc\U0001f3fd"
PERSON_IN_BED_MEDIUM_DARK_SKIN_TONE = "\U0001f6cc\U0001f3fe"
PERSON_IN_BED_DARK_SKIN_TONE = "\U0001f6cc\U0001f3ff"
PEOPLE_HOLDING_HANDS = "\U0001f9d1\u200d\U0001f91d\u200d\U0001f9d1"
PEOPLE_HOLDING_HANDS_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fb"
PEOPLE_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fc"
PEOPLE_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fd"
PEOPLE_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fe"
PEOPLE_HOLDING_HANDS_LIGHT_SKIN_TONE_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fb\u200d\U0001f91d\u200d\U0001f9d1\U0001f3ff"
PEOPLE_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fb"
PEOPLE_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3fc\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fc"
PEOPLE_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fd"
PEOPLE_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fe"
PEOPLE_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\U0001f91d\u200d\U0001f9d1\U0001f3ff"
PEOPLE_HOLDING_HANDS_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fb"
PEOPLE_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fc"
PEOPLE_HOLDING_HANDS_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fd"
PEOPLE_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fe"
PEOPLE_HOLDING_HANDS_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fd\u200d\U0001f91d\u200d\U0001f9d1\U0001f3ff"
PEOPLE_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fb"
PEOPLE_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fc"
PEOPLE_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fd"
PEOPLE_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f9d1\U0001f3fe\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fe"
PEOPLE_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\U0001f91d\u200d\U0001f9d1\U0001f3ff"
PEOPLE_HOLDING_HANDS_DARK_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fb"
PEOPLE_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fc"
PEOPLE_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fd"
PEOPLE_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\U0001f91d\u200d\U0001f9d1\U0001f3fe"
PEOPLE_HOLDING_HANDS_DARK_SKIN_TONE = "\U0001f9d1\U0001f3ff\u200d\U0001f91d\u200d\U0001f9d1\U0001f3ff"
WOMEN_HOLDING_HANDS = "\U0001f46d"
WOMEN_HOLDING_HANDS_LIGHT_SKIN_TONE = "\U0001f46d\U0001f3fb"
WOMEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f469\U0001f3fc"
WOMEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f469\U0001f3fd"
WOMEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f469\U0001f3fe"
WOMEN_HOLDING_HANDS_LIGHT_SKIN_TONE_DARK_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f469\U0001f3ff"
WOMEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f469\U0001f3fb"
WOMEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46d\U0001f3fc"
WOMEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f469\U0001f3fd"
WOMEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f469\U0001f3fe"
WOMEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f469\U0001f3ff"
WOMEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f469\U0001f3fb"
WOMEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f469\U0001f3fc"
WOMEN_HOLDING_HANDS_MEDIUM_SKIN_TONE = "\U0001f46d\U0001f3fd"
WOMEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f469\U0001f3fe"
WOMEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f469\U0001f3ff"
WOMEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f469\U0001f3fb"
WOMEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f469\U0001f3fc"
WOMEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f469\U0001f3fd"
WOMEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f46d\U0001f3fe"
WOMEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f469\U0001f3ff"
WOMEN_HOLDING_HANDS_DARK_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f469\U0001f3fb"
WOMEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f469\U0001f3fc"
WOMEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f469\U0001f3fd"
WOMEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f469\U0001f3fe"
WOMEN_HOLDING_HANDS_DARK_SKIN_TONE = "\U0001f46d\U0001f3ff"
WOMAN_AND_MAN_HOLDING_HANDS = "\U0001f46b"
WOMAN_AND_MAN_HOLDING_HANDS_LIGHT_SKIN_TONE = "\U0001f46b\U0001f3fb"
WOMAN_AND_MAN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
WOMAN_AND_MAN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
WOMAN_AND_MAN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
WOMAN_AND_MAN_HOLDING_HANDS_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46b\U0001f3fc"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_SKIN_TONE = "\U0001f46b\U0001f3fd"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f46b\U0001f3fe"
WOMAN_AND_MAN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
WOMAN_AND_MAN_HOLDING_HANDS_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
WOMAN_AND_MAN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
WOMAN_AND_MAN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
WOMAN_AND_MAN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
WOMAN_AND_MAN_HOLDING_HANDS_DARK_SKIN_TONE = "\U0001f46b\U0001f3ff"
MEN_HOLDING_HANDS = "\U0001f46c"
MEN_HOLDING_HANDS_LIGHT_SKIN_TONE = "\U0001f46c\U0001f3fb"
MEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
MEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
MEN_HOLDING_HANDS_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
MEN_HOLDING_HANDS_LIGHT_SKIN_TONE_DARK_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
MEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
MEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f46c\U0001f3fc"
MEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
MEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
MEN_HOLDING_HANDS_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
MEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
MEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
MEN_HOLDING_HANDS_MEDIUM_SKIN_TONE = "\U0001f46c\U0001f3fd"
MEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
MEN_HOLDING_HANDS_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
MEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
MEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
MEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
MEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE = "\U0001f46c\U0001f3fe"
MEN_HOLDING_HANDS_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\U0001f91d\u200d\U0001f468\U0001f3ff"
MEN_HOLDING_HANDS_DARK_SKIN_TONE_LIGHT_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fb"
MEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fc"
MEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fd"
MEN_HOLDING_HANDS_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\U0001f91d\u200d\U0001f468\U0001f3fe"
MEN_HOLDING_HANDS_DARK_SKIN_TONE = "\U0001f46c\U0001f3ff"
KISS = "\U0001f48f"
KISS_LIGHT_SKIN_TONE = "\U0001f48f\U0001f3fb"
KISS_MEDIUM_LIGHT_SKIN_TONE = "\U0001f48f\U0001f3fc"
KISS_MEDIUM_SKIN_TONE = "\U0001f48f\U0001f3fd"
KISS_MEDIUM_DARK_SKIN_TONE = "\U0001f48f\U0001f3fe"
KISS_DARK_SKIN_TONE = "\U0001f48f\U0001f3ff"
KISS_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fc"
KISS_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fd"
KISS_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fe"
KISS_PERSON_PERSON_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3ff"
KISS_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fb"
KISS_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fd"
KISS_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fe"
KISS_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3ff"
KISS_PERSON_PERSON_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fb"
KISS_PERSON_PERSON_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fc"
KISS_PERSON_PERSON_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fe"
KISS_PERSON_PERSON_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3ff"
KISS_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fb"
KISS_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fc"
KISS_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fd"
KISS_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3ff"
KISS_PERSON_PERSON_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fb"
KISS_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fc"
KISS_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fd"
KISS_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f9d1\U0001f3fe"
KISS_WOMAN_MAN = "\U0001f469\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468"
KISS_WOMAN_MAN_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_WOMAN_MAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_WOMAN_MAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_WOMAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_WOMAN_MAN_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_WOMAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_WOMAN_MAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_WOMAN_MAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_WOMAN_MAN_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_MAN_MAN = "\U0001f468\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468"
KISS_MAN_MAN_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_MAN_MAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_MAN_MAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_MAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_MAN_MAN_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_MAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_MAN_MAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_MAN_MAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_MAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_MAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_MAN_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_MAN_MAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_MAN_MAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fb"
KISS_MAN_MAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fc"
KISS_MAN_MAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fd"
KISS_MAN_MAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3fe"
KISS_MAN_MAN_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f468\U0001f3ff"
KISS_WOMAN_WOMAN = "\U0001f469\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469"
KISS_WOMAN_WOMAN_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fb"
KISS_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fc"
KISS_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fd"
KISS_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fe"
KISS_WOMAN_WOMAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3ff"
KISS_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fb"
KISS_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fc"
KISS_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fd"
KISS_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fe"
KISS_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3ff"
KISS_WOMAN_WOMAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fb"
KISS_WOMAN_WOMAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fc"
KISS_WOMAN_WOMAN_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fd"
KISS_WOMAN_WOMAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fe"
KISS_WOMAN_WOMAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3ff"
KISS_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fb"
KISS_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fc"
KISS_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fd"
KISS_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fe"
KISS_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3ff"
KISS_WOMAN_WOMAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fb"
KISS_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fc"
KISS_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fd"
KISS_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3fe"
KISS_WOMAN_WOMAN_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f48b\u200d\U0001f469\U0001f3ff"
COUPLE_WITH_HEART = "\U0001f491"
COUPLE_WITH_HEART_LIGHT_SKIN_TONE = "\U0001f491\U0001f3fb"
COUPLE_WITH_HEART_MEDIUM_LIGHT_SKIN_TONE = "\U0001f491\U0001f3fc"
COUPLE_WITH_HEART_MEDIUM_SKIN_TONE = "\U0001f491\U0001f3fd"
COUPLE_WITH_HEART_MEDIUM_DARK_SKIN_TONE = "\U0001f491\U0001f3fe"
COUPLE_WITH_HEART_DARK_SKIN_TONE = "\U0001f491\U0001f3ff"
COUPLE_WITH_HEART_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fc"
COUPLE_WITH_HEART_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fd"
COUPLE_WITH_HEART_PERSON_PERSON_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fe"
COUPLE_WITH_HEART_PERSON_PERSON_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3ff"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fb"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fd"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fe"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3ff"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fb"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fc"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fe"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3ff"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fb"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fc"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fd"
COUPLE_WITH_HEART_PERSON_PERSON_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3ff"
COUPLE_WITH_HEART_PERSON_PERSON_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fb"
COUPLE_WITH_HEART_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fc"
COUPLE_WITH_HEART_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fd"
COUPLE_WITH_HEART_PERSON_PERSON_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f9d1\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f9d1\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN = "\U0001f469\u200d\u2764\ufe0f\u200d\U0001f468"
COUPLE_WITH_HEART_WOMAN_MAN_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_MAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_MAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_MAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_MAN_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_MAN_MAN = "\U0001f468\u200d\u2764\ufe0f\u200d\U0001f468"
COUPLE_WITH_HEART_MAN_MAN_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_MAN_MAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_MAN_MAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE = "\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_SKIN_TONE = "\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_DARK_SKIN_TONE = "\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_MAN_MAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f468\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_MAN_MAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fb"
COUPLE_WITH_HEART_MAN_MAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fc"
COUPLE_WITH_HEART_MAN_MAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fd"
COUPLE_WITH_HEART_MAN_MAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3fe"
COUPLE_WITH_HEART_MAN_MAN_DARK_SKIN_TONE = "\U0001f468\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f468\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_WOMAN = "\U0001f469\u200d\u2764\ufe0f\u200d\U0001f469"
COUPLE_WITH_HEART_WOMAN_WOMAN_LIGHT_SKIN_TONE = "\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_WOMAN_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_WOMAN_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fb\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_LIGHT_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fc\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_SKIN_TONE = "\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fd\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_WOMAN_MEDIUM_DARK_SKIN_TONE_DARK_SKIN_TONE = \
"\U0001f469\U0001f3fe\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3ff"
COUPLE_WITH_HEART_WOMAN_WOMAN_DARK_SKIN_TONE_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fb"
COUPLE_WITH_HEART_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_LIGHT_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fc"
COUPLE_WITH_HEART_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fd"
COUPLE_WITH_HEART_WOMAN_WOMAN_DARK_SKIN_TONE_MEDIUM_DARK_SKIN_TONE = \
"\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3fe"
COUPLE_WITH_HEART_WOMAN_WOMAN_DARK_SKIN_TONE = "\U0001f469\U0001f3ff\u200d\u2764\ufe0f\u200d\U0001f469\U0001f3ff"
FAMILY = "\U0001f46a"
FAMILY_MAN_WOMAN_BOY = "\U0001f468\u200d\U0001f469\u200d\U0001f466"
FAMILY_MAN_WOMAN_GIRL = "\U0001f468\u200d\U0001f469\u200d\U0001f467"
FAMILY_MAN_WOMAN_GIRL_BOY = "\U0001f468\u200d\U0001f469\u200d\U0001f467\u200d\U0001f466"
FAMILY_MAN_WOMAN_BOY_BOY = "\U0001f468\u200d\U0001f469\u200d\U0001f466\u200d\U0001f466"
FAMILY_MAN_WOMAN_GIRL_GIRL = "\U0001f468\u200d\U0001f469\u200d\U0001f467\u200d\U0001f467"
FAMILY_MAN_MAN_BOY = "\U0001f468\u200d\U0001f468\u200d\U0001f466"
FAMILY_MAN_MAN_GIRL = "\U0001f468\u200d\U0001f468\u200d\U0001f467"
FAMILY_MAN_MAN_GIRL_BOY = "\U0001f468\u200d\U0001f468\u200d\U0001f467\u200d\U0001f466"
FAMILY_MAN_MAN_BOY_BOY = "\U0001f468\u200d\U0001f468\u200d\U0001f466\u200d\U0001f466"
FAMILY_MAN_MAN_GIRL_GIRL = "\U0001f468\u200d\U0001f468\u200d\U0001f467\u200d\U0001f467"
FAMILY_WOMAN_WOMAN_BOY = "\U0001f469\u200d\U0001f469\u200d\U0001f466"
FAMILY_WOMAN_WOMAN_GIRL = "\U0001f469\u200d\U0001f469\u200d\U0001f467"
FAMILY_WOMAN_WOMAN_GIRL_BOY = "\U0001f469\u200d\U0001f469\u200d\U0001f467\u200d\U0001f466"
FAMILY_WOMAN_WOMAN_BOY_BOY = "\U0001f469\u200d\U0001f469\u200d\U0001f466\u200d\U0001f466"
FAMILY_WOMAN_WOMAN_GIRL_GIRL = "\U0001f469\u200d\U0001f469\u200d\U0001f467\u200d\U0001f467"
FAMILY_MAN_BOY = "\U0001f468\u200d\U0001f466"
FAMILY_MAN_BOY_BOY = "\U0001f468\u200d\U0001f466\u200d\U0001f466"
FAMILY_MAN_GIRL = "\U0001f468\u200d\U0001f467"
FAMILY_MAN_GIRL_BOY = "\U0001f468\u200d\U0001f467\u200d\U0001f466"
FAMILY_MAN_GIRL_GIRL = "\U0001f468\u200d\U0001f467\u200d\U0001f467"
FAMILY_WOMAN_BOY = "\U0001f469\u200d\U0001f466"
FAMILY_WOMAN_BOY_BOY = "\U0001f469\u200d\U0001f466\u200d\U0001f466"
FAMILY_WOMAN_GIRL = "\U0001f469\u200d\U0001f467"
FAMILY_WOMAN_GIRL_BOY = "\U0001f469\u200d\U0001f467\u200d\U0001f466"
FAMILY_WOMAN_GIRL_GIRL = "\U0001f469\u200d\U0001f467\u200d\U0001f467"
SPEAKING_HEAD = "\U0001f5e3\ufe0f"
BUST_IN_SILHOUETTE = "\U0001f464"
BUSTS_IN_SILHOUETTE = "\U0001f465"
PEOPLE_HUGGING = "\U0001fac2"
FOOTPRINTS = "\U0001f463"
LIGHT_SKIN_TONE = "\U0001f3fb"
MEDIUM_LIGHT_SKIN_TONE = "\U0001f3fc"
MEDIUM_SKIN_TONE = "\U0001f3fd"
MEDIUM_DARK_SKIN_TONE = "\U0001f3fe"
DARK_SKIN_TONE = "\U0001f3ff"
RED_HAIR = "\U0001f9b0"
CURLY_HAIR = "\U0001f9b1"
WHITE_HAIR = "\U0001f9b3"
BALD = "\U0001f9b2"
MONKEY_FACE = "\U0001f435"
MONKEY = "\U0001f412"
GORILLA = "\U0001f98d"
ORANGUTAN = "\U0001f9a7"
DOG_FACE = "\U0001f436"
DOG = "\U0001f415"
GUIDE_DOG = "\U0001f9ae"
SERVICE_DOG = "\U0001f415\u200d\U0001f9ba"
POODLE = "\U0001f429"
WOLF = "\U0001f43a"
FOX = "\U0001f98a"
RACCOON = "\U0001f99d"
CAT_FACE = "\U0001f431"
CAT = "\U0001f408"
BLACK_CAT = "\U0001f408\u200d\u2b1b"
LION = "\U0001f981"
TIGER_FACE = "\U0001f42f"
TIGER = "\U0001f405"
LEOPARD = "\U0001f406"
HORSE_FACE = "\U0001f434"
HORSE = "\U0001f40e"
UNICORN = "\U0001f984"
ZEBRA = "\U0001f993"
DEER = "\U0001f98c"
BISON = "\U0001f9ac"
COW_FACE = "\U0001f42e"
OX = "\U0001f402"
WATER_BUFFALO = "\U0001f403"
COW = "\U0001f404"
PIG_FACE = "\U0001f437"
PIG = "\U0001f416"
BOAR = "\U0001f417"
PIG_NOSE = "\U0001f43d"
RAM = "\U0001f40f"
EWE = "\U0001f411"
GOAT = "\U0001f410"
CAMEL = "\U0001f42a"
TWO_HUMP_CAMEL = "\U0001f42b"
LLAMA = "\U0001f999"
GIRAFFE = "\U0001f992"
ELEPHANT = "\U0001f418"
MAMMOTH = "\U0001f9a3"
RHINOCEROS = "\U0001f98f"
HIPPOPOTAMUS = "\U0001f99b"
MOUSE_FACE = "\U0001f42d"
MOUSE = "\U0001f401"
RAT = "\U0001f400"
HAMSTER = "\U0001f439"
RABBIT_FACE = "\U0001f430"
RABBIT = "\U0001f407"
CHIPMUNK = "\U0001f43f\ufe0f"
BEAVER = "\U0001f9ab"
HEDGEHOG = "\U0001f994"
BAT = "\U0001f987"
BEAR = "\U0001f43b"
POLAR_BEAR = "\U0001f43b\u200d\u2744\ufe0f"
KOALA = "\U0001f428"
PANDA = "\U0001f43c"
SLOTH = "\U0001f9a5"
OTTER = "\U0001f9a6"
SKUNK = "\U0001f9a8"
KANGAROO = "\U0001f998"
BADGER = "\U0001f9a1"
PAW_PRINTS = "\U0001f43e"
TURKEY = "\U0001f983"
CHICKEN = "\U0001f414"
ROOSTER = "\U0001f413"
HATCHING_CHICK = "\U0001f423"
BABY_CHICK = "\U0001f424"
FRONT_FACING_BABY_CHICK = "\U0001f425"
BIRD = "\U0001f426"
PENGUIN = "\U0001f427"
DOVE = "\U0001f54a\ufe0f"
EAGLE = "\U0001f985"
DUCK = "\U0001f986"
SWAN = "\U0001f9a2"
OWL = "\U0001f989"
DODO = "\U0001f9a4"
FEATHER = "\U0001fab6"
FLAMINGO = "\U0001f9a9"
PEACOCK = "\U0001f99a"
PARROT = "\U0001f99c"
FROG = "\U0001f438"
CROCODILE = "\U0001f40a"
TURTLE = "\U0001f422"
LIZARD = "\U0001f98e"
SNAKE = "\U0001f40d"
DRAGON_FACE = "\U0001f432"
DRAGON = "\U0001f409"
SAUROPOD = "\U0001f995"
T_REX = "\U0001f996"
SPOUTING_WHALE = "\U0001f433"
WHALE = "\U0001f40b"
DOLPHIN = "\U0001f42c"
SEAL = "\U0001f9ad"
FISH = "\U0001f41f"
TROPICAL_FISH = "\U0001f420"
BLOWFISH = "\U0001f421"
SHARK = "\U0001f988"
OCTOPUS = "\U0001f419"
SPIRAL_SHELL = "\U0001f41a"
CORAL = "\U0001fab8"
SNAIL = "\U0001f40c"
BUTTERFLY = "\U0001f98b"
BUG = "\U0001f41b"
ANT = "\U0001f41c"
HONEYBEE = "\U0001f41d"
BEETLE = "\U0001fab2"
LADY_BEETLE = "\U0001f41e"
CRICKET = "\U0001f997"
COCKROACH = "\U0001fab3"
SPIDER = "\U0001f577\ufe0f"
SPIDER_WEB = "\U0001f578\ufe0f"
SCORPION = "\U0001f982"
MOSQUITO = "\U0001f99f"
FLY = "\U0001fab0"
WORM = "\U0001fab1"
MICROBE = "\U0001f9a0"
BOUQUET = "\U0001f490"
CHERRY_BLOSSOM = "\U0001f338"
WHITE_FLOWER = "\U0001f4ae"
LOTUS = "\U0001fab7"
ROSETTE = "\U0001f3f5\ufe0f"
ROSE = "\U0001f339"
WILTED_FLOWER = "\U0001f940"
HIBISCUS = "\U0001f33a"
SUNFLOWER = "\U0001f33b"
BLOSSOM = "\U0001f33c"
TULIP = "\U0001f337"
SEEDLING = "\U0001f331"
POTTED_PLANT = "\U0001fab4"
EVERGREEN_TREE = "\U0001f332"
DECIDUOUS_TREE = "\U0001f333"
PALM_TREE = "\U0001f334"
CACTUS = "\U0001f335"
SHEAF_OF_RICE = "\U0001f33e"
HERB = "\U0001f33f"
SHAMROCK = "\u2618\ufe0f"
FOUR_LEAF_CLOVER = "\U0001f340"
MAPLE_LEAF = "\U0001f341"
FALLEN_LEAF = "\U0001f342"
LEAF_FLUTTERING_IN_WIND = "\U0001f343"
EMPTY_NEST = "\U0001fab9"
NEST_WITH_EGGS = "\U0001faba"
GRAPES = "\U0001f347"
MELON = "\U0001f348"
WATERMELON = "\U0001f349"
TANGERINE = "\U0001f34a"
LEMON = "\U0001f34b"
BANANA = "\U0001f34c"
PINEAPPLE = "\U0001f34d"
MANGO = "\U0001f96d"
RED_APPLE = "\U0001f34e"
GREEN_APPLE = "\U0001f34f"
PEAR = "\U0001f350"
PEACH = "\U0001f351"
CHERRIES = "\U0001f352"
STRAWBERRY = "\U0001f353"
BLUEBERRIES = "\U0001fad0"
KIWI_FRUIT = "\U0001f95d"
TOMATO = "\U0001f345"
OLIVE = "\U0001fad2"
COCONUT = "\U0001f965"
AVOCADO = "\U0001f951"
EGGPLANT = "\U0001f346"
POTATO = "\U0001f954"
CARROT = "\U0001f955"
EAR_OF_CORN = "\U0001f33d"
HOT_PEPPER = "\U0001f336\ufe0f"
BELL_PEPPER = "\U0001fad1"
CUCUMBER = "\U0001f952"
LEAFY_GREEN = "\U0001f96c"
BROCCOLI = "\U0001f966"
GARLIC = "\U0001f9c4"
ONION = "\U0001f9c5"
MUSHROOM = "\U0001f344"
PEANUTS = "\U0001f95c"
BEANS = "\U0001fad8"
CHESTNUT = "\U0001f330"
BREAD = "\U0001f35e"
CROISSANT = "\U0001f950"
BAGUETTE_BREAD = "\U0001f956"
FLATBREAD = "\U0001fad3"
PRETZEL = "\U0001f968"
BAGEL = "\U0001f96f"
PANCAKES = "\U0001f95e"
WAFFLE = "\U0001f9c7"
CHEESE_WEDGE = "\U0001f9c0"
MEAT_ON_BONE = "\U0001f356"
POULTRY_LEG = "\U0001f357"
CUT_OF_MEAT = "\U0001f969"
BACON = "\U0001f953"
HAMBURGER = "\U0001f354"
FRENCH_FRIES = "\U0001f35f"
PIZZA = "\U0001f355"
HOT_DOG = "\U0001f32d"
SANDWICH = "\U0001f96a"
TACO = "\U0001f32e"
BURRITO = "\U0001f32f"
TAMALE = "\U0001fad4"
STUFFED_FLATBREAD = "\U0001f959"
FALAFEL = "\U0001f9c6"
EGG = "\U0001f95a"
COOKING = "\U0001f373"
SHALLOW_PAN_OF_FOOD = "\U0001f958"
POT_OF_FOOD = "\U0001f372"
FONDUE = "\U0001fad5"
BOWL_WITH_SPOON = "\U0001f963"
GREEN_SALAD = "\U0001f957"
POPCORN = "\U0001f37f"
BUTTER = "\U0001f9c8"
SALT = "\U0001f9c2"
CANNED_FOOD = "\U0001f96b"
BENTO_BOX = "\U0001f371"
RICE_CRACKER = "\U0001f358"
RICE_BALL = "\U0001f359"
COOKED_RICE = "\U0001f35a"
CURRY_RICE = "\U0001f35b"
STEAMING_BOWL = "\U0001f35c"
SPAGHETTI = "\U0001f35d"
ROASTED_SWEET_POTATO = "\U0001f360"
ODEN = "\U0001f362"
SUSHI = "\U0001f363"
FRIED_SHRIMP = "\U0001f364"
FISH_CAKE_WITH_SWIRL = "\U0001f365"
MOON_CAKE = "\U0001f96e"
DANGO = "\U0001f361"
DUMPLING = "\U0001f95f"
FORTUNE_COOKIE = "\U0001f960"
TAKEOUT_BOX = "\U0001f961"
CRAB = "\U0001f980"
LOBSTER = "\U0001f99e"
SHRIMP = "\U0001f990"
SQUID = "\U0001f991"
OYSTER = "\U0001f9aa"
SOFT_ICE_CREAM = "\U0001f366"
SHAVED_ICE = "\U0001f367"
ICE_CREAM = "\U0001f368"
DOUGHNUT = "\U0001f369"
COOKIE = "\U0001f36a"
BIRTHDAY_CAKE = "\U0001f382"
SHORTCAKE = "\U0001f370"
CUPCAKE = "\U0001f9c1"
PIE = "\U0001f967"
CHOCOLATE_BAR = "\U0001f36b"
CANDY = "\U0001f36c"
LOLLIPOP = "\U0001f36d"
CUSTARD = "\U0001f36e"
HONEY_POT = "\U0001f36f"
BABY_BOTTLE = "\U0001f37c"
GLASS_OF_MILK = "\U0001f95b"
HOT_BEVERAGE = "\u2615"
TEAPOT = "\U0001fad6"
TEACUP_WITHOUT_HANDLE = "\U0001f375"
SAKE = "\U0001f376"
BOTTLE_WITH_POPPING_CORK = "\U0001f37e"
WINE_GLASS = "\U0001f377"
COCKTAIL_GLASS = "\U0001f378"
TROPICAL_DRINK = "\U0001f379"
BEER_MUG = "\U0001f37a"
CLINKING_BEER_MUGS = "\U0001f37b"
CLINKING_GLASSES = "\U0001f942"
TUMBLER_GLASS = "\U0001f943"
POURING_LIQUID = "\U0001fad7"
CUP_WITH_STRAW = "\U0001f964"
BUBBLE_TEA = "\U0001f9cb"
BEVERAGE_BOX = "\U0001f9c3"
MATE = "\U0001f9c9"
ICE = "\U0001f9ca"
CHOPSTICKS = "\U0001f962"
FORK_AND_KNIFE_WITH_PLATE = "\U0001f37d\ufe0f"
FORK_AND_KNIFE = "\U0001f374"
SPOON = "\U0001f944"
KITCHEN_KNIFE = "\U0001f52a"
JAR = "\U0001fad9"
AMPHORA = "\U0001f3fa"
GLOBE_SHOWING_EUROPE_AFRICA = "\U0001f30d"
GLOBE_SHOWING_AMERICAS = "\U0001f30e"
GLOBE_SHOWING_ASIA_AUSTRALIA = "\U0001f30f"
GLOBE_WITH_MERIDIANS = "\U0001f310"
WORLD_MAP = "\U0001f5fa\ufe0f"
MAP_OF_JAPAN = "\U0001f5fe"
COMPASS = "\U0001f9ed"
SNOW_CAPPED_MOUNTAIN = "\U0001f3d4\ufe0f"
MOUNTAIN = "\u26f0\ufe0f"
VOLCANO = "\U0001f30b"
MOUNT_FUJI = "\U0001f5fb"
CAMPING = "\U0001f3d5\ufe0f"
BEACH_WITH_UMBRELLA = "\U0001f3d6\ufe0f"
DESERT = "\U0001f3dc\ufe0f"
DESERT_ISLAND = "\U0001f3dd\ufe0f"
NATIONAL_PARK = "\U0001f3de\ufe0f"
STADIUM = "\U0001f3df\ufe0f"
CLASSICAL_BUILDING = "\U0001f3db\ufe0f"
BUILDING_CONSTRUCTION = "\U0001f3d7\ufe0f"
BRICK = "\U0001f9f1"
ROCK = "\U0001faa8"
WOOD = "\U0001fab5"
HUT = "\U0001f6d6"
HOUSES = "\U0001f3d8\ufe0f"
DERELICT_HOUSE = "\U0001f3da\ufe0f"
HOUSE = "\U0001f3e0"
HOUSE_WITH_GARDEN = "\U0001f3e1"
OFFICE_BUILDING = "\U0001f3e2"
JAPANESE_POST_OFFICE = "\U0001f3e3"
POST_OFFICE = "\U0001f3e4"
HOSPITAL = "\U0001f3e5"
BANK = "\U0001f3e6"
HOTEL = "\U0001f3e8"
LOVE_HOTEL = "\U0001f3e9"
CONVENIENCE_STORE = "\U0001f3ea"
SCHOOL = "\U0001f3eb"
DEPARTMENT_STORE = "\U0001f3ec"
FACTORY = "\U0001f3ed"
JAPANESE_CASTLE = "\U0001f3ef"
CASTLE = "\U0001f3f0"
WEDDING = "\U0001f492"
TOKYO_TOWER = "\U0001f5fc"
STATUE_OF_LIBERTY = "\U0001f5fd"
CHURCH = "\u26ea"
MOSQUE = "\U0001f54c"
HINDU_TEMPLE = "\U0001f6d5"
SYNAGOGUE = "\U0001f54d"
SHINTO_SHRINE = "\u26e9\ufe0f"
KAABA = "\U0001f54b"
FOUNTAIN = "\u26f2"
TENT = "\u26fa"
FOGGY = "\U0001f301"
NIGHT_WITH_STARS = "\U0001f303"
CITYSCAPE = "\U0001f3d9\ufe0f"
SUNRISE_OVER_MOUNTAINS = "\U0001f304"
SUNRISE = "\U0001f305"
CITYSCAPE_AT_DUSK = "\U0001f306"
SUNSET = "\U0001f307"
BRIDGE_AT_NIGHT = "\U0001f309"
HOT_SPRINGS = "\u2668\ufe0f"
CAROUSEL_HORSE = "\U0001f3a0"
PLAYGROUND_SLIDE = "\U0001f6dd"
FERRIS_WHEEL = "\U0001f3a1"
ROLLER_COASTER = "\U0001f3a2"
BARBER_POLE = "\U0001f488"
CIRCUS_TENT = "\U0001f3aa"
LOCOMOTIVE = "\U0001f682"
RAILWAY_CAR = "\U0001f683"
HIGH_SPEED_TRAIN = "\U0001f684"
BULLET_TRAIN = "\U0001f685"
TRAIN = "\U0001f686"
METRO = "\U0001f687"
LIGHT_RAIL = "\U0001f688"
STATION = "\U0001f689"
TRAM = "\U0001f68a"
MONORAIL = "\U0001f69d"
MOUNTAIN_RAILWAY = "\U0001f69e"
TRAM_CAR = "\U0001f68b"
BUS = "\U0001f68c"
ONCOMING_BUS = "\U0001f68d"
TROLLEYBUS = "\U0001f68e"
MINIBUS = "\U0001f690"
AMBULANCE = "\U0001f691"
FIRE_ENGINE = "\U0001f692"
POLICE_CAR = "\U0001f693"
ONCOMING_POLICE_CAR = "\U0001f694"
TAXI = "\U0001f695"
ONCOMING_TAXI = "\U0001f696"
AUTOMOBILE = "\U0001f697"
ONCOMING_AUTOMOBILE = "\U0001f698"
SPORT_UTILITY_VEHICLE = "\U0001f699"
PICKUP_TRUCK = "\U0001f6fb"
DELIVERY_TRUCK = "\U0001f69a"
ARTICULATED_LORRY = "\U0001f69b"
TRACTOR = "\U0001f69c"
RACING_CAR = "\U0001f3ce\ufe0f"
MOTORCYCLE = "\U0001f3cd\ufe0f"
MOTOR_SCOOTER = "\U0001f6f5"
MANUAL_WHEELCHAIR = "\U0001f9bd"
MOTORIZED_WHEELCHAIR = "\U0001f9bc"
AUTO_RICKSHAW = "\U0001f6fa"
BICYCLE = "\U0001f6b2"
KICK_SCOOTER = "\U0001f6f4"
SKATEBOARD = "\U0001f6f9"
ROLLER_SKATE = "\U0001f6fc"
BUS_STOP = "\U0001f68f"
MOTORWAY = "\U0001f6e3\ufe0f"
RAILWAY_TRACK = "\U0001f6e4\ufe0f"
OIL_DRUM = "\U0001f6e2\ufe0f"
FUEL_PUMP = "\u26fd"
WHEEL = "\U0001f6de"
POLICE_CAR_LIGHT = "\U0001f6a8"
HORIZONTAL_TRAFFIC_LIGHT = "\U0001f6a5"
VERTICAL_TRAFFIC_LIGHT = "\U0001f6a6"
STOP_SIGN = "\U0001f6d1"
CONSTRUCTION = "\U0001f6a7"
ANCHOR = "\u2693"
RING_BUOY = "\U0001f6df"
SAILBOAT = "\u26f5"
CANOE = "\U0001f6f6"
SPEEDBOAT = "\U0001f6a4"
PASSENGER_SHIP = "\U0001f6f3\ufe0f"
FERRY = "\u26f4\ufe0f"
MOTOR_BOAT = "\U0001f6e5\ufe0f"
SHIP = "\U0001f6a2"
AIRPLANE = "\u2708\ufe0f"
SMALL_AIRPLANE = "\U0001f6e9\ufe0f"
AIRPLANE_DEPARTURE = "\U0001f6eb"
AIRPLANE_ARRIVAL = "\U0001f6ec"
PARACHUTE = "\U0001fa82"
SEAT = "\U0001f4ba"
HELICOPTER = "\U0001f681"
SUSPENSION_RAILWAY = "\U0001f69f"
MOUNTAIN_CABLEWAY = "\U0001f6a0"
AERIAL_TRAMWAY = "\U0001f6a1"
SATELLITE = "\U0001f6f0\ufe0f"
ROCKET = "\U0001f680"
FLYING_SAUCER = "\U0001f6f8"
BELLHOP_BELL = "\U0001f6ce\ufe0f"
LUGGAGE = "\U0001f9f3"
HOURGLASS_DONE = "\u231b"
HOURGLASS_NOT_DONE = "\u23f3"
WATCH = "\u231a"
ALARM_CLOCK = "\u23f0"
STOPWATCH = "\u23f1\ufe0f"
TIMER_CLOCK = "\u23f2\ufe0f"
MANTELPIECE_CLOCK = "\U0001f570\ufe0f"
TWELVE_O_CLOCK = "\U0001f55b"
TWELVE_THIRTY = "\U0001f567"
ONE_O_CLOCK = "\U0001f550"
ONE_THIRTY = "\U0001f55c"
TWO_O_CLOCK = "\U0001f551"
TWO_THIRTY = "\U0001f55d"
THREE_O_CLOCK = "\U0001f552"
THREE_THIRTY = "\U0001f55e"
FOUR_O_CLOCK = "\U0001f553"
FOUR_THIRTY = "\U0001f55f"
FIVE_O_CLOCK = "\U0001f554"
FIVE_THIRTY = "\U0001f560"
SIX_O_CLOCK = "\U0001f555"
SIX_THIRTY = "\U0001f561"
SEVEN_O_CLOCK = "\U0001f556"
SEVEN_THIRTY = "\U0001f562"
EIGHT_O_CLOCK = "\U0001f557"
EIGHT_THIRTY = "\U0001f563"
NINE_O_CLOCK = "\U0001f558"
NINE_THIRTY = "\U0001f564"
TEN_O_CLOCK = "\U0001f559"
TEN_THIRTY = "\U0001f565"
ELEVEN_O_CLOCK = "\U0001f55a"
ELEVEN_THIRTY = "\U0001f566"
NEW_MOON = "\U0001f311"
WAXING_CRESCENT_MOON = "\U0001f312"
FIRST_QUARTER_MOON = "\U0001f313"
WAXING_GIBBOUS_MOON = "\U0001f314"
FULL_MOON = "\U0001f315"
WANING_GIBBOUS_MOON = "\U0001f316"
LAST_QUARTER_MOON = "\U0001f317"
WANING_CRESCENT_MOON = "\U0001f318"
CRESCENT_MOON = "\U0001f319"
NEW_MOON_FACE = "\U0001f31a"
FIRST_QUARTER_MOON_FACE = "\U0001f31b"
LAST_QUARTER_MOON_FACE = "\U0001f31c"
THERMOMETER = "\U0001f321\ufe0f"
SUN = "\u2600\ufe0f"
FULL_MOON_FACE = "\U0001f31d"
SUN_WITH_FACE = "\U0001f31e"
RINGED_PLANET = "\U0001fa90"
STAR = "\u2b50"
GLOWING_STAR = "\U0001f31f"
SHOOTING_STAR = "\U0001f320"
MILKY_WAY = "\U0001f30c"
CLOUD = "\u2601\ufe0f"
SUN_BEHIND_CLOUD = "\u26c5"
CLOUD_WITH_LIGHTNING_AND_RAIN = "\u26c8\ufe0f"
SUN_BEHIND_SMALL_CLOUD = "\U0001f324\ufe0f"
SUN_BEHIND_LARGE_CLOUD = "\U0001f325\ufe0f"
SUN_BEHIND_RAIN_CLOUD = "\U0001f326\ufe0f"
CLOUD_WITH_RAIN = "\U0001f327\ufe0f"
CLOUD_WITH_SNOW = "\U0001f328\ufe0f"
CLOUD_WITH_LIGHTNING = "\U0001f329\ufe0f"
TORNADO = "\U0001f32a\ufe0f"
FOG = "\U0001f32b\ufe0f"
WIND_FACE = "\U0001f32c\ufe0f"
CYCLONE = "\U0001f300"
RAINBOW = "\U0001f308"
CLOSED_UMBRELLA = "\U0001f302"
UMBRELLA = "\u2602\ufe0f"
UMBRELLA_WITH_RAIN_DROPS = "\u2614"
UMBRELLA_ON_GROUND = "\u26f1\ufe0f"
HIGH_VOLTAGE = "\u26a1"
SNOWFLAKE = "\u2744\ufe0f"
SNOWMAN = "\u2603\ufe0f"
SNOWMAN_WITHOUT_SNOW = "\u26c4"
COMET = "\u2604\ufe0f"
FIRE = "\U0001f525"
DROPLET = "\U0001f4a7"
WATER_WAVE = "\U0001f30a"
JACK_O_LANTERN = "\U0001f383"
CHRISTMAS_TREE = "\U0001f384"
FIREWORKS = "\U0001f386"
SPARKLER = "\U0001f387"
FIRECRACKER = "\U0001f9e8"
SPARKLES = "\u2728"
BALLOON = "\U0001f388"
PARTY_POPPER = "\U0001f389"
CONFETTI_BALL = "\U0001f38a"
TANABATA_TREE = "\U0001f38b"
PINE_DECORATION = "\U0001f38d"
JAPANESE_DOLLS = "\U0001f38e"
CARP_STREAMER = "\U0001f38f"
WIND_CHIME = "\U0001f390"
MOON_VIEWING_CEREMONY = "\U0001f391"
RED_ENVELOPE = "\U0001f9e7"
RIBBON = "\U0001f380"
WRAPPED_GIFT = "\U0001f381"
REMINDER_RIBBON = "\U0001f397\ufe0f"
ADMISSION_TICKETS = "\U0001f39f\ufe0f"
TICKET = "\U0001f3ab"
MILITARY_MEDAL = "\U0001f396\ufe0f"
TROPHY = "\U0001f3c6"
SPORTS_MEDAL = "\U0001f3c5"
FIRST_PLACE_MEDAL = "\U0001f947"
SECOND_PLACE_MEDAL = "\U0001f948"
THIRD_PLACE_MEDAL = "\U0001f949"
SOCCER_BALL = "\u26bd"
BASEBALL = "\u26be"
SOFTBALL = "\U0001f94e"
BASKETBALL = "\U0001f3c0"
VOLLEYBALL = "\U0001f3d0"
AMERICAN_FOOTBALL = "\U0001f3c8"
RUGBY_FOOTBALL = "\U0001f3c9"
TENNIS = "\U0001f3be"
FLYING_DISC = "\U0001f94f"
BOWLING = "\U0001f3b3"
CRICKET_GAME = "\U0001f3cf"
FIELD_HOCKEY = "\U0001f3d1"
ICE_HOCKEY = "\U0001f3d2"
LACROSSE = "\U0001f94d"
PING_PONG = "\U0001f3d3"
BADMINTON = "\U0001f3f8"
BOXING_GLOVE = "\U0001f94a"
MARTIAL_ARTS_UNIFORM = "\U0001f94b"
GOAL_NET = "\U0001f945"
FLAG_IN_HOLE = "\u26f3"
ICE_SKATE = "\u26f8\ufe0f"
FISHING_POLE = "\U0001f3a3"
DIVING_MASK = "\U0001f93f"
RUNNING_SHIRT = "\U0001f3bd"
SKIS = "\U0001f3bf"
SLED = "\U0001f6f7"
CURLING_STONE = "\U0001f94c"
BULLSEYE = "\U0001f3af"
YO_YO = "\U0001fa80"
KITE = "\U0001fa81"
POOL_8_BALL = "\U0001f3b1"
CRYSTAL_BALL = "\U0001f52e"
MAGIC_WAND = "\U0001fa84"
NAZAR_AMULET = "\U0001f9ff"
HAMSA = "\U0001faac"
VIDEO_GAME = "\U0001f3ae"
JOYSTICK = "\U0001f579\ufe0f"
SLOT_MACHINE = "\U0001f3b0"
GAME_DIE = "\U0001f3b2"
PUZZLE_PIECE = "\U0001f9e9"
TEDDY_BEAR = "\U0001f9f8"
PINATA = "\U0001fa85"
MIRROR_BALL = "\U0001faa9"
NESTING_DOLLS = "\U0001fa86"
SPADE_SUIT = "\u2660\ufe0f"
HEART_SUIT = "\u2665\ufe0f"
DIAMOND_SUIT = "\u2666\ufe0f"
CLUB_SUIT = "\u2663\ufe0f"
CHESS_PAWN = "\u265f\ufe0f"
JOKER = "\U0001f0cf"
MAHJONG_RED_DRAGON = "\U0001f004"
FLOWER_PLAYING_CARDS = "\U0001f3b4"
PERFORMING_ARTS = "\U0001f3ad"
FRAMED_PICTURE = "\U0001f5bc\ufe0f"
ARTIST_PALETTE = "\U0001f3a8"
THREAD = "\U0001f9f5"
SEWING_NEEDLE = "\U0001faa1"
YARN = "\U0001f9f6"
KNOT = "\U0001faa2"
GLASSES = "\U0001f453"
SUNGLASSES = "\U0001f576\ufe0f"
GOGGLES = "\U0001f97d"
LAB_COAT = "\U0001f97c"
SAFETY_VEST = "\U0001f9ba"
NECKTIE = "\U0001f454"
T_SHIRT = "\U0001f455"
JEANS = "\U0001f456"
SCARF = "\U0001f9e3"
GLOVES = "\U0001f9e4"
COAT = "\U0001f9e5"
SOCKS = "\U0001f9e6"
DRESS = "\U0001f457"
KIMONO = "\U0001f458"
SARI = "\U0001f97b"
ONE_PIECE_SWIMSUIT = "\U0001fa71"
BRIEFS = "\U0001fa72"
SHORTS = "\U0001fa73"
BIKINI = "\U0001f459"
WOMAN_S_CLOTHES = "\U0001f45a"
PURSE = "\U0001f45b"
HANDBAG = "\U0001f45c"
CLUTCH_BAG = "\U0001f45d"
SHOPPING_BAGS = "\U0001f6cd\ufe0f"
BACKPACK = "\U0001f392"
THONG_SANDAL = "\U0001fa74"
MAN_S_SHOE = "\U0001f45e"
RUNNING_SHOE = "\U0001f45f"
HIKING_BOOT = "\U0001f97e"
FLAT_SHOE = "\U0001f97f"
HIGH_HEELED_SHOE = "\U0001f460"
WOMAN_S_SANDAL = "\U0001f461"
BALLET_SHOES = "\U0001fa70"
WOMAN_S_BOOT = "\U0001f462"
CROWN = "\U0001f451"
WOMAN_S_HAT = "\U0001f452"
TOP_HAT = "\U0001f3a9"
GRADUATION_CAP = "\U0001f393"
BILLED_CAP = "\U0001f9e2"
MILITARY_HELMET = "\U0001fa96"
RESCUE_WORKER_S_HELMET = "\u26d1\ufe0f"
PRAYER_BEADS = "\U0001f4ff"
LIPSTICK = "\U0001f484"
RING = "\U0001f48d"
GEM_STONE = "\U0001f48e"
MUTED_SPEAKER = "\U0001f507"
SPEAKER_LOW_VOLUME = "\U0001f508"
SPEAKER_MEDIUM_VOLUME = "\U0001f509"
SPEAKER_HIGH_VOLUME = "\U0001f50a"
LOUDSPEAKER = "\U0001f4e2"
MEGAPHONE = "\U0001f4e3"
POSTAL_HORN = "\U0001f4ef"
BELL = "\U0001f514"
BELL_WITH_SLASH = "\U0001f515"
MUSICAL_SCORE = "\U0001f3bc"
MUSICAL_NOTE = "\U0001f3b5"
MUSICAL_NOTES = "\U0001f3b6"
STUDIO_MICROPHONE = "\U0001f399\ufe0f"
LEVEL_SLIDER = "\U0001f39a\ufe0f"
CONTROL_KNOBS = "\U0001f39b\ufe0f"
MICROPHONE = "\U0001f3a4"
HEADPHONE = "\U0001f3a7"
RADIO = "\U0001f4fb"
SAXOPHONE = "\U0001f3b7"
ACCORDION = "\U0001fa97"
GUITAR = "\U0001f3b8"
MUSICAL_KEYBOARD = "\U0001f3b9"
TRUMPET = "\U0001f3ba"
VIOLIN = "\U0001f3bb"
BANJO = "\U0001fa95"
DRUM = "\U0001f941"
LONG_DRUM = "\U0001fa98"
MOBILE_PHONE = "\U0001f4f1"
MOBILE_PHONE_WITH_ARROW = "\U0001f4f2"
TELEPHONE = "\u260e\ufe0f"
TELEPHONE_RECEIVER = "\U0001f4de"
PAGER = "\U0001f4df"
FAX_MACHINE = "\U0001f4e0"
BATTERY = "\U0001f50b"
LOW_BATTERY = "\U0001faab"
ELECTRIC_PLUG = "\U0001f50c"
LAPTOP = "\U0001f4bb"
DESKTOP_COMPUTER = "\U0001f5a5\ufe0f"
PRINTER = "\U0001f5a8\ufe0f"
KEYBOARD = "\u2328\ufe0f"
COMPUTER_MOUSE = "\U0001f5b1\ufe0f"
TRACKBALL = "\U0001f5b2\ufe0f"
COMPUTER_DISK = "\U0001f4bd"
FLOPPY_DISK = "\U0001f4be"
OPTICAL_DISK = "\U0001f4bf"
DVD = "\U0001f4c0"
ABACUS = "\U0001f9ee"
MOVIE_CAMERA = "\U0001f3a5"
FILM_FRAMES = "\U0001f39e\ufe0f"
FILM_PROJECTOR = "\U0001f4fd\ufe0f"
CLAPPER_BOARD = "\U0001f3ac"
TELEVISION = "\U0001f4fa"
CAMERA = "\U0001f4f7"
CAMERA_WITH_FLASH = "\U0001f4f8"
VIDEO_CAMERA = "\U0001f4f9"
VIDEOCASSETTE = "\U0001f4fc"
MAGNIFYING_GLASS_TILTED_LEFT = "\U0001f50d"
MAGNIFYING_GLASS_TILTED_RIGHT = "\U0001f50e"
CANDLE = "\U0001f56f\ufe0f"
LIGHT_BULB = "\U0001f4a1"
FLASHLIGHT = "\U0001f526"
RED_PAPER_LANTERN = "\U0001f3ee"
DIYA_LAMP = "\U0001fa94"
NOTEBOOK_WITH_DECORATIVE_COVER = "\U0001f4d4"
CLOSED_BOOK = "\U0001f4d5"
OPEN_BOOK = "\U0001f4d6"
GREEN_BOOK = "\U0001f4d7"
BLUE_BOOK = "\U0001f4d8"
ORANGE_BOOK = "\U0001f4d9"
BOOKS = "\U0001f4da"
NOTEBOOK = "\U0001f4d3"
LEDGER = "\U0001f4d2"
PAGE_WITH_CURL = "\U0001f4c3"
SCROLL = "\U0001f4dc"
PAGE_FACING_UP = "\U0001f4c4"
NEWSPAPER = "\U0001f4f0"
ROLLED_UP_NEWSPAPER = "\U0001f5de\ufe0f"
BOOKMARK_TABS = "\U0001f4d1"
BOOKMARK = "\U0001f516"
LABEL = "\U0001f3f7\ufe0f"
MONEY_BAG = "\U0001f4b0"
COIN = "\U0001fa99"
YEN_BANKNOTE = "\U0001f4b4"
DOLLAR_BANKNOTE = "\U0001f4b5"
EURO_BANKNOTE = "\U0001f4b6"
POUND_BANKNOTE = "\U0001f4b7"
MONEY_WITH_WINGS = "\U0001f4b8"
CREDIT_CARD = "\U0001f4b3"
RECEIPT = "\U0001f9fe"
CHART_INCREASING_WITH_YEN = "\U0001f4b9"
ENVELOPE = "\u2709\ufe0f"
E_MAIL = "\U0001f4e7"
INCOMING_ENVELOPE = "\U0001f4e8"
ENVELOPE_WITH_ARROW = "\U0001f4e9"
OUTBOX_TRAY = "\U0001f4e4"
INBOX_TRAY = "\U0001f4e5"
PACKAGE = "\U0001f4e6"
CLOSED_MAILBOX_WITH_RAISED_FLAG = "\U0001f4eb"
CLOSED_MAILBOX_WITH_LOWERED_FLAG = "\U0001f4ea"
OPEN_MAILBOX_WITH_RAISED_FLAG = "\U0001f4ec"
OPEN_MAILBOX_WITH_LOWERED_FLAG = "\U0001f4ed"
POSTBOX = "\U0001f4ee"
BALLOT_BOX_WITH_BALLOT = "\U0001f5f3\ufe0f"
PENCIL = "\u270f\ufe0f"
BLACK_NIB = "\u2712\ufe0f"
FOUNTAIN_PEN = "\U0001f58b\ufe0f"
PEN = "\U0001f58a\ufe0f"
PAINTBRUSH = "\U0001f58c\ufe0f"
CRAYON = "\U0001f58d\ufe0f"
MEMO = "\U0001f4dd"
BRIEFCASE = "\U0001f4bc"
FILE_FOLDER = "\U0001f4c1"
OPEN_FILE_FOLDER = "\U0001f4c2"
CARD_INDEX_DIVIDERS = "\U0001f5c2\ufe0f"
CALENDAR = "\U0001f4c5"
TEAR_OFF_CALENDAR = "\U0001f4c6"
SPIRAL_NOTEPAD = "\U0001f5d2\ufe0f"
SPIRAL_CALENDAR = "\U0001f5d3\ufe0f"
CARD_INDEX = "\U0001f4c7"
CHART_INCREASING = "\U0001f4c8"
CHART_DECREASING = "\U0001f4c9"
BAR_CHART = "\U0001f4ca"
CLIPBOARD = "\U0001f4cb"
PUSHPIN = "\U0001f4cc"
ROUND_PUSHPIN = "\U0001f4cd"
PAPERCLIP = "\U0001f4ce"
LINKED_PAPERCLIPS = "\U0001f587\ufe0f"
STRAIGHT_RULER = "\U0001f4cf"
TRIANGULAR_RULER = "\U0001f4d0"
SCISSORS = "\u2702\ufe0f"
CARD_FILE_BOX = "\U0001f5c3\ufe0f"
FILE_CABINET = "\U0001f5c4\ufe0f"
WASTEBASKET = "\U0001f5d1\ufe0f"
LOCKED = "\U0001f512"
UNLOCKED = "\U0001f513"
LOCKED_WITH_PEN = "\U0001f50f"
LOCKED_WITH_KEY = "\U0001f510"
KEY = "\U0001f511"
OLD_KEY = "\U0001f5dd\ufe0f"
HAMMER = "\U0001f528"
AXE = "\U0001fa93"
PICK = "\u26cf\ufe0f"
HAMMER_AND_PICK = "\u2692\ufe0f"
HAMMER_AND_WRENCH = "\U0001f6e0\ufe0f"
DAGGER = "\U0001f5e1\ufe0f"
CROSSED_SWORDS = "\u2694\ufe0f"
WATER_PISTOL = "\U0001f52b"
BOOMERANG = "\U0001fa83"
BOW_AND_ARROW = "\U0001f3f9"
SHIELD = "\U0001f6e1\ufe0f"
CARPENTRY_SAW = "\U0001fa9a"
WRENCH = "\U0001f527"
SCREWDRIVER = "\U0001fa9b"
NUT_AND_BOLT = "\U0001f529"
GEAR = "\u2699\ufe0f"
CLAMP = "\U0001f5dc\ufe0f"
BALANCE_SCALE = "\u2696\ufe0f"
WHITE_CANE = "\U0001f9af"
LINK = "\U0001f517"
CHAINS = "\u26d3\ufe0f"
HOOK = "\U0001fa9d"
TOOLBOX = "\U0001f9f0"
MAGNET = "\U0001f9f2"
LADDER = "\U0001fa9c"
ALEMBIC = "\u2697\ufe0f"
TEST_TUBE = "\U0001f9ea"
PETRI_DISH = "\U0001f9eb"
DNA = "\U0001f9ec"
MICROSCOPE = "\U0001f52c"
TELESCOPE = "\U0001f52d"
SATELLITE_ANTENNA = "\U0001f4e1"
SYRINGE = "\U0001f489"
DROP_OF_BLOOD = "\U0001fa78"
PILL = "\U0001f48a"
ADHESIVE_BANDAGE = "\U0001fa79"
CRUTCH = "\U0001fa7c"
STETHOSCOPE = "\U0001fa7a"
X_RAY = "\U0001fa7b"
DOOR = "\U0001f6aa"
ELEVATOR = "\U0001f6d7"
MIRROR = "\U0001fa9e"
WINDOW = "\U0001fa9f"
BED = "\U0001f6cf\ufe0f"
COUCH_AND_LAMP = "\U0001f6cb\ufe0f"
CHAIR = "\U0001fa91"
TOILET = "\U0001f6bd"
PLUNGER = "\U0001faa0"
SHOWER = "\U0001f6bf"
BATHTUB = "\U0001f6c1"
MOUSE_TRAP = "\U0001faa4"
RAZOR = "\U0001fa92"
LOTION_BOTTLE = "\U0001f9f4"
SAFETY_PIN = "\U0001f9f7"
BROOM = "\U0001f9f9"
BASKET = "\U0001f9fa"
ROLL_OF_PAPER = "\U0001f9fb"
BUCKET = "\U0001faa3"
SOAP = "\U0001f9fc"
BUBBLES = "\U0001fae7"
TOOTHBRUSH = "\U0001faa5"
SPONGE = "\U0001f9fd"
FIRE_EXTINGUISHER = "\U0001f9ef"
SHOPPING_CART = "\U0001f6d2"
CIGARETTE = "\U0001f6ac"
COFFIN = "\u26b0\ufe0f"
HEADSTONE = "\U0001faa6"
FUNERAL_URN = "\u26b1\ufe0f"
MOAI = "\U0001f5ff"
PLACARD = "\U0001faa7"
IDENTIFICATION_CARD = "\U0001faaa"
ATM_SIGN = "\U0001f3e7"
LITTER_IN_BIN_SIGN = "\U0001f6ae"
POTABLE_WATER = "\U0001f6b0"
WHEELCHAIR_SYMBOL = "\u267f"
MEN_S_ROOM = "\U0001f6b9"
WOMEN_S_ROOM = "\U0001f6ba"
RESTROOM = "\U0001f6bb"
BABY_SYMBOL = "\U0001f6bc"
WATER_CLOSET = "\U0001f6be"
PASSPORT_CONTROL = "\U0001f6c2"
CUSTOMS = "\U0001f6c3"
BAGGAGE_CLAIM = "\U0001f6c4"
LEFT_LUGGAGE = "\U0001f6c5"
WARNING = "\u26a0\ufe0f"
CHILDREN_CROSSING = "\U0001f6b8"
NO_ENTRY = "\u26d4"
PROHIBITED = "\U0001f6ab"
NO_BICYCLES = "\U0001f6b3"
NO_SMOKING = "\U0001f6ad"
NO_LITTERING = "\U0001f6af"
NON_POTABLE_WATER = "\U0001f6b1"
NO_PEDESTRIANS = "\U0001f6b7"
NO_MOBILE_PHONES = "\U0001f4f5"
NO_ONE_UNDER_EIGHTEEN = "\U0001f51e"
RADIOACTIVE = "\u2622\ufe0f"
BIOHAZARD = "\u2623\ufe0f"
UP_ARROW = "\u2b06\ufe0f"
UP_RIGHT_ARROW = "\u2197\ufe0f"
RIGHT_ARROW = "\u27a1\ufe0f"
DOWN_RIGHT_ARROW = "\u2198\ufe0f"
DOWN_ARROW = "\u2b07\ufe0f"
DOWN_LEFT_ARROW = "\u2199\ufe0f"
LEFT_ARROW = "\u2b05\ufe0f"
UP_LEFT_ARROW = "\u2196\ufe0f"
UP_DOWN_ARROW = "\u2195\ufe0f"
LEFT_RIGHT_ARROW = "\u2194\ufe0f"
RIGHT_ARROW_CURVING_LEFT = "\u21a9\ufe0f"
LEFT_ARROW_CURVING_RIGHT = "\u21aa\ufe0f"
RIGHT_ARROW_CURVING_UP = "\u2934\ufe0f"
RIGHT_ARROW_CURVING_DOWN = "\u2935\ufe0f"
CLOCKWISE_VERTICAL_ARROWS = "\U0001f503"
COUNTERCLOCKWISE_ARROWS_BUTTON = "\U0001f504"
BACK_ARROW = "\U0001f519"
END_ARROW = "\U0001f51a"
ON_ARROW = "\U0001f51b"
SOON_ARROW = "\U0001f51c"
TOP_ARROW = "\U0001f51d"
PLACE_OF_WORSHIP = "\U0001f6d0"
ATOM_SYMBOL = "\u269b\ufe0f"
OM = "\U0001f549\ufe0f"
STAR_OF_DAVID = "\u2721\ufe0f"
WHEEL_OF_DHARMA = "\u2638\ufe0f"
YIN_YANG = "\u262f\ufe0f"
LATIN_CROSS = "\u271d\ufe0f"
ORTHODOX_CROSS = "\u2626\ufe0f"
STAR_AND_CRESCENT = "\u262a\ufe0f"
PEACE_SYMBOL = "\u262e\ufe0f"
MENORAH = "\U0001f54e"
DOTTED_SIX_POINTED_STAR = "\U0001f52f"
ARIES = "\u2648"
TAURUS = "\u2649"
GEMINI = "\u264a"
CANCER = "\u264b"
LEO = "\u264c"
VIRGO = "\u264d"
LIBRA = "\u264e"
SCORPIO = "\u264f"
SAGITTARIUS = "\u2650"
CAPRICORN = "\u2651"
AQUARIUS = "\u2652"
PISCES = "\u2653"
OPHIUCHUS = "\u26ce"
SHUFFLE_TRACKS_BUTTON = "\U0001f500"
REPEAT_BUTTON = "\U0001f501"
REPEAT_SINGLE_BUTTON = "\U0001f502"
PLAY_BUTTON = "\u25b6\ufe0f"
FAST_FORWARD_BUTTON = "\u23e9"
NEXT_TRACK_BUTTON = "\u23ed\ufe0f"
PLAY_OR_PAUSE_BUTTON = "\u23ef\ufe0f"
REVERSE_BUTTON = "\u25c0\ufe0f"
FAST_REVERSE_BUTTON = "\u23ea"
LAST_TRACK_BUTTON = "\u23ee\ufe0f"
UPWARDS_BUTTON = "\U0001f53c"
FAST_UP_BUTTON = "\u23eb"
DOWNWARDS_BUTTON = "\U0001f53d"
FAST_DOWN_BUTTON = "\u23ec"
PAUSE_BUTTON = "\u23f8\ufe0f"
STOP_BUTTON = "\u23f9\ufe0f"
RECORD_BUTTON = "\u23fa\ufe0f"
EJECT_BUTTON = "\u23cf\ufe0f"
CINEMA = "\U0001f3a6"
DIM_BUTTON = "\U0001f505"
BRIGHT_BUTTON = "\U0001f506"
ANTENNA_BARS = "\U0001f4f6"
VIBRATION_MODE = "\U0001f4f3"
MOBILE_PHONE_OFF = "\U0001f4f4"
FEMALE_SIGN = "\u2640\ufe0f"
MALE_SIGN = "\u2642\ufe0f"
TRANSGENDER_SYMBOL = "\u26a7\ufe0f"
MULTIPLY = "\u2716\ufe0f"
PLUS = "\u2795"
MINUS = "\u2796"
DIVIDE = "\u2797"
HEAVY_EQUALS_SIGN = "\U0001f7f0"
INFINITY = "\u267e\ufe0f"
DOUBLE_EXCLAMATION_MARK = "\u203c\ufe0f"
EXCLAMATION_QUESTION_MARK = "\u2049\ufe0f"
RED_QUESTION_MARK = "\u2753"
WHITE_QUESTION_MARK = "\u2754"
WHITE_EXCLAMATION_MARK = "\u2755"
RED_EXCLAMATION_MARK = "\u2757"
WAVY_DASH = "\u3030\ufe0f"
CURRENCY_EXCHANGE = "\U0001f4b1"
HEAVY_DOLLAR_SIGN = "\U0001f4b2"
MEDICAL_SYMBOL = "\u2695\ufe0f"
RECYCLING_SYMBOL = "\u267b\ufe0f"
FLEUR_DE_LIS = "\u269c\ufe0f"
TRIDENT_EMBLEM = "\U0001f531"
NAME_BADGE = "\U0001f4db"
JAPANESE_SYMBOL_FOR_BEGINNER = "\U0001f530"
HOLLOW_RED_CIRCLE = "\u2b55"
CHECK_MARK_BUTTON = "\u2705"
CHECK_BOX_WITH_CHECK = "\u2611\ufe0f"
CHECK_MARK = "\u2714\ufe0f"
CROSS_MARK = "\u274c"
CROSS_MARK_BUTTON = "\u274e"
CURLY_LOOP = "\u27b0"
DOUBLE_CURLY_LOOP = "\u27bf"
PART_ALTERNATION_MARK = "\u303d\ufe0f"
EIGHT_SPOKED_ASTERISK = "\u2733\ufe0f"
EIGHT_POINTED_STAR = "\u2734\ufe0f"
SPARKLE = "\u2747\ufe0f"
COPYRIGHT = "\xa9\ufe0f"
REGISTERED = "\xae\ufe0f"
TRADE_MARK = "\u2122\ufe0f"
KEYCAP_NUMBER_SIGN = "#\ufe0f\u20e3"
KEYCAP_ASTERISK = "*\ufe0f\u20e3"
KEYCAP_DIGIT_ZERO = "0\ufe0f\u20e3"
KEYCAP_DIGIT_ONE = "1\ufe0f\u20e3"
KEYCAP_DIGIT_TWO = "2\ufe0f\u20e3"
KEYCAP_DIGIT_THREE = "3\ufe0f\u20e3"
KEYCAP_DIGIT_FOUR = "4\ufe0f\u20e3"
KEYCAP_DIGIT_FIVE = "5\ufe0f\u20e3"
KEYCAP_DIGIT_SIX = "6\ufe0f\u20e3"
KEYCAP_DIGIT_SEVEN = "7\ufe0f\u20e3"
KEYCAP_DIGIT_EIGHT = "8\ufe0f\u20e3"
KEYCAP_DIGIT_NINE = "9\ufe0f\u20e3"
KEYCAP_10 = "\U0001f51f"
INPUT_LATIN_UPPERCASE = "\U0001f520"
INPUT_LATIN_LOWERCASE = "\U0001f521"
INPUT_NUMBERS = "\U0001f522"
INPUT_SYMBOLS = "\U0001f523"
INPUT_LATIN_LETTERS = "\U0001f524"
A_BUTTON_BLOOD_TYPE = "\U0001f170\ufe0f"
AB_BUTTON_BLOOD_TYPE = "\U0001f18e"
B_BUTTON_BLOOD_TYPE = "\U0001f171\ufe0f"
CL_BUTTON = "\U0001f191"
COOL_BUTTON = "\U0001f192"
FREE_BUTTON = "\U0001f193"
INFORMATION = "\u2139\ufe0f"
ID_BUTTON = "\U0001f194"
CIRCLED_M = "\u24c2\ufe0f"
NEW_BUTTON = "\U0001f195"
NG_BUTTON = "\U0001f196"
O_BUTTON_BLOOD_TYPE = "\U0001f17e\ufe0f"
OK_BUTTON = "\U0001f197"
P_BUTTON = "\U0001f17f\ufe0f"
SOS_BUTTON = "\U0001f198"
UP_BUTTON = "\U0001f199"
VS_BUTTON = "\U0001f19a"
JAPANESE_HERE_BUTTON = "\U0001f201"
JAPANESE_SERVICE_CHARGE_BUTTON = "\U0001f202\ufe0f"
JAPANESE_MONTHLY_AMOUNT_BUTTON = "\U0001f237\ufe0f"
JAPANESE_NOT_FREE_OF_CHARGE_BUTTON = "\U0001f236"
JAPANESE_RESERVED_BUTTON = "\U0001f22f"
JAPANESE_BARGAIN_BUTTON = "\U0001f250"
JAPANESE_DISCOUNT_BUTTON = "\U0001f239"
JAPANESE_FREE_OF_CHARGE_BUTTON = "\U0001f21a"
JAPANESE_PROHIBITED_BUTTON = "\U0001f232"
JAPANESE_ACCEPTABLE_BUTTON = "\U0001f251"
JAPANESE_APPLICATION_BUTTON = "\U0001f238"
JAPANESE_PASSING_GRADE_BUTTON = "\U0001f234"
JAPANESE_VACANCY_BUTTON = "\U0001f233"
JAPANESE_CONGRATULATIONS_BUTTON = "\u3297\ufe0f"
JAPANESE_SECRET_BUTTON = "\u3299\ufe0f"
JAPANESE_OPEN_FOR_BUSINESS_BUTTON = "\U0001f23a"
JAPANESE_NO_VACANCY_BUTTON = "\U0001f235"
RED_CIRCLE = "\U0001f534"
ORANGE_CIRCLE = "\U0001f7e0"
YELLOW_CIRCLE = "\U0001f7e1"
GREEN_CIRCLE = "\U0001f7e2"
BLUE_CIRCLE = "\U0001f535"
PURPLE_CIRCLE = "\U0001f7e3"
BROWN_CIRCLE = "\U0001f7e4"
BLACK_CIRCLE = "\u26ab"
WHITE_CIRCLE = "\u26aa"
RED_SQUARE = "\U0001f7e5"
ORANGE_SQUARE = "\U0001f7e7"
YELLOW_SQUARE = "\U0001f7e8"
GREEN_SQUARE = "\U0001f7e9"
BLUE_SQUARE = "\U0001f7e6"
PURPLE_SQUARE = "\U0001f7ea"
BROWN_SQUARE = "\U0001f7eb"
BLACK_LARGE_SQUARE = "\u2b1b"
WHITE_LARGE_SQUARE = "\u2b1c"
BLACK_MEDIUM_SQUARE = "\u25fc\ufe0f"
WHITE_MEDIUM_SQUARE = "\u25fb\ufe0f"
BLACK_MEDIUM_SMALL_SQUARE = "\u25fe"
WHITE_MEDIUM_SMALL_SQUARE = "\u25fd"
BLACK_SMALL_SQUARE = "\u25aa\ufe0f"
WHITE_SMALL_SQUARE = "\u25ab\ufe0f"
LARGE_ORANGE_DIAMOND = "\U0001f536"
LARGE_BLUE_DIAMOND = "\U0001f537"
SMALL_ORANGE_DIAMOND = "\U0001f538"
SMALL_BLUE_DIAMOND = "\U0001f539"
RED_TRIANGLE_POINTED_UP = "\U0001f53a"
RED_TRIANGLE_POINTED_DOWN = "\U0001f53b"
DIAMOND_WITH_A_DOT = "\U0001f4a0"
RADIO_BUTTON = "\U0001f518"
WHITE_SQUARE_BUTTON = "\U0001f533"
BLACK_SQUARE_BUTTON = "\U0001f532"
CHEQUERED_FLAG = "\U0001f3c1"
TRIANGULAR_FLAG = "\U0001f6a9"
CROSSED_FLAGS = "\U0001f38c"
BLACK_FLAG = "\U0001f3f4"
WHITE_FLAG = "\U0001f3f3\ufe0f"
RAINBOW_FLAG = "\U0001f3f3\ufe0f\u200d\U0001f308"
TRANSGENDER_FLAG = "\U0001f3f3\ufe0f\u200d\u26a7\ufe0f"
PIRATE_FLAG = "\U0001f3f4\u200d\u2620\ufe0f"
FLAG_ASCENSION_ISLAND = "\U0001f1e6\U0001f1e8"
FLAG_ANDORRA = "\U0001f1e6\U0001f1e9"
FLAG_UNITED_ARAB_EMIRATES = "\U0001f1e6\U0001f1ea"
FLAG_AFGHANISTAN = "\U0001f1e6\U0001f1eb"
FLAG_ANTIGUA_ANDAMP_BARBUDA = "\U0001f1e6\U0001f1ec"
FLAG_ANGUILLA = "\U0001f1e6\U0001f1ee"
FLAG_ALBANIA = "\U0001f1e6\U0001f1f1"
FLAG_ARMENIA = "\U0001f1e6\U0001f1f2"
FLAG_ANGOLA = "\U0001f1e6\U0001f1f4"
FLAG_ANTARCTICA = "\U0001f1e6\U0001f1f6"
FLAG_ARGENTINA = "\U0001f1e6\U0001f1f7"
FLAG_AMERICAN_SAMOA = "\U0001f1e6\U0001f1f8"
FLAG_AUSTRIA = "\U0001f1e6\U0001f1f9"
FLAG_AUSTRALIA = "\U0001f1e6\U0001f1fa"
FLAG_ARUBA = "\U0001f1e6\U0001f1fc"
FLAG_ALAND_ISLANDS = "\U0001f1e6\U0001f1fd"
FLAG_AZERBAIJAN = "\U0001f1e6\U0001f1ff"
FLAG_BOSNIA_ANDAMP_HERZEGOVINA = "\U0001f1e7\U0001f1e6"
FLAG_BARBADOS = "\U0001f1e7\U0001f1e7"
FLAG_BANGLADESH = "\U0001f1e7\U0001f1e9"
FLAG_BELGIUM = "\U0001f1e7\U0001f1ea"
FLAG_BURKINA_FASO = "\U0001f1e7\U0001f1eb"
FLAG_BULGARIA = "\U0001f1e7\U0001f1ec"
FLAG_BAHRAIN = "\U0001f1e7\U0001f1ed"
FLAG_BURUNDI = "\U0001f1e7\U0001f1ee"
FLAG_BENIN = "\U0001f1e7\U0001f1ef"
FLAG_ST_BARTHELEMY = "\U0001f1e7\U0001f1f1"
FLAG_BERMUDA = "\U0001f1e7\U0001f1f2"
FLAG_BRUNEI = "\U0001f1e7\U0001f1f3"
FLAG_BOLIVIA = "\U0001f1e7\U0001f1f4"
FLAG_CARIBBEAN_NETHERLANDS = "\U0001f1e7\U0001f1f6"
FLAG_BRAZIL = "\U0001f1e7\U0001f1f7"
FLAG_BAHAMAS = "\U0001f1e7\U0001f1f8"
FLAG_BHUTAN = "\U0001f1e7\U0001f1f9"
FLAG_BOUVET_ISLAND = "\U0001f1e7\U0001f1fb"
FLAG_BOTSWANA = "\U0001f1e7\U0001f1fc"
FLAG_BELARUS = "\U0001f1e7\U0001f1fe"
FLAG_BELIZE = "\U0001f1e7\U0001f1ff"
FLAG_CANADA = "\U0001f1e8\U0001f1e6"
FLAG_COCOS_KEELING_ISLANDS = "\U0001f1e8\U0001f1e8"
FLAG_CONGO_KINSHASA = "\U0001f1e8\U0001f1e9"
FLAG_CENTRAL_AFRICAN_REPUBLIC = "\U0001f1e8\U0001f1eb"
FLAG_CONGO_BRAZZAVILLE = "\U0001f1e8\U0001f1ec"
FLAG_SWITZERLAND = "\U0001f1e8\U0001f1ed"
FLAG_COTE_D_IVOIRE = "\U0001f1e8\U0001f1ee"
FLAG_COOK_ISLANDS = "\U0001f1e8\U0001f1f0"
FLAG_CHILE = "\U0001f1e8\U0001f1f1"
FLAG_CAMEROON = "\U0001f1e8\U0001f1f2"
FLAG_CHINA = "\U0001f1e8\U0001f1f3"
FLAG_COLOMBIA = "\U0001f1e8\U0001f1f4"
FLAG_CLIPPERTON_ISLAND = "\U0001f1e8\U0001f1f5"
FLAG_COSTA_RICA = "\U0001f1e8\U0001f1f7"
FLAG_CUBA = "\U0001f1e8\U0001f1fa"
FLAG_CAPE_VERDE = "\U0001f1e8\U0001f1fb"
FLAG_CURACAO = "\U0001f1e8\U0001f1fc"
FLAG_CHRISTMAS_ISLAND = "\U0001f1e8\U0001f1fd"
FLAG_CYPRUS = "\U0001f1e8\U0001f1fe"
FLAG_CZECHIA = "\U0001f1e8\U0001f1ff"
FLAG_GERMANY = "\U0001f1e9\U0001f1ea"
FLAG_DIEGO_GARCIA = "\U0001f1e9\U0001f1ec"
FLAG_DJIBOUTI = "\U0001f1e9\U0001f1ef"
FLAG_DENMARK = "\U0001f1e9\U0001f1f0"
FLAG_DOMINICA = "\U0001f1e9\U0001f1f2"
FLAG_DOMINICAN_REPUBLIC = "\U0001f1e9\U0001f1f4"
FLAG_ALGERIA = "\U0001f1e9\U0001f1ff"
FLAG_CEUTA_ANDAMP_MELILLA = "\U0001f1ea\U0001f1e6"
FLAG_ECUADOR = "\U0001f1ea\U0001f1e8"
FLAG_ESTONIA = "\U0001f1ea\U0001f1ea"
FLAG_EGYPT = "\U0001f1ea\U0001f1ec"
FLAG_WESTERN_SAHARA = "\U0001f1ea\U0001f1ed"
FLAG_ERITREA = "\U0001f1ea\U0001f1f7"
FLAG_SPAIN = "\U0001f1ea\U0001f1f8"
FLAG_ETHIOPIA = "\U0001f1ea\U0001f1f9"
FLAG_EUROPEAN_UNION = "\U0001f1ea\U0001f1fa"
FLAG_FINLAND = "\U0001f1eb\U0001f1ee"
FLAG_FIJI = "\U0001f1eb\U0001f1ef"
FLAG_FALKLAND_ISLANDS = "\U0001f1eb\U0001f1f0"
FLAG_MICRONESIA = "\U0001f1eb\U0001f1f2"
FLAG_FAROE_ISLANDS = "\U0001f1eb\U0001f1f4"
FLAG_FRANCE = "\U0001f1eb\U0001f1f7"
FLAG_GABON = "\U0001f1ec\U0001f1e6"
FLAG_UNITED_KINGDOM = "\U0001f1ec\U0001f1e7"
FLAG_GRENADA = "\U0001f1ec\U0001f1e9"
FLAG_GEORGIA = "\U0001f1ec\U0001f1ea"
FLAG_FRENCH_GUIANA = "\U0001f1ec\U0001f1eb"
FLAG_GUERNSEY = "\U0001f1ec\U0001f1ec"
FLAG_GHANA = "\U0001f1ec\U0001f1ed"
FLAG_GIBRALTAR = "\U0001f1ec\U0001f1ee"
FLAG_GREENLAND = "\U0001f1ec\U0001f1f1"
FLAG_GAMBIA = "\U0001f1ec\U0001f1f2"
FLAG_GUINEA = "\U0001f1ec\U0001f1f3"
FLAG_GUADELOUPE = "\U0001f1ec\U0001f1f5"
FLAG_EQUATORIAL_GUINEA = "\U0001f1ec\U0001f1f6"
FLAG_GREECE = "\U0001f1ec\U0001f1f7"
FLAG_SOUTH_GEORGIA_ANDAMP_SOUTH_SANDWICH_ISLANDS = "\U0001f1ec\U0001f1f8"
FLAG_GUATEMALA = "\U0001f1ec\U0001f1f9"
FLAG_GUAM = "\U0001f1ec\U0001f1fa"
FLAG_GUINEA_BISSAU = "\U0001f1ec\U0001f1fc"
FLAG_GUYANA = "\U0001f1ec\U0001f1fe"
FLAG_HONG_KONG_SAR_CHINA = "\U0001f1ed\U0001f1f0"
FLAG_HEARD_ANDAMP_MCDONALD_ISLANDS = "\U0001f1ed\U0001f1f2"
FLAG_HONDURAS = "\U0001f1ed\U0001f1f3"
FLAG_CROATIA = "\U0001f1ed\U0001f1f7"
FLAG_HAITI = "\U0001f1ed\U0001f1f9"
FLAG_HUNGARY = "\U0001f1ed\U0001f1fa"
FLAG_CANARY_ISLANDS = "\U0001f1ee\U0001f1e8"
FLAG_INDONESIA = "\U0001f1ee\U0001f1e9"
FLAG_IRELAND = "\U0001f1ee\U0001f1ea"
FLAG_ISRAEL = "\U0001f1ee\U0001f1f1"
FLAG_ISLE_OF_MAN = "\U0001f1ee\U0001f1f2"
FLAG_INDIA = "\U0001f1ee\U0001f1f3"
FLAG_BRITISH_INDIAN_OCEAN_TERRITORY = "\U0001f1ee\U0001f1f4"
FLAG_IRAQ = "\U0001f1ee\U0001f1f6"
FLAG_IRAN = "\U0001f1ee\U0001f1f7"
FLAG_ICELAND = "\U0001f1ee\U0001f1f8"
FLAG_ITALY = "\U0001f1ee\U0001f1f9"
FLAG_JERSEY = "\U0001f1ef\U0001f1ea"
FLAG_JAMAICA = "\U0001f1ef\U0001f1f2"
FLAG_JORDAN = "\U0001f1ef\U0001f1f4"
FLAG_JAPAN = "\U0001f1ef\U0001f1f5"
FLAG_KENYA = "\U0001f1f0\U0001f1ea"
FLAG_KYRGYZSTAN = "\U0001f1f0\U0001f1ec"
FLAG_CAMBODIA = "\U0001f1f0\U0001f1ed"
FLAG_KIRIBATI = "\U0001f1f0\U0001f1ee"
FLAG_COMOROS = "\U0001f1f0\U0001f1f2"
FLAG_ST_KITTS_ANDAMP_NEVIS = "\U0001f1f0\U0001f1f3"
FLAG_NORTH_KOREA = "\U0001f1f0\U0001f1f5"
FLAG_SOUTH_KOREA = "\U0001f1f0\U0001f1f7"
FLAG_KUWAIT = "\U0001f1f0\U0001f1fc"
FLAG_CAYMAN_ISLANDS = "\U0001f1f0\U0001f1fe"
FLAG_KAZAKHSTAN = "\U0001f1f0\U0001f1ff"
FLAG_LAOS = "\U0001f1f1\U0001f1e6"
FLAG_LEBANON = "\U0001f1f1\U0001f1e7"
FLAG_ST_LUCIA = "\U0001f1f1\U0001f1e8"
FLAG_LIECHTENSTEIN = "\U0001f1f1\U0001f1ee"
FLAG_SRI_LANKA = "\U0001f1f1\U0001f1f0"
FLAG_LIBERIA = "\U0001f1f1\U0001f1f7"
FLAG_LESOTHO = "\U0001f1f1\U0001f1f8"
FLAG_LITHUANIA = "\U0001f1f1\U0001f1f9"
FLAG_LUXEMBOURG = "\U0001f1f1\U0001f1fa"
FLAG_LATVIA = "\U0001f1f1\U0001f1fb"
FLAG_LIBYA = "\U0001f1f1\U0001f1fe"
FLAG_MOROCCO = "\U0001f1f2\U0001f1e6"
FLAG_MONACO = "\U0001f1f2\U0001f1e8"
FLAG_MOLDOVA = "\U0001f1f2\U0001f1e9"
FLAG_MONTENEGRO = "\U0001f1f2\U0001f1ea"
FLAG_ST_MARTIN = "\U0001f1f2\U0001f1eb"
FLAG_MADAGASCAR = "\U0001f1f2\U0001f1ec"
FLAG_MARSHALL_ISLANDS = "\U0001f1f2\U0001f1ed"
FLAG_NORTH_MACEDONIA = "\U0001f1f2\U0001f1f0"
FLAG_MALI = "\U0001f1f2\U0001f1f1"
FLAG_MYANMAR_BURMA = "\U0001f1f2\U0001f1f2"
FLAG_MONGOLIA = "\U0001f1f2\U0001f1f3"
FLAG_MACAO_SAR_CHINA = "\U0001f1f2\U0001f1f4"
FLAG_NORTHERN_MARIANA_ISLANDS = "\U0001f1f2\U0001f1f5"
FLAG_MARTINIQUE = "\U0001f1f2\U0001f1f6"
FLAG_MAURITANIA = "\U0001f1f2\U0001f1f7"
FLAG_MONTSERRAT = "\U0001f1f2\U0001f1f8"
FLAG_MALTA = "\U0001f1f2\U0001f1f9"
FLAG_MAURITIUS = "\U0001f1f2\U0001f1fa"
FLAG_MALDIVES = "\U0001f1f2\U0001f1fb"
FLAG_MALAWI = "\U0001f1f2\U0001f1fc"
FLAG_MEXICO = "\U0001f1f2\U0001f1fd"
FLAG_MALAYSIA = "\U0001f1f2\U0001f1fe"
FLAG_MOZAMBIQUE = "\U0001f1f2\U0001f1ff"
FLAG_NAMIBIA = "\U0001f1f3\U0001f1e6"
FLAG_NEW_CALEDONIA = "\U0001f1f3\U0001f1e8"
FLAG_NIGER = "\U0001f1f3\U0001f1ea"
FLAG_NORFOLK_ISLAND = "\U0001f1f3\U0001f1eb"
FLAG_NIGERIA = "\U0001f1f3\U0001f1ec"
FLAG_NICARAGUA = "\U0001f1f3\U0001f1ee"
FLAG_NETHERLANDS = "\U0001f1f3\U0001f1f1"
FLAG_NORWAY = "\U0001f1f3\U0001f1f4"
FLAG_NEPAL = "\U0001f1f3\U0001f1f5"
FLAG_NAURU = "\U0001f1f3\U0001f1f7"
FLAG_NIUE = "\U0001f1f3\U0001f1fa"
FLAG_NEW_ZEALAND = "\U0001f1f3\U0001f1ff"
FLAG_OMAN = "\U0001f1f4\U0001f1f2"
FLAG_PANAMA = "\U0001f1f5\U0001f1e6"
FLAG_PERU = "\U0001f1f5\U0001f1ea"
FLAG_FRENCH_POLYNESIA = "\U0001f1f5\U0001f1eb"
FLAG_PAPUA_NEW_GUINEA = "\U0001f1f5\U0001f1ec"
FLAG_PHILIPPINES = "\U0001f1f5\U0001f1ed"
FLAG_PAKISTAN = "\U0001f1f5\U0001f1f0"
FLAG_POLAND = "\U0001f1f5\U0001f1f1"
FLAG_ST_PIERRE_ANDAMP_MIQUELON = "\U0001f1f5\U0001f1f2"
FLAG_PITCAIRN_ISLANDS = "\U0001f1f5\U0001f1f3"
FLAG_PUERTO_RICO = "\U0001f1f5\U0001f1f7"
FLAG_PALESTINIAN_TERRITORIES = "\U0001f1f5\U0001f1f8"
FLAG_PORTUGAL = "\U0001f1f5\U0001f1f9"
FLAG_PALAU = "\U0001f1f5\U0001f1fc"
FLAG_PARAGUAY = "\U0001f1f5\U0001f1fe"
FLAG_QATAR = "\U0001f1f6\U0001f1e6"
FLAG_REUNION = "\U0001f1f7\U0001f1ea"
FLAG_ROMANIA = "\U0001f1f7\U0001f1f4"
FLAG_SERBIA = "\U0001f1f7\U0001f1f8"
FLAG_RUSSIA = "\U0001f1f7\U0001f1fa"
FLAG_RWANDA = "\U0001f1f7\U0001f1fc"
FLAG_SAUDI_ARABIA = "\U0001f1f8\U0001f1e6"
FLAG_SOLOMON_ISLANDS = "\U0001f1f8\U0001f1e7"
FLAG_SEYCHELLES = "\U0001f1f8\U0001f1e8"
FLAG_SUDAN = "\U0001f1f8\U0001f1e9"
FLAG_SWEDEN = "\U0001f1f8\U0001f1ea"
FLAG_SINGAPORE = "\U0001f1f8\U0001f1ec"
FLAG_ST_HELENA = "\U0001f1f8\U0001f1ed"
FLAG_SLOVENIA = "\U0001f1f8\U0001f1ee"
FLAG_SVALBARD_ANDAMP_JAN_MAYEN = "\U0001f1f8\U0001f1ef"
FLAG_SLOVAKIA = "\U0001f1f8\U0001f1f0"
FLAG_SIERRA_LEONE = "\U0001f1f8\U0001f1f1"
FLAG_SAN_MARINO = "\U0001f1f8\U0001f1f2"
FLAG_SENEGAL = "\U0001f1f8\U0001f1f3"
FLAG_SOMALIA = "\U0001f1f8\U0001f1f4"
FLAG_SURINAME = "\U0001f1f8\U0001f1f7"
FLAG_SOUTH_SUDAN = "\U0001f1f8\U0001f1f8"
FLAG_SAO_TOME_ANDAMP_PRINCIPE = "\U0001f1f8\U0001f1f9"
FLAG_EL_SALVADOR = "\U0001f1f8\U0001f1fb"
FLAG_SINT_MAARTEN = "\U0001f1f8\U0001f1fd"
FLAG_SYRIA = "\U0001f1f8\U0001f1fe"
FLAG_ESWATINI = "\U0001f1f8\U0001f1ff"
FLAG_TRISTAN_DA_CUNHA = "\U0001f1f9\U0001f1e6"
FLAG_TURKS_ANDAMP_CAICOS_ISLANDS = "\U0001f1f9\U0001f1e8"
FLAG_CHAD = "\U0001f1f9\U0001f1e9"
FLAG_FRENCH_SOUTHERN_TERRITORIES = "\U0001f1f9\U0001f1eb"
FLAG_TOGO = "\U0001f1f9\U0001f1ec"
FLAG_THAILAND = "\U0001f1f9\U0001f1ed"
FLAG_TAJIKISTAN = "\U0001f1f9\U0001f1ef"
FLAG_TOKELAU = "\U0001f1f9\U0001f1f0"
FLAG_TIMOR_LESTE = "\U0001f1f9\U0001f1f1"
FLAG_TURKMENISTAN = "\U0001f1f9\U0001f1f2"
FLAG_TUNISIA = "\U0001f1f9\U0001f1f3"
FLAG_TONGA = "\U0001f1f9\U0001f1f4"
FLAG_TURKEY = "\U0001f1f9\U0001f1f7"
FLAG_TRINIDAD_ANDAMP_TOBAGO = "\U0001f1f9\U0001f1f9"
FLAG_TUVALU = "\U0001f1f9\U0001f1fb"
FLAG_TAIWAN = "\U0001f1f9\U0001f1fc"
FLAG_TANZANIA = "\U0001f1f9\U0001f1ff"
FLAG_UKRAINE = "\U0001f1fa\U0001f1e6"
FLAG_UGANDA = "\U0001f1fa\U0001f1ec"
FLAG_U_S_OUTLYING_ISLANDS = "\U0001f1fa\U0001f1f2"
FLAG_UNITED_NATIONS = "\U0001f1fa\U0001f1f3"
FLAG_UNITED_STATES = "\U0001f1fa\U0001f1f8"
FLAG_URUGUAY = "\U0001f1fa\U0001f1fe"
FLAG_UZBEKISTAN = "\U0001f1fa\U0001f1ff"
FLAG_VATICAN_CITY = "\U0001f1fb\U0001f1e6"
FLAG_ST_VINCENT_ANDAMP_GRENADINES = "\U0001f1fb\U0001f1e8"
FLAG_VENEZUELA = "\U0001f1fb\U0001f1ea"
FLAG_BRITISH_VIRGIN_ISLANDS = "\U0001f1fb\U0001f1ec"
FLAG_U_S_VIRGIN_ISLANDS = "\U0001f1fb\U0001f1ee"
FLAG_VIETNAM = "\U0001f1fb\U0001f1f3"
FLAG_VANUATU = "\U0001f1fb\U0001f1fa"
FLAG_WALLIS_ANDAMP_FUTUNA = "\U0001f1fc\U0001f1eb"
FLAG_SAMOA = "\U0001f1fc\U0001f1f8"
FLAG_KOSOVO = "\U0001f1fd\U0001f1f0"
FLAG_YEMEN = "\U0001f1fe\U0001f1ea"
FLAG_MAYOTTE = "\U0001f1fe\U0001f1f9"
FLAG_SOUTH_AFRICA = "\U0001f1ff\U0001f1e6"
FLAG_ZAMBIA = "\U0001f1ff\U0001f1f2"
FLAG_ZIMBABWE = "\U0001f1ff\U0001f1fc"
FLAG_ENGLAND = "\U0001f3f4\U000e0067\U000e0062\U000e0065\U000e006e\U000e0067\U000e007f"
FLAG_SCOTLAND = "\U0001f3f4\U000e0067\U000e0062\U000e0073\U000e0063\U000e0074\U000e007f"
FLAG_WALES = "\U0001f3f4\U000e0067\U000e0062\U000e0077\U000e006c\U000e0073\U000e007f"
REGIONAL_INDICATOR_SYMBOL_LETTER_A = "\U0001f1e6"
REGIONAL_INDICATOR_SYMBOL_LETTER_B = "\U0001f1e7"
REGIONAL_INDICATOR_SYMBOL_LETTER_C = "\U0001f1e8"
REGIONAL_INDICATOR_SYMBOL_LETTER_D = "\U0001f1e9"
REGIONAL_INDICATOR_SYMBOL_LETTER_E = "\U0001f1ea"
REGIONAL_INDICATOR_SYMBOL_LETTER_F = "\U0001f1eb"
REGIONAL_INDICATOR_SYMBOL_LETTER_G = "\U0001f1ec"
REGIONAL_INDICATOR_SYMBOL_LETTER_H = "\U0001f1ed"
REGIONAL_INDICATOR_SYMBOL_LETTER_I = "\U0001f1ee"
REGIONAL_INDICATOR_SYMBOL_LETTER_J = "\U0001f1ef"
REGIONAL_INDICATOR_SYMBOL_LETTER_K = "\U0001f1f0"
REGIONAL_INDICATOR_SYMBOL_LETTER_L = "\U0001f1f1"
REGIONAL_INDICATOR_SYMBOL_LETTER_M = "\U0001f1f2"
REGIONAL_INDICATOR_SYMBOL_LETTER_N = "\U0001f1f3"
REGIONAL_INDICATOR_SYMBOL_LETTER_O = "\U0001f1f4"
REGIONAL_INDICATOR_SYMBOL_LETTER_P = "\U0001f1f5"
REGIONAL_INDICATOR_SYMBOL_LETTER_Q = "\U0001f1f6"
REGIONAL_INDICATOR_SYMBOL_LETTER_R = "\U0001f1f7"
REGIONAL_INDICATOR_SYMBOL_LETTER_S = "\U0001f1f8"
REGIONAL_INDICATOR_SYMBOL_LETTER_T = "\U0001f1f9"
REGIONAL_INDICATOR_SYMBOL_LETTER_U = "\U0001f1fa"
REGIONAL_INDICATOR_SYMBOL_LETTER_V = "\U0001f1fb"
REGIONAL_INDICATOR_SYMBOL_LETTER_W = "\U0001f1fc"
REGIONAL_INDICATOR_SYMBOL_LETTER_X = "\U0001f1fd"
REGIONAL_INDICATOR_SYMBOL_LETTER_Y = "\U0001f1fe"
REGIONAL_INDICATOR_SYMBOL_LETTER_Z = "\U0001f1ff"
TAG_RIGHT_CURLY_BRACKET = "\U000e007d"
DIGIT_FIVE = "5\ufe0f"
TAG_LATIN_CAPITAL_LETTER_U = "\U000e0055"
TAG_LATIN_CAPITAL_LETTER_Q = "\U000e0051"
TAG_LATIN_CAPITAL_LETTER_K = "\U000e004b"
COMBINING_ENCLOSING_KEYCAP = "\u20e3"
TAG_LATIN_CAPITAL_LETTER_C = "\U000e0043"
TAG_ASTERISK = "\U000e002a"
TAG_FULL_STOP = "\U000e002e"
TAG_CIRCUMFLEX_ACCENT = "\U000e005e"
DIGIT_ONE = "1\ufe0f"
TAG_COMMA = "\U000e002c"
DIGIT_ZERO = "0\ufe0f"
TAG_EQUALS_SIGN = "\U000e003d"
TAG_LATIN_CAPITAL_LETTER_O = "\U000e004f"
TAG_COMMERCIAL_AT = "\U000e0040"
DIGIT_EIGHT = "8\ufe0f"
TAG_NUMBER_SIGN = "\U000e0023"
TAG_LATIN_CAPITAL_LETTER_T = "\U000e0054"
TAG_LATIN_CAPITAL_LETTER_N = "\U000e004e"
DIGIT_SIX = "6\ufe0f"
TAG_PERCENT_SIGN = "\U000e0025"
VARIATION_SELECTOR_16 = "\ufe0f"
TAG_LATIN_CAPITAL_LETTER_W = "\U000e0057"
TAG_DOLLAR_SIGN = "\U000e0024"
TAG_LOW_LINE = "\U000e005f"
TAG_DIGIT_EIGHT = "\U000e0038"
TAG_LATIN_CAPITAL_LETTER_M = "\U000e004d"
TAG_LATIN_CAPITAL_LETTER_A = "\U000e0041"
TAG_REVERSE_SOLIDUS = "\U000e005c"
TAG_SOLIDUS = "\U000e002f"
TAG_LATIN_CAPITAL_LETTER_H = "\U000e0048"
TAG_DIGIT_NINE = "\U000e0039"
TAG_LEFT_CURLY_BRACKET = "\U000e007b"
TAG_LATIN_CAPITAL_LETTER_E = "\U000e0045"
TAG_LATIN_SMALL_LETTER_W = "\U000e0077"
TAG_DIGIT_ZERO = "\U000e0030"
TAG_LATIN_CAPITAL_LETTER_B = "\U000e0042"
TAG_LATIN_CAPITAL_LETTER_F = "\U000e0046"
TAG_LATIN_CAPITAL_LETTER_Y = "\U000e0059"
TAG_TILDE = "\U000e007e"
TAG_LATIN_SMALL_LETTER_P = "\U000e0070"
TAG_LATIN_CAPITAL_LETTER_Z = "\U000e005a"
TAG_GREATER_THAN_SIGN = "\U000e003e"
TAG_LATIN_SMALL_LETTER_S = "\U000e0073"
TAG_LATIN_SMALL_LETTER_G = "\U000e0067"
TAG_APOSTROPHE = "\U000e0027"
TAG_RIGHT_PARENTHESIS = "\U000e0029"
TAG_DIGIT_THREE = "\U000e0033"
TAG_LEFT_PARENTHESIS = "\U000e0028"
TAG_DIGIT_SEVEN = "\U000e0037"
TAG_LATIN_SMALL_LETTER_O = "\U000e006f"
TAG_DIGIT_SIX = "\U000e0036"
TAG_DIGIT_TWO = "\U000e0032"
TAG_LATIN_SMALL_LETTER_F = "\U000e0066"
TAG_LATIN_SMALL_LETTER_K = "\U000e006b"
TAG_LATIN_SMALL_LETTER_Y = "\U000e0079"
TAG_SPACE = "\U000e0020"
TAG_LATIN_SMALL_LETTER_I = "\U000e0069"
DIGIT_TWO = "2\ufe0f"
TAG_DIGIT_ONE = "\U000e0031"
TAG_RIGHT_SQUARE_BRACKET = "\U000e005d"
TAG_LATIN_SMALL_LETTER_R = "\U000e0072"
HASH_SIGN = "#\ufe0f"
TAG_SEMICOLON = "\U000e003b"
TAG_LATIN_CAPITAL_LETTER_L = "\U000e004c"
TAG_HYPHEN_MINUS = "\U000e002d"
ASTERISK = "*\ufe0f"
TAG_LATIN_SMALL_LETTER_A = "\U000e0061"
TAG_EXCLAMATION_MARK = "\U000e0021"
TAG_LATIN_CAPITAL_LETTER_V = "\U000e0056"
TAG_LATIN_SMALL_LETTER_C = "\U000e0063"
TAG_GRAVE_ACCENT = "\U000e0060"
ZERO_WIDTH_JOINER = "\u200d"
TAG_LATIN_CAPITAL_LETTER_G = "\U000e0047"
DIGIT_NINE = "9\ufe0f"
TAG_VERTICAL_LINE = "\U000e007c"
TAG_LATIN_SMALL_LETTER_Z = "\U000e007a"
TAG_LATIN_CAPITAL_LETTER_X = "\U000e0058"
TAG_LATIN_SMALL_LETTER_J = "\U000e006a"
TAG_LATIN_CAPITAL_LETTER_P = "\U000e0050"
TAG_AMPERSAND = "\U000e0026"
TAG_LATIN_SMALL_LETTER_L = "\U000e006c"
TAG_LATIN_SMALL_LETTER_X = "\U000e0078"
DIGIT_SEVEN = "7\ufe0f"
TAG_LATIN_CAPITAL_LETTER_J = "\U000e004a"
TAG_LATIN_SMALL_LETTER_T = "\U000e0074"
TAG_QUESTION_MARK = "\U000e003f"
TAG_LATIN_SMALL_LETTER_B = "\U000e0062"
TAG_LEFT_SQUARE_BRACKET = "\U000e005b"
TAG_LATIN_SMALL_LETTER_D = "\U000e0064"
TAG_LATIN_SMALL_LETTER_E = "\U000e0065"
TAG_LATIN_SMALL_LETTER_M = "\U000e006d"
TAG_LESS_THAN_SIGN = "\U000e003c"
TAG_DIGIT_FIVE = "\U000e0035"
TAG_LATIN_CAPITAL_LETTER_D = "\U000e0044"
TAG_LATIN_SMALL_LETTER_N = "\U000e006e"
TAG_PLUS_SIGN = "\U000e002b"
TAG_COLON = "\U000e003a"
DIGIT_THREE = "3\ufe0f"
TAG_LATIN_SMALL_LETTER_Q = "\U000e0071"
TAG_LATIN_CAPITAL_LETTER_R = "\U000e0052"
TAG_LATIN_CAPITAL_LETTER_S = "\U000e0053"
DIGIT_FOUR = "4\ufe0f"
TAG_LATIN_CAPITAL_LETTER_I = "\U000e0049"
TAG_QUOTATION_MARK = "\U000e0022"
CANCEL_TAG = "\U000e007f"
TAG_LATIN_SMALL_LETTER_V = "\U000e0076"
TAG_LATIN_SMALL_LETTER_H = "\U000e0068"
TAG_LATIN_SMALL_LETTER_U = "\U000e0075"
TAG_DIGIT_FOUR = "\U000e0034" | PypiClean |
/AsynCluster-0.3.tar.gz/AsynCluster-0.3/doc/example.py | import time, os.path
from random import sample as sampleWOR
import scipy as s
from scipy import stats
from twisted.internet import defer, reactor, threads
from asynqueue import ThreadQueue
from asyncluster.master import jobs
from twisted_goodies.pybywire import pack
def sample(W, N=None, logWeights=False):
"""
Returns an index array that samples I{N} values from some external
array. Each element of the array will have a probability of being
sampled (with replacement) that is proportional to its corresponding
weight in the 1-D array I{W}.
Uses Walker's alias algorithm as set forth in L. Devroye, Non-Uniform
Random Variate Generation, p. 109,
http://cg.scs.carleton.ca/~luc/rnbookindex.html.
If N is not specified, it defaults to the length of W, i.e., importance
resampling.
If I{logWeights} is set C{True}, the weights are transformed from log
to linear.
"""
if logWeights:
W = s.exp(W - W.max())
W /= sum(W)
I0 = (s.isfinite(W) * s.greater(W, 1E-7)).nonzero()[0]
K = len(W[I0])
if N is None:
N = K
if K == 0:
return []
if K == 1:
return [0]*N
greater, smaller = [], []
T = s.zeros((K,2))
for m, w in enumerate(W[I0]):
q = T[m,0] = K*w
if q < 1:
smaller.append(m)
else:
greater.append(m)
while smaller and greater:
k = greater[-1]
m = smaller.pop()
T[m,1] = k
T[k,0] -= (1 - T[m,0])
if T[k,0] < 1:
greater.pop()
smaller.append(k)
V = s.rand(N)
RI = s.random.randint(0, K-1, N)
I1 = s.greater(V, T[RI,0]).choose(RI, T[RI,1].astype(int))
return I0[I1]
class Proposer(object):
"""
I provide random variates and probabilities for jumps to be made in
proposing each new parameter vector. The jumps have a normal (Gaussian)
distribution, as is typical. It doesn't matter that we're trying to model a
somewhat different distribution.
"""
def __init__(self):
self._jumpDist = stats.distributions.norm()
def r(self, N, wiggle):
"""
Returns an array of I{N} rows of parameter offsets drawn from the prior
distributions scaled by a I{wiggle} value between 0 and 1.
"""
return wiggle * self._jumpDist.rvs((N, 2))
def p(self, X, wiggle):
"""
Returns a vector of probability densities for each parameter offset in
X, or rows of X, under the assumption that they were generated from my
L{rProposal} method with the specified I{wiggle}.
"""
if len(X.shape) == 1:
X = X.reshape(1, X.shape[0])
N = X.shape[0]
return self._jumpDist.pdf(X/wiggle) / wiggle
class NodeCaller(object):
"""
I call on nodes to compute likelihoods of data given vectors of parameters.
"""
nodecode = """
###########################################################################
import scipy as s
from scipy import stats
from twisted_goodies.pybywire import pack
G = {}
def newData(data):
G['data'] = pack.Unpacker(data)()
def likelihood(paramVector):
X = pack.Unpacker(paramVector)()
# The next two lines are where all the real computing work gets done
distObj = stats.distributions.cauchy(loc=X[0], scale=X[1])
L = s.log(distObj.pdf(G['data'])).sum()
return pack.Packer(L)()
###########################################################################
"""
def __init__(self, socket, data):
self.socket, self.data = socket, data
def _oops(self, failure):
print "FAILURE:\n%s" % failure.getTraceback()
def startup(self):
"""
Starts me up to run the named model remotely on the asyncluster nodes
via the supplied UNIX domain I{socket}. Returns a deferred that fires
when remotely-run operations can commence.
"""
def maybeStarted(status):
if not status:
raise Exception("Client couldn't connect!")
reactor.addSystemEventTrigger(
'before', 'shutdown', self.client.shutdown)
d = self.client.update('newData', pack.Packer(self.data)())
d.addErrback(self._oops)
return d
self.client = jobs.JobClient(self.socket, codeString=self.nodecode)
return self.client.startup().addCallback(maybeStarted)
def likelihood(self, X, runLocally=False):
"""
Returns the log-likelihood of my data given the Cauchy distribution of
my model, after application of the location and scale parameters from
the supplied 2-element parameter vector I{X}.
This method returns the likelihood of the data given the parameters. It
does not consider any prior probability of the parameters; in this
example a 'non-informative uniform prior' is used.
"""
def localLikelihood():
distObj = stats.distributions.cauchy(loc=X[0], scale=X[1])
return s.log(distObj.pdf(self.data)).sum()
def gotResult(L_packed):
return pack.Unpacker(L_packed)()
if runLocally:
d = threads.deferToThread(localLikelihood)
else:
X_packed = pack.Packer(X)()
d = self.client.run('likelihood', X_packed, **{'timeout':20})
d.addCallbacks(gotResult, self._oops)
return d
class Population_Monte_Carlo(object):
"""
Population MCMC with per Cappe et al.
I fit the location (median and mode) and scale (half-width at half-maximum)
of a Cauchy distribution to a set of observations from a Cauchy random
process. Basically, the Cauchy distribution accounts better for rare,
significant events that would be vanishingly unlikely under a Gaussian
model.
This example could fit a normal distribution to Gaussian-distributed data
instead, but that would be boring. You'd just be computing the mean and
variance of the data set.
"""
runLocally = False
socket="/tmp/.ndm"
V = [0.10, 0.02, 0.005, 0.001]
def __init__(self, data):
self.proposer = Proposer()
self.caller = NodeCaller(self.socket, data)
self.fh = open('params.csv', 'w')
self.fh.write("loc, scale\n")
def subsetIndex(self, k):
"""
Returns a subset index for the samples in my I{X} attribute that
correspond to the jump variance for the supplied index I{k}.
"""
I = sampleWOR(self.Is, self.R[k])
self.Is = s.setdiff1d(self.Is, I)
return I
def weightedProposals(self, X, v):
"""
Returns a deferred that fires with a 1-D array of valid proposals on
the supplied parameter array I{X}, given the specified proposal
variance I{v}, along with log-importance weights for those proposals.
Valid proposals are those with non-zero likelihood. The censoring to
only valid proposals means that fewer proposals may be returned than
supplied parameters, possibly no proposals at all.
The importance weights are to be combined with those from other calls
to this method with different variance settings. The weights will be
used to resample the returned proposals so that the probability of any
given proposal being included in the final result for this iteration is
proportional to its likelihood, and inversely proportional to the
probability density of the proposal offest.
"""
def gotLikelihoods(results):
XP = X + XD
L = s.array(results)
if len(L):
I = s.isfinite(L).nonzero()[0]
logProbJumps = s.log(self.proposer.p(XD[I], v)).sum(1)
return XP[I], L[I] + logProbJumps
return XP, L
# Have the nodes evaluate the likelihood of each proposal, which is
# the most time-consuming step in real-life usage.
XD = self.proposer.r(len(X), v)
dList = [self.caller.likelihood(Xp, self.runLocally) for Xp in X+XD]
d = defer.gatherResults(dList)
d.addCallback(gotLikelihoods)
return d
@defer.deferredGenerator
def run(self, N_iter, N_chains):
"""
Does a PMC run with I{N_chains} population members and jump variances
in the supplied 1-D array I{V}. The number of population members for
each jump variance will go up or down, depending on the performance for
that setting.
B{NOTE}: What I've been calling variances are actually computed as
standard deviations, i.e., not squared. Should fix this either by
relabeling or adjusting the code.
Returns a deferred that fires when the run is done. No output value is
provided via the deferred, however.
@param N_iter: The number of iterations to produce after burn-in.
@param N_chains: The number of Monte Carlo chains run at each
iteration.
"""
def gotWeightedProposals(results, k):
XP[k] = results[0]
W[k] = results[1]
def normalize(R):
# Rescale so that the proportions are global rather than
# individual. Highly successful settings will have a large portion
# of the next sample even if they last operated on only a few of
# the samples.
R = R.astype(float) / R.sum()
# Turn the scaled array into an array of subsample sizes, with a
# minimum size of two apiece.
R = s.clip(
s.round_(N_chains*R), rMin,
N_chains-rMin*(P-1)).astype(int)
# Twiddle the biggest one as needed to keep sum = N_chains
R[s.argmax(R)] += N_chains - sum(R)
# Replace the old list and subset index
self.R = R
self.Is = s.arange(N_chains)
# Connect the node caller to the AsynCluster master server
wfd = defer.waitForDeferred(self.caller.startup())
yield wfd; wfd.getResult()
# Some constants
P = len(self.V)
rMin = max([2, int(round(0.01*N_chains))])
# Initialize some arrays
normalize(s.ones(P))
X = self.proposer.r(N_chains, self.V[0])
# The iteration loop
for i in xrange(N_iter):
t0 = time.time()
# Generate and weight some proposals
XP, W, II = [[None]*P for k in (1,2,3)]
dList = []
for k, v in enumerate(self.V):
I = II[k] = self.subsetIndex(k)
d = self.weightedProposals(X[I], v)
d.addCallback(gotWeightedProposals, k)
dList.append(d)
wfd = defer.waitForDeferred(defer.DeferredList(dList))
yield wfd; wfd.getResult()
# Resample everything together
I = sample(s.concatenate(W), N_chains, logWeights=True)
if len(I):
X = s.row_stack(XP)[I]
R = s.array([sum([x in II[k] for x in I]) for k in xrange(P)])
# Write the parameters for this iteration to the output file
for Xk in X:
rowString = ", ".join(["%f" % x for x in Xk])
self.fh.write(rowString + "\n")
# Normalize R to maintain a total of N_chains population members
normalize(R)
# Provide some info about this iteration
vInfo = ", ".join(["%3d" % r for r in self.R])
msg = "%04d, %5.2f sec, VR={%s} : %s" % (
i, time.time()-t0, vInfo,
", ".join(["%9.3g" % x for x in X.mean(0)]))
print msg
# All done
self.fh.close()
reactor.stop()
if __name__ == '__main__':
# Parameters for the example
N_obs = 50000
N_chains = 1000
N_iter = 1000
loc, scale = 0.1, 0.4
# Create some fake observations of a Cauchy random process
distObj = stats.distributions.cauchy(loc=loc, scale=scale)
data = distObj.rvs(N_obs)
fh = open('data.csv', 'w')
for x in data:
fh.write("%f\n" % x)
fh.close()
# Create the Monte Carlo runner and set up the run
pmc = Population_Monte_Carlo(data)
reactor.callWhenRunning(pmc.run, N_iter, N_chains)
# Run!
reactor.run() | PypiClean |
/First_Messenger_Server-0.8.2.tar.gz/First_Messenger_Server-0.8.2/server/common/metaclasses.py | import dis
class ServerMaker(type):
'''
Метакласс, проверяющий что в результирующем классе нет клиентских
вызовов таких как: connect. Также проверяется, что серверный
сокет является TCP и работает по IPv4 протоколу.
'''
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
# Атрибуты, вызываемые функциями классов
attrs = []
for func in clsdict:
# Пробуем
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
# Раз функция разбираем код, получая используемые методы и
# атрибуты.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
elif i.opname == 'LOAD_ATTR':
if i.argval not in attrs:
attrs.append(i.argval)
# Если обнаружено использование недопустимого метода connect,
# генерируем исключение:
if 'connect' in methods:
raise TypeError(
'Использование метода connect недопустимо в серверном классе')
# Если сокет не инициализировался константами SOCK_STREAM(TCP)
# AF_INET(IPv4), тоже исключение.
if not ('SOCK_STREAM' in attrs and 'AF_INET' in attrs):
raise TypeError('Некорректная инициализация сокета.')
super().__init__(clsname, bases, clsdict)
class ClientMaker(type):
'''
Метакласс, проверяющий что в результирующем классе нет серверных
вызовов таких как: accept, listen. Также проверяется, что сокет не
создаётся внутри конструктора класса.
'''
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
for func in clsdict:
# Пробуем
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
# Раз функция разбираем код, получая используемые методы.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
# Если обнаружено использование недопустимого метода accept, listen,
# socket бросаем исключение:
for command in ('accept', 'listen', 'socket'):
if command in methods:
raise TypeError(
'В классе обнаружено использование запрещённого метода')
# Вызов get_message или send_message из utils считаем корректным
# использованием сокетов
if 'get_message' in methods or 'send_message' in methods:
pass
else:
raise TypeError(
'Отсутствуют вызовы функций, работающих с сокетами.')
super().__init__(clsname, bases, clsdict) | PypiClean |
/Fraunhofer-1.0.3.tar.gz/Fraunhofer-1.0.3/fraunhofer/specfit_mod.py | from __future__ import print_function
__authors__ = 'David Nidever <dnidever@montana.edu>'
__version__ = '20200711' # yyyymmdd
import os
import shutil
import contextlib, io, sys
import numpy as np
import warnings
from astropy.io import fits
from astropy.table import Table
from dlnpyutils.minpack import curve_fit
from dlnpyutils.least_squares import least_squares
from scipy.interpolate import interp1d
from dlnpyutils import utils as dln, bindata, astro
import doppler
from doppler.spec1d import Spec1D
from doppler import (cannon,utils,reader)
import copy
import logging
import time
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.legend import Legend
import tempfile
from . import models
from synple import synple
# Ignore these warnings, it's a bug
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
cspeed = 2.99792458e5 # speed of light in km/s
class SpecFitter:
def __init__ (self,spec,params,fitparams=None,norm=True,verbose=False,
alinefile=None,mlinefile=None):
# Parameters
self.params = params
if fitparams is not None:
self.fitparams = fitparams
else:
self.fitparams = list(params.keys()) # by default fit all parameters
self.nsynfev = 0 # number of synthetic spectra made
self.njac = 0 # number of times jacobian called
# Save spectrum information
self.spec = spec.copy()
self.flux = spec.flux.flatten()
self.err = spec.err.flatten()
self.wave = spec.wave.flatten()
self.lsf = spec.lsf.copy()
self.lsf.wavevac = spec.wavevac # need this later for synspec prep
self.wavevac = spec.wavevac
self.verbose = verbose
self.norm = norm # normalize
self.continuum_func = spec.continuum_func
self.alinefile = alinefile
self.mlinefile = mlinefile
# Convert vacuum to air wavelengths
# synspec uses air wavelengths
if spec.wavevac is True:
wave = astro.vactoair(spec.wave.copy().flatten()).reshape(spec.wave.shape)
else:
wave = spec.wave.copy()
if wave.ndim==1:
wave = np.atleast_2d(wave).T
# Figure out the wavelength parameters
npix = spec.npix
norder = spec.norder
xp = np.arange(npix//20)*20
wr = np.zeros((spec.lsf.norder,2),np.float64)
dw = np.zeros(spec.lsf.norder,np.float64)
mindw = np.zeros(norder,np.float64)
for o in range(spec.norder):
dw[o] = np.median(dln.slope(wave[:,o]))
wr[o,0] = np.min(wave[:,o])
wr[o,1] = np.max(wave[:,o])
fwhm = spec.lsf.fwhm(wave[xp,o],xtype='Wave',order=o)
# FWHM is in units of lsf.xtype, convert to wavelength/angstroms, if necessary
if spec.lsf.xtype.lower().find('pix')>-1:
fwhm *= np.abs(dw[o])
# need at least ~4 pixels per LSF FWHM across the spectrum
# using 3 affects the final profile shape
mindw[o] = np.min(fwhm/4)
self._dwair = np.min(mindw) # IN AIR WAVELENGTHS!!
self._w0air = np.min(wave)
self._w1air = np.max(wave)
# parameters to save
self._all_pars = []
self._all_model = []
self._all_chisq = []
self._jac_array = None
@property
def params(self):
return self._params
@params.setter
def params(self,params):
""" Dictionary, keys must be all CAPS."""
self._params = dict((key.upper(), value) for (key, value) in params.items()) # all CAPS
@property
def fitparams(self):
return self._fitparams
@fitparams.setter
def fitparams(self,fitparams):
""" list, keys must be all CAPS."""
self._fitparams = [v.upper() for v in fitparams] # all CAPS
def mkinputs(self,args):
""" Make INPUTS dictionary."""
# Create INPUTS with all arguments needed to make the spectrum
inputs = self.params.copy() # initialize with initial/fixed values
for k in range(len(self.fitparams)): # this overwrites the values for the fitted values
inputs[self.fitparams[k]] = args[k]
inputs['DW'] = self._dwair # add in wavelength parameters
inputs['W0'] = self._w0air
inputs['W1'] = self._w1air
return inputs
def chisq(self,model):
return np.sqrt( np.sum( (self.flux-model)**2/self.err**2 )/len(self.flux) )
def model(self, xx, *args):
""" Return a model spectrum flux with the given input arguments."""
# The input arguments correspond to FITPARAMS
# This corrects for air/vacuum wavelength differences
if self.verbose:
print(args)
# The arguments correspond to the fitting parameters
inputs = self.mkinputs(args)
if self.verbose:
print(inputs)
# Create the synthetic spectrum
synspec = model_spectrum(inputs,verbose=self.verbose, # always returns air wavelengths
alinefile=self.alinefile,mlinefile=self.mlinefile)
self.nsynfev += 1
# Convolve with the LSF and do air/vacuum wave conversion
pspec = prepare_synthspec(synspec,self.lsf,norm=self.norm,
continuum_func=self.continuum_func)
# Save models/pars/chisq
self._all_pars.append(list(args).copy())
self._all_model.append(pspec.flux.flatten().copy())
self._all_chisq.append(self.chisq(pspec.flux.flatten()))
# Return flattened spectrum
return pspec.flux.flatten()
def getstep(self,name,val,relstep=0.02):
""" Calculate step for a parameter."""
# It mainly deals with edge cases
#if val != 0.0:
# step = relstep*val
#else:
# if name=='RV':
# step = 1.0
# elif name=='VROT':
# step = 0.5
# elif name=='VMICRO':
# step = 0.5
# elif name.endswith('_H'):
# step = 0.02
# else:
# step = 0.02
if name=='TEFF':
step = 5.0
elif name=='RV':
step = 0.1
elif name=='VROT':
step = 0.5
elif name=='VMICRO':
step = 0.5
elif name.endswith('_H'):
step = 0.01
else:
step = 0.01
return step
return step
def jac(self,x,*args):
""" Compute the Jacobian matrix (an m-by-n matrix, where element (i, j)
is the partial derivative of f[i] with respect to x[j]). """
if hasattr(self,'logger') is False:
logger = dln.basiclogger()
else:
logger = self.logger
logger.info(args)
if self.verbose:
logger.info(' ')
logger.info('##### Calculating Jacobian Matrix #####')
logger.info(' ')
# A new synthetic spectrum does not need to be generated RV, vmicro or vsini.
# Some time can be saved by not remaking those.
# Use a one-sided derivative.
# Boundaries
lbounds,ubounds = mkbounds(self.fitparams)
relstep = 0.02
npix = len(x)
npar = len(args)
# Get INPUTS dictionary and make keys all CAPS
inputs = self.mkinputs(args)
inputs = dict((key.upper(), value) for (key, value) in inputs.items())
# Some important parameters
w0 = inputs['W0']
w1 = inputs['W1']
dw = inputs['DW']
rv = inputs.get('RV')
vrot = inputs.get('VROT')
vmicro = inputs.get('VMICRO')
# Create synthetic spectrum at current values
# set vrot=vmicro=rv=0, will modify later if necessary
if self.verbose:
logger.info('--- Current values ---')
logger.info(args)
tinputs = inputs.copy()
tinputs['VMICRO'] = 0
tinputs['VROT'] = 0
tinputs['RV'] = 0
origspec = model_spectrum(tinputs,keepextend=True, # always are wavelengths
alinefile=self.alinefile,mlinefile=self.mlinefile)
self.nsynfev += 1
# Smooth and shift
smorigspec = smoothshift_spectrum(origspec,vrot=vrot,vmicro=vmicro,rv=rv)
# Trim to final wavelengths
smorigspec = trim_spectrum(smorigspec,w0,w1)
# Convolve with the LSF and do air/vacuum wave conversion
pspec = prepare_synthspec(smorigspec,self.lsf,norm=self.norm,
continuum_func=self.continuum_func)
# Flatten the spectrum
f0 = pspec.flux.flatten()
# Save models/pars/chisq
self._all_pars.append(list(args).copy())
self._all_model.append(f0.copy())
self._all_chisq.append(self.chisq(f0))
chisq = np.sqrt( np.sum( (self.flux-f0)**2/self.err**2 )/len(self.flux) )
self
if self.verbose:
logger.info('chisq = '+str(chisq))
# MASK PIXELS!?
# Initialize jacobian matrix
jac = np.zeros((npix,npar),np.float64)
# Loop over parameters
for i in range(npar):
pars = np.array(copy.deepcopy(args))
step = self.getstep(self.fitparams[i],pars[i],relstep)
# Check boundaries, if above upper boundary
# go the opposite way
if pars[i]>ubounds[i]:
step *= -1
pars[i] += step
tinputs = self.mkinputs(pars)
if self.verbose:
logger.info(' ')
logger.info('--- '+str(i+1)+' '+self.fitparams[i]+' '+str(pars[i])+' ---')
logger.info(pars)
# VROT/VMICRO/RV, just shift/smooth original spectrum
if self.fitparams[i]=='VROT' or self.fitparams[i]=='VMICRO' or self.fitparams[i]=='RV':
tvrot = tinputs.get('VROT')
tvmicro = tinputs.get('VMICRO')
trv = tinputs.get('RV')
#import pdb; pdb.set_trace()
# Smooth and shift
synspec = smoothshift_spectrum(origspec,vrot=tvrot,vmicro=tvmicro,rv=trv)
# Trim to final wavelengths
synspec = trim_spectrum(synspec,w0,w1)
else:
synspec = model_spectrum(tinputs,alinefile=self.alinefile,
mlinefile=self.mlinefile) # always returns air wavelengths
self.nsynfev += 1
# Convert to vacuum wavelengths if necessary
if self.wavevac:
synspec.wave = astro.airtovac(synspec.wave)
synspec.wavevac = True
# Convolve with the LSF and do air/vacuum wave conversion
pspec = prepare_synthspec(synspec,self.lsf,norm=self.norm,
continuum_func=self.continuum_func)
# Flatten the spectrum
f1 = pspec.flux.flatten()
# Save models/pars/chisq
self._all_pars.append(list(pars).copy())
self._all_model.append(f1.copy())
self._all_chisq.append(self.chisq(f1))
if np.sum(~np.isfinite(f1))>0:
print('some nans/infs')
import pdb; pdb.set_trace()
jac[:,i] = (f1-f0)/step
if np.sum(~np.isfinite(jac))>0:
print('some nans/infs')
import pdb; pdb.set_trace()
self._jac_array = jac.copy() # keep a copy
self.njac += 1
return jac
def trim_spectrum(spec,w0,w1):
""" Trim a synthetic spectrum to [w0,w1]."""
# This assumes that the spectrum has a single order
wv1, ind1 = dln.closest(spec.wave,w0)
wv2, ind2 = dln.closest(spec.wave,w1)
# Nothing to do
if ind1==0 and ind2==(spec.npix-1):
return spec
outspec = spec.copy()
outspec.flux = outspec.flux[ind1:ind2+1]
outspec.wave = outspec.wave[ind1:ind2+1]
if outspec.err is not None:
outspec.err = outspec.err[ind1:ind2+1]
if outspec.mask is not None:
outspec.mask = outspec.mask[ind1:ind2+1]
if hasattr(outspec,'cont'):
if outspec.cont is not None:
outspec.cont = outspec.cont[ind1:ind2+1]
outspec.npix = len(outspec.flux)
return outspec
def getabund(inputs,verbose=False):
""" Grab the abundances out of the input file and return array of abundances."""
# Create the input 99-element abundance array
codedir = os.path.dirname(os.path.abspath(__file__))
pertab = Table.read(codedir+'/data/periodic_table.txt',format='ascii')
feh = inputs.get('FEH')
if feh is None:
feh = inputs.get('FE_H')
if feh is None:
raise ValueError('FE_H missing from inputs')
# Read model atmosphere
modelfile = inputs.get('modelfile')
if modelfile is None:
raise ValueError('modelfile missing from inputs')
atmostype, teff, logg, vmicro2, mabu, nd, atmos = synple.read_model(modelfile,verbose=verbose)
mlines = dln.readlines(modelfile)
# solar abundances
# first two are Teff and logg
# last two are Hydrogen and Helium
solar_abund = np.array([ 4750., 2.5,
-10.99, -10.66, -9.34, -3.61, -4.21,
-3.35, -7.48, -4.11, -5.80, -4.44,
-5.59, -4.53, -6.63, -4.92, -6.54,
-5.64, -7.01, -5.70, -8.89, -7.09,
-8.11, -6.40, -6.61, -4.54, -7.05,
-5.82, -7.85, -7.48, -9.00, -8.39,
-9.74, -8.70, -9.50, -8.79, -9.52,
-9.17, -9.83, -9.46, -10.58, -10.16,
-20.00, -10.29, -11.13, -10.47, -11.10,
-10.33, -11.24, -10.00, -11.03, -9.86,
-10.49, -9.80, -10.96, -9.86, -10.94,
-10.46, -11.32, -10.62, -20.00, -11.08,
-11.52, -10.97, -11.74, -10.94, -11.56,
-11.12, -11.94, -11.20, -11.94, -11.19,
-12.16, -11.19, -11.78, -10.64, -10.66,
-10.42, -11.12, -10.87, -11.14, -10.29,
-11.39, -20.00, -20.00, -20.00, -20.00,
-20.00, -20.00, -12.02, -20.00, -12.58,
-20.00, -20.00, -20.00, -20.00, -20.00,
-20.00, -20.00])
# Deal with alpha abundances
# only add the individual alpha abundance if it's not already there
# sometimes we might fit a single alpha element but want to use
# ALPHA_H to set the rest of them
if inputs.get('ALPHA_H') is not None:
alpha = inputs['ALPHA_H']
elem = ['O','MG','SI','S','CA','TI']
for k in range(len(elem)):
if inputs.get(elem[k]+'_H') is None:
inputs[elem[k]+'_H'] = alpha
# Scale global metallicity
abu = solar_abund.copy()
abu[2:] += feh
# Now offset the elements with [X/Fe], [X/Fe]=[X/H]-[Fe/H]
g, = np.where( (np.char.array(list(inputs.keys())).find('_H') != -1) &
(np.char.array(list(inputs.keys())) != 'FE_H') )
if len(g)>0:
ind1,ind2 = dln.match(np.char.array(list(inputs.keys()))[g],np.char.array(pertab['symbol']).upper()+'_H')
for k in range(len(ind1)):
key1 = np.char.array(list(inputs.keys()))[g[ind1[k]]]
abu[ind2[k]] += float(inputs[key1]) - feh
if verbose:
print('%s %f' % (key1,float(inputs[key1])))
# convert to linear
abu[2:] = 10**abu[2:]
# Divide by N(H)
g, = np.where(np.char.array(mlines).find('ABUNDANCE SCALE') != -1)
nhtot = np.float64(mlines[g[0]].split()[6])
abu[2:] /= nhtot
# use model values for H and He
abu[0:2] = mabu[0:2]
return abu
def synple_wrapper(inputs,verbose=False,tmpbase='/tmp',alinefile=None,mlinefile=None):
""" This is a wrapper around synple to generate a new synthetic spectrum."""
# Wavelengths are all AIR!!
# inputs is a dictionary with all of the inputs
# Teff, logg, [Fe/H], some [X/Fe], and the wavelength parameters (w0, w1, dw).
# Make temporary directory for synple to work in
curdir = os.path.abspath(os.curdir)
tdir = os.path.abspath(tempfile.mkdtemp(prefix="syn",dir=tmpbase))
os.chdir(tdir)
# Linelists to use
linelist = ['gfallx3_bpo.19','kmol3_0.01_30.20'] # default values
if alinefile is not None: # atomic linelist input
linelist[0] = alinefile
if mlinefile is not None: # molecular linelist input
linelist[1] = mlinefile
if verbose:
print('Using linelist: ',linelist)
# Make key names all CAPS
inputs = dict((key.upper(), value) for (key, value) in inputs.items())
# Make the model atmosphere file
teff = inputs['TEFF']
logg = inputs['LOGG']
metal = inputs['FE_H']
tid,modelfile = tempfile.mkstemp(prefix="mod",dir=".")
os.close(tid) # close the open file
# Limit values
# of course the logg/feh ranges vary with Teff
mteff = dln.limit(teff,3500.0,60000.0)
mlogg = dln.limit(logg,0.0,5.0)
mmetal = dln.limit(metal,-2.5,0.5)
model, header, tail = models.mkmodel(mteff,mlogg,mmetal,modelfile)
inputs['modelfile'] = modelfile
if os.path.exists(modelfile) is False or os.stat(modelfile).st_size==0:
print('model atmosphere file does NOT exist')
import pdb; pdb.set_trace()
# Create the synspec synthetic spectrum
w0 = inputs['W0']
w1 = inputs['W1']
dw = inputs['DW']
vmicro = inputs.get('VMICRO')
vrot = inputs.get('VROT')
if vrot is None:
vrot = 0.0
# Get the abundances
abu = getabund(inputs,verbose=verbose)
wave,flux,cont = synple.syn(modelfile,(w0,w1),dw,vmicro=vmicro,vrot=vrot,
abu=list(abu),verbose=verbose,linelist=linelist)
# Delete temporary files
shutil.rmtree(tdir)
os.chdir(curdir)
return (wave,flux,cont)
def smoothshift_spectrum(inpspec,vmicro=None,vrot=None,rv=None):
""" This smoothes the spectrum by Vrot+Vmicro and
shifts it by RV."""
#vmicro = inputs.get('VMICRO')
#vrot = inputs.get('VROT')
#rv = inputs.get('RV')
# Nothing to do
if vmicro is None and vrot is None and rv is None:
return inpspec.copy()
# Initialize output spectrum
spec = inpspec.copy()
# Some broadening
if vmicro is not None or vrot is not None:
flux = utils.broaden(spec.wave,spec.flux,vgauss=vmicro,vsini=vrot)
spec.flux = flux
## Vrot/Vsini (km/s) and Vmicro (in km/s)
#if vrot is not None or vmicro is not None:
# wave, flux = synple.call_rotin(wave, flux, vrot, fwhm, space, steprot, stepfwhm, clean=False, reuseinputfiles=True)
# Doppler shift only (in km/s)
if rv is not None:
if rv != 0.0:
shiftwave = spec.wave*(1+rv/cspeed)
gd,ngd,bd,nbd = dln.where( (spec.wave >= np.min(shiftwave)) & (spec.wave <= np.max(shiftwave)), comp=True)
# Doppler shift and interpolate onto wavelength array
if hasattr(spec,'cont'):
cont = synple.interp_spl(spec.wave[gd], shiftwave, spec.cont)
spec.cont *= 0
spec.cont[gd] = cont
# interpolate the continuing to the missing pixels
if nbd>0:
contmissing = dln.interp(spec.wave[gd],spec.cont[gd],spec.wave[bd],kind='linear',assume_sorted=False)
spec.cont[bd] = contmissing
flux = synple.interp_spl(spec.wave[gd], shiftwave, spec.flux)
spec.flux *= 0
spec.flux[gd] = flux
if nbd>0:
# Fill in missing values with interpolated values
if np.sum(np.isfinite(spec.flux[gd]))>0:
coef = dln.poly_fit(spec.wave[gd],spec.flux[gd],2)
fluxmissing = dln.poly(spec.wave[bd],coef)
spec.flux[bd] = fluxmissing
# Mask these pixels
if spec.mask is None:
spec.mask = np.zeros(len(spec.flux),bool)
spec.mask[bd] = True
return spec
def model_spectrum(inputs,verbose=False,keepextend=False,alinefile=None,mlinefile=None):
"""
This creates a model spectrum given the inputs:
RV, Teff, logg, vmicro, vsini, [Fe/H], [X/Fe], w0, w1, dw.
This creates the new synthetic spectrum and then convolves with vmicro, vsini and
shifts to velocity RV.
The returned spectrum always uses AIR wavelengths!!!
"""
# Make key names all CAPS
inputs = dict((key.upper(), value) for (key, value) in inputs.items())
# Extend on the ends for RV/convolution purposes
w0 = inputs['W0']
w1 = inputs['W1']
dw = inputs['DW']
rv = inputs.get('RV')
vrot = inputs.get('VROT')
vmicro = inputs.get('VMICRO')
inputsext = inputs.copy()
if rv is not None or vrot is not None or vmicro is not None:
numext = int(np.ceil(w1*(1.0+1500/cspeed)-w1))
inputsext['W0'] = w0-numext*dw
inputsext['W1'] = w1+numext*dw
if verbose:
print('Extending wavelength by '+str(numext)+' pixels on each end')
# Create the synthetic spectrum
# set vrot=vmicro=0, will convolve later if necessary
inputsext['VMICRO'] = 0
inputsext['VROT'] = 0
wave1,flux1,cont1 = synple_wrapper(inputsext,verbose=verbose,alinefile=alinefile,
mlinefile=mlinefile)
# Get final wavelength array
wv1, ind1 = dln.closest(wave1,w0)
wv2, ind2 = dln.closest(wave1,w1)
synspec = Spec1D(flux1/cont1,err=flux1*0,wave=wave1,lsfpars=np.array(0.0))
synspec.cont = cont1
synspec.wavevac = False
# Smooth and shift
if rv is not None or vrot is not None or vmicro is not None:
synspec = smoothshift_spectrum(synspec,vrot=vrot,vmicro=vmicro,rv=rv)
# Trim to final wavelengths
if keepextend is False:
synspec = trim_spectrum(synspec,w0,w1)
return synspec
def prepare_synthspec(synspec,lsf,norm=True,continuum_func=None):
""" Prepare a synthetic spectrum to be compared to an observed spectrum."""
# Convolve with LSF and do air<->vacuum wavelength conversion
# Convert wavelength from air->vacuum or vice versa
if synspec.wavevac != lsf.wavevac:
# Air -> Vacuum
if synspec.wavevac is False:
synspec.wave = astro.airtovac(synspec.wave)
synspec.wavevac = True
# Vacuum -> Air
else:
synspec.dispersion = astro.vactoair(synspec.wave)
synspec.wavevac = False
# Initialize the output spectrum
if lsf.wave.ndim==2:
npix,norder = lsf.wave.shape
else:
npix = len(lsf.wave)
norder = 1
pspec = Spec1D(np.zeros((npix,norder),np.float32),err=np.zeros((npix,norder),np.float32),
wave=lsf.wave,lsfpars=lsf.pars,lsftype=lsf.lsftype,lsfxtype=lsf.xtype)
pspec.cont = np.zeros((npix,norder),np.float32)
if continuum_func is not None:
pspec.continuum_func = continuum_func
# Loop over orders
if lsf.wave.ndim==1:
wave = np.atleast_2d(lsf.wave.copy()).T
else:
wave = lsf.wave.copy()
for o in range(lsf.norder):
wobs = wave[:,o]
dw = np.median(dln.slope(wobs))
wv1,ind1 = dln.closest(synspec.wave,np.min(wobs)-2*np.abs(dw))
wv2,ind2 = dln.closest(synspec.wave,np.max(wobs)+2*np.abs(dw))
modelflux = synspec.flux[ind1:ind2+1]
modelwave = synspec.wave[ind1:ind2+1]
modelcont = synspec.cont[ind1:ind2+1]
# Rebin, if necessary
# get LSF FWHM (A) for a handful of positions across the spectrum
xp = np.arange(npix//20)*20
fwhm = lsf.fwhm(wobs[xp],xtype='Wave',order=o)
# FWHM is in units of lsf.xtype, convert to wavelength/angstroms, if necessary
if lsf.xtype.lower().find('pix')>-1:
fwhm *= np.abs(dw)
# convert FWHM (A) in number of model pixels at those positions
dwmod = dln.slope(modelwave)
dwmod = np.hstack((dwmod,dwmod[-1]))
xpmod = dln.interp(modelwave,np.arange(len(modelwave)),wobs[xp],kind='cubic',assume_sorted=False,extrapolate=True)
xpmod = np.round(xpmod).astype(int)
fwhmpix = np.abs(fwhm/dwmod[xpmod])
# need at least ~4 pixels per LSF FWHM across the spectrum
# using 3 affects the final profile shape
nbin = np.round(np.min(fwhmpix)//4).astype(int)
if nbin>1:
npix2 = np.round(len(synspec.flux) // nbin).astype(int)
modelflux = dln.rebin(modelflux[0:npix2*nbin],npix2)
modelwave = dln.rebin(modelwave[0:npix2*nbin],npix2)
modelcont = dln.rebin(modelcont[0:npix2*nbin],npix2)
# Convolve
lsf2d = lsf.anyarray(modelwave,xtype='Wave',order=o,original=False)
cflux = utils.convolve_sparse(modelflux,lsf2d)
# Interpolate onto final wavelength array
flux = synple.interp_spl(wobs, modelwave, cflux)
cont = synple.interp_spl(wobs, modelwave, modelcont)
pspec.flux[:,o] = flux
pspec.cont[:,o] = cont
pspec.normalized = True
# Normalize
if norm is True:
newcont = pspec.continuum_func(pspec)
pspec.flux /= newcont
pspec.cont *= newcont
return pspec
def mkbounds(params,paramlims=None):
""" Make lower and upper boundaries for parameters """
params = np.char.array(params).upper()
if paramlims is not None:
limkeys = np.char.array(list(paramlims.keys())).upper()
n = len(params)
lbounds = np.zeros(n,np.float64)
ubounds = np.zeros(n,np.float64)
# Teff
g, = np.where(params=='TEFF')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = 3500
ubounds[g[0]] = 60000
else:
g1, = np.where(limkeys=='TEFF')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# logg
g, = np.where(params=='LOGG')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = 0
ubounds[g[0]] = 5
else:
g1, = np.where(limkeys=='LOGG')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# fe_h
g, = np.where(params=='FE_H')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = -3
ubounds[g[0]] = 1
else:
g1, = np.where(limkeys=='FE_H')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# Vmicro
g, = np.where(params=='VMICRO')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = 0
ubounds[g[0]] = 5
else:
g1, = np.where(limkeys=='VMICRO')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# Vsini/vrot
g, = np.where(params=='VROT')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = 0
ubounds[g[0]] = 500
else:
g1, = np.where(limkeys=='VROT')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# RV
g, = np.where(params=='RV')
if len(g)>0:
if paramlims is None:
lbounds[g[0]] = -1500
ubounds[g[0]] = 1500
else:
g1, = np.where(limkeys=='RV')
if len(g1)>0:
lbounds[g[0]] = paramlims[limkeys[g1[0]]][0]
ubounds[g[0]] = paramlims[limkeys[g1[0]]][1]
# abundances
g, = np.where( (params.find('_H') != -1) & (params != 'FE_H') )
if len(g)>0:
lbounds[g] = -3
ubounds[g] = 10
bounds = (lbounds,ubounds)
return bounds
def mkdxlim(fitparams):
""" Make array of parameter changes at which curve_fit should finish."""
npar = len(fitparams)
dx_lim = np.zeros(npar,float)
for k in range(npar):
if fitparams[k]=='TEFF':
dx_lim[k] = 1.0
elif fitparams[k]=='LOGG':
dx_lim[k] = 0.005
elif fitparams[k]=='VMICRO':
dx_lim[k] = 0.1
elif fitparams[k]=='VROT':
dx_lim[k] = 0.1
elif fitparams[k]=='RV':
dx_lim[k] = 0.01
elif fitparams[k].endswith('_H'):
dx_lim[k] = 0.005
else:
dx_lim[k] = 0.01
return dx_lim
def initpars(params,fitparams):
""" Make initial set of parameters given PARAMS and
FITPARAMS."""
params = dict((key.upper(), value) for (key, value) in params.items()) # all CAPS
fitparams = [v.upper() for v in fitparams] # all CAPS
npars = len(fitparams)
pinit = np.zeros(npars,np.float64)
# Loop over parameters
for k in range(npars):
ind, = np.where(np.char.array(list(params.keys()))==fitparams[k])
# This parameter is in PARAMS
if len(ind)>0:
pinit[k] = params[fitparams[k]]
# Not in PARAMS
else:
if fitparams[k]=='RV':
pinit[k] = 0.0
elif fitparams[k]=='VMICRO':
pinit[k] = 2.0
elif fitparams[k]=='VROT':
pinit[k] = 0.0
elif fitparams[k]=='TEFF':
pinit[k] = 5000.0
elif fitparams[k]=='LOGG':
pinit[k] = 3.0
elif fitparams[k].endswith('_H'):
# Abundances, use FE_H if possible
if 'FE_H' in params.keys():
pinit[k] = params['FE_H']
else:
pinit[k] = 0.0
else:
pinit[k] = 0.0
return pinit
def specfigure(figfile,spec,fmodel,out,original=None,verbose=True,figsize=10):
""" Make diagnostic figure."""
#import matplotlib
matplotlib.use('Agg')
#import matplotlib.pyplot as plt
if os.path.exists(figfile): os.remove(figfile)
norder = spec.norder
nlegcol = 2
if original is not None: nlegcol=3
# Single-order plot
if norder==1:
fig,ax = plt.subplots()
fig.set_figheight(figsize*0.5)
fig.set_figwidth(figsize)
if original is not None:
plt.plot(original.wave,original.flux,color='green',label='Original',linewidth=1)
plt.plot(spec.wave,spec.flux,'b',label='Masked Data',linewidth=0.5)
plt.plot(fmodel.wave,fmodel.flux,'r',label='Model',linewidth=0.5,alpha=0.8)
leg = ax.legend(loc='upper left', frameon=True, framealpha=0.8, ncol=nlegcol)
plt.xlabel('Wavelength (Angstroms)')
plt.ylabel('Normalized Flux')
xr = dln.minmax(spec.wave)
yr = [np.min([spec.flux,fmodel.flux]), np.max([spec.flux,fmodel.flux])]
if original is not None:
yr = [np.min([original.flux,spec.flux,fmodel.flux]), np.max([spec.flux,fmodel.flux])]
yr = [yr[0]-dln.valrange(yr)*0.15,yr[1]+dln.valrange(yr)*0.005]
yr = [np.max([yr[0],-0.2]), np.min([yr[1],2.0])]
plt.xlim(xr)
plt.ylim(yr)
snr = np.nanmedian(spec.flux/spec.err)
plt.title(spec.filename)
#ax.annotate(r'S/N=%5.1f Teff=%5.1f$\pm$%5.1f logg=%5.2f$\pm$%5.2f [Fe/H]=%5.2f$\pm$%5.2f Vrel=%5.2f$\pm$%5.2f chisq=%5.2f' %
# (snr, out['TEFF'], out['tefferr'], out['LOGG'], out['loggerr'], out['FE_H'], out['feherr'], out['RV'], out['vrelerr'], out['chisq']),
# xy=(np.mean(xr), yr[0]+dln.valrange(yr)*0.05),ha='center')
# Multi-order plot
else:
fig,ax = plt.subplots(norder)
fig.set_figheight(figsize)
fig.set_figwidth(figsize)
for i in range(norder):
if original is not None:
ax[i].plot(original.wave[:,i],original.flux[:,i],color='green',label='Original',linewidth=1)
ax[i].plot(spec.wave[:,i],spec.flux[:,i],'b',label='Masked Data',linewidth=0.5)
ax[i].plot(fmodel.wave[:,i],fmodel.flux[:,i],'r',label='Model',linewidth=0.5,alpha=0.8)
if i==0:
leg = ax[i].legend(loc='upper left', frameon=True, framealpha=0.8, ncol=nlegcol)
ax[i].set_xlabel('Wavelength (Angstroms)')
ax[i].set_ylabel('Normalized Flux')
xr = dln.minmax(spec.wave[:,i])
yr = [np.min([spec.flux[:,i],fmodel.flux[:,i]]), np.max([spec.flux[:,i],fmodel.flux[:,i]])]
if original is not None:
yr = [np.min([original.flux[:,i],spec.flux[:,i],fmodel.flux[:,i]]), np.max([spec.flux[:,i],fmodel.flux[:,i]])]
yr = [yr[0]-dln.valrange(yr)*0.05,yr[1]+dln.valrange(yr)*0.05]
if i==0:
yr = [yr[0]-dln.valrange(yr)*0.15,yr[1]+dln.valrange(yr)*0.05]
yr = [np.max([yr[0],-0.2]), np.min([yr[1],2.0])]
ax[i].set_xlim(xr)
ax[i].set_ylim(yr)
# legend
if i==0:
snr = np.nanmedian(spec.flux/spec.err)
ax[i].set_title(spec.filename)
#ax[i].annotate(r'S/N=%5.1f Teff=%5.1f$\pm$%5.1f logg=%5.2f$\pm$%5.2f [Fe/H]=%5.2f$\pm$%5.2f Vrel=%5.2f$\pm$%5.2f chisq=%5.2f' %
# (snr,out['teff'],out['tefferr'],out['logg'],out['loggerr'],out['feh'],out['feherr'],out['vrel'],out['vrelerr'],out['chisq']),
# xy=(np.mean(xr), yr[0]+dln.valrange(yr)*0.05),ha='center')
plt.savefig(figfile,bbox_inches='tight')
plt.close(fig)
if verbose is True: print('Figure saved to '+figfile)
def dopvrot_lsq(spec,models=None,initpar=None,verbose=False,logger=None):
"""
Least Squares fitting with forward modeling of the spectrum.
Parameters
----------
spec : Spec1D object
The observed spectrum to match.
models : list of Cannon models, optional
A list of Cannon models to use. The default is to load all of the Cannon
models in the data/ directory and use those.
initpar : numpy array, optional
Initial estimate for [teff, logg, feh, RV, vsini], optional.
verbose : bool, optional
Verbose output of the various steps. This is False by default.
Returns
-------
out : numpy structured array
The output structured array of the final derived RVs, stellar parameters and errors.
bmodel : Spec1D object
The best-fitting Cannon model spectrum (as Spec1D object).
Example
-------
.. code-block:: python
out, bmodel = fit_lsq(spec)
"""
if logger is None:
logger = dln.basiclogger()
# Load and prepare the Cannon models
#-------------------------------------------
if models is None:
models = cannon.models.copy()
models.prepare(spec)
# Get initial estimates
if initpar is None:
initpar = np.array([6000.0, 2.5, -0.5, 0.0, 0.0])
initpar = np.array(initpar).flatten()
# Calculate the bounds
lbounds = np.zeros(5,float)+1e5
ubounds = np.zeros(5,float)-1e5
for p in models:
lbounds[0:3] = np.minimum(lbounds[0:3],np.min(p.ranges,axis=1))
ubounds[0:3] = np.maximum(ubounds[0:3],np.max(p.ranges,axis=1))
lbounds[3] = -1000
ubounds[3] = 1000
lbounds[4] = 0.0
ubounds[4] = 500.0
bounds = (lbounds, ubounds)
# function to use with curve_fit
def spec_interp_vsini(x,teff,logg,feh,rv,vsini):
""" This returns the interpolated model for a given spectrum."""
# The "models" and "spec" must already exist outside of this function
m = models(teff=teff,logg=logg,feh=feh,rv=rv)
if m is None: # there was a problem
return np.zeros(spec.flux.shape,float).flatten()+1e30
# Broaden to vsini
if spec.norder>1:
smflux = spec.flux*0
for k in range(spec.norder):
smflux[:,k] = utils.broaden(m.wave[:,k],m.flux[:,k],vsini=vsini)
else:
smflux = utils.broaden(m.wave.flatten(),m.flux.flatten(),vsini=vsini)
return smflux.flatten()
def spec_interp_vsini_jac(x,*args):
""" Compute the Jacobian matrix (an m-by-n matrix, where element (i, j)
is the partial derivative of f[i] with respect to x[j]). """
relstep = 0.02
npix = len(x)
npar = len(args)
# Current values
f0 = spec_interp_vsini(x,*args)
# Initialize jacobian matrix
jac = np.zeros((npix,npar),np.float64)
# Loop over parameters
for i in range(npar):
pars = np.array(copy.deepcopy(args))
step = relstep*pars[i]
if step<=0.0:
step = 0.02
pars[i] += step
f1 = spec_interp_vsini(x,*pars)
jac[:,i] = (f1-f0)/step
return jac
# Use curve_fit
lspars, lscov = curve_fit(spec_interp_vsini, spec.wave.flatten(), spec.flux.flatten(), sigma=spec.err.flatten(),
p0=initpar, bounds=bounds, jac=spec_interp_vsini_jac)
# If it hits a boundary then the solution won't change much compared to initpar
# setting absolute_sigma=True gives crazy low lsperror values
lsperror = np.sqrt(np.diag(lscov))
if verbose is True:
logger.info('Least Squares RV and stellar parameters:')
for k,n in enumerate(['Teff','logg','[Fe/H]','RV','Vsini']):
logger.info('%s = %f' % (n,lspars[k]))
lsmodel = spec_interp_vsini(spec.wave,teff=lspars[0],logg=lspars[1],feh=lspars[2],rv=lspars[3],vsini=lspars[4])
lschisq = np.sqrt(np.sum(((spec.flux.flatten()-lsmodel)/spec.err.flatten())**2)/len(lsmodel))
if verbose is True: logger.info('chisq = %5.2f' % lschisq)
# Put it into the output structure
npar = len(lspars)
dtype = np.dtype([('pars',float,npar),('parerr',float,npar),('parcov',float,(npar,npar)),('chisq',float)])
out = np.zeros(1,dtype=dtype)
out['pars'] = lspars
out['parerr'] = lsperror
out['parcov'] = lscov
out['chisq'] = lschisq
return out, lsmodel
def fit_elem(spec,params,elem,verbose=0,alinefile=None,mlinefile=None,logger=None):
""" Fit an individual element."""
t0 = time.time()
if logger is None:
logger = dln.basiclogger()
# Create fitparams
#fitparams = [e+'_H' for e in elem]
fitparams = elem.copy()
if verbose>0:
logger.info('Fitting: '+', '.join(fitparams))
# Initialize the fitter
spfitter = SpecFitter(spec,params,fitparams=fitparams,verbose=(verbose>=2),
alinefile=alinefile,mlinefile=mlinefile)
spfitter.logger = logger
spfitter.norm = True # normalize the synthetic spectrum
#spfitter.verbose = True
pinit = initpars(params,elem)
bounds = mkbounds(elem)
# Initalize output
npar = len(fitparams)
dtyp = []
for f in fitparams:
dtyp += [(f,float)]
dtyp += [('pars',float,npar),('chisq',float),('nsynfev',int)]
dtype = np.dtype(dtyp)
out = np.zeros(1,dtype=dtype)
# Loop over elemental abundances
flag = 0
abund = -2.0
dabund = 1.0
count = 0
abundarr = []
chisq = []
modelarr = []
# Loop from -2 to +1 or until we get through the minimum
while (flag==0):
model = spfitter.model(spec.wave.flatten(),abund)
chisq1 = spfitter.chisq(model)
abundarr.append(abund)
modelarr.append(model)
chisq.append(chisq1)
if verbose>0:
logger.info('%f %f' % (abund,chisq1))
# Are we done?
if (abund>=1) and (chisq1 != np.min(np.array(chisq))):
flag = 1
if (abund >= 10):
flag = 1
# Increment the abundance
abund += dabund
count += 1
# Best value is at the end, just return that value
bestind = np.argmin(chisq)
if (bestind==0) or (bestind==len(chisq)-1):
bestabund = abundarr[bestind]
for k,f in enumerate(fitparams):
out[f] = bestabund
out['pars'] = bestabund
out['chisq'] = np.min(chisq)
out['nsynfev'] = spfitter.nsynfev
model = modelarr[bestind]
if verbose>0:
logger.info('%f %f' % (bestabund,np.min(chisq)))
logger.info('nfev = %i' % spfitter.nsynfev)
logger.info('dt = %.2f sec.' % (time.time()-t0))
logger.info(' ')
return out, model
# Now refine twice
for i in range(2):
# Get best value
bestind = np.argmin(np.array(chisq))
# get values half-way to left and right
# Left
lftind = bestind-1
lftabund = np.mean([abundarr[lftind],abundarr[bestind]])
lftmodel = spfitter.model(spec.wave.flatten(),lftabund)
lftchisq = spfitter.chisq(lftmodel)
abundarr.append(lftabund)
modelarr.append(lftmodel)
chisq.append(lftchisq)
if verbose>0:
logger.info('%f %f' % (lftabund,lftchisq))
# Right
rgtind = bestind+1
rgtabund = np.mean([abundarr[bestind],abundarr[rgtind]])
rgtmodel = spfitter.model(spec.wave.flatten(),rgtabund)
rgtchisq = spfitter.chisq(rgtmodel)
abundarr.append(rgtabund)
modelarr.append(rgtmodel)
chisq.append(rgtchisq)
if verbose>0:
logger.info('%f %f' % (rgtabund,rgtchisq))
# Sort arrays
si = np.argsort(abundarr)
abundarr = [abundarr[k] for k in si]
chisq = [chisq[k] for k in si]
modelarr = [modelarr[k] for k in si]
# Now interpolate to find the best value
abundarr2 = np.linspace(np.min(abundarr),np.max(abundarr),1000)
chisq2 = interp1d(abundarr,chisq,kind='quadratic')(abundarr2)
bestind = np.argmin(chisq2)
bestabund = abundarr2[bestind]
# Get the model at the best value
model = spfitter.model(spec.wave.flatten(),bestabund)
bestchisq = spfitter.chisq(model)
# Populate output structure
for k,f in enumerate(fitparams):
out[f] = bestabund
out['pars'] = bestabund
out['chisq'] = bestchisq
out['nsynfev'] = spfitter.nsynfev
if verbose>0:
logger.info('%f %f' % (bestabund,bestchisq))
logger.info('nfev = %i' % spfitter.nsynfev)
logger.info('dt = %.2f sec.' % (time.time()-t0))
logger.info(' ')
return out, model
def fit_lsq(spec,params,fitparams=None,paramlims=None,verbose=0,alinefile=None,mlinefile=None,logger=None):
"""
Fit a spectrum with a synspec synthetic spectrum and determine stellar parameters and
abundances using least-squares.
Parameters
----------
spec : Spec1D object
The observed spectrum to match.
params : dict
Dictionary of initial values to use or parameters/elements to hold fixed.
fitparams : list, optional
List of parameter names to fit (e.g., TEFF, LOGG, FE_H, RV). By default all values
in PARAMS are fit.
fitparamlims : dict, optional
Dictionary of lower and upper limits for each of the fitparams.
verbose : int, optional
Verbosity level (0, 1, or 2). The default is 0 and verbose=2 is for debugging.
alinefile : str, optional
The atomic linelist to use. Default is None which means the default synple linelist is used.
mlinefile : str, optional
The molecular linelist to use. Default is None which means the default synple linelist is used.
logger : logging object, optional
Logging object.
Returns
-------
out : numpy structured array
Catalog of best-fit values.
model : numpy array
The best-fit synthetic stellar spectrum.
Example
-------
.. code-block:: python
spec = doppler.read(file)
params = {'teff':5500,'logg':3.0,'fe_h':-1.0,'rv':0.0,'ca_h':-1.0}
fitparams = ['teff','logg','fe_h','rv','ca_h']
out,model = specfit.fit_lsq(spec,params,fitparams=fitparams)
"""
t0 = time.time()
if logger is None:
logger = dln.basiclogger()
# Normalize the spectrum
if spec.normalized==False:
spec.normalize()
# Capitalize the inputs
# Make key names all CAPS
params = dict((key.upper(), value) for (key, value) in params.items())
# Fitting parameters
if fitparams is None:
fitparams = list(params.keys())
fitparams = [v.upper() for v in fitparams] # all CAPS
npar = len(fitparams)
# Initialize the fitter
spfitter = SpecFitter(spec,params,fitparams=fitparams,verbose=(verbose>=2),
alinefile=alinefile,mlinefile=mlinefile)
spfitter.logger = logger
spfitter.norm = True # normalize the synthetic spectrum
pinit = initpars(params,fitparams)
bounds = mkbounds(fitparams,paramlims)
if verbose>0:
logger.info('Fitting: '+', '.join(fitparams))
# Fit the spectrum using curve_fit
dx_lim = mkdxlim(fitparams)
pars, cov = curve_fit(spfitter.model,spfitter.wave,spfitter.flux,dx_lim=dx_lim,
sigma=spfitter.err,p0=pinit,bounds=bounds,jac=spfitter.jac)
error = np.sqrt(np.diag(cov))
if verbose>0:
logger.info('Best values:')
for k in range(npar):
logger.info('%s = %.3f +/- %.3f' % (fitparams[k],pars[k],error[k]))
model = spfitter.model(spfitter.wave,*pars)
chisq = np.sqrt(np.sum(((spfitter.flux-model)/spfitter.err)**2)/len(model))
if verbose>0:
logger.info('chisq = %.2f' % chisq)
logger.info('nfev = %i' % spfitter.nsynfev)
logger.info('dt = %.2f sec.' % (time.time()-t0))
# Put it into the output structure
dtyp = []
for f in fitparams:
dtyp += [(f,float),(f+'_ERR',float)]
dtyp += [('pars',float,npar),('parerr',float,npar),('parcov',float,(npar,npar)),('chisq',float),('nsynfev',int)]
dtype = np.dtype(dtyp)
out = np.zeros(1,dtype=dtype)
for k,f in enumerate(fitparams):
out[f] = pars[k]
out[f+'_ERR'] = error[k]
out['pars'] = pars
out['parerr'] = error
out['parcov'] = cov
out['chisq'] = chisq
out['nsynfev'] = spfitter.nsynfev
# Reshape final model spectrum
model = model.reshape(spec.flux.shape)
return out, model
def fit(spec,params=None,paramlims=None,elem=None,figfile=None,fitvsini=False,fitvmicro=False,
verbose=1,alinefile=None,mlinefile=None,logger=None):
"""
Fit a spectrum with a synspec synthetic spectrum and determine stellar parameters and
abundances using a multi-step iterative method.
Step 1: Fit Teff/logg/[Fe/H]/RV using Doppler
Step 2: Fit Teff/logg/[Fe/H]/RV + vsini with Doppler model
Step 3: Fit stellar parameters (Teff/logg/[Fe/H]/[alpha/H]), RV and broadening (Vrot/Vmicro)
Step 4: Fit each element one at a time holding everything else fixed.
Step 5: Fit everything simultaneously
Parameters
----------
spec : Spec1D object
The observed spectrum to match.
params : dict, optional
Dictionary of initial values to use or parameters/elements to hold fixed.
paramlims : dict, optional
Dictionary of lower and upper limits for each of the params values.
For example, if params is {'teff': 9000, 'logg': 4.00, 'rv': -16.124}, paramlims
could be {'teff': [8000,10000], 'logg': [3.50,4.50], 'rv': [-20.124,-12.124]}.
elem : list, optional
List of elements to fit. The default is:
elem = ['C','N','O','NA','MG','AL','SI','K','CA','TI','V','CR','MN','CO','NI','CU','CE','ND']
Input an empty list [] to fit no elements.
figfile : string, optional
The filename for a diagnostic plot showing the observed spectrum and model spectrum.
fitvsini : bool, optional
Fit rotational velocity (vsini). By default, Vsini will be fit initially with a Doppler
model, but only included in the final fit if it improved chisq.
fitvmicro : bool, optional
Fit Vmicro. Default is False. By default, Vmicro is set (if not included in PARAMS)
logg>=3.8: vmicro = 2.0
logg<3.8: vmicro = 10^(0.226−0.0228*logg+0.0297*(logg)^2−0.0113*(logg)^3 )
verbose : int, optional
Verbosity level (0, 1, or 2). The default is 0 and verbose=2 is for debugging.
alinefile : str, optional
The atomic linelist to use. Default is None which means the default synple linelist is used.
mlinefile : str, optional
The molecular linelist to use. Default is None which means the default synple linelist is used.
logger : logging object, optional
Logging object.
Returns
-------
out : numpy structured array
Catalog of best-fit values.
model : numpy array
The best-fit synthetic stellar spectrum.
Example
-------
.. code-block:: python
spec = doppler.read(file)
out,model = specfit.fit(spec)
"""
t0 = time.time()
if logger is None:
logger = dln.basiclogger()
logger.handlers[0].setFormatter(logging.Formatter("%(asctime)s [%(levelname)-5.5s] %(message)s"))
logger.handlers[0].setStream(sys.stdout)
# Default set of elements
if elem is None:
elem = ['C','N','O','NA','MG','AL','SI','K','CA','TI','V','CR','MN','CO','NI','CU','SR','CE','ND']
# Normalize the spectrum
if spec.normalized==False:
spec.normalize()
# Print out inputs
if verbose>0:
logger.info('Inputs:')
if params is not None:
logger.info('PARAMS:')
for k,n in enumerate(params.keys()):
logger.info('%s = %f' % (n,params[n]))
else:
logger.info('PARAMS: None')
if fitvmicro:
logger.info('Fitting VMICRO')
if fitvsini:
logger.info('Fitting VSINI')
if len(elem)>0:
logger.info('Elements to fit: '+', '.join(elem))
else:
logger.info('No elements to fit')
logger.info(' ')
# Input linelists
if verbose and alinefile is not None:
logger.info('Using input atomic linelist: ',alinefile)
if verbose and mlinefile is not None:
logger.info('Using input molecular linelist: ',mlinefile)
# 1) Doppler (Teff, logg, feh, RV)
#---------------------------------
t1 = time.time()
if verbose>0:
logger.info('Step 1: Running Doppler')
# Use Doppler to get initial guess of stellar parameters and RV
dopout, dopfmodel, dopspecm = doppler.fit(spec)
if verbose>0:
logger.info('Teff = %.2f +/- %.2f' % (dopout['teff'][0],dopout['tefferr'][0]))
logger.info('logg = %.3f +/- %.3f' % (dopout['logg'][0],dopout['loggerr'][0]))
logger.info('[Fe/H] = %.3f +/- %.3f' % (dopout['feh'][0],dopout['feherr'][0]))
logger.info('Vrel = %.4f +/- %.4f' % (dopout['vrel'][0],dopout['vrelerr'][0]))
logger.info('chisq = %.3f' % dopout['chisq'][0])
logger.info('dt = %.2f sec.' % (time.time()-t1))
# typically 5 sec
# 2) Fit vsini as well with Doppler model
#-----------------------------------------
t2 = time.time()
if verbose>0:
logger.info(' ')
logger.info('Step 2: Fitting vsini with Doppler model')
# For APOGEE resolution you need vsini~4 km/s or greater to see an effect
initpar2 = [dopout['teff'][0], dopout['logg'][0], dopout['feh'][0], dopout['vrel'][0], 5.0]
out2, model2 = dopvrot_lsq(spec,initpar=initpar2,verbose=verbose,logger=logger)
if verbose>0:
logger.info('Teff = %.2f +/- %.2f' % (out2['pars'][0][0],out2['parerr'][0][0]))
logger.info('logg = %.3f +/- %.3f' % (out2['pars'][0][1],out2['parerr'][0][1]))
logger.info('[Fe/H] = %.3f +/- %.3f' % (out2['pars'][0][2],out2['parerr'][0][2]))
logger.info('Vrel = %.4f +/- %.4f' % (out2['pars'][0][3],out2['parerr'][0][3]))
logger.info('Vsini = %.3f +/- %.3f' % (out2['pars'][0][4],out2['parerr'][0][4]))
logger.info('chisq = %.3f' % out2['chisq'][0])
logger.info('dt = %.2f sec.' % (time.time()-t2))
# typically 5 sec
if out2['chisq'][0] > dopout['chisq'][0]:
if verbose>0:
logger.info('Doppler Vrot=0 chisq is better')
out2['pars'][0] = [dopout['teff'][0],dopout['logg'][0],dopout['feh'][0],dopout['vrel'][0],0.0]
# Initialize params
if params is None:
params = {}
else:
params = dict((key.upper(), value) for (key, value) in params.items()) # all CAPS
# Using input values when possible, otherwise Doppler values
for k,name in enumerate(['TEFF','LOGG','FE_H','RV','VROT']):
if params.get(name) is None:
params[name] = out2['pars'][0][k]
# Get Vmicro using Teff/logg relation
# APOGEE DR14 vmicro relation (Holtzman et al. 2018)
# for stars with [M/H]>-1 and logg<3.8
# vmicro = 10^(0.226−0.0228*logg+0.0297*(logg)^2−0.0113*(logg)^3 )
# coef = [0.226,0.0228,0.0297,−0.0113]
# only giants, was fit in dwarfs
if params.get('VMICRO') is None:
vmicro = 2.0 # default
if params['LOGG']<3.8:
vmcoef = [0.226,0.0228,0.0297,-0.0113]
vmicro = 10**dln.poly(params['LOGG'],vmcoef[::-1])
params['VMICRO'] = vmicro
# for giants
# vmacro = 10^(0.741−0.0998*logg−0.225[M/H])
# maximum of 15 km/s
# 3) Fit stellar parameters (Teff, logg, feh, alpha, RV, Vsini)
#--------------------------------------------------------------
t3 = time.time()
if verbose>0:
logger.info(' ')
logger.info('Step 3: Fitting stellar parameters, RV and broadening')
params3 = params.copy()
fitparams3 = ['TEFF','LOGG','FE_H','ALPHA_H','RV']
if params3['VROT']>0 or fitvsini is True:
fitparams3.append('VROT')
# Fit Vmicro as well if it's a dwarf
if params3['LOGG']>3.8 or params3['TEFF']>8000 or fitvmicro is True:
fitparams3.append('VMICRO')
out3, model3 = fit_lsq(spec,params3,fitparams3,paramlims,verbose=verbose,
alinefile=alinefile,mlinefile=mlinefile,logger=logger)
# typically 9 min.
# Should we fit C_H and N_H as well??
# Tweak the continuum
if verbose is not None:
logger.info('Tweaking continuum using best-fit synthetic model')
tmodel = Spec1D(model3,wave=spec.wave.copy(),lsfpars=np.array(0.0))
spec = doppler.rv.tweakcontinuum(spec,tmodel)
# 4) Fit each element separately
#-------------------------------
t4 = time.time()
if verbose>0:
logger.info(' ')
logger.info('Step 4: Fitting each element separately')
params4 = params3.copy()
for k in range(len(fitparams3)):
params4[fitparams3[k]] = out3['pars'][0][k]
nelem = len(elem)
if nelem>0:
if verbose>0:
logger.info('Elements: '+', '.join(elem))
elemcat = np.zeros(nelem,dtype=np.dtype([('name',np.str,10),('par',np.float64),('parerr',np.float64)]))
elemcat['name'] = elem
for k in range(nelem):
t4b = time.time()
parselem = params4.copy()
if elem[k] in ['O','MG','SI','S','CA','TI']:
parselem[elem[k]+'_H'] = params4['ALPHA_H']
else:
parselem[elem[k]+'_H'] = params4['FE_H']
fitparselem = [elem[k]+'_H']
#out4, model4 = fit_lsq(spec,parselem,fitparselem,verbose=verbose,logger=logger)
out4, model4 = fit_elem(spec,parselem,fitparselem,verbose=verbose,
alinefile=alinefile,mlinefile=mlinefile,logger=logger)
elemcat['par'][k] = out4['pars'][0]
#elemcat['parerr'][k] = out4['parerr'][0]
if verbose>0:
logger.info('dt = %f sec.' % (time.time()-t4))
logger.info(' ')
else:
if verbose>0:
logger.info('No elements to fit')
# about 50 min.
# 5) Fit all parameters simultaneously
#---------------------------------------
# if NO elements to fit, then nothing to do
if nelem>0:
t5 = time.time()
if verbose>0:
logger.info('Step 5: Fit all parameters simultaneously')
params5 = params4.copy()
for k in range(nelem):
params5[elem[k]+'_H'] = elemcat['par'][k]
if params5.get('ALPHA_H') is not None:
del params5['ALPHA_H']
fitparams5 = ['TEFF','LOGG','FE_H','RV']
if 'VROT' in fitparams3 or fitvsini is True:
fitparams5.append('VROT')
if 'VMICRO' in fitparams3 or fitvmicro is True:
fitparams5.append('VMICRO')
fitparams5 = fitparams5+list(np.char.array(elem)+'_H')
out5, model5 = fit_lsq(spec,params5,fitparams5,paramlims,verbose=verbose,
alinefile=alinefile,mlinefile=mlinefile,logger=logger)
else:
out5 = out3
model5 = model3
fitparams5 = fitparams3
# Make final structure and save the figure
out = out5
dtyp = []
npar = len(fitparams5)
for f in fitparams5:
dtyp += [(f,float),(f+'_ERR',float)]
dtyp += [('pars',float,npar),('parerr',float,npar),('parcov',float,(npar,npar)),('chisq',float),('vhelio',float)]
dtype = np.dtype(dtyp)
out = np.zeros(1,dtype=dtype)
for k,f in enumerate(fitparams5):
out[f] = out5['pars'][0][k]
out[f+'_ERR'] = out5['parerr'][0][k]
out['pars'] = out5['pars'][0]
out['parerr'] = out5['parerr'][0]
out['parcov'] = out5['parcov'][0]
out['chisq'] = out5['chisq'][0]
out['vhelio'] = out5['RV']+spec.barycorr()
if verbose>0:
logger.info('Vhelio = %.3f' % out['vhelio'])
# Final model
model = Spec1D(model5,wave=spec.wave.copy(),lsfpars=np.array(0.0))
model.lsf = spec.lsf.copy()
# Make figure
if figfile is not None:
specfigure(figfile,spec,model,out,verbose=(verbose>=2))
if verbose>0:
logger.info('dt = %.2f sec.' % (time.time()-t0))
return out, model | PypiClean |
/Django-SupportTools-3.6.0.tar.gz/Django-SupportTools-3.6.0/supporttools/static/supporttools/js/main.js |
$(function () {
"use strict";
// if mobile... handle menu button click fo
if (window.support.is_mobile || window.support.is_tablet) {
$('a#tool_menu_button').click(function (event) {
event.preventDefault();
$('#tool_menu').toggleClass('hide-div').toggleClass('show-div');
});
}
// else desktop... hide the menu button and show the full menu list
else {
$('a#tool_menu_button').hide();
$('#tool_menu').removeClass('hide-div');
// get correct height for sidebar
calculate_sidebar_height();
}
$.extend({
keys: function (obj) {
var a = [];
$.each(obj, function (k) { a.push(k); });
return a;
}
});
function anchor(href) {
var service = href.match(/^(\/((sws\/)?student|(pws\/)?identity)\/v[0-9]+).*/),
url = null;
if (service) {
$('.default-sidelinks a').each(function () {
var root = $(this).attr('href'),
offset;
if (root) {
offset = root.indexOf(service[1]);
if (offset > 0) {
url = root.substr(0, offset) + href;
return false;
}
}
return true;
});
}
return url;
}
function presentJSONPropertyValue($container, key, value) {
var $a,
href;
if (key.toLowerCase() === 'href') {
href = anchor(value);
if (href) {
$a = $('<a></a>');
$a.attr('href', href);
$a.html(value ? value.replace(/([&\/,])/g, '$1​') : 'NULL');
value = $a;
}
} else if (key.toLowerCase() === 'name') {
value = value ? value.replace(/([ ])/g, ' ') : 'NULL';
} else {
switch ($.type(value)) {
case 'undefined':
case 'null':
return false; // ignore
case 'array':
if (value.length === 0) {
return false; // ignore
}
/* falls through */
case 'object':
presentJSON($container, value);
return true; // handled
case 'boolean':
value = value.toString();
break;
case 'string':
if (value.length === 0) {
return false; // ignore
}
/* falls through */
default:
break;
}
}
return value;
}
function presentJSON($container, json_obj) {
var $ul = $('<ul>'),
$li,
$span,
$table,
$thead,
$tbody,
$tr,
$td,
keys,
key,
value,
a_obj,
d;
if ($.isArray(json_obj) && json_obj.length > 0) {
if ($.type(json_obj[0]) != 'object') {
d = [];
$.each(json_obj, function () {
value = presentJSONPropertyValue($container, '', this);
d.push(value);
});
$container.append(d.join(', '));
return;
} else if (json_obj.length === 1 ||
window.support.suppress_json_tables) {
$.each(json_obj, function () {
presentJSON($container, this);
$container.append($('<p></p>'));
});
return;
}
// gather unique keys and non-empty, interesting columns
d = {};
$.each(json_obj, function () {
a_obj = this;
$.each($.keys(a_obj), function () {
value = a_obj[this];
if (d.hasOwnProperty(this)) {
if (d[this].valid) {
d[this].valid = !(((
($.type(value) === 'string' &&
$.type(d[this].last_value) === 'string') ||
($.type(value) === 'number' &&
$.type(d[this].last_value) === 'number')) &&
value === d[this].last_value));
} else {
if (($.type(value) === 'string' &&
$.type(d[this].last_value) === 'string' &&
value.length && value != d[this].last_value) ||
($.type(value) === 'number' &&
$.type(d[this].last_value) === 'number' &&
value != d[this].last_value) ||
(d[this].last_value === null && value !== null)) {
d[this].valid = true;
}
}
} else {
d[this] = {
valid: !(($.type(value) === 'string' && value.length === 0) || value === null),
last_value: value
};
}
});
});
keys = $.map(d, function (o, k) { return ((o.valid) ? k : null); }).sort(function (a, b) {
var a_lower = a.toLowerCase(),
b_lower = b.toLowerCase();
// special key sort preferences
if (a_lower === 'href' &&
(b_lower === 'name' || b_lower === 'regid')) {
return 1;
}
return a - b;
});
$li = $('<li>');
$table = $('<table class="sws-array table table-striped"></table>');
$thead = $('<thead></thead>');
$tbody = $('<tbody></tbody>');
$tr = $('<tr></tr>');
$.each(keys, function () {
$tr.append('<th>' + this + '</th>');
});
$thead.append($tr);
$table.append($thead);
$.each(json_obj, function () {
a_obj = this;
$tr = $('<tr></tr>');
$.each(keys, function () {
key = this;
$td = $('<td></td>');
if (a_obj.hasOwnProperty(key)) {
value = presentJSONPropertyValue($td, key, a_obj[key]);
if ($.type(value) !== 'boolean') {
$td.append(value);
}
}
$tr.append($td);
});
$tbody.append($tr);
});
$table.append($tbody);
$li.append($table);
$ul.append($li);
} else {
$.each(json_obj, function (k, v) {
$li = $('<li>');
$li.append('<span class="json-key">' + k + ' : </span>');
value = presentJSONPropertyValue($li, k, v);
switch (value) {
case true: // handled
break;
case false: // ignore
return true;
default: // display
$span = $('<span class="json-value"></span>');
$span.append(value);
$li.append($span);
break;
}
$ul.append($li);
});
}
if ($ul.find('li').length) {
$container.append($ul);
}
}
// display digested JSON
if (window.hasOwnProperty('restclients_json_data') &&
window.restclients_json_data) {
var $h1 = $('.restclients-response-content h1').detach(),
$form = $('.restclients-response-content form').detach(),
$tabs = $('#restclient-tabs').detach(),
original_html = $('.restclients-response-content').html();
$('.restclients-response-content').empty();
$('.restclients-response-content').append($h1);
$('.restclients-response-content').append($form);
$tabs.appendTo('.restclients-response-content');
$('.main-content').html(original_html);
$('.jsonview-response').JSONView(window.restclients_json_data, { collapsed: true });
presentJSON($('.restclients-digested-response'), window.restclients_json_data);
$tabs.show();
$('.table.sws-array').each(function () {
$(this).dataTable({
searching: false,
paging: false,
info: false,
'aaSorting': [[ 0, "asc" ]],
'bPaginate': false
});
});
}
});
function calculate_sidebar_height() {
var base_h = $(".header").height(),
header_h = 60,
netid_h = $(".tool-app").outerHeight(),
footer_h = $(".footer").outerHeight(),
diff_h = base_h - (header_h + netid_h + footer_h) - 20;
$(".tool-list-inner").height(diff_h);
}
$(window).resize(function() {
if (!window.support.is_mobile && !window.support.is_tablet) {
// get correct height for sidebar
calculate_sidebar_height();
}
}); | PypiClean |
/CPAT-3.0.4.tar.gz/CPAT-3.0.4/.eggs/nose-1.3.7-py3.7.egg/nose/selector.py | import logging
import os
import unittest
from nose.config import Config
from nose.util import split_test_name, src, getfilename, getpackage, ispackage, is_executable
log = logging.getLogger(__name__)
__all__ = ['Selector', 'defaultSelector', 'TestAddress']
# for efficiency and easier mocking
op_join = os.path.join
op_basename = os.path.basename
op_exists = os.path.exists
op_splitext = os.path.splitext
op_isabs = os.path.isabs
op_abspath = os.path.abspath
class Selector(object):
"""Core test selector. Examines test candidates and determines whether,
given the specified configuration, the test candidate should be selected
as a test.
"""
def __init__(self, config):
if config is None:
config = Config()
self.configure(config)
def configure(self, config):
self.config = config
self.exclude = config.exclude
self.ignoreFiles = config.ignoreFiles
self.include = config.include
self.plugins = config.plugins
self.match = config.testMatch
def matches(self, name):
"""Does the name match my requirements?
To match, a name must match config.testMatch OR config.include
and it must not match config.exclude
"""
return ((self.match.search(name)
or (self.include and
[_f for _f in [inc.search(name) for inc in self.include] if _f]))
and ((not self.exclude)
or not [_f for _f in [exc.search(name) for exc in self.exclude] if _f]
))
def wantClass(self, cls):
"""Is the class a wanted test class?
A class must be a unittest.TestCase subclass, or match test name
requirements. Classes that start with _ are always excluded.
"""
declared = getattr(cls, '__test__', None)
if declared is not None:
wanted = declared
else:
wanted = (not cls.__name__.startswith('_')
and (issubclass(cls, unittest.TestCase)
or self.matches(cls.__name__)))
plug_wants = self.plugins.wantClass(cls)
if plug_wants is not None:
log.debug("Plugin setting selection of %s to %s", cls, plug_wants)
wanted = plug_wants
log.debug("wantClass %s? %s", cls, wanted)
return wanted
def wantDirectory(self, dirname):
"""Is the directory a wanted test directory?
All package directories match, so long as they do not match exclude.
All other directories must match test requirements.
"""
tail = op_basename(dirname)
if ispackage(dirname):
wanted = (not self.exclude
or not [_f for _f in [exc.search(tail) for exc in self.exclude] if _f])
else:
wanted = (self.matches(tail)
or (self.config.srcDirs
and tail in self.config.srcDirs))
plug_wants = self.plugins.wantDirectory(dirname)
if plug_wants is not None:
log.debug("Plugin setting selection of %s to %s",
dirname, plug_wants)
wanted = plug_wants
log.debug("wantDirectory %s? %s", dirname, wanted)
return wanted
def wantFile(self, file):
"""Is the file a wanted test file?
The file must be a python source file and match testMatch or
include, and not match exclude. Files that match ignore are *never*
wanted, regardless of plugin, testMatch, include or exclude settings.
"""
# never, ever load files that match anything in ignore
# (.* _* and *setup*.py by default)
base = op_basename(file)
ignore_matches = [ ignore_this for ignore_this in self.ignoreFiles
if ignore_this.search(base) ]
if ignore_matches:
log.debug('%s matches ignoreFiles pattern; skipped',
base)
return False
if not self.config.includeExe and is_executable(file):
log.info('%s is executable; skipped', file)
return False
dummy, ext = op_splitext(base)
pysrc = ext == '.py'
wanted = pysrc and self.matches(base)
plug_wants = self.plugins.wantFile(file)
if plug_wants is not None:
log.debug("plugin setting want %s to %s", file, plug_wants)
wanted = plug_wants
log.debug("wantFile %s? %s", file, wanted)
return wanted
def wantFunction(self, function):
"""Is the function a test function?
"""
try:
if hasattr(function, 'compat_func_name'):
funcname = function.compat_func_name
else:
funcname = function.__name__
except AttributeError:
# not a function
return False
declared = getattr(function, '__test__', None)
if declared is not None:
wanted = declared
else:
wanted = not funcname.startswith('_') and self.matches(funcname)
plug_wants = self.plugins.wantFunction(function)
if plug_wants is not None:
wanted = plug_wants
log.debug("wantFunction %s? %s", function, wanted)
return wanted
def wantMethod(self, method):
"""Is the method a test method?
"""
try:
method_name = method.__name__
except AttributeError:
# not a method
return False
if method_name.startswith('_'):
# never collect 'private' methods
return False
declared = getattr(method, '__test__', None)
if declared is not None:
wanted = declared
else:
wanted = self.matches(method_name)
plug_wants = self.plugins.wantMethod(method)
if plug_wants is not None:
wanted = plug_wants
log.debug("wantMethod %s? %s", method, wanted)
return wanted
def wantModule(self, module):
"""Is the module a test module?
The tail of the module name must match test requirements. One exception:
we always want __main__.
"""
declared = getattr(module, '__test__', None)
if declared is not None:
wanted = declared
else:
wanted = self.matches(module.__name__.split('.')[-1]) \
or module.__name__ == '__main__'
plug_wants = self.plugins.wantModule(module)
if plug_wants is not None:
wanted = plug_wants
log.debug("wantModule %s? %s", module, wanted)
return wanted
defaultSelector = Selector
class TestAddress(object):
"""A test address represents a user's request to run a particular
test. The user may specify a filename or module (or neither),
and/or a callable (a class, function, or method). The naming
format for test addresses is:
filename_or_module:callable
Filenames that are not absolute will be made absolute relative to
the working dir.
The filename or module part will be considered a module name if it
doesn't look like a file, that is, if it doesn't exist on the file
system and it doesn't contain any directory separators and it
doesn't end in .py.
Callables may be a class name, function name, method name, or
class.method specification.
"""
def __init__(self, name, workingDir=None):
if workingDir is None:
workingDir = os.getcwd()
self.name = name
self.workingDir = workingDir
self.filename, self.module, self.call = split_test_name(name)
log.debug('Test name %s resolved to file %s, module %s, call %s',
name, self.filename, self.module, self.call)
if self.filename is None:
if self.module is not None:
self.filename = getfilename(self.module, self.workingDir)
if self.filename:
self.filename = src(self.filename)
if not op_isabs(self.filename):
self.filename = op_abspath(op_join(workingDir,
self.filename))
if self.module is None:
self.module = getpackage(self.filename)
log.debug(
'Final resolution of test name %s: file %s module %s call %s',
name, self.filename, self.module, self.call)
def totuple(self):
return (self.filename, self.module, self.call)
def __str__(self):
return self.name
def __repr__(self):
return "%s: (%s, %s, %s)" % (self.name, self.filename,
self.module, self.call) | PypiClean |
/FlaskCms-0.0.4.tar.gz/FlaskCms-0.0.4/flask_cms/static/js/ckeditor/plugins/a11yhelp/dialogs/lang/sv.js | /*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","sv",{title:"Hjälpmedelsinstruktioner",contents:"Hjälpinnehåll. För att stänga denna dialogruta trycker du på ESC.",legend:[{name:"Allmänt",items:[{name:"Editor verktygsfält",legend:"Tryck på ${toolbarFocus} för att navigera till verktygsfältet. Flytta till nästa och föregående verktygsfältsgrupp med TAB och SHIFT-TAB. Flytta till nästa och föregående knapp i verktygsfältet med HÖGERPIL eller VÄNSTERPIL. Tryck Space eller ENTER för att aktivera knappen i verktygsfältet."},
{name:"Dialogeditor",legend:"Inuti en dialogruta, tryck TAB för att navigera till nästa fält i dialogrutan. Du trycker SKIFT + TAB för att flytta till föregående fält. Tryck ENTER för att skicka. Du avbryter och stänger dialogen med ESC. För dialogrutor som har flera flikar, tryck ALT + F10 navigera till fliklistan. Flytta sedan till nästa flik med HÖGERPIL. Flytta till föregående flik med SHIFT + TAB eller VÄNSTERPIL. Tryck Space eller ENTER för att välja fliken."},{name:"Editor för innehållsmeny",
legend:"Tryck på $ {contextMenu} eller PROGRAMTANGENTEN för att öppna snabbmenyn. Flytta sedan till nästa menyalternativ med TAB eller NEDPIL. Flytta till föregående alternativ med SHIFT + TABB eller UPPIL. Tryck Space eller ENTER för att välja menyalternativ. Öppna undermeny av nuvarande alternativ med SPACE eller ENTER eller HÖGERPIL. Gå tillbaka till överordnade menyalternativ med ESC eller VÄNSTERPIL. Stäng snabbmenyn med ESC."},{name:"Editor för List Box",legend:"Inuti en list-box, gå till nästa listobjekt med TAB eller NEDPIL. Flytta till föregående listobjekt med SHIFT + TAB eller UPPIL. Tryck Space eller ENTER för att välja listan alternativet. Tryck ESC för att stänga listan-boxen."},
{name:"Editor för elementens sökväg",legend:"Tryck på $ {elementsPathFocus} för att navigera till verktygsfältet för elementens sökvägar. Flytta till nästa elementknapp med TAB eller HÖGERPIL. Flytta till föregående knapp med SKIFT + TAB eller VÄNSTERPIL. Tryck Space eller ENTER för att välja element i redigeraren."}]},{name:"Kommandon",items:[{name:"Kommandot ångra",legend:"Tryck på ${undo}"},{name:"Kommandot gör om",legend:"Tryck på ${redo}"},{name:"Kommandot fet stil",legend:"Tryck på ${bold}"},
{name:"Kommandot kursiv",legend:"Tryck på ${italic}"},{name:"Kommandot understruken",legend:"Tryck på ${underline}"},{name:"kommandot länk",legend:"Tryck på ${link}"},{name:"Verktygsfält Dölj kommandot",legend:"Tryck på ${toolbarCollapse}"},{name:"Gå till föregående fokus plats",legend:"Tryck på ${accessPreviousSpace} för att gå till närmast onåbarara utrymme före markören, exempel: två intilliggande HR element. Repetera tangentkombinationen för att gå till nästa."},{name:"Tillgå nästa fokuskommandots utrymme",
legend:"Tryck ${accessNextSpace} på för att komma åt den närmaste onåbar fokus utrymme efter cirkumflex, till exempel: två intilliggande HR element. Upprepa tangentkombinationen för att nå avlägsna fokus utrymmen."},{name:"Hjälp om tillgänglighet",legend:"Tryck ${a11yHelp}"}]}]}); | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_dyo-sn.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Dimas",
"Tene\u014b",
"Talata",
"Alarbay",
"Aramisay",
"Arjuma",
"Sibiti"
],
"MONTH": [
"Sanvie",
"F\u00e9birie",
"Mars",
"Aburil",
"Mee",
"Sue\u014b",
"S\u00fauyee",
"Ut",
"Settembar",
"Oktobar",
"Novembar",
"Disambar"
],
"SHORTDAY": [
"Dim",
"Ten",
"Tal",
"Ala",
"Ara",
"Arj",
"Sib"
],
"SHORTMONTH": [
"Sa",
"Fe",
"Ma",
"Ab",
"Me",
"Su",
"S\u00fa",
"Ut",
"Se",
"Ok",
"No",
"De"
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "d/M/y HH:mm",
"shortDate": "d/M/y",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "CFA",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "dyo-sn",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/LIV_robotics-0.1.5-py3-none-any.whl/liv/trainer.py | import clip
import numpy as np
from numpy.core.numeric import full
import torch
import torch.nn as nn
import torch.nn.functional as F
import random
from pathlib import Path
from torchvision.utils import save_image
import time
import copy
import torchvision.transforms as T
epsilon = 1e-8
def do_nothing(x): return x
class Trainer():
def __init__(self):
self.clip_loss_img = nn.CrossEntropyLoss()
self.clip_loss_txt = nn.CrossEntropyLoss()
# A simplified function call of CLIP.forward()
def compute_clip_loss(self, model, image_features, text_features):
image_features = image_features / image_features.norm(dim=1, keepdim=True)
text_features = text_features / text_features.norm(dim=1, keepdim=True)
logits_per_image = image_features @ text_features.t()
logits_per_text = logits_per_image.t()
ground_truth = torch.arange(len(image_features),dtype=torch.long,device=image_features.device)
clip_loss = (self.clip_loss_img(logits_per_image,ground_truth) + self.clip_loss_txt(logits_per_text,ground_truth))/2
return clip_loss
def compute_vip_loss(self, model, e0, es0_vip, es1_vip, eg, b_reward, num_negatives=0):
r = b_reward.to(e0.device)
V_0 = model.module.sim(e0, eg)
V_s = model.module.sim(es0_vip, eg)
V_s_next = model.module.sim(es1_vip, eg)
# Rescale Value
V_0 = V_0 / (1-model.module.gamma)
V_s = V_s / (1-model.module.gamma)
V_s_next = V_s_next / (1-model.module.gamma)
# Compute VIP Loss
V_loss = (1-model.module.gamma) * -V_0.mean() + torch.log(epsilon + torch.mean(torch.exp(-(r + model.module.gamma * V_s_next - V_s))))
# Optionally, add additional "negative" observations
if num_negatives > 0:
V_s_neg = []
V_s_next_neg = []
for _ in range(num_negatives):
perm = torch.randperm(es0_vip.size()[0])
es0_vip_shuf = es0_vip[perm]
es1_vip_shuf = es1_vip[perm]
V_s_neg.append(model.module.sim(es0_vip_shuf, eg))
V_s_next_neg.append(model.module.sim(es1_vip_shuf, eg))
V_s_neg = torch.cat(V_s_neg)
V_s_next_neg = torch.cat(V_s_next_neg)
r_neg = -torch.ones(V_s_neg.shape).to(V_0.device)
V_s_neg = V_s_neg / (1-model.module.gamma)
V_s_next_neg = V_s_next_neg / (1-model.module.gamma)
V_loss = V_loss + torch.log(epsilon + torch.mean(torch.exp(-(r_neg + model.module.gamma * V_s_next_neg - V_s_neg))))
return V_loss
def update(self, model, batch, step, eval=False):
t0 = time.time()
metrics = dict()
if eval:
model.eval()
else:
model.train()
## Batch
b_im, b_reward, b_lang = batch
b_im = b_im.cuda()
bs = b_im.shape[0]
img_stack_size = b_im.shape[1]
H = b_im.shape[-2]
W = b_im.shape[-1]
b_im_r = b_im.reshape(bs*img_stack_size, 3, H, W)
# Encode visual and text inputs
e_img = model(b_im_r, modality="vision")
b_token = clip.tokenize(b_lang)
e_lang = model(b_token, modality="text")
e_img = e_img.reshape(bs, img_stack_size, -1)
e0 = e_img[:, 0] # initial, o_0
eg = e_img[:, 1] # final, o_g
es0_vip = e_img[:, 2] # o_t
es1_vip = e_img[:, 3] # o_t+1
eg_img = e_img[:, -1]
full_loss = 0
## CLIP Loss
if model.module.clipweight != 0:
clip_loss = self.compute_clip_loss(model, eg_img, e_lang)
clip_loss = model.module.clipweight * clip_loss
metrics['clip_loss'] = clip_loss.item()
full_loss += clip_loss
## VIP Loss (Visual)
vip_loss_visual = self.compute_vip_loss(model, e0, es0_vip, es1_vip, eg, b_reward, model.module.num_negatives)
metrics['vip_loss_visual'] = vip_loss_visual.item()
full_loss += model.module.visionweight * vip_loss_visual
## VIP Loss (Language)
if model.module.langweight != 0:
vip_loss_lang = self.compute_vip_loss(model, e0, es0_vip, es1_vip, e_lang, b_reward, model.module.num_negatives)
metrics['vip_loss_lang'] = vip_loss_lang.item()
full_loss += model.module.langweight * vip_loss_lang
metrics['full_loss'] = full_loss.item()
if not eval:
model.module.encoder_opt.zero_grad()
full_loss.backward()
model.module.encoder_opt.step()
return metrics, None | PypiClean |
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/juliapanel.py | import wx
from aptus.gui.computepanel import MiniComputePanel
from aptus.gui.ids import *
from aptus.gui.misc import AptusToolFrame, ListeningWindowMixin
class JuliaPanel(MiniComputePanel, ListeningWindowMixin):
""" A panel displaying the Julia set for the current point in another window.
"""
def __init__(self, parent, viewwin, size=wx.DefaultSize):
""" Create a JuliaPanel, with `parent` as its parent, and `viewwin` as
the window to track.
"""
MiniComputePanel.__init__(self, parent, size=size)
ListeningWindowMixin.__init__(self)
self.viewwin = viewwin
self.register_listener(self.on_coloring_changed, EVT_APTUS_COLORING_CHANGED, self.viewwin)
self.register_listener(self.draw_julia, EVT_APTUS_INDICATEPOINT, self.viewwin)
self.compute.center, self.compute.diam = (0.0,0.0), (3.0,3.0)
self.compute.mode = 'julia'
self.on_coloring_changed(None)
# Need to call update_info after the window appears, so that the widths of
# the text controls can be set properly. Else, it all appears left-aligned.
wx.CallAfter(self.draw_julia)
def draw_julia(self, event=None):
# Different events will trigger this, be flexible about how to get the
# mouse position.
if event and hasattr(event, 'point'):
pt = event.point
else:
pt = self.viewwin.ScreenToClient(wx.GetMousePosition())
pt_info = self.viewwin.get_point_info(pt)
if pt_info:
self.compute.rijulia = pt_info['r'], pt_info['i']
self.compute.iter_limit = 1000
else:
self.compute.rijulia = 0,0
self.compute.create_mandel()
self.computation_changed()
# Need to let the main window handle the event too.
if event:
event.Skip()
def on_coloring_changed(self, event_unused):
if self.compute.copy_coloring(self.viewwin.compute):
self.coloring_changed()
class JuliaFrame(AptusToolFrame):
def __init__(self, mainframe, viewwin):
AptusToolFrame.__init__(self, mainframe, title='Julia Set', size=(180,180))
self.panel = JuliaPanel(self, viewwin) | PypiClean |
/MagickPy-0.3.tar.gz/MagickPy-0.3/magickpy/image.py | from magickpy import lib
from magickpy.util import wrap_ptr_class
from magickpy.enums import *
from magickpy.types import (_ExceptionInfo, ExceptionInfo,
TimerInfo, ProfileInfo, ImageMagickException,
GeometryInfo, QuantizeInfo,
PixelPacket, Color, RectangleInfo, ChromaticityInfo, ErrorInfo)
import ctypes
__all__ = [
'Image',
'ImageInfo',
]
class _ImageInfo(ctypes.Structure):
_fields_ = [
('compression', ctypes.c_int),
('orientation', ctypes.c_int),
('temporary', ctypes.c_int),
('adjoin', ctypes.c_int),
('affirm', ctypes.c_int),
('antialias', ctypes.c_int),
('size', ctypes.c_char_p),
('extract', ctypes.c_char_p),
('page', ctypes.c_char_p),
('scenes', ctypes.c_char_p),
('scene', ctypes.c_ulong),
('number_scenes', ctypes.c_ulong),
('depth', ctypes.c_ulong),
('inteface', ctypes.c_int),
('endian', ctypes.c_int),
('units', ctypes.c_int),
('quality', ctypes.c_ulong),
('sampling_factor', ctypes.c_char_p),
('server_name', ctypes.c_char_p),
('font', ctypes.c_char_p),
('texture', ctypes.c_char_p),
('density', ctypes.c_char_p),
('pointsize', ctypes.c_double),
('fuzz', ctypes.c_double),
('background_color', PixelPacket),
('border_color', PixelPacket),
('matte_color', PixelPacket),
('dither', ctypes.c_int),
('mnochrome', ctypes.c_int),
('colors', ctypes.c_ulong),
('colorspace', ctypes.c_int),
('type', ctypes.c_int),
('preview_type', ctypes.c_int),
('group', ctypes.c_long),
('ping', ctypes.c_int),
('verbose', ctypes.c_int),
('view', ctypes.c_char_p),
('authenticate', ctypes.c_char_p),
('channel', ctypes.c_int),
('attributes', ctypes.c_void_p),
('options', ctypes.c_void_p),
('progress_monitor', ctypes.c_void_p),
('client_data', ctypes.c_void_p),
('cache', ctypes.c_void_p),
('stream', ctypes.c_void_p),
('file', ctypes.c_void_p),
('blob', ctypes.c_void_p),
('length', ctypes.c_ulong),
('magick', ctypes.c_char * 4096),
('unique', ctypes.c_char * 4096),
('zero', ctypes.c_char * 4096),
('filename', ctypes.c_char * 4096),
('debug', ctypes.c_int),
('tile', ctypes.c_char_p),
('subimage', ctypes.c_ulong),
('subrange', ctypes.c_ulong),
('pen', PixelPacket),
('signature', ctypes.c_ulong),
('virtual_pixel_method', ctypes.c_void_p),
('transparent_color', PixelPacket),
('profile', ctypes.c_void_p),
]
ImageInfo = wrap_ptr_class(_ImageInfo, lib.AcquireImageInfo, lib.DestroyImageInfo)
class _Image(ctypes.Structure):
pass
_Image._fields_ = [
('storage_class', ctypes.c_int),
('colorspace', ctypes.c_int),
('compression', ctypes.c_int),
('quality', ctypes.c_ulong),
('operation', ctypes.c_int),
('taint', ctypes.c_int),
('matte', ctypes.c_int),
('columns', ctypes.c_ulong),
('rows', ctypes.c_ulong),
('depth', ctypes.c_ulong),
('colors', ctypes.c_ulong),
('colormap', ctypes.POINTER(PixelPacket)),
('background_color', PixelPacket),
('border_color', PixelPacket),
('matte_color', PixelPacket),
('gamma', ctypes.c_double),
('chromaticity', ChromaticityInfo),
('rendering_intent', ctypes.c_int),
('profiles', ctypes.c_void_p),
('units', ctypes.c_int),
('montage', ctypes.c_char_p),
('directory', ctypes.c_char_p),
('geometry', ctypes.c_char_p),
('offset', ctypes.c_long),
('x_resolution', ctypes.c_double),
('y_resolution', ctypes.c_double),
('page', RectangleInfo),
('extract_info', RectangleInfo),
('the_info', RectangleInfo),
('bias', ctypes.c_double),
('blur', ctypes.c_double),
('fuzz', ctypes.c_double),
('filter', ctypes.c_int),
('interlace', ctypes.c_int),
('endian', ctypes.c_int),
('gravity', ctypes.c_int),
('compose', ctypes.c_int),
('dispose', ctypes.c_int),
('clip_mask', ctypes.POINTER(_Image)),
('scene', ctypes.c_ulong),
('delay', ctypes.c_ulong),
('ticks_per_second', ctypes.c_long),
('iterations', ctypes.c_ulong),
('total_colors', ctypes.c_ulong),
('start_loop', ctypes.c_long),
('error', ErrorInfo),
('timer', TimerInfo),
('progress_monitor', ctypes.c_int),
('client_data', ctypes.c_void_p),
('cache', ctypes.c_void_p),
('attributes', ctypes.c_void_p),
('ascii85', ctypes.c_void_p),
('blob', ctypes.c_void_p),
('filename', ctypes.c_char * 4096),
('magick_filename', ctypes.c_char * 4096),
('magick', ctypes.c_char * 4096),
('magick_columns', ctypes.c_ulong),
('magick_rows', ctypes.c_ulong),
('exception', _ExceptionInfo),
('debug', ctypes.c_int),
('reference_count', ctypes.c_long),
('semaphore', ctypes.c_void_p),
('color_profile', ProfileInfo),
('iptc_profile', ProfileInfo),
('generic_profile', ctypes.POINTER(ProfileInfo)),
('generic_profiles', ctypes.c_ulong),
('signature', ctypes.c_ulong),
('previous', ctypes.c_void_p),
('list', ctypes.c_void_p),
('next', ctypes.c_void_p),
('interpolate', ctypes.c_int),
('black_point_compensation', ctypes.c_int),
('transparent_color', PixelPacket),
('mask', ctypes.POINTER(_Image)),
('tile_offset', RectangleInfo),
('properties', ctypes.c_void_p),
('artifacts', ctypes.c_void_p),
]
def new_image_wrapper(fun, *args):
args = [_PImage] + list(args) + [ExceptionInfo]
fun.argtypes = args
def func(self, *args):
exc = ExceptionInfo()
args = [self] + list(args) + [exc]
res = fun(*args)
if not res:
raise ImageMagickException(exc)
return Image(res)
return func
def apply_image_wrapper(fun, *args):
args = [_PImage] + list(args)
fun.argtypes = args
def func(self, *args):
args = [self] + list(args)
res = fun(*args)
if not res:
if self.exception:
raise ImageMagickException(self.exception)
return bool(res)
return func
_PImage = wrap_ptr_class(_Image, lambda:lib.AllocateImage(None), lib.DestroyImage, classname="_PImage")
class PixelWrapper(object):
def __init__(self, im, x, y, w, h):
self.im = im
self.x = x
self.y = y
self.w = w
self.h = h
def __enter__(self):
exc = ExceptionInfo()
px = lib.GetAuthenticPixels(self.im, self.x, self.y, self.w, self.h, exc)
if not px:
raise ImageMagickException(exc)
self.px = ctypes.cast(px, ctypes.POINTER(PixelPacket*(self.w*self.h)))
return self
def __exit__(self, A, B, C):
del self.px
exc = ExceptionInfo()
lib.SyncAuthenticPixels(self.im, exc)
del self.im
def __setitem__(self, coord, value):
x, y = coord
if x >= self.w or y >= self.h or x < 0 or y < 0:
raise ValueError("Wrong coordinates %d, %d" % (x, y))
if not isinstance(value, PixelPacket):
value = PixelPacket(*value)
self.px[0][y*self.w+x] = value
def __getitem__(self, coord):
x, y = coord
if x >= self.w or y >= self.h or x < 0 or y < 0:
raise ValueError("Wrong coordinates %d, %d" % (x, y))
return self.px[0][y*self.w+x]
class Image(_PImage):
@classmethod
def read(C, file):
if isinstance(file, str):
inf = ImageInfo()
inf.filename = file
exinfo = ExceptionInfo()
res = lib.ReadImage(inf, exinfo)
if not res:
raise ImageMagickException(exinfo)
return C(res)
else:
raise NotImplementedError
@classmethod
def tile(C, file, width, height):
if isinstance(file, str):
inf = ImageInfo()
file = 'tile:' + file
inf.size = "%dx%d" % (width, height)
inf.filename = file
try:
exinfo = ExceptionInfo()
res = lib.ReadImage(inf, exinfo)
if not res:
raise ImageMagickException(exinfo)
return C(res)
finally:
inf.size = 0
inf.file = 0
else:
raise NotImplementedError
@classmethod
def ping(C, file):
if isinstance(file, str):
inf = ImageInfo()
inf.filename = file
exinfo = ExceptionInfo()
res = lib.PingImage(inf, exinfo)
if not res:
raise ImageMagickException(exinfo)
return C(res)
else:
raise NotImplementedError
@classmethod
def create(C, width, height, color):
inf = ImageInfo()
res = lib.NewMagickImage(inf, width, height, ctypes.byref(color))
if not res:
raise ImageMagickException(inf.exception)
im = C(res)
im.setColorspace(ColorspaceType.RGB)
im.setBackgroundColor(color)
return im
def write(self, file):
if isinstance(file, str):
inf = ImageInfo()
self.filename = file
if not lib.WriteImage(inf, self):
raise ImageMagickException(self.exception)
return True
else:
raise NotImplementedError
def display(self):
inf = ImageInfo()
res = lib.DisplayImages(inf, self)
if not res:
raise ImageMagickException(self.exception)
def __bool__(self):
return True
@property
def width(self):
return self.columns
@property
def height(self):
return self.rows
def draw(self, string):
inf = DrawInfo()
data = string.encode('utf-8')
buf = ctypes.c_buffer(data)
inf.primitive = ctypes.cast(buf, ctypes.c_char_p)
try:
if not lib.DrawImage(self.value, inf):
raise ImageMagickException(self.exception)
finally:
inf.primitive = 0
def makeCrop(self, geometry_or_width, height=None, x=None, y=None):
if height is None:
return self._makeCrop(geometry_or_width)
geom = RectangleInfo(geometry_or_width, height, x, y)
return self._makeCrop(geom)
def makeColorize(self, color, opacity_r, opacity_g=None, opacity_b=None):
if isinstance(opacity_r, str):
opacity = opacity_r
else:
if opacity_g is None:
opacity_g = opacity_r
if opacity_b is None:
opacity_b = opacity_r
opacity = "%u/%u/%u" % (opacity_r, opacity_g, opacity_b)
return self._makeColorize(opacity.encode('ascii'), color)
def copyPixels(self, source, sx, sy, w, h, tx, ty):
exc = ExceptionInfo()
dest = lib.GetAuthenticPixels(self, tx, ty, w, h, exc)
if not dest:
raise ImageMagickException(exc)
src = lib.GetAuthenticPixels(source, sx, sy, w, h, exc)
if not src:
raise ImageMagickException(exc)
artype = ctypes.POINTER(PixelPacket*(w*h))
artype1 = PixelPacket*(w*h)
dest = ctypes.cast(dest, artype)
src = ctypes.cast(src, artype)
for i in range(w*h):
dest[0][i] = src[0][i]
if not lib.SyncAuthenticPixels(self, exc):
raise ImageMagickException(exc)
def getPixels(self, x, y, w, h):
return PixelWrapper(self, x, y, w, h)
_makeCrop = new_image_wrapper(lib.CropImage, ctypes.POINTER(RectangleInfo))
makeBlur = new_image_wrapper(lib.BlurImage, ctypes.c_double, ctypes.c_double)
makeAdaptiveBlur = new_image_wrapper(lib.AdaptiveBlurImage, ctypes.c_double, ctypes.c_double)
makeGaussianBlur = new_image_wrapper(lib.GaussianBlurImage, ctypes.c_double, ctypes.c_double)
makeMotionBlur = new_image_wrapper(lib.MotionBlurImage, ctypes.c_double, ctypes.c_double, ctypes.c_double)
makeShade = new_image_wrapper(lib.ShadeImage, ctypes.c_int, ctypes.c_double, ctypes.c_double)
_makeColorize = new_image_wrapper(lib.ColorizeImage, ctypes.c_char_p, Color)
makeThumbnail = new_image_wrapper(lib.ThumbnailImage, ctypes.c_ulong, ctypes.c_ulong)
makeScale = new_image_wrapper(lib.ScaleImage, ctypes.c_ulong, ctypes.c_ulong)
makeSample = new_image_wrapper(lib.SampleImage, ctypes.c_ulong, ctypes.c_ulong)
makeResize = new_image_wrapper(lib.ResizeImage, ctypes.c_ulong, ctypes.c_ulong, FilterTypes, ctypes.c_double)
makeTrim = new_image_wrapper(lib.TrimImage)
makeExtent = new_image_wrapper(lib.ExtentImage, ctypes.POINTER(RectangleInfo))
makeBorder = new_image_wrapper(lib.BorderImage, ctypes.POINTER(RectangleInfo))
makeSwirl = new_image_wrapper(lib.SwirlImage, ctypes.c_double)
makeRoll = new_image_wrapper(lib.RollImage, ctypes.c_long, ctypes.c_long)
_applyContrastStretch = apply_image_wrapper(lib.ContrastStretchImage, ctypes.c_char_p)
applyNormalize = apply_image_wrapper(lib.NormalizeImage)
applyComposite = apply_image_wrapper(lib.CompositeImage, CompositeOp, _PImage, ctypes.c_int, ctypes.c_int)
_applySigmoidalContrast = apply_image_wrapper(lib.SigmoidalContrastImage, ctypes.c_int, ctypes.c_char_p)
applySeparateChannel = apply_image_wrapper(lib.SeparateImageChannel, ChannelType)
applyNegate = apply_image_wrapper(lib.NegateImage, ctypes.c_int)
applyTransparent = apply_image_wrapper(lib.TransparentImage, PixelPacket, ctypes.c_uint16)
def applyQuantize(self, number_colors, dither=DitherMethod.No,
colorspace=ColorspaceType.RGB, measure_error=False):
qi = QuantizeInfo()
qi.dither_method = dither
qi.dither = dither != DitherMethod.No
qi.number_colors = number_colors
lib.QuantizeImage(qi, self)
def applyDissolve(self, im, x=0, y=0, percent=None, dst_percent=None):
g = im.geometry
s = dst_percent is not None and "%fx%f" % (percent, dst_percent) or "%f" % percent
im.geometry = s
try:
return self.applyComposite(CompositeOp.Dissolve, im, x, y)
finally:
im.geometry = g
def applyContrastStretch(self, val):
self._applyContrastStretch(val.encode('ascii'))
def applySigmoidalContrast(self, a, b):
self._applySigmoidalContrast(a, b.encode('ascii'))
def compare(self, other, metric):
dbl = (ctypes.c_double*1)()
exp = ExceptionInfo()
img = CompareImageChannels(self, other, ChannelType.All, metric, dbl, exp)
if not img:
raise ImageMagickException(exp)
return (dbl[0], Image(img))
setColorspace = apply_image_wrapper(lib.SetImageColorspace, ColorspaceType)
def copy(self):
exc = ExceptionInfo()
res = lib.CloneImage(self, 0, 0, True, exc)
if not res:
raise ImageMagickException(exc)
return Image(res)
def setBackgroundColor(self, color):
self.background_color = color
lib.SetImageBackgroundColor(self)
def setMatte(self, value):
if bool(value) != bool(self.matte):
lib.SetImageOpacity(self, OpaqueOpacity)
def setVirtualPixelMethod(self, value):
lib.SetImageVirtualPixelMethod(self, int(value))
CompareImageChannels = lib.CompareImageChannels
CompareImageChannels.argtypes = [_PImage, _PImage, ChannelType, MetricType, ctypes.POINTER(ctypes.c_double), ExceptionInfo]
## Constants
OpaqueOpacity = 0
TransparentOpacity = 65535
from magickpy.draw import DrawInfo #avoiding circular import | PypiClean |
/Halocoin-0.1.0.4.tar.gz/Halocoin-0.1.0.4/halocoin/static/dashboard/js/bootstrap.min.js | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b.target).is(this))return b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a("#"===f?[]:f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"], input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(a>this.$items.length-1||a<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.7",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.7",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){document===a.target||this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);if(c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),!c.isInStateTrue())return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide()},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);if(this.$element.trigger(g),!g.isDefaultPrevented())return f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=window.SVGElement&&c instanceof window.SVGElement,g=d?{top:0,left:0}:f?null:b.offset(),h={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},i=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,h,i,g)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null,a.$element=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.7",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.7",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){
this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.7",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return e<c&&"top";if("bottom"==this.affixed)return null!=c?!(e+this.unpin<=f.top)&&"bottom":!(e+g<=a-d)&&"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&e<=c?"top":null!=d&&i+j>=a-d&&"bottom"},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); | PypiClean |
/D-Analyst-1.0.6.tar.gz/D-Analyst-1.0.6/main/analyst/processors/processor.py | import inspect
import numpy as np
from analyst import Manager, TextVisual, get_color
import analyst
__all__ = ['EventProcessor']
class EventProcessor(object):
"""Process several related events."""
def __init__(self, interaction_manager, *args, **kwargs):
self.interaction_manager = interaction_manager
self.parent = interaction_manager.parent
self.handlers = {}
self.cursor = None
self.activate()
self.initialize(*args, **kwargs)
def get_processor(self, name):
"""Return a processor in the Manager from its name."""
return self.interaction_manager.get_processor(name)
def _get_paint_manager(self):
return self.interaction_manager.paint_manager
paint_manager = property(_get_paint_manager)
def set_data(self, *args, **kwargs):
"""PaintManager.set_data."""
return self.parent.paint_manager.set_data(*args, **kwargs)
def get_visual(self, name):
"""Get a visual in the PaintManager from its name."""
return self.parent.paint_manager.get_visual(name)
def add_visual(self, *args, **kwargs):
"""Add a new visual in the paint manager."""
name = kwargs.get('name')
if not self.get_visual(name):
self.parent.paint_manager.add_visual(*args, **kwargs)
def set_cursor(self, cursor):
self.cursor = cursor
def get_cursor(self):
"""Return the current cursor."""
return self.cursor
def activate(self, boo=True):
"""Activate or deactivate a processor."""
self.activated = boo
def deactivate(self):
"""Deactive the processor."""
self.activated = False
def register(self, event, method):
"""Register a handler for the event."""
self.handlers[event] = method
def registered(self, event):
"""Return whether the specified event has been registered by this
processor."""
return self.handlers.get(event, None) is not None
def process(self, event, parameter):
"""Process an event by calling the registered handler if there's one.
"""
method = self.handlers.get(event, None)
if method:
if (inspect.ismethod(method) and
(EventProcessor in inspect.getmro(method.im_class) or
analyst.InteractionManager in inspect.getmro(method.im_class))):
method(parameter)
else:
fig = self.interaction_manager.figure
if not hasattr(fig, 'set_data'):
fig.set_data = self.parent.paint_manager.set_data
fig.copy_texture = self.parent.paint_manager.copy_texture
fig.set_rendering_options = self.parent.paint_manager.set_rendering_options
fig.get_processor = self.interaction_manager.get_processor
fig.get_visual = self.paint_manager.get_visual
fig.process_interaction = self.parent.process_interaction
fig.resizeGL = self.parent.paint_manager.resizeGL
method(fig, parameter)
def process_none(self):
"""Process the None event, occuring when there's no event, or when
an event has just finished."""
self.process(None, None)
def initialize(self, *args, **kwargs):
"""Initialize the event processor by calling self.register to register
handlers for different events."""
pass | PypiClean |
/donatellopyzza-1.6.6.tar.gz/donatellopyzza-1.6.6/donatellopyzza/assessor.py | from random import randint
from tqdm import tqdm
import os, sys
import time
#sys.path.insert(0, os.path.abspath('donatellopyzza/'))
from .mazeGenerator import MazeGenerator
from .game import Feedback
from .game import Game
class Assessor:
def __init__(self, nruns: int, minrows: int, mincolumns: int, maxrows: int, maxcolumns: int, complexity=0, overflow=-1):
assert nruns > 0 and minrows > 7 and mincolumns > 7, "Please, specify a valid number of runs, rows and columns"
if overflow == -1:
overflow = maxrows * maxcolumns * 2
self.clss = None
self.overflow = overflow
self.nruns = nruns
self.minrows = minrows
self.mincolumns = mincolumns
self.maxrows = maxrows
self.maxcolumns = maxcolumns
self.complexity = complexity
self.generator = MazeGenerator()
def setSolution(self, clss):
self.clss = clss
def run(self):
assert self.clss != None, "Please, set your solution before running the assessor"
print("Running the evaluation...")
success = 0
failure = 0
startTime = time.time()
totalActions = 0
for i in tqdm(range(self.nruns)):
r, c = randint(self.minrows, self.maxrows), randint(self.mincolumns, self.maxcolumns)
deviation = (abs(c-(r+c)/2)/2)
a = int(min(r, c) + deviation)
b = int(max(r, c) - deviation)
maze = self.generator.create_maze(a, b, self.complexity)
filepath = "assessment_maze"
maze.save(maze, filename=filepath)
game = Game(filepath, False)
# returns a turtle that execute actions on its environment
turtle = game.start()
feedback = Feedback.NO_FEEDBACK
cnt = 0
solutionInstance = self.clss()
while not game.isWon(False) and self.overflow > cnt:
action = solutionInstance.nextAction(feedback)
feedback = turtle.execute(action)
cnt += 1
totalActions += cnt
if self.overflow > cnt:
success += 1
else:
failure += 1
endTime = time.time()
duration = endTime - startTime
print("Total duration: ", round(duration, 2), " seconds")
print("------------------------")
print("| Success rate: ", round(success / (success + failure), 2) * 100, "% |")
print("------------------------")
print("Number of actions per maze: ", round(totalActions/self.nruns, 2))
print("Time per maze: ", round(duration/self.nruns, 2), "seconds")
print("------------------------") | PypiClean |
/AN-DiscordBot-3.9.4.tar.gz/AN-DiscordBot-3.9.4/anbot/core/rpc.py | import asyncio
from typing import Optional
from aiohttp import web
from aiohttp_json_rpc import JsonRpc
from aiohttp_json_rpc.rpc import JsonRpcMethod
import logging
log = logging.getLogger("red.rpc")
__all__ = ["RPC", "RPCMixin", "get_name"]
def get_name(func, prefix=""):
class_name = prefix or func.__self__.__class__.__name__.lower()
func_name = func.__name__.strip("_")
if class_name == "redrpc":
return func_name.upper()
return f"{class_name}__{func_name}".upper()
class ANRpc(JsonRpc):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_methods(("", self.get_method_info))
def _add_method(self, method, name="", prefix=""):
if not asyncio.iscoroutinefunction(method):
return
name = name or get_name(method, prefix)
self.methods[name] = JsonRpcMethod(method)
def remove_method(self, method):
meth_name = get_name(method)
new_methods = {}
for name, meth in self.methods.items():
if name != meth_name:
new_methods[name] = meth
self.methods = new_methods
def remove_methods(self, prefix: str):
new_methods = {}
for name, meth in self.methods.items():
splitted = name.split("__")
if len(splitted) < 2 or splitted[0] != prefix:
new_methods[name] = meth
self.methods = new_methods
async def get_method_info(self, request):
method_name = request.params[0]
if method_name in self.methods:
return self.methods[method_name].__doc__
return "No docstring available."
class RPC:
"""
RPC server manager.
"""
def __init__(self):
self.app = web.Application()
self._rpc = ANRpc()
self.app.router.add_route("*", "/", self._rpc.handle_request)
self._runner = web.AppRunner(self.app)
self._site: Optional[web.TCPSite] = None
async def initialize(self):
"""
Finalizes the initialization of the RPC server and allows it to begin
accepting queries.
"""
await self._runner.setup()
self._site = web.TCPSite(self._runner, host="127.0.0.1", port=6133)
await self._site.start()
log.debug("Created RPC server listener.")
async def close(self):
"""
Closes the RPC server.
"""
await self._runner.cleanup()
def add_method(self, method, prefix: str = None):
if prefix is None:
prefix = method.__self__.__class__.__name__.lower()
if not asyncio.iscoroutinefunction(method):
raise TypeError("RPC methods must be coroutines.")
self._rpc.add_methods((prefix, method))
def add_multi_method(self, *methods, prefix: str = None):
if not all(asyncio.iscoroutinefunction(m) for m in methods):
raise TypeError("RPC methods must be coroutines.")
for method in methods:
self.add_method(method, prefix=prefix)
def remove_method(self, method):
self._rpc.remove_method(method)
def remove_methods(self, prefix: str):
self._rpc.remove_methods(prefix)
class RPCMixin:
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.rpc = RPC()
self.rpc_handlers = {} # Uppercase cog name to method
def register_rpc_handler(self, method):
"""
Registers a method to act as an RPC handler if the internal RPC server is active.
When calling this method through the RPC server, use the naming scheme
"cogname__methodname".
.. important::
All parameters to RPC handler methods must be JSON serializable objects.
The return value of handler methods must also be JSON serializable.
Parameters
----------
method : coroutine
The method to register with the internal RPC server.
"""
self.rpc.add_method(method)
cog_name = method.__self__.__class__.__name__.upper()
if cog_name not in self.rpc_handlers:
self.rpc_handlers[cog_name] = []
self.rpc_handlers[cog_name].append(method)
def unregister_rpc_handler(self, method):
"""
Unregisters an RPC method handler.
This will be called automatically for you on cog unload and will pass silently if the
method is not previously registered.
Parameters
----------
method : coroutine
The method to unregister from the internal RPC server.
"""
self.rpc.remove_method(method)
name = get_name(method)
cog_name = name.split("__")[0]
if cog_name in self.rpc_handlers:
try:
self.rpc_handlers[cog_name].remove(method)
except ValueError:
pass | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/docs/djblets/guides/webapi/writing-api-resources.rst | .. _writing-api-resources:
=========================
Writing Web API Resources
=========================
.. py:currentmodule:: djblets.webapi.resources.base
Overview
--------
:py:class:`~WebAPIResource` is used to write REST API resources. A resource
lives at a specific, stable URL, can represent either an object or a list of
objects, and can respond to various HTTP methods (GET, POST, PUT, DELETE).
Subclasses of :py:class:`~WebAPIResource` are expected to override functions
and variables in order to provide specific functionality, such as modifying or
creating new objects.
This guide will go over the capabilities and responsibilities of resource
subclasses.
Representing Models
-------------------
Most resources will :py:attr:`WebAPIResource.model` set to a
:py:class:`~django.db.models.Model` subclass, and
:py:attr:`WebAPIResource.fields` set to a dictionary defining the fields to
return in the resource payloads.
Each resource will also include a ``link`` dictionary that maps
a key (resource name or action) to a dictionary containing the URL
(``href``) and the HTTP method that's to be used for that URL
(``method``). This will include a special ``self`` key that links to
that resource's actual location.
An example of this might be:
.. code-block:: javascript
'links': {
'self': {
'method': 'GET',
'href': '/path/to/this/resource/'
},
'update': {
'method': 'PUT',
'href': '/path/to/this/resource/'
}
}
Resources associated with a model may want to override
:py:meth:`WebAPIResource.get_queryset` to return a queryset with a more
specific query.
By default, an individual object's key name in the resulting payloads
will be set to the lowercase class name of the object, and the plural
version used for lists will be the same but with "s" appended to it. This
can be overridden by setting :py:attr:`WebAPIResource.name` and
:py:attr:`WebAPIResource.name_plural`.
Non-Database Models
-------------------
Resources are not always backed by a database model. It's often useful to
work with lists of objects or data computed within the request.
In these cases, most resources will still want to set
:py:attr:`WebAPIResource.model` to some sort of class and provide a
:py:attr:`WebAPIResource.fields` dictionary. It's expected that the fields
will all exist as attributes on an instance of the model, or that a serializer
function will exist for the field.
These resources will then to define a :py:meth:`WebAPIResource.get_queryset`
that returns a :py:class:`~djblets.db.query.LocalDataQuerySet` containing the
list of items to return in the resource. This will allow standard resource
functionality like pagination to work.
Matching Objects
----------------
Objects are generally queried by their numeric object ID and mapping that to
the object's ``pk`` attribute. For this to work, the
:py:attr:`WebAPIResource.uri_object_key` attribute must be set to the name in
the regex for the URL that will be captured and passed to the handlers for
this resource. The :py:attr:`WebAPIResource.uri_object_key_regex` attribute
can be overridden to specify the regex for matching this ID (useful for
capturing names instead of numeric IDs) and
:py:attr:`WebAPIResource.model_object_key` can be overridden to specify the
model field that will be matched against.
Parents and URLs
----------------
Resources typically have a parent resource, of which the resource is a
subclass. Resources will often list their children (by setting
:py:attr:`WebAPIResource.list_child_resources` and
:py:attr:`WebAPIResource.item_child_resources` in a subclass to lists of other
WebAPIResource instances). This makes the entire tree navigatable. The URLs
are built up automatically, so long as the result of
:py:func:`get_url_patterns` from top-level resources are added to the Django
``url_patterns`` variables commonly found in :file:`urls.py`.
Child objects should set the :py:attr:`WebAPIResource.model_parent_key`
variable to the field name of the object's parent in the resource hierarchy.
This allows :py:class:`WebAPIResource` to build a URL with the right values
filled in in order to make a URL to this object.
If the parent is dynamic based on certain conditions, then the
:py:meth:`WebAPIResource.get_parent_object` function can be overridden
instead.
Object Serialization
--------------------
Objects are serialized through the :py:meth:`WebAPIResource.serialize_object`
function. This rarely needs to be overridden, but can be called from
WebAPIEncoders in order to serialize the object. By default, this will loop
through the :py:attr:`WebAPIResource.fields` variable and add each value to
the resulting dictionary.
Values can be specially serialized by creating functions in the form of
:samp:`serialize_<fieldname>_field()`. These functions take the object being
serialized and must return a value that can be fed to the encoder.
By default, resources will not necessarily serialize the objects in their own
payloads. Instead, they will look up the registered resource instance for the
model using :py:meth:`WebAPIResourec.get_resource_for_object`, and serialize
with that. A resource can override that logic for its own payloads by
providing a custom :py:meth:`WebAPIResource.get_serializer_for_object` method.
Handling Requests
-----------------
WebAPIResource calls the following functions based on the type of
HTTP request:
* :py:meth:`~WebAPIResource.get` -
HTTP GET for individual objects.
* :py:meth:`~WebAPIResource.get_list` -
HTTP GET for resources representing lists of objects.
* :py:meth:`~WebAPIResource.create` -
HTTP POST on resources representing lists of objects. This is expected to
return the object and :http:`201` on success.
* :py:meth:`~WebAPIResource.update` -
HTTP PUT on individual objects to modify their state based on full or
partial data.
* :py:meth:`~WebAPIResource.delete` -
HTTP DELETE on an individual object. This is expected to return :http:`204`
on success. The default implementation just deletes the object.
Any function that is not implemented will return an :http:`405`. Functions
that have handlers provided should
set :py:attr:`WebAPIResource.allowed_methods` to a tuple of the HTTP methods
allowed. For example:
.. code-block:: python
allowed_methods = ('GET', 'POST', 'DELETE')
These functions are passed an :py:class:`~django.http.HTTPRequest` and a list
of arguments captured in the URL and are expected to return standard HTTP
response codes, along with a payload in most cases. The functions can return
any of:
* :py:class:`~django.http.HttpResponse`
* :py:class:`~djblets.webapi.responses.WebAPIResponse`
* :py:class:`~djblets.webapi.errors.WebAPIError`
* Tuple of (:py:class:`~djblets.webapi.errors.WebAPIError`, payload)
* Tuple of (:py:class:`~djblets.webapi.errors.WebAPIError`,
payload dictionary, headers dictionary)
* Tuple of (HTTP status, payload)
* Tuple of (HTTP status, payload dictionary, headers dictionary)
In general, it's best to return one of the tuples containing an HTTP
status, and not any object, but there are cases where an object is
necessary.
Commonly, a handler will need to fetch parent objects in order to make
some request. The values for all captured object IDs in the URL are passed
to the handler, but it's best to not use these directly. Instead, the
handler should accept a ``**kwargs`` parameter, and then call the parent
resource's :py:meth:`WebAPIResource.get_object` function and pass in that
``**kwargs``. For example:
.. code-block:: python
def create(self, request, *args, **kwargs):
try:
my_parent = myParentResource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
Pagination
----------
List resources automatically handle pagination of data, when using
models and querysets. Each request will return a fixed number of
results, and clients can fetch the previous or next batches through
the generated ``prev`` and ``next`` links.
By default, pagination is handled by
:py:class:`~djblets.webapi.responses.WebAPIResponsePaginated`. This is
responsible for fetching data from the resource's queryset. It's also
responsible for interpreting the ``start`` and ``max-results`` query
parameters, which are assumed to be 0-based indexes into the queryset.
Resources can override how pagination works by setting
:py:attr:`WebAPIResource.paginated_cls` to a subclass of
:py:class:`~djblets.webapi.responses.WebAPIResponsePaginated`. Through that,
they can customize all aspects of pagination for the resource.
Expanding Resources
-------------------
The resulting data returned from a resource will by default provide
links to child resources. If a lot of aggregated data is needed, then
instead of making several queries the caller can use the ``?expand=``
parameter. This takes a comma-separated list of keys in the resource
names found in the payloads and expands them instead of linking to them.
This can result in really large downloads, if deep expansion is made
when accessing lists of resources. However, it can also result in less
strain on the server if used correctly.
Faking HTTP Methods
-------------------
There are clients that can't actually request anything but HTTP POST
and HTTP GET. An HTML form is one such example, and Flash applications
are another. For these cases, an HTTP POST can be made, with a special
``_method`` parameter passed to the URL. This can be set to the HTTP
method that's desired. For example, ``PUT`` or ``DELETE``.
Permissions
-----------
Unless overridden, an object cannot be modified, created, or deleted
if the user is not logged in and if an appropriate permission function
does not return True. These permission functions are:
* :py:meth:`~WebAPIResource.has_access_permissions` -
Used for HTTP GET calls. Returns ``True`` by default.
* :py:meth:`~WebAPIResource.has_modify_permissions` -
Used for HTTP POST or PUT calls, if called by the subclass.
Returns ``False`` by default.
* :py:meth:`~WebAPIResource.has_delete_permissions` -
Used for HTTP DELETE permissions. Returns ``False`` by default.
Browser Caching
---------------
To improve performance, resources can make use of browser-side caching.
If a resource is accessed more than once, and it hasn't changed,
the resource will return an :http:`304`.
There are two methods for caching: Last Modified headers, and ETags.
Last Modified
~~~~~~~~~~~~~
A resource can set :py:meth:`WebAPIResource.last_modified_field` to the name
of a :py:class:`~django.db.models.DateTimeField` in the model. This will be
used to determine if the resource has changed since the last request.
If a bit more work is needed, the :py:meth:`WebAPIResource.get_last_modified`
function can instead be overridden. This takes the request and object and is
expected to return a timestamp.
ETags
~~~~~
ETags are arbitrary, unique strings that represent the state of a resource.
There should only ever be one possible ETag per state of the resource.
A resource can set the :py:attr:`WebAPIResourec.etag_field` to the name of a
field in the model.
If no field really works, :py:attr:`WebAPIResource.autogenerate_etags` can be
set. This will generate a suitable ETag based on all fields in the resource.
For this to work correctly, no custom data can be added to the payload, and
links cannot be dynamic.
If more work is needed, the :py:meth:`WebAPIResource.get_etag` function can
instead be overridden. It will take a request and object and is expected to
return a string.
Mimetypes
---------
Resources should list the possible mimetypes they'll accept and return in
:py:attr:`WebAPIResource.allowed_mimetypes`. Each entry in the list is a
dictionary with ``list`` containing a mimetype for resource lists, and
``item`` containing the equivalent mimetype for a resource item. In the case
of a singleton, ``item`` will contain the mimetype. If the mimetype is not
applicable to one of the resource forms, the corresponding entry should
contain ``None``.
Entries in these lists are checked against the mimetypes requested in the
HTTP ``Accept`` header, and, by default, the returned data will be sent in
that mimetype. If the requested data is a resource list, the corresponding
resource item mimetype will also be sent in the ``Item-Content-Type``
header.
By default, this lists will have entries with both ``list`` and ``item``
containing :mimetype:`application/json` and :mimetype:`application/xml`,
along with any resource-specific mimetypes, if used.
Resource-specific Mimetypes
~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to better identify resources, resources can provide their
own custom mimetypes. These are known as vendor-specific mimetypes, and
are subsets of :mimetype:`application/json` and
:mimetype:`application/xml`. An example would be
:mimetype:`application/vnd.example.com.myresource+json`.
To enable this on a resource, set :py:attr:`WebAPIResource.mimetype_vendor` to
the vendor name. This is often a domain name. For example:
.. code-block:: python
mimetype_vendor = 'djblets.org'
The resource names will then be generated based on the name of the resource
(:py:attr:`WebAPIResource.name_plural` for resource lists,
:py:attr:`WebAPIResource.name` for resource items and singletons). These can
be customized as well:
.. code-block:: python
mimetype_list_resource_name = 'myresource-list'
mimetype_item_resource_name = 'myresource'
When these are used, any client requesting either the resource-specific
mimetype or the more generic mimetype will by default receive a payload
with the resource-specific mimetype. This makes it easier to identify
the schema of resource data without hard-coding any knowledge of the
URI.
Limiting Payload Contents
-------------------------
.. versionadded:: 0.9
Often times, the client won't actually need the full contents of an
API payload. Returning a full payload would not only increase the amount
of data that needs to be transferred, but would also incur extra
processing time on both the server and client, possibly also additional
database queries.
Clients can specify a list of fields and/or links that should be returned
in the payload by including ``?only-fields=`` or ``?only-links=`` in the URL
in any GET requst. These should contain a comma-separated list of fields or
link names to include. To prevent any fields/links from being returned,
simply leave the list blank.
To limit fields/links in PUT or POST requests, you should instead send
a field in the request called ``only_fields`` or ``only_links``. The
behavior is exactly the same as for GET requests.
| PypiClean |
/COMPAS-1.17.5.tar.gz/COMPAS-1.17.5/src/compas/datastructures/mesh/contours_numpy.py | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from numpy import asarray
from numpy import meshgrid
from numpy import linspace
from numpy import amax
from numpy import amin
from scipy.interpolate import griddata
import matplotlib.pyplot as plt
from compas.numerical import scalarfield_contours_numpy
__all__ = [
"mesh_isolines_numpy",
"mesh_contours_numpy",
]
def mesh_isolines_numpy(mesh, attr_name, N=50):
"""Compute the isolines of a specified attribute of the vertices of a mesh.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
A mesh object.
attr_name : str
The name of the vertex attribute.
N : int, optional
The density of the isolines.
Returns
-------
list[float]
A list of levels.
list[list[float]]
A list of isolines.
The list of levels contains the z-values at each of the isolines.
Each isoline is a list of paths, and each path is a list polygons.
"""
xy = [mesh.vertex_coordinates(key, "xy") for key in mesh.vertices()]
s = [mesh.vertex[key][attr_name] for key in mesh.vertices()]
return scalarfield_contours_numpy(xy, s, N)
def mesh_contours_numpy(mesh, levels=50, density=100):
"""Compute the contours of the mesh.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
The mesh object.
levels : int, optional
The number of contours.
density : int, optional
The density of the interpolation grid.
Returns
-------
list[float]
A list of levels.
list[list[float]]
A list of contours.
The list of levels contains the z-values at each of the contours.
Each contour is a list of paths, and each path is a list polygons.
Notes
-----
The contours are defined as the isolines of the z-coordinates of the vertices of the mesh.
"""
xy = [mesh.vertex_attributes(key, "xy") for key in mesh.vertices()]
z = [mesh.vertex_attribute(key, "z") for key in mesh.vertices()]
xy = asarray(xy)
z = asarray(z)
x = xy[:, 0]
y = xy[:, 1]
X, Y = meshgrid(linspace(amin(x), amax(x), 2 * density), linspace(amin(y), amax(y), 2 * density))
Z = griddata((x, y), z, (X, Y), method="cubic")
fig = plt.figure()
ax = fig.add_subplot(111, aspect="equal")
c = ax.contour(X, Y, Z, levels)
contours = [0] * len(c.collections)
levels = c.levels
for i, coll in enumerate(iter(c.collections)):
paths = coll.get_paths()
contours[i] = [0] * len(paths)
for j, path in enumerate(iter(paths)):
polygons = path.to_polygons()
contours[i][j] = [0] * len(polygons)
for k, polygon in enumerate(iter(polygons)):
contours[i][j][k] = polygon
plt.close(fig)
return levels, contours | PypiClean |
/Flask-Imgur-0.1.tar.gz/Flask-Imgur-0.1/flask_imgur/flask_imgur.py |
import base64
import json
from six.moves import urllib
class Imgur(object):
"""
Simple class for handling Imgur image upload, and deletion
"""
API_URL = "https://api.imgur.com/3/image"
def __init__(self, app=None, client_id=None, **kwargs):
if not client_id and not app.config.get("IMGUR_ID", None):
raise Exception("Missing client id")
self.client_id = client_id or app.config.get("IMGUR_ID")
if 'api' in kwargs:
self.API_URL = kwargs["api"]
def _get_api(self):
return self.API_URL
def _add_authorization_header(self, additional = dict()):
"""
Builds authorization headers for anonymous users
"""
headers = dict(
Authorization = "Client-ID " + self.client_id
)
headers.update(additional)
return headers
def _build_send_request(self, image=None, params=dict()):
"""
Build request for sending an image
"""
if not image:
raise Exception("Missing image object")
b64 = base64.b64encode(image.read())
data = dict(
image = b64,
type = 'base64',
)
data.update(params)
return urllib.parse.urlencode(data).encode("utf-8")
def send_image(self, image, send_params=dict(), additional_headers=dict()):
"""
Main handler for sending images
:params image -- Image object
:params send_params -- additional info to be sent to imgur
:params additional_headers -- additional headers to be added to request
"""
req = urllib.request.Request(url = self._get_api(),
data = self._build_send_request(image, send_params),
headers = self._add_authorization_header(additional_headers)
)
data = urllib.request.urlopen(req)
return json.loads(data.read().decode("utf-8"))
def delete_image(self, delete_hash, additional_headers=dict()):
"""
Delete image from imgur
:params delete_hash -- string containing unique
image hash optained when sending an image
:params additional_headers -- aditional headers to be addd to request
"""
opener = urllib.request.build_opener(urllib.request.HTTPHandler)
req = urllib.request.Request(url = self._get_api() + "/" + delete_hash,
headers = self._add_authorization_header(additional_headers))
req.get_method = lambda: "DELETE"
data = urllib.request.urlopen(req)
return json.loads(data.read().decode("utf-8")) | PypiClean |
/Lokai-0.3.tar.gz/Lokai-0.3/lokai/tool_box/tb_install/environment_setup.py |
#-----------------------------------------------------------------------
"""
environment_setup takes a yaml description of any directories and
their (text based) content that need to be present in the running
environment.
The module is executable, and it provides a function
(process_setup) that can be called from other places (such as test
scripts).
When run against an environment that already exists any target
text files that are found are renamed with a date. The facility
does _not_ attempt to merge the content. Perhaps it should.
"""
#-----------------------------------------------------------------------
import sys
import yaml
import os
import datetime
#-----------------------------------------------------------------------
class AbandonExecution(Exception):
pass
#-----------------------------------------------------------------------
def process_setup(source, working_directory='.'):
""" Read the yaml source file.
Prepend working_directory onto paths. Use os.path.join, so it
is possible to override relative path with absolute, but you
have to be explicit.
"""
pwd = os.path.expanduser(working_directory)
for arg_set in yaml.safe_load_all(source):
for name, body in arg_set.iteritems():
process_node(os.path.join(pwd, name), body)
def process_node(path, body):
""" Recurse through the structure, creating things as we go.
"""
if body == None:
create_directory(path)
elif not isinstance(body, type({})):
create_file(path, body)
else:
create_directory(path)
for name, new_body in body.iteritems():
process_node(os.path.join(path, name), new_body)
def create_directory(path):
if os.path.exists(path):
if os.path.isfile(path):
raise AbandonExecution, \
"Directory path points to existing file: %s"%path
else:
os.makedirs(path)
def create_file(path, body):
time_stamp = datetime.datetime.now()
if os.path.exists(path):
if os.path.isdir(path):
raise AbandonExecution, \
"File path points to existing file: %s"%path
dir,name = os.path.split(path)
new_name = "%s.%s"% (name, time_stamp.strftime("%Y%m%d%H%M%S"))
new_path = os.path.join(dir, new_name)
os.rename(path, new_path)
target = open(path,'w')
target.write(body)
target.close()
#-----------------------------------------------------------------------
if __name__ == '__main__':
import optparse
usage = ("Set up config files and working directories based on "
"current or given working directory. Use stdin by default")
parser = optparse.OptionParser(usage=usage)
parser.add_option('-f', '--file', dest = 'source_file',
help="Name of file to process")
parser.add_option('-w', '--working-directory',
dest='working_directory',
default='.',
help="Override working directory")
(options, args) = parser.parse_args()
if options.source_file:
source = open(options.source_file)
else:
source = sys.__stdin__
process_setup(source, options.working_directory)
#----------------------------------------------------------------------- | PypiClean |
/NICEx-0.2.3.tar.gz/NICEx-0.2.3/nice/__init__.py | from nice.utils.distance import*
from nice.utils.data import data_NICE
from nice.utils.optimization.heuristic import best_first
from nice.utils.optimization.reward import SparsityReward, ProximityReward, PlausibilityReward
from typing import Optional
import numpy as np
# =============================================================================
# Types and constants
# =============================================================================
CRITERIA_DIS = {'HEOM':HEOM}
CRITERIA_NRM = {'std':StandardDistance,
'minmax':MinMaxDistance}
CRITERIA_REW = {'sparsity':SparsityReward,
'proximity':ProximityReward,
'plausibility':PlausibilityReward}
class NICE:
def __init__(
self,
predict_fn,
X_train:np.ndarray,
cat_feat:list,
num_feat ='auto',
y_train: Optional[np.ndarray]=None,
optimization='sparsity',
justified_cf:bool = True,
distance_metric:str ='HEOM',
num_normalization:str = 'minmax',
auto_encoder = None):
self.optimization = optimization
self.data = data_NICE(X_train,y_train,cat_feat,num_feat,predict_fn,justified_cf,0.00000000001)
self.distance_metric = CRITERIA_DIS[distance_metric](self.data, CRITERIA_NRM[num_normalization])
self.nearest_neighbour = NearestNeighbour(self.data, self.distance_metric)
if optimization != 'none':
self.reward_function = CRITERIA_REW[optimization](
self.data,
distance_metric = self.distance_metric,
auto_encoder= auto_encoder
)
self.optimizer = best_first(self.data,self.reward_function)
def explain(self,X,target_class ='other'):#todo target class 'other'
self.data.fit_to_X(X,target_class)
NN = self.nearest_neighbour.find_neighbour(self.data.X)
if self.optimization != 'none':
CF = self.optimizer.optimize(NN)
return CF
return NN | PypiClean |
/BiliUtil-0.2.3.tar.gz/BiliUtil-0.2.3/README.md | # BiliUtil





Bilibili.com(B站)数据下载工具包。若您在使用过程中发现BUG或有可以改进之处,欢迎提交[Issue](https://github.com/wolfbolin/BiliUtil/issues)或邮件(mailto@wolfbolin.com)与我联系。如果觉得还不错,欢迎Star和Fork支持一下(两百个Star冲鸭)。
特别提醒:鉴于B站快速的发展,目前代码包中仍在使用一些新版中废弃的接口,如果某日B站关闭了旧接口服务,可能会导致本函数包不可用(虽然已经找到了新的获取方式,但是懒得改代码了,非Json接口获取成本++)
> 特性
>
> * 用户与频道内视频批量下载
> * 支持360P至4K全部画质下载
> * 开放灵活详细的API编程接口
>* 多链接分块下载与自动合成
常见问题请参考[Q&A](#qa) | BUG修复请参考[更新日志](#update) | 新的疑问请参考[反馈列表](https://github.com/wolfbolin/BiliUtil/issues)
###### TOC
一、[安装方式](#install)
二、[使用样式](#example)
三、[接口文档](#document)
四、[Q&A](#qa)
五、[关于BiliUtil](#about)
六、[更新日志](#update)
## 一、安装方式<span id="install"/>
本工具包采用pip方式发布,并需要调用本地aria2c与ffmpeg插件,工具包启动前将根据环境变量检查环境是否可用,当环境中缺少相关插件时,部分类将不会导入。
*Step 1*: 使用pip安装
```shell
pip install BiliUtil
```
*Step 2*: 安装Aria2c插件
插件官网:<https://aria2.github.io/>
*Step 3:* 安装FFmpeg插件
插件官网:<https://ffmpeg.org/>
## 二、使用示例<span id="example"/>
获取视频信息并下载视频
```python
import BiliUtil
from cookie import cookie_info
if __name__ == '__main__':
# DNA视频下载
album = BiliUtil.Album("170001")
# album = BiliUtil.Album("av170001")
# album = BiliUtil.Album("BV17x411w7KC")
# album.set_album("456025297")
# album.set_album("av456025297")
# album.set_album("BV17x411w7KC")
# album.set_by_url("https://www.bilibili.com/video/av170001")
# album.set_by_url("https://www.bilibili.com/video/BV17x411w7KC")
# 4K视频下载测试
# album = BiliUtil.Album("BV1QV411R7d1")
album_info = album.sync()
print(album_info)
video_list = album.get_video_list()
print(video_list)
for video in video_list:
video.sync(cookie=cookie_info)
task = BiliUtil.Task(video, 'D:/BiliUtil', album.aid)
task.start()
```
获取用户信息并下载所有视频
```python
import BiliUtil
if __name__ == '__main__':
cookie = "SESSDATA=abcd68fd..."
cache = "D:/BiliUtil"
user = BiliUtil.User(20165629) # 他发的太多了,建议换个人尝试
# user.set_user(user)
# user.set_by_url("https://space.bilibili.com/20165629")
user_info = user.sync()
print(user_info)
fetcher = BiliUtil.Fetcher(user)
av_list = fetcher.fetch_all(cookie, BiliUtil.Config.SET_AS_NAME)
print(av_list)
positive_list, negative_list = fetcher.load_exist(cache)
print(positive_list)
print(negative_list)
task_id = fetcher.load_task(cache, positive_list, cache)
download_list = fetcher.pull_all()
print('完成{}个视频下载:{}'.format(len(download_list), download_list))
```
高配版示例程序请见[example3.py](https://github.com/wolfbolin/BiliUtil/blob/master/example3.py)和[example4.py](https://github.com/wolfbolin/BiliUtil/blob/master/example3.py),其中example4.py是我个人自测自用程序,涉及大多数使用场景,可靠性与适用性MAX
## 三、接口文档<span id="document"/>
在第四章[Q&A](#四QA)中将讲解常见问题与逻辑结构,如有需要请移步第四章,那里可能有你想问的。本章仅讲解工具包的使用方法,简单粗暴便于理解。
### 0、数据字典与基础函数
## 常量与含义<span id="config"/>
常量中包含了文件命名方式的定义,画质信息的定义,全局代理设置的定义等内容。
| 常量 | 值 | 含义 |
| ---------------- | -------------------------------- | --------------- |
| 命名方式 | | |
| SET_AS_NAME | 1 | 以视频名称命名 |
| SET_AS_CODE | 2 | 以对象编号命名 |
| SET_AS_PAGE | 3 | 以分P文件命名 |
| 网络代理 | | |
| HTTP_PROXY | 例http://user:pass@1.2.3.4:5678 | HTTP代理设置 |
| HTTPS_PROXY | 例https://user:pass@1.2.3.4:5678 | HTTPS代理设置 |
| 视频画质 | | |
| Quality.V360P | ('16', '流畅 360P') | 360P |
| Quality.V480P | ('32', '清晰 480P') | 480P |
| Quality.V720P | ('64', '高清 720P') | 720P(登录) |
| Quality.V720P60 | ('74', '高清 720P60') | 720P60(会员) |
| Quality.V1080P | ('80', '高清 1080P') | 1080P(登录) |
| Quality.V1080Px | ('112', '高清 1080P+') | 1080P+(会员) |
| Quality.V1080P60 | ('116', '高清 1080P60') | 1080P60(会员) |
| Quality.V4K | ('120', '超清 4K') | 4K(会员) |
| | | |
## 基础函数功能
### BiliUtil.Util.av2bv(av)
该函数可将形如`av170001`,`170001`的av号转化为形如`BV17x411w7KC`的新编码方式
> 编码转换算法代码参考来源:https://blog.csdn.net/jkddf9h8xd9j646x798t/article/details/105124465
### BiliUtil.Util.bv2av(bv)
该函数可将形如`BV17x411w7KC`的bv号转化为形如`170001`的旧编码方式
> 编码转换算法代码参考来源:https://blog.csdn.net/jkddf9h8xd9j646x798t/article/details/105124465
### BiliUtil.Util.to_bv(av)
该函数可识别av、bv号并都转化为bv号的编码形式
### BiliUtil.Util.to_av(av)
该函数可识别av、bv号并都转化为av号的编码形式
### 1、用户类(BiliUtil.User)<span id="userclass"/>
#### 1.1、`__init__(uid=None)`
你可以为每一个用户声明一个对象实例,在声明时你可以指定用户uid或在同步数据前设定用户uid。
``` python
user = BiliUtil.User('20165629')
```
每个实例中将包含以下成员变量,你可以在[`sync()`](#user-sync)操作后读取这些信息。
| 成员变 | 变量含义 |
| -------- | ------------------- |
| uid | 用户uid |
| name | 用户昵称 |
| birthday | 用户生日 |
| title | 用户身份 |
| face | 用户头像 |
| time | 创号时间(可能为0) |
| level | 用户级别 |
| sex | 用户性别 |
| sign | 用户签名 |
| vip | 大会员 |
| | |
#### 1.2、`set_user(uid)`
你可以使用该函数设定用户uid或重新指定用户uid。该操作不会重置成员变量。
```python
user.set_user('20165629')
```
#### 1.3、`set_by_url(url)`
你可以通过该函数以url解析的方式指定对象的用户uid。该操作不会重置成员变量。
```python
user.set_by_url('https://space.bilibili.com/20165629?from=search')
```
#### 1.4、`sync(cookie=None)`<span id="user-sync"/>
你可用通过该操作更新对象的成员变量,如果你感觉信息不够丰满,请与开发者联系。
```python
user_info = user.sync(cookie='SESSDATA=abcd68fd...')
```
#### 1.5、`get_channel_list(cookie=None)`
你可以通过该操作获取用户公开的全部频道,返回值中将储存本工具包中[频道类](#channelclass)的对象。
```python
channel_list = user.get_channel_list(cookie='SESSDATA=abcd68fd...')
```
#### 1.6、`get_album_list(cookie=None)`
你可以通过该操作获取用户公开的全部视频,返回值中将储存本工具包中[稿件类](#albumclass)的对象。
```python
get_album_list(cookie='SESSDATA=abcd68fd...')
```
### 2、频道类(BiliUtil.Channel)<span id="channelclass"/>
#### 2.1、`__init__(uid=None, cid=None)`
你可以为每一个用户声明一个对象实例,在声明时你可以指定用户uid、频道cid或在同步数据前设定用户uid、频道cid。
```python
channel = BiliUtil.Channel(uid='20165629', cid='9108')
```
每个实例中将包含以下成员变量,你可以在[`get_album_list()`](#channel-get_album_list)操作后读取这些信息。
| 成员变量 | 变量含义 | 默认值 |
| ---------- | ------------------ | ------ |
| uid | 用户uid | None |
| cid | 频道cid | None |
| name | 频道名称 | None |
| cover | 频道封面 | None |
| count | 频道内稿件数量 | None |
| | | |
#### 2.2、`set_channel(uid, cid)`
你可以使用该函数设定频道cid或重新指定频道cid,同时必须指定频道对应用户uid。该操作不会重置成员变量。
```python
channel.set_channel('20165629', '9108')
```
#### 2.3、`set_by_url(url)`
你可以通过该函数以url解析的方式指定对象的用户uid和频道cid。该操作不会重置成员变量。
```python
channel.set_by_url('https://space.bilibili.com/20165629/channel/detail?cid=9108')
```
#### 2.4、`get_album_list(cookie=None)`<span id="channel-get_album_list"/>
你可用通过该操作获取该频道中的全部稿件对象,返回值中将储存本工具包中[稿件类](#albumclass)的对象。
```python
channel_info = channel.get_album_list(cookie='SESSDATA=abcd68fd...')
```
### 3、稿件类(BiliUtil.Album)<span id="alnbumclass"/>
#### 3.1、`__init__(aid=None)`
你可以为每一个稿件声明一个对象实例,在声明时你可以指定稿件aid(av号),或者同步数据前设定稿件aid(av号)。关于稿件与视频的区别请参考[Q&A](#qa)加强对名词的理解。
```python
album = BiliUtil.Album('3947271')
```
每个实例中将包含以下成员变量,你可以在[`sync()`](#album-sync)操作后读取这些信息。
| 成员变量 | 变量含义 | 默认值 |
| -------- | --------------- | ------ |
| aid | 稿件aid(av号) | None |
| num | 包含视频数量 | None |
| type | 分区名称 | None |
| cover | 封面链接 | None |
| name | 视频名称 | None |
| time | 发布时间 | None |
| desc | 稿件描述 | None |
| view | 观看人数 | None |
| danmu | 弹幕数量 | None |
| reply | 回复数量 | None |
| favorite | 收藏数量 | None |
| coin | 硬币数量 | None |
| share | 分享数量 | None |
| like | 点赞数量 | None |
| cid_list | 视频cid编号列表 | None |
| | | |
#### 3.2、`set_album(aid)`
你可以使用该函数设定稿件aid或重新指定稿件aid,该操作不会重置成员变量。
```python
album.set_user('3947271')
```
#### 3.3、`set_by_url(url)`
你可以通过该函数以url解析的方式指定对象的稿件aid,该操作不会重置成员变量。
```python
album.set_by_url('https://www.bilibili.com/video/av3947271')
```
#### 3.4、`album_name(name_pattern=Util.Config.SET_AS_CODE)`
你可以通过该操作获取标准化的稿件名称,同时你可以通过参数的方式生成不同命名方式的名称
```python
album_name = album.album_name()
```
#### 3.5、`sync(cookie=None)`<span id="album-sync"/>
你可用通过该操作更新对象的成员变量,如果你感觉信息不够丰满,请与开发者联系。
```python
album_info = album.sync(cookie='SESSDATA=abcd68fd...')
```
#### 3.6、`get_video_list(cookie=None)`<span id="album-get_video_list"/>
你可以通过该操作获取每个稿件中的视频对象,返回值中将储存本工具包中视频类的对象。
```python
get_video_list(cookie='SESSDATA=abcd68fd...')
```
### 4、视频类(BiliUtil.Video)<span id="videoclass"/>
#### 4.1、`__init__(aid=None, cid=None)`
不建议使用者自行创建视频对象,请使用稿件类的[`get_video_list()`](#album-get_video_list)操作获取视频类对象实例列表。
每个实例中将包含以下成员变量,你可以在[`sync()`](#video-sync)操作后读取这些信息。
| 成员变量 | 变量含义 | 默认值 |
| -------- | ------------------- | ------ |
| album | 稿件对象 | None |
| cid | 视频cid | None |
| name | 视频名称(分P名称) | None |
| page | 视频编号(分P序号) | None |
| quality | 视频画质 | None |
| length | 视频长度 | None |
| format | 视频格式 | None |
| height | 视频高度 | None |
| width | 视频宽度 | None |
| level | 视频版本 | None |
| video | 视频链接 | list() |
| audio | 视频链接 | list() |
| | | |
#### 4.2、`video_name(name_pattern=Util.Config.SET_AS_CODE)`
你可以通过该操作获取标准化的视频名称,同时你可以通过参数的方式生成不同命名方式的名称
```
video_name = video.video_name(Util.Config.SET_AS_PAGE)
```
#### 4.3、`sync(cookie=None, quality=None)`<span id="video-sync"/>
你可用通过该操作更新对象的成员变量,如果你感觉信息不够丰满,请与开发者联系。
你可以在同步视频信息时选择需要获取的视频画质,如果不指定,将默认按照可获取到的最高画质获取信息。
最高画质的获取与传入的Cookie信息有密切联系,有关Cookie与画质的关系请查阅[Q&A](#qa)
```python
video_info = video.sync(
cookie='SESSDATA=abcd68fd...',
quailty=BiliUtil.Config.Quality.V1080P
)
```
## 5、抓取器(BiliUtil.Fetcher)<span id="fetcherclass"/>
该类的设计是针对有批量下载视频需求而设计,避免使用者自行完成所有下载流程的编写。请关注该类的使用流程与使用示例,奇怪的使用方式可能会触发不知道什么情况的BUG。
#### 5.1、`__init__(obj)`
你可以使用用户类或频道类来初始化一个抓取器对象实例,不建议开发者操作实例中的对象数据。
#### 5.2、`fetch_all(cookie=None, name_pattern=SET_AS_CODE, quality=None)`
请在初始化之后使用该函数获取对象名下所有的视频列表,并储存在对象内部变量中,为后续操作提供数据。
当初始化对象为用户类时,将自动获取用户名下的所有视频。当初始化对象为频道类时,将自动获取该频道中的所有视频。当文件命名[命名方式](#config)为以名字命名时(`SET_AS_NAME`),程序将自动调用视频对象的`sync()`函数获取该视频的名称。你还可以传入视频质量的枚举以调整视频的最高质量,若无该参数则按照最高视频质量下载。
#### 5.3、`load_exist(ouput)`
你可以使用该函数加载输出目录中已经存在的视频列表,返回值分为乐观策略和悲观策略。在乐观策略状态下稿件实例有存在视频即认为存在, 在悲观策略状态下稿件实例所有视频都存在才认为存在。
该函数的设计是为了避免在视频下载时程序重复下载视频浪费流量与时间,也避免过多请求被官方风控。
#### 5.4、`load_task(output, exclude=None, v_filter=None)`
该函数可以帮助你在抓取器对象中生成一个任务列表,在任务列表中主要包含了任务类对象实例。
该函数提供了两个可选参数
* exclude:排除列表,当视频av号命中该列表中av号时,将自动跳过不创建下载任务。
* v_filter:过滤器,当稿件中的视频命中了过滤器的过滤条件时,将不创建下载任务。
#### 5.5、`pull_all(show_process=True, no_repeat=True)`
在一切都准备好之后,你可以调用该函数完成视频的批量下载,程序将自动调用每一个任务实例中的[`start()`函数](#taskstart)开始,函数的两个参数也将透传给任务对象。
### 6、任务类(BiliUtil.Task)<span id="tasklass"/>
#### 6.1、`__init__(video, output, name, cover=None)`
在初始化任务类时,需要传入一个视频对象,输出文件夹路径,封面链接与视频命名。如果你觉得非常麻烦,请使用抓取器自动生成单个视频的下载任务。
#### 6.2、`start(show_process=True, no_repeat=True)`<span id="taskstart"/>
该函数将启动任务的下载流程,程序将按照实例化对象时的参数调用Aria2c完成视频与封面的下载。
关于在视频下载流程中会发生什么,请参考[Q&A](#qa)中关于视频下载的相关解释。
函数提供两个可选参数
* show_process:是否显示下载进度,通过该参数可以控制是否显示Aria2c和FFmpeg工作流程的信息。
* no_repeat:是否重复下载,通过该参数可以控制遇到已存在视频是否跳过下载流程。
### 7、过滤器(BiliUtil.Filter)
#### 7.1、`__init__(quality=None, length=None, height=None, width=None, page=None, ratio=None)`
你可以初始化一个过滤器对象用于过滤批量下载过程中不符合条件的视频(部分参数仅针对新类型的视频有效)
该函数提供了多个可选参数
* quality:视频画质,请传入一个包含预制[画质](#config)类型的数组。(例:[BiliUtil.Config.Quality.V1080P,
BiliUtil.Config.Quality.V1080Px])
* length:视频时长,请传入一个闭区间作为视频时长的判断标准(秒为单位)。(例:[40, 600])
* height:视频高度,请传入一个闭区间作为视频高度的判断标准(px为单位)。(例:[720, 1080])
* weight:视频宽度,请传入一个闭区间作为视频宽度的判断标准(px为单位)。(例:[720, 1080])
* page:视频分P,请传入一个由数组组成的数组作为分P的判读依据(下标1开始)。(例:[1, 2])
* ratio:视频比例,请传入一个闭区间作为视频比例的判断标准(比例->宽/高)。(例:[1, 2])
#### 7.2、set_xxx()
你也可以使用set加对应参数名修改对象实例中的参数信息。
## 四、Q&A<span id="qa"/>
### 开发进度与缺陷
目前已完成开发的模块
* 用户信息获取与视频列表拉取
* 频道信息获取与视频列表拉取
* 稿件信息获取与视频列表拉取
* 视频信息获取
* 任务列表生成器
* 视频列表过滤器
* 已存视频检查器
* 新版多P视频下载与合成
* 旧版单视频下载与转换
* 4K120FPS视频下载兼容
* BV与AV编码转换
目前尚存在缺陷的功能
* 旧版分段视频下载与合成
期望或将要开发的功能
* 多线程视频信息获取
* 视频弹幕获取与保存
* 视频评论获取与保存
* 远程视频缓存server
### 下载流程简单说明
使用者在下载的过程中一般遵循一下步骤:初始化对象-->获取视频对象-->创建任务-->开始下载
在创建任务的过程中,程序将处理视频的储存位置与下载后视频名称的问题,并将不同层级的对象统一转化为任务,将任务作为下载的最小单元,方便编程与管理。
在下载过程中,程序将主要处理文件夹的建立,封面的下载、音画下载与音画合并。其中在核心的下载过程中,程序将根据情况自动采用多线程多连接的下载方式,并且减少分片大小,相比于v1.x的速度有大幅提高,不再会产生挂机一晚也下载不到视频的情况。
### 暂不支持的功能整理
* [Issue#16](https://github.com/wolfbolin/BiliUtil/issues/16):列举分区下所有视频
* [Issue#17](https://github.com/wolfbolin/BiliUtil/issues/17):番剧的下载(版权与权限限制
### Cookie信息的获取与使用
- cookie信息不影响除视频画质外其他信息的获取。
- 通过cookie信息,你可以在下载视频的时候获取到更高清的视频数据,也可以手动指定视频清晰度。
- 设定cookie信息时,类似于`_uuid=B45CF1AB-xxx; LIVE_BUVID=AUTO76154xxx; SESSDATA=abcd68fd%2C1123761144%2C38d97xyz`的cookie信息也是可以被识别的。也可以传入字典类型的cookie信息,但是cookie信息中必须包含`SESSDATA`字段,该字段是提升视频质量的关键点。
- 不同的身份信息视频质量上限表:
- 未登录--->480P
- 已登录--->1080P
- 大会员--->1080P60FPS / 1080P+ / 4K120FPS
- 关键的cookie存在与发往`*.bilibili.com`域下,发往其他域的请求中不包含该信息。至于如何在浏览器中获取Cookie,请移步:[如何在浏览器中获取Cookie](http://baidux.tinoy.cn/?q=%E5%A6%82%E4%BD%95%E5%9C%A8%E6%B5%8F%E8%A7%88%E5%99%A8%E4%B8%AD%E8%8E%B7%E5%8F%96Cookie)
### 什么是稿件Album和视频Video有什么区别?
首先说明这个"Album"在此不翻译为“唱片;专辑”,这个"Album"是指包含了多个视频的一个集合,在B站页面中显示为“稿件”,代表了用户的一次投稿发布。
众所周知许多Up会上传多P,多P就对应了多个视频,因此一个av号可能会对应多个视频。所以在文档中我们不能再使用“视频”这个词汇来表达一个av号所对应的资源,因此便采用了“稿件”这个词汇来表达。
### 什么是uid、cid、aid?
我们需要为每一个资源做一个标记,官方也是这么做的。如果你真的经常使用B站,那么你一定知道UID为2的 **碧诗**和av号,本工具包沿用了B站的编号体系,不仅仅是用户与稿件,每一个频道与视频都是有他们自己的编号的。
### set_by_url有什么要求?
当我们打开了用户或视频时,URL中就已经包含了我们生成对象所需要的信息。在声明对象实例时,我们可以利用这些信息,由于不是使用正则进行匹配的,因此你可以随心所欲的拷贝URL,包括带有参数的URL都是可以接受的。但是,请确保传入的URL是与对象类型相匹配的,否则可能会导致程序运行异常。
### 为什么要用到FFmpeg?
在B站更新了数据下发形式后,你所观看的每一个视频都由纯视频和纯音频的形式下发,因此我们在下载之后需要使用工具将这些数据封装在一起。工具的使用方法我已经封装在代码中,默认会在视频下载结束后完成合并渲染。
### 新旧视频版本的区别
目前视频的版本主要分为两种,由程序内部自动判断。对于旧版视频,因为在下载前无法获取视频的具体参数,因此不可使用过滤器中的部分功能,而且旧版视频音画是在同一个视频容器中,因此无需合并数据,但同时旧版视频仅支持单链接下载,没有多服务器下发的能力。视频的下载速度可能会受到影响。
### 画质分级与最高下载画质
根据B站的限制,拥有不同身份的用户能够看到的视频数据有所不同,因此在下载视频时应尽量使用有大会员的用户身份进行下载。否则,即使你在程序中指定的是v1080Px也无法获取到该画质的视频。毕竟这个工具包不是搞大会员破解的。
### BV与AV号
在本工具包开发直出还没有BV号的概念,所有代码中对于一个稿件的唯一性判断都已AV号为准,因此在兼容BV号的过程中,工具包在所有可能输入BV号的入口都添加了BV=>AV转换的函数。该转换函数仅依赖本地计算即可完成,无需网络调用,不影响下载流程与性能。但是如果您需要对代码进行Debug,需要注意的是在工具包内部的函数调用与网络访问都是以AV号进行的。
**其他未尽适宜请提[Issue](https://github.com/wolfbolin/BiliUtil/issues)**
## 五、关于BiliUtil<span id="about"/>
这个包中还有很多未完成的细节,还有一些想实现的功能未完成。
欢迎有兴趣的小伙伴一次参与,反馈BUG,更新代码,提供方案,我们共同完善它。
你可以联系我:mailto@wolfbolin.com
**声明:该代码库内容仅供学习参考,请勿用于商业目的**
## 六、更新日志<span id="update"/>
### v0.2.3
修复
* [Issue #37](https://github.com/wolfbolin/BiliUtil/issues/37) 提出的音频下载问题
新增
* AV号、BV号转换函数与内嵌识别
* [Issue #37](https://github.com/wolfbolin/BiliUtil/issues/37) 最高4K120FPS视频下载支持
计划
* 多线程数据获取函数
* 多段视频获取方案实现
* (以上两个部分我在实现过程中不稳定概率高,难以达到发布的水平,请多包涵)
### v0.2.2
修复
* [Issue #24](https://github.com/wolfbolin/BiliUtil/issues/24) [PR #25](https://github.com/wolfbolin/BiliUtil/pull/25)缓存视频允许以分P名称命名视频
* [driverCzn](https://github.com/driverCzn) 提出的BUG,解决旧版视频下载一半后无法自动断点续传的检测策略问题
优化
* 代码结构与语法优化
缺陷
* [driverCzn](https://github.com/driverCzn) 提出但尚未解决的“旧版长视频分段下载”问题
### v0.2.1
修复
* [Issue #14](https://github.com/wolfbolin/BiliUtil/issues/14) API调整导致视频链接获取错误的BUG
* [Commit 351b07](https://github.com/wolfbolin/BiliUtil/commit/351b072100998e0b845da336a10b854710e10847) 修复视频画质设置中,先有鸡还是先有蛋的问题
* 删除部分开发无关的文件
### v0.2.0
新版发布
- 简化代码结构与文档长度,简化使用方式
- cookie直接透传至,管理cookie更方面
- 多连接小分片并行下载,提高下载成功率与速度
- 支持根据视频属性、视频分P过滤无需下载的视频
- 支持设定下载代理地址,让流量走一些神奇的通道
### v0.1.10
修复
- 修复了上一个版本在Linux平台上还是不能下载的Bug
- 修正了实例中的一些BUG
- 修正了文档锚定的错误写法
### v0.1.9
修复
- 修复了在Linux平台上相对路径错误导致的无法下载问题
- 添加了批量下载Up主的代码实例,在examples文件夹中。
### v0.1.8
修复
- 修复了严重的翻页BUG(之前版本get_all_video_info获取用户视频最多30个,程序出现了一些偏差)
### v0.1.7
修复
- get_xxx_info函数中vars函数运行异常
- 修改数据拷贝方式,防止数据被篡改
- 修改对象初始化方式,防止二次创建对象时异常
- 修正若干数据获取逻辑BUG
### v0.1.6
修复
- 修复exclude_list列表使用BUG
新增
- 为频道与用户对象添加获取已下载视频的AV号列表。
- 为稿件对象添加判断视频是否已下载的访问接口
### v0.1.5
新增
- 为频道与用户对象添加获取AV号列表函数。
- 允许在批量下载视频时,通过添加排除列表,过滤部分视频的下载。
- 为多个函数添加合适的响应值。
### v0.1.4
解决ffmpeg合成阶段程序卡死。由于pipe size的大小限制,导致程序在收到超过64kb的输出时,程序会卡死不动。修改process.wait()为process.communicate()以解决该问题。
### v0.1.3
操作失误导致pip中v0.1.2版本被删除,将以v0.1.3版本发布。建议更新至最新版本后再使用。
### v0.1.2
修改:
- 修复了`ffmpeg`环境检测不通过的BUG,因为使用了错误的语法。
- 移除了对powershell的支持,未来将在linux环境中测试。
- 修复了使用`aria2c`时的错误语法,解决自定义输出路径报错。
- 修改路径获取方案,相对路径传入后,将以绝对路径进行计算。
### v0.1.1
新增:
- 音视频合并函数
- 音视频批量合并代码示例
修改:
- 删除部分无意义的`(=・ω・=)`输出
- 调整aria2与ffmpeg环境检测机制
### v0.0.1
BiliUtil已经过基础测试,正式发布第一个版本,若要直接使用,请使用pip进行安装。
| PypiClean |
/Amino_new.py-5.0.tar.gz/Amino_new.py-5.0/amino/lib/util/d.py | import marshal as m
data=m.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s@\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x02l\x03m\x04Z\x04\x01\x00d\x00d\x01l\x05Z\x05d\x03d\x04\x84\x00Z\x06d\x05d\x06\x84\x00Z\x07d\x01S\x00)\x07\xe9\x00\x00\x00\x00N)\x01\xda\x06Threadc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s,\x00\x00\x00d\x01\xa0\x00t\x01j\x02t\x03j\x04t\x03j\x05\x17\x00d\x02\x17\x00d\x03d\x04\x8d\x02\xa1\x01\xa0\x06d\x05d\x02\xa1\x02}\x00|\x00S\x00)\x06N\xda\x00\xfa\x01-i\xce\x01\x00\x00)\x01\xda\x01kz\x02--)\x07\xda\x04join\xda\x06random\xda\x07choices\xda\x06string\xda\x0fascii_uppercase\xda\x0fascii_lowercase\xda\x07replace)\x01\xda\x03val\xa9\x00r\x0e\x00\x00\x00r\x03\x00\x00\x00\xda\x0bgen_captcha\x06\x00\x00\x00s\x04\x00\x00\x00\x00\x01(\x01r\x0f\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sd\x00\x00\x00d\x01d\x02d\x03d\x04d\x05d\x06\x9c\x05}\x00t\x00\x83\x00d\x07d\x08d\td\nd\x0b\x9c\x05}\x01|\x01}\x01t\x01j\x02d\x0c|\x00|\x01d\r\x8d\x03}\x02t\x03\xa0\x04|\x02j\x05\xa1\x01}\x02|\x02d\x0e\x19\x00d\x0f\x19\x00\xa0\x06d\x10\xa1\x01d\x11\x19\x00}\x03|\x03\xa0\x07\xa1\x00}\x04|\x03S\x00)\x12Nz\raminoapps.comzsMozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36z\x10application/json\xfa\x01/z\x16https://aminoapps.com/)\x05Z\x04Hostz\nuser-agentz\x0ccontent-type\xda\x06accept\xda\x06originZ\x02v3r\x01\x00\x00\x00Z\x08samar123z\x12p5y3xagz@wwjmp.com)\x05Z\x13recaptcha_challengeZ\x11recaptcha_versionZ\tauth_typeZ\x06secret\xda\x05emailz\x1ehttps://aminoapps.com/api/auth)\x02\xda\x07headers\xda\x04json\xda\x06result\xda\x03url\xfa\x01=\xe9\x04\x00\x00\x00)\x08r\x0f\x00\x00\x00\xda\x08requests\xda\x04postr\x15\x00\x00\x00\xda\x05loads\xda\x04text\xda\x05split\xda\x05upper)\x05r\x14\x00\x00\x00\xda\x04data\xda\x03reqZ\x05devid\xda\x03devr\x0e\x00\x00\x00r\x0e\x00\x00\x00r\x03\x00\x00\x00\xda\x07devicee\t\x00\x00\x00s$\x00\x00\x00\x00\x02\x02\x01\x02\x01\x02\x01\x02\x01\x02\xfb\x06\t\x04\x01\x02\x01\x02\x01\x02\x01\x02\xfb\x06\x07\x04\x01\x10\x01\x0c\x01\x16\x01\x08\x01r#\x00\x00\x00)\x08r\t\x00\x00\x00r\x15\x00\x00\x00r\x1a\x00\x00\x00\xda\tthreadingr\x02\x00\x00\x00r\x07\x00\x00\x00r\x0f\x00\x00\x00r#\x00\x00\x00r\x0e\x00\x00\x00r\x0e\x00\x00\x00r\x0e\x00\x00\x00r\x03\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\x0c\x00\x00\x00\x08\x01\x08\x01\x08\x01\x0c\x01\x08\x01\x08\x03')
exec(data) | PypiClean |
/KratosSwimmingDEMApplication-9.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/KratosMultiphysics/SwimmingDEMApplication/apply_custom_body_force_process.py | import KratosMultiphysics
import KratosMultiphysics.SwimmingDEMApplication
from importlib import import_module
def Factory(settings, Model):
if not isinstance(settings, KratosMultiphysics.Parameters):
raise Exception("expected input shall be a Parameters object, encapsulating a json string")
return ApplyCustomBodyForceProcess(Model, settings["Parameters"])
## All the processes python should be derived from "Process"
class ApplyCustomBodyForceProcess(KratosMultiphysics.Process):
def __init__(self, model, settings ):
KratosMultiphysics.Process.__init__(self)
default_settings = KratosMultiphysics.Parameters("""
{
"model_part_name" : "please_specify_model_part_name",
"variable_name" : "BODY_FORCE",
"benchmark_name" : "custom_body_force.vortex",
"benchmark_parameters" : {},
"compute_nodal_error" : true,
"print_convergence_output" : false,
"output_parameters" : {}
}
"""
)
self.settings = settings
self.settings.ValidateAndAssignDefaults(default_settings)
self.model_part = model[self.settings["model_part_name"].GetString()]
self.variable = KratosMultiphysics.KratosGlobals.GetVariable(self.settings["variable_name"].GetString())
benchmark_module = import_module(self.settings["benchmark_name"].GetString())
self.benchmark = benchmark_module.CreateManufacturedSolution(self.settings["benchmark_parameters"])
self.compute_error = self.settings["compute_nodal_error"].GetBool()
self.print_output = self.settings["print_convergence_output"].GetBool()
if self.print_output:
from custom_body_force.hdf5_output_tool import Hdf5OutputTool
self.output_process = Hdf5OutputTool(model, self.settings["output_parameters"])
def ExecuteBeforeSolutionLoop(self):
current_time = 0.0
for node in self.model_part.Nodes:
value = self.benchmark.Velocity(node.X, node.Y, node.Z, current_time)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, value)
exact_value = self.benchmark.Velocity(node.X, node.Y, node.Z, current_time)
node.SetValue(KratosMultiphysics.Y, exact_value)
def ExecuteInitializeSolutionStep(self):
self._SetBodyForce()
def ExecuteFinalizeSolutionStep(self):
if self.compute_error:
self._ComputeVelocityError()
def ExecuteBeforeOutputStep(self):
self._ComputeVelocityBenchmark()
def ExecuteFinalize(self):
if self.compute_error:
rel_err = self._SumNodalError()
if self.print_output:
self.output_process.WriteBodyForceAttributes(self.settings["benchmark_parameters"])
self.output_process.WriteAverageRelativeError(rel_err)
def _SetBodyForce(self):
current_time = self.model_part.ProcessInfo[KratosMultiphysics.TIME]
for node in self.model_part.Nodes:
value = self.benchmark.BodyForce(node.X, node.Y, node.Z, current_time)
node.SetSolutionStepValue(self.variable, value)
def _ComputeVelocityError(self):
epsilon = 1e-16
current_time = self.model_part.ProcessInfo[KratosMultiphysics.TIME]
for node in self.model_part.Nodes:
fem_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY)
exact_vel = self.benchmark.Velocity(node.X, node.Y, node.Z, current_time)
fem_vel_modulus = (fem_vel[0]**2 + fem_vel[1]**2 + fem_vel[2]**2)**0.5
exact_vel_modulus = (exact_vel[0]**2 + exact_vel[1]**2 + exact_vel[2]**2)**0.5
error = abs(fem_vel_modulus - exact_vel_modulus)
error = error / abs(exact_vel_modulus + epsilon)
node.SetValue(KratosMultiphysics.NODAL_ERROR, error)
def _CopyVelocityAsNonHistorical(self):
for node in self.model_part.Nodes:
vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY)
node.SetValue(KratosMultiphysics.VELOCITY, vel)
def _ComputeVelocityBenchmark(self):
current_time = self.model_part.ProcessInfo[KratosMultiphysics.TIME]
for node in self.model_part.Nodes:
vel = self.benchmark.Velocity(node.X, node.Y, node.Z, current_time)
node.SetValue(KratosMultiphysics.Y, vel)
def _SumNodalError(self):
err_sum = 0.0
for node in self.model_part.Nodes:
err_sum = err_sum + node.GetValue(KratosMultiphysics.NODAL_ERROR)
rel_err = err_sum / self.model_part.Nodes.__len__()
KratosMultiphysics.Logger.PrintInfo("SwimmingDEM", "Benchmark", "The nodal error average is : ", rel_err)
return rel_err | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/yaml/yaml/reader.py |
__all__ = ['Reader', 'ReaderError']
from .error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, bytes):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = ''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, str):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+'\0'
elif isinstance(stream, bytes):
self.name = "<byte string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' \
or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size=4096):
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True | PypiClean |
/AutoItLibrary-1.1.post1.zip/AutoItLibrary-1.1.post1/README.txt | AutoItLibrary
=============
Introduction
------------
AutoItLibrary is a Python keyword library that extends Robot Framework
(http://code.google.com/p/robotframework/) by providing keywords based on the
COM interface to AutoIt (http://www.autoitscript.com/autoit3/index.shtml).
AutoIt is a freeware tool for automating the Windows GUI.
In order to do screenshots, the AutoItLibrary uses the Open Source Python
Image Library tool PIL (http://www.pythonware.com/products/pil/).
Installation
------------
AutoItLibrary installs its own file and, if not already present, the 3rd party
AutoIt and PIL tools. To install, unzip the release file into a temporary
directory on your PC, open a command window in that directory and type:
python setup.py install
This installation creates the folder:
C:\RobotFramework\Extensions\AutoItLibrary
on your PC and puts various files into this directory folder.
Documentation
-------------
AutoItLibrary documentation is installed by the installation process into
C:\RobotFramework\Extensions\AutoItLibrary\AutoItLibrary.html
The AutoItX documentation is also installed into this folder as AutoItX.chm.
Tests
-----
The AutoItLibrary installer puts a suite of self-tests here:
C:\RobotFramework\Extensions\AutoItLibrary\tests
To run these tests, which exercise the Windows Calculator GUI, run the
RunTests.bat file in the above folder.
-------------------------------- End of file --------------------------------
| PypiClean |
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/gui/kivy/uix/qrcodewidget.py | from threading import Thread
from functools import partial
import qrcode
from kivy.uix.floatlayout import FloatLayout
from kivy.graphics.texture import Texture
from kivy.properties import StringProperty
from kivy.properties import ObjectProperty, StringProperty, ListProperty,\
BooleanProperty
from kivy.lang import Builder
from kivy.clock import Clock
Builder.load_string('''
<QRCodeWidget>
canvas.before:
# Draw white Rectangle
Color:
rgba: root.background_color
Rectangle:
size: self.size
pos: self.pos
canvas.after:
Color:
rgba: root.foreground_color
Rectangle:
size: self.size
pos: self.pos
Image
id: qrimage
pos_hint: {'center_x': .5, 'center_y': .5}
allow_stretch: True
size_hint: None, None
size: root.width * .9, root.height * .9
''')
class QRCodeWidget(FloatLayout):
data = StringProperty(None, allow_none=True)
background_color = ListProperty((1, 1, 1, 1))
foreground_color = ListProperty((0, 0, 0, 0))
def __init__(self, **kwargs):
super(QRCodeWidget, self).__init__(**kwargs)
self.data = None
self.qr = None
self._qrtexture = None
def on_data(self, instance, value):
if not (self.canvas or value):
return
self.update_qr()
def set_data(self, data):
if self.data == data:
return
MinSize = 210 if len(data) < 128 else 500
self.setMinimumSize((MinSize, MinSize))
self.data = data
self.qr = None
def update_qr(self):
if not self.data and self.qr:
return
L = qrcode.constants.ERROR_CORRECT_L
data = self.data
self.qr = qr = qrcode.QRCode(
version=None,
error_correction=L,
box_size=10,
border=0,
)
qr.add_data(data)
qr.make(fit=True)
self.update_texture()
def setMinimumSize(self, size):
# currently unused, do we need this?
self._texture_size = size
def _create_texture(self, k):
self._qrtexture = texture = Texture.create(size=(k,k), colorfmt='rgb')
# don't interpolate texture
texture.min_filter = 'nearest'
texture.mag_filter = 'nearest'
def update_texture(self):
if not self.qr:
return
matrix = self.qr.get_matrix()
k = len(matrix)
# create the texture
self._create_texture(k)
buff = []
bext = buff.extend
cr, cg, cb, ca = self.background_color[:]
cr, cg, cb = cr*255, cg*255, cb*255
for r in range(k):
for c in range(k):
bext([0, 0, 0] if matrix[k-1-r][c] else [cr, cg, cb])
# then blit the buffer
buff = ''.join(map(chr, buff))
# update texture
self._upd_texture(buff)
def _upd_texture(self, buff):
texture = self._qrtexture
texture.blit_buffer(buff, colorfmt='rgb', bufferfmt='ubyte')
img = self.ids.qrimage
img.anim_delay = -1
img.texture = texture
img.canvas.ask_update()
if __name__ == '__main__':
from kivy.app import runTouchApp
import sys
data = str(sys.argv[1:])
runTouchApp(QRCodeWidget(data=data)) | PypiClean |
/EpikCord.py-0.4.3.tar.gz/EpikCord.py-0.4.3/EpikCord/slash_command.py | from typing import (
Optional,
List,
Union
)
from .abc import BaseSlashCommandOption
from .application import ApplicationCommand
class Subcommand(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 1
class SubCommandGroup(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 2
class StringOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 3
class IntegerOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 4
class BooleanOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 5
class UserOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 6
class ChannelOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 7
class RoleOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 8
class MentionableOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 9
class NumberOption(BaseSlashCommandOption):
def __init__(self, *, name: str, description: Optional[str] = None, required: bool = False):
super().__init__(name=name, description=description, required=required)
self.settings["type"] = 10
class SlashCommandOptionChoice:
def __init__(self, * name: str, value: Union[float, int, str]):
self.settings = {
"name": name,
"value": value
}
class SlashCommand(ApplicationCommand):
def __init__(self, data: dict):
super().__init__(data)
self.options: Optional[List[Union[Subcommand, SubCommandGroup, StringOption, IntegerOption, BooleanOption, UserOption, ChannelOption, RoleOption, MentionableOption, NumberOption]]] = data["options"] or None # Return the type hinted class later this will take too long and is very tedious, I'll probably get Copilot to do it for me lmao | PypiClean |
/Furious-GUI-0.2.4.tar.gz/Furious-GUI-0.2.4/Furious/Utility/Proxy.py | from Furious.Utility.Constants import PLATFORM
import logging
import subprocess
logger = logging.getLogger(__name__)
def runCommand(*args, **kwargs):
return subprocess.run(*args, **kwargs)
def linuxProxyConfig(proxy_args, arg0, arg1):
runCommand(
[
'gsettings',
'set',
'org.gnome.system.' + proxy_args,
arg0,
arg1,
],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True,
)
def darwinProxyConfig(operation, *args):
def getNetworkServices():
command = runCommand(
['networksetup', '-listallnetworkservices'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
)
service = list(filter(lambda x: x != '', command.stdout.decode().split('\n')))
return service[1:]
for serviceName in getNetworkServices():
runCommand(
[
'networksetup',
f'-{operation}',
serviceName,
*args,
]
)
class _Proxy:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._daemonThread = None
@staticmethod
def pac(pac_url):
def _pac():
if PLATFORM == 'Windows':
try:
import sysproxy
return sysproxy.pac(pac_url)
except Exception:
# Any non-exit exceptions
return False
if PLATFORM == 'Linux':
try:
linuxProxyConfig('proxy', 'autoconfig-url', pac_url)
linuxProxyConfig('proxy', 'mode', 'auto')
except Exception:
# Any non-exit exceptions
return False
else:
return True
if PLATFORM == 'Darwin':
try:
darwinProxyConfig('setautoproxyurl', pac_url)
except Exception:
# Any non-exit exceptions
return False
else:
return True
if _pac():
logger.info('set proxy PAC success')
else:
logger.error('set proxy PAC failed')
@staticmethod
def set(server, bypass):
def _set():
if PLATFORM == 'Windows':
try:
import sysproxy
return sysproxy.set(server, bypass)
except Exception:
# Any non-exit exceptions
return False
if PLATFORM == 'Linux':
try:
host, port = server.split(':')
linuxProxyConfig('proxy.http', 'host', host)
linuxProxyConfig('proxy.http', 'port', port)
linuxProxyConfig('proxy.https', 'host', host)
linuxProxyConfig('proxy.https', 'port', port)
linuxProxyConfig(
'proxy', 'ignore-hosts', str(list(bypass.split(';')))
)
linuxProxyConfig('proxy', 'mode', 'manual')
except Exception:
# Any non-exit exceptions
return False
else:
return True
if PLATFORM == 'Darwin':
try:
darwinProxyConfig('setwebproxy', *server.split(':'))
darwinProxyConfig('setsecurewebproxy', *server.split(':'))
darwinProxyConfig('setproxybypassdomains', *bypass.split(';'))
except Exception:
# Any non-exit exceptions
return False
else:
return True
if _set():
logger.info(f'set proxy server {server} success')
else:
logger.error(f'set proxy server {server} failed')
@staticmethod
def off():
def _off():
if PLATFORM == 'Windows':
try:
import sysproxy
return sysproxy.off()
except Exception:
# Any non-exit exceptions
return False
if PLATFORM == 'Linux':
try:
linuxProxyConfig('proxy', 'mode', 'none')
except Exception:
# Any non-exit exceptions
return False
else:
return True
if PLATFORM == 'Darwin':
try:
darwinProxyConfig('setwebproxystate', 'off')
darwinProxyConfig('setsecurewebproxystate', 'off')
darwinProxyConfig('setautoproxystate', 'off')
except Exception:
# Any non-exit exceptions
return False
else:
return True
if _off():
logger.info('turn off proxy success')
else:
logger.error('turn off proxy failed')
def daemonOn_(self):
def _daemonOn_():
if PLATFORM == 'Windows':
try:
import sysproxy
import threading
if self._daemonThread is not None:
return False
self._daemonThread = threading.Thread(
target=lambda: sysproxy.daemon_on_(), daemon=True
)
self._daemonThread.start()
return True
except Exception:
# Any non-exit exceptions
return False
if _daemonOn_():
logger.info('turn on proxy daemon success')
else:
logger.error('turn on proxy daemon failed')
def daemonOff(self):
def _daemonOff():
if PLATFORM == 'Windows':
try:
import sysproxy
import threading
if self._daemonThread is None:
# Already in off state
return True
assert isinstance(self._daemonThread, threading.Thread)
if sysproxy.daemon_off():
self._daemonThread.join()
# Reset it
self._daemonThread = None
return True
else:
return False
except Exception:
# Any non-exit exceptions
return False
# Note: try to resolve non-Windows proxy daemon issue by
# turning on StartupOnBoot by default. This should be
# friendly for most of the users
if _daemonOff():
logger.info('turn off proxy daemon success')
else:
logger.error('turn off proxy daemon failed')
Proxy = _Proxy() | PypiClean |
/Flask-Fundatio-0.1.tar.gz/Flask-Fundatio-0.1/flask_fundatio/static/js/foundation/foundation.dropdown.js |
;(function ($, window, document, undefined) {
'use strict';
Foundation.libs.dropdown = {
name : 'dropdown',
version : '4.2.0',
settings : {
activeClass: 'open',
is_hover: false,
opened: function(){},
closed: function(){}
},
init : function (scope, method, options) {
this.scope = scope || this.scope;
Foundation.inherit(this, 'throttle scrollLeft data_options');
if (typeof method === 'object') {
$.extend(true, this.settings, method);
}
if (typeof method !== 'string') {
if (!this.settings.init) {
this.events();
}
return this.settings.init;
} else {
return this[method].call(this, options);
}
},
events : function () {
var self = this;
$(this.scope)
.on('click.fndtn.dropdown', '[data-dropdown]', function (e) {
var settings = $.extend({}, self.settings, self.data_options($(this)));
e.preventDefault();
if (!settings.is_hover) self.toggle($(this));
})
.on('mouseenter', '[data-dropdown]', function (e) {
var settings = $.extend({}, self.settings, self.data_options($(this)));
if (settings.is_hover) self.toggle($(this));
})
.on('mouseleave', '[data-dropdown-content]', function (e) {
var target = $('[data-dropdown="' + $(this).attr('id') + '"]'),
settings = $.extend({}, self.settings, self.data_options(target));
if (settings.is_hover) self.close.call(self, $(this));
})
.on('opened.fndtn.dropdown', '[data-dropdown-content]', this.settings.opened)
.on('closed.fndtn.dropdown', '[data-dropdown-content]', this.settings.closed);
$('body').on('click.fndtn.dropdown', function (e) {
var parent = $(e.target).closest('[data-dropdown-content]');
if ($(e.target).data('dropdown')) {
return;
}
if (parent.length > 0 && ($(e.target).is('[data-dropdown-content]') || $.contains(parent.first()[0], e.target))) {
e.stopPropagation();
return;
}
self.close.call(self, $('[data-dropdown-content]'));
});
$(window).on('resize.fndtn.dropdown', self.throttle(function () {
self.resize.call(self);
}, 50)).trigger('resize');
this.settings.init = true;
},
close: function (dropdown) {
var self = this;
dropdown.each(function () {
if ($(this).hasClass(self.settings.activeClass)) {
$(this)
.css(Foundation.rtl ? 'right':'left', '-99999px')
.removeClass(self.settings.activeClass);
$(this).trigger('closed');
}
});
},
open: function (dropdown, target) {
this
.css(dropdown
.addClass(this.settings.activeClass), target);
dropdown.trigger('opened');
},
toggle : function (target) {
var dropdown = $('#' + target.data('dropdown'));
this.close.call(this, $('[data-dropdown-content]').not(dropdown));
if (dropdown.hasClass(this.settings.activeClass)) {
this.close.call(this, dropdown);
} else {
this.close.call(this, $('[data-dropdown-content]'))
this.open.call(this, dropdown, target);
}
},
resize : function () {
var dropdown = $('[data-dropdown-content].open'),
target = $("[data-dropdown='" + dropdown.attr('id') + "']");
if (dropdown.length && target.length) {
this.css(dropdown, target);
}
},
css : function (dropdown, target) {
var offset_parent = dropdown.offsetParent();
// temporary workaround until 4.2
if (offset_parent.length > 0 && /body/i.test(dropdown.offsetParent()[0].nodeName)) {
var position = target.offset();
position.top -= dropdown.offsetParent().offset().top;
position.left -= dropdown.offsetParent().offset().left;
} else {
var position = target.position();
}
if (this.small()) {
dropdown.css({
position : 'absolute',
width: '95%',
left: '2.5%',
'max-width': 'none',
top: position.top + this.outerHeight(target)
});
} else {
if (!Foundation.rtl && $(window).width() > this.outerWidth(dropdown) + target.offset().left) {
var left = position.left;
if (dropdown.hasClass('right')) {
dropdown.removeClass('right');
}
} else {
if (!dropdown.hasClass('right')) {
dropdown.addClass('right');
}
var left = position.left - (this.outerWidth(dropdown) - this.outerWidth(target));
}
dropdown.attr('style', '').css({
position : 'absolute',
top: position.top + this.outerHeight(target),
left: left
});
}
return dropdown;
},
small : function () {
return $(window).width() < 768 || $('html').hasClass('lt-ie9');
},
off: function () {
$(this.scope).off('.fndtn.dropdown');
$('html, body').off('.fndtn.dropdown');
$(window).off('.fndtn.dropdown');
$('[data-dropdown-content]').off('.fndtn.dropdown');
this.settings.init = false;
},
reflow : function () {}
};
}(Foundation.zj, this, this.document)); | PypiClean |
/Flaskel-3.1.0rc2-py3-none-any.whl/flaskel/utils/schemas/default.py | from vbcore.datastruct import ObjectDict
from vbcore.jsonschema.support import Fields
from .openapi3 import SCHEMA as OPENAPI_SCHEMA
SCHEMAS = ObjectDict(
OPENAPI=OPENAPI_SCHEMA,
JSONRPC=ObjectDict(
REQUEST=Fields.oneof(
Fields.ref("/definitions/request", description="An individual request"),
Fields.array(
items=Fields.ref("/definitions/request"),
description="An array of requests",
),
**Fields.schema,
description="A JSON RPC 2.0 request",
definitions={
"request": Fields.object(
required=["jsonrpc", "method"],
properties={
"jsonrpc": Fields.enum("2.0"),
"method": Fields.string,
"params": Fields.type("array", "object"),
"id": Fields.type(
"string",
"number",
"null",
note=[
"While allowed, null should be avoided: "
"http://www.jsonrpc.org/specification#id1",
"While allowed, a number with a fractional part should be avoided: "
"http://www.jsonrpc.org/specification#id2",
],
),
},
)
},
),
RESPONSE=Fields.oneof(
Fields.ref("/definitions/response"),
Fields.array(items=Fields.ref("/definitions/response")),
**Fields.schema,
definitions={
"response": Fields.type(
"array",
"object",
required=["jsonrpc"],
properties={
"jsonrpc": Fields.enum("2.0"),
"id": Fields.type("string", "number", "null"),
"result": Fields.type("array", "object", "null"),
"error": Fields.type(
"array",
"object",
properties={
"code": Fields.number,
"message": Fields.string,
},
),
},
)
},
),
),
POST_REVOKE_TOKEN=Fields.object(
all_required=False,
properties={
"access_token": Fields.string,
"refresh_token": Fields.string,
"device_token": Fields.string,
},
),
POST_ACCESS_TOKEN=Fields.object(
properties={"email": Fields.string, "password": Fields.string}
),
ACCESS_TOKEN=Fields.object(
required=["access_token", "refresh_token", "expires_in", "issued_at"],
properties={
"access_token": Fields.string,
"refresh_token": Fields.string,
"expires_in": Fields.Opt.integer,
"issued_at": Fields.integer,
"token_type": Fields.string,
"scope": Fields.Opt.string,
},
),
REFRESH_TOKEN=Fields.object(
required=["access_token", "expires_in", "issued_at"],
properties={
"access_token": Fields.string,
"expires_in": Fields.Opt.integer,
"issued_at": Fields.integer,
"token_type": Fields.string,
"scope": Fields.Opt.string,
},
),
REGISTER_CONFIRM=Fields.object(properties={"token": Fields.string}),
PASSWORD_RESET=Fields.object(
properties={
"email": Fields.string,
"new_password": Fields.string,
"old_password": Fields.string,
}
),
PASSWORD_FORGOT=Fields.object(properties={"email": Fields.string}),
PASSWORD_CONFIRM=Fields.object(
properties={"token": Fields.string, "password": Fields.string}
),
API_PROBLEM=Fields.object(
properties={
"type": Fields.string,
"title": Fields.string,
"detail": Fields.string,
"instance": Fields.string,
"status": Fields.integer,
"response": Fields.any,
}
),
HEALTH_CHECK=Fields.object(
**Fields.schema,
properties={
"status": Fields.string,
"checks": Fields.object(
patternProperties={
".": Fields.object(
properties={
"status": Fields.string,
"output": Fields.type("null", "string", "object"),
}
)
}
),
"links": Fields.object(properties={"about": Fields.Opt.string}),
},
),
)
if __name__ == "__main__": # pragma: no cover
import json
print(json.dumps(SCHEMAS)) | PypiClean |
/Adjax-1.0.1.tar.gz/Adjax-1.0.1/adjax/decorators.py |
__all__ = ('adjax_response',)
from django.http import HttpResponse
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.conf import settings
from django.core.serializers import json, serialize
from adjax.base import get_store
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
class LazyEncoder(json.DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return super(LazyEncoder, self).default(obj)
class JsonResponse(HttpResponse):
def __init__(self, object):
if isinstance(object, QuerySet):
content = serialize('json', object)
else:
content = simplejson.dumps(
object, indent=2, cls=LazyEncoder,
ensure_ascii=False)
super(JsonResponse, self).__init__(
content, content_type='application/json')
# Where to redirect to when view is called without an ajax request.
DEFAULT_REDIRECT = getattr(settings, 'ADJAX_DEFAULT_REDIRECT', None)
ADJAX_CONTEXT_KEY = 'adjax'
def adjax_response(func):
""" Renders the response using JSON, if appropriate.
"""
# TODO allow a template to be given for non-ajax requests
template_name = None
def wrapper(request, *args, **kw):
output = func(request, *args, **kw)
store = get_store(request)
# If a dict is given, add that to the output
if output is None:
output = {}
elif isinstance(output, dict):
output = output.copy()
output.pop('request', None)
for key, val in output.items():
store.extra(key, val)
# Intercept redirects
elif isinstance(output, HttpResponse) and output.status_code in (301, 302):
store.redirect(output['Location'])
if request.is_ajax():
return store.json_response
if isinstance(output, dict):
# If we have a template, render that
if template_name:
output.setdefault(ADJAX_CONTEXT_KEY, store)
return render_to_response(template_name, output, context_instance=RequestContext(request))
# Try and redirect somewhere useful
if 'HTTP_REFERER' in request.META:
return redirect(request.META['HTTP_REFERER'])
elif DEFAULT_REDIRECT:
return redirect(DEFAULT_REDIRECT)
else:
return HttpResponse()
return output
return wrapper | PypiClean |
/ELDAM_LCA-1.0-py3-none-any.whl/eldam/gui/widgets/update_elda_template.py | from eldam.gui.widgets import EldamWidget
from eldam.gui.dialogs import EldaOpenDialog, RetryCancelDialog
from eldam.gui.gui_parameters import *
from eldam.core.elda import Elda
from eldam.utils.misc import find_data_file
class UpdateEldaTemplateWidget(EldamWidget):
def __init__(self, *args, **kwargs):
super().__init__(find_data_file("files/user_interfaces/update_elda_template.ui"), *args, **kwargs)
self.elda_open_dialog = EldaOpenDialog(self)
self.browse_button.clicked.connect(self.elda_open_dialog.exec)
self.elda_open_dialog.filesSelected.connect(lambda files: self.open_edit.setText(";".join(files)))
self.main_button.clicked.connect(self.update_elda_template)
self.open_edit.textChanged.connect(self.update_main_button_state)
def update_main_button_state(self):
# Check if the main button must be enabled or not
if self.open_edit.text():
self.main_button.setEnabled(True)
else:
self.main_button.setEnabled(False)
def update_elda_template(self):
""" Updates Elda template of every selected Eldas """
filenames = self.open_edit.text().split(";")
updated_files = 0
# Reads processes from the files
for i, input_filename in enumerate(filenames):
self.show_message(FILES_UPDATING_MESSAGE.format(i + 1, len(filenames)))
elda = Elda(filepath=input_filename)
# Looping to catch PermissionError
loop = True
while loop:
try:
elda.save(input_filename)
loop = False
updated_files += 1
except PermissionError:
# Displays a retry/cancel choice in case of permission denied
retry_cancel = RetryCancelDialog(self, title=PERMISSION_DENIED_TITLE,
message=PERMISSION_DENIED_MESSAGE,
additional_info=PERMISSION_DENIED_ADDITIONAL_INFO.format(
input_filename))
if retry_cancel.exec() == RetryCancelDialog.Cancel:
loop = False
if updated_files:
self.show_message(FILES_UPDATING_SUCCESS_MESSAGE.format(updated_files),
message_type=EldamWidget.SuccessMessage) | PypiClean |
/Allegra-0.63.zip/Allegra-0.63/lib/producer.py |
"http://laurentszyster.be/blog/producer/"
import types
class File (object):
"producer wrapper for file[-like] objects"
def __init__ (self, file, chunk=1<<14): # 16KB buffer
self.file = file
self.chunk = chunk
def more (self):
return self.file.read (self.chunk)
def producer_stalled (self):
return False
class Simple (object):
"scanning producer for a large string"
def __init__ (self, data, chunk=1<<14): # 16KB buffer
lb = len (data)
self.content_length = lambda: lb
self.more = self.produce (data, chunk).next
def produce (self, data, chunk):
lb = len (data)
start = 0
while start < lb:
end = start + chunk
yield data[start:end]
start = end
del data, self.content_length, self.more
yield ''
def producer_stalled (self):
return False
class Stalled_generator (object):
# the simplest stallable generator, a usefull construct for any
# generator based producer that is set as a finalization or a
# handler of diverse asynchronous or synchronized callback ...
def __call__ (self, *args):
self.generator = iter ((
'Stalled_generator.__call__ not implemented',
))
generator = None
def more (self):
try:
return self.generator.next ()
except StopIteration:
return ''
def producer_stalled (self):
return self.generator == None
class Composite (object):
# This is a more "modern" composite producer than the original
# one, with support for stalled producers and generators. it is the
# bread & butter of Allegra's PRESTo! with the Buffer.
def __init__ (self, head, body, glob=1<<14): # 16KB globber
assert (
type (head) == types.StringType and
type (body) == types.GeneratorType
)
self.current = head
self.generator = body
self.glob = glob
def more (self):
if self.current == '':
return ''
buffer = ''
limit = self.glob
while True:
if type (self.current) == str:
buffer += self.current
try:
self.current = self.generator.next ()
except StopIteration:
self.current = ''
break
if len (buffer) > limit:
break
elif self.current.producer_stalled ():
assert buffer != '' # watch this!
break
else:
data = self.current.more ()
if data:
buffer += data
if len (buffer) > limit:
break
else:
continue
try:
self.current = self.generator.next ()
except StopIteration:
self.current = ''
break
return buffer
def producer_stalled (self):
try:
return self.current.producer_stalled ()
except:
return False
# Note that this class also makes the original Medusa's lines, buffer
# and globbing producer redundant. What this class does it to glob
# as much strings as possible from a MIME like data structure:
#
# head = 'string'
# body = (generator of 'string' or producer ())
#
# It's a practical producer for asynchronous REST responses composed
# of simple strings and maybe-stalling producers. The overhead of
# another loop buys globbing and helps the peer fill its channel's
# buffers more efficiently for TCP/IP.
class Tee (object):
def __init__ (self, producer):
self.index = -1
self.producer = producer
try:
self.buffers = producer.tee_buffers
except AttributeError:
self.buffers = producer.tee_buffers = []
def more (self):
self.index += 1
try:
return self.buffers[self.index]
except IndexError:
data = self.producer.more ()
self.buffers.append (data)
return data
def producer_stalled (self):
if self.index + 1 < len (self.buffers):
return False
return self.producer.producer_stalled () | PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.