text string | size int64 | token_count int64 |
|---|---|---|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from porthole import models, brocade
class Command(BaseCommand):
help = "Command the Brocade switch stacks"
args = ""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'--print_stacks',
action='store_true',
dest='print_stacks',
help='Show the VLAN data from all switch stacks',
)
def handle(self, *args, **options):
if options['print_stacks']:
self.print_stacks()
def print_stacks(self):
for s in models.SwitchStack.objects.all():
stack = brocade.SwitchStack(s.name, s.ip_address, s.raw_username, s.raw_password, port=s.port)
stack.print_stack()
print()
| 846 | 245 |
"""Tests joulia.unit_conversions.
"""
from django.test import TestCase
from joulia import unit_conversions
class GramsToPoundsTest(TestCase):
def test_grams_to_pounds(self):
self.assertEquals(unit_conversions.grams_to_pounds(1000.0), 2.20462)
class GramsToOuncesTest(TestCase):
def test_grams_to_ounces(self):
self.assertEquals(unit_conversions.grams_to_ounces(1000.0), 35.27392)
| 410 | 166 |
from django.contrib.auth.models import User
from django.test import TestCase
from blog.models import Category, Post
class Test_Create_Post(TestCase):
@classmethod
def setUpTestData(cls):
test_category = Category.objects.create(name='django')
testuser1 = User.objects.create_user(
username='test-123', password='testpass'
)
test_post = Post.objects.create(category_id=1, title='Post', excerpt='Excerpt',
content='Content', slug='Slug', author_id=1, status='published')
def test_blog_contenet(self):
post = Post.postobjects.get(id=1)
cat = Category.objects.get(id=1)
author = f'{post.author}'
excerpt = f'{post.excerpt}'
title = f'{post.title}'
content = f'{post.content}'
status = f'{post.status}'
self.assertEqual(author, 'test-123')
self.assertEqual(title, 'Post')
self.assertEqual(content, 'Content')
self.assertEqual(status, 'published')
self.assertEqual(str(post), 'Post')
self.assertEqual(str(cat), 'django') | 1,127 | 346 |
from pathlib import Path
from hylfm.hylfm_types import (
CriterionChoice,
DatasetChoice,
LRSchedThresMode,
LRSchedulerChoice,
MetricChoice,
OptimizerChoice,
PeriodUnit,
)
from hylfm.model import HyLFM_Net
from hylfm.train import train
if __name__ == "__main__":
train(
dataset=DatasetChoice.beads_highc_b,
batch_multiplier=2,
batch_size=1,
crit_apply_weight_above_threshold=False,
crit_beta=1.0,
crit_decay_weight_by=0.8,
crit_decay_weight_every_unit=PeriodUnit.epoch,
crit_decay_weight_every_value=1,
crit_decay_weight_limit=1.0,
crit_ms_ssim_weight=0.01,
crit_threshold=0.5,
crit_weight=0.001,
criterion=CriterionChoice.WeightedSmoothL1,
data_range=1.0,
eval_batch_size=1,
interpolation_order=2,
lr_sched_factor=0.5,
lr_sched_patience=10,
lr_sched_thres=0.0001,
lr_sched_thres_mode=LRSchedThresMode.abs,
lr_scheduler=LRSchedulerChoice.ReduceLROnPlateau,
max_epochs=10,
model_weights=None, # Path()
opt_lr=3e-4,
opt_momentum=0.0,
opt_weight_decay=0.0,
optimizer=OptimizerChoice.Adam,
patience=5,
score_metric=MetricChoice.MS_SSIM,
seed=None,
validate_every_unit=PeriodUnit.epoch,
validate_every_value=1,
win_sigma=1.5,
win_size=11,
# model
nnum=19,
z_out=51,
kernel2d=3,
c00_2d=976,
c01_2d=976,
c02_2d=0,
c03_2d=0,
c04_2d=0,
up0_2d=488,
c10_2d=488,
c11_2d=0,
c12_2d=0,
c13_2d=0,
c14_2d=0,
up1_2d=244,
c20_2d=244,
c21_2d=0,
c22_2d=0,
c23_2d=0,
c24_2d=0,
up2_2d=0,
c30_2d=0,
c31_2d=0,
c32_2d=0,
c33_2d=0,
c34_2d=0,
last_kernel2d=1,
cin_3d=7,
kernel3d=3,
c00_3d=7,
c01_3d=0,
c02_3d=0,
c03_3d=0,
c04_3d=0,
up0_3d=7,
c10_3d=7,
c11_3d=7,
c12_3d=0,
c13_3d=0,
c14_3d=0,
up1_3d=0,
c20_3d=0,
c21_3d=0,
c22_3d=0,
c23_3d=0,
c24_3d=0,
up2_3d=0,
c30_3d=0,
c31_3d=0,
c32_3d=0,
c33_3d=0,
c34_3d=0,
init_fn=HyLFM_Net.InitName.xavier_uniform_,
final_activation=None,
)
| 2,504 | 1,153 |
import os
import pickle
from PIL import Image
class PatientToImageFolder:
def __init__(self, sourceFolder):
self.sourceFolder = sourceFolder
# How many patient with contrast SA for each pathology (used for classification)
self.contrastSApathologyDict = {}
# How many patient with contrast LA for each pathology (used for classification)
self.contrastCH2pathologyDict = {}
self.contrastCH3pathologyDict = {}
self.contrastCH4pathologyDict = {}
# How many patient with SA image (used for autoencoder training)
self.totalSaImagePatientNum = 0
self.curSaImagePatientNum = 0
# How many patient with LA image (used for autoencoder training)
self.totalCH2ImagePatientNum = 0
self.curCH2ImagePatientNum = 0
self.totalCH3ImagePatientNum = 0
self.curCH3ImagePatientNum = 0
self.totalCH4ImagePatientNum = 0
self.curCH4ImagePatientNum = 0
self.curContrastSaImagePatientNum = {}
self.curContrastCH2ImagePatientNum = {}
self.curContrastCH3ImagePatientNum = {}
self.curContrastCH4ImagePatientNum = {}
self.collectInfo()
def collectInfo(self):
for file in os.listdir(self.sourceFolder):
if ".p" in file:
tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb'))
patho = tmpPat.pathology.strip()
if "U18" in patho or "sport" in patho or "Normal" in patho:
continue
# elif "sport" in patho:
# patho = "Sport"
# elif "Normal" not in patho and "HCM" not in patho:
# patho = "Other"
if tmpPat.normalSaImages is not None:
self.totalSaImagePatientNum += 1
if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and
tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None):
if patho in self.contrastSApathologyDict:
self.contrastSApathologyDict[patho] += 1
else:
self.contrastSApathologyDict[patho] = 1
if patho in self.contrastCH2pathologyDict:
self.contrastCH2pathologyDict[patho] += 1
else:
self.contrastCH2pathologyDict[patho] = 1
if patho in self.contrastCH3pathologyDict:
self.contrastCH3pathologyDict[patho] += 1
else:
self.contrastCH3pathologyDict[patho] = 1
if patho in self.contrastCH4pathologyDict:
self.contrastCH4pathologyDict[patho] += 1
else:
self.contrastCH4pathologyDict[patho] = 1
if tmpPat.normalLaImages.ch2Images is not None:
self.totalCH2ImagePatientNum += 1
if tmpPat.normalLaImages.ch3Images is not None:
self.totalCH3ImagePatientNum += 1
if tmpPat.normalLaImages.ch4Images is not None:
self.totalCH4ImagePatientNum += 1
for key in self.contrastSApathologyDict:
self.curContrastSaImagePatientNum[key] = 0
for key in self.contrastCH2pathologyDict:
self.curContrastCH2ImagePatientNum[key] = 0
for key in self.contrastCH3pathologyDict:
self.curContrastCH3ImagePatientNum[key] = 0
for key in self.contrastCH4pathologyDict:
self.curContrastCH4ImagePatientNum[key] = 0
def convertImage(self, image_2d):
# if image_2d.min() > 254:
# return None
# Converting image from numpy array to PIL.
pil_img = Image.fromarray(image_2d)
if pil_img.getbbox() is None:
return None
return pil_img
def createAutoEncoderImageFolderStructure(self, folderName):
autoFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName)
autoTrainingFolder = os.path.join(autoFolder, "training")
autoTestFolder = os.path.join(autoFolder, "test")
os.makedirs(autoTrainingFolder)
os.makedirs(autoTestFolder)
return autoFolder, autoTrainingFolder, autoTestFolder
def createClassificationImageFolderStructure(self, folderName):
classFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName)
classTrainingFolder = os.path.join(classFolder, "training")
classValidationFolder = os.path.join(classFolder, "validation")
classTestFolder = os.path.join(classFolder, "test")
classAllFolder = os.path.join(classFolder, 'all')
os.makedirs(classTrainingFolder)
os.makedirs(classValidationFolder)
os.makedirs(classTestFolder)
os.makedirs(classAllFolder)
return classFolder, classTrainingFolder, classValidationFolder, classTestFolder, classAllFolder
def saveImageForClassification(self, image, patientId, patho, testFolder, validationFolder, trainingFolder,
axis, imPatho, curPatientNum, allFolder, pathologyDict):
pil_img = self.convertImage(image[:, :])
if pil_img is not None:
if (curPatientNum[patho] <= pathologyDict[patho] * 0.075 or
(pathologyDict[patho] * 0.85 <= curPatientNum[patho] <= pathologyDict[patho] * 0.925)):
imFolder = os.path.join(testFolder, imPatho)
os.makedirs(imFolder, exist_ok=True)
patientFolder = os.path.join(self.patientSeperatedTestFolder, imPatho + '_' + patientId)
os.makedirs(patientFolder, exist_ok=True)
elif ((pathologyDict[patho] * 0.075 <= curPatientNum[patho] <= pathologyDict[patho] * 0.15) or
curPatientNum[patho] >= int(pathologyDict[patho] * 0.925)):
imFolder = os.path.join(validationFolder, imPatho)
os.makedirs(imFolder, exist_ok=True)
patientFolder = os.path.join(self.patientSeperatedValidationFolder, imPatho + '_' + patientId)
os.makedirs(patientFolder, exist_ok=True)
else:
imFolder = os.path.join(trainingFolder, imPatho)
os.makedirs(imFolder, exist_ok=True)
patientFolder = os.path.join(self.patientSeperatedTrainingFolder, imPatho + '_' + patientId)
os.makedirs(patientFolder, exist_ok=True)
axisFolder = os.path.join(patientFolder, axis)
os.makedirs(axisFolder, exist_ok=True)
pil_img.save(os.path.join(imFolder, "{}.png".format(patientId)))
# pil_img.save(os.path.join(allFolder, "{}.png".format(patientId)))
pil_img.save(os.path.join(axisFolder, "{}.png".format(patientId)))
file = open(os.path.join(patientFolder, "pathology.txt"), "w")
file.write("{}\n".format(patho))
file.close()
def saveImageForAutoEncoder(self, images, patientId, testFolder, trainingFolder,
curPatientNum, totalPatientNum, sliceIdx, frameIdx):
if sliceIdx is not None:
pil_img = self.convertImage(images[sliceIdx, frameIdx, :, :])
else:
pil_img = self.convertImage(images[frameIdx, :, :])
if pil_img is not None:
if (curPatientNum <= totalPatientNum * 0.1
or curPatientNum >= int(totalPatientNum * 0.9)):
if sliceIdx is not None:
pil_img.save(os.path.join(testFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx)))
else:
pil_img.save(os.path.join(testFolder, "{}_{}.png".format(patientId, frameIdx)))
else:
if sliceIdx is not None:
pil_img.save(os.path.join(trainingFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx)))
else:
pil_img.save(os.path.join(trainingFolder, "{}_{}.png".format(patientId, frameIdx)))
def createImageFolderDatasets(self):
subfol = "only_abnormal"
# autoSaFolder, autoSaTrainingFolder, autoSaTestFolder = self.createAutoEncoderImageFolderStructure(
# "SaAutoEncoder")
(contrastSaFolder, contrastSaTrainingFolder,
contrastSaValidationFolder, contrastSaTestFolder,
contrastSaAllFolder) = self.createClassificationImageFolderStructure(
"{}/SaClassification".format(subfol))
# autoCH2Folder, autoCH2TrainingFolder, autoCH2TestFolder = self.createAutoEncoderImageFolderStructure(
# "CH2AutoEncoder")
(contrastCH2Folder, contrastCH2TrainingFolder,
contrastCH2ValidationFolder, contrastCH2TestFolder,
contrastCH2AllFolder) = self.createClassificationImageFolderStructure(
"{}/CH2Classification".format(subfol))
# autoCH3Folder, autoCH3TrainingFolder, autoCH3TestFolder = self.createAutoEncoderImageFolderStructure(
# "CH3AutoEncoder")
(contrastCH3Folder, contrastCH3TrainingFolder,
contrastCH3ValidationFolder, contrastCH3TestFolder,
contrastCH3AllFolder) = self.createClassificationImageFolderStructure(
"{}/CH3Classification".format(subfol))
# autoCH4Folder, autoCH4TrainingFolder, autoCH4TestFolder = self.createAutoEncoderImageFolderStructure(
# "CH4AutoEncoder")
(contrastCH4Folder, contrastCH4TrainingFolder,
contrastCH4ValidationFolder, contrastCH4TestFolder,
contrastCH4AllFolder) = self.createClassificationImageFolderStructure(
"{}/CH4Classification".format(subfol))
self.patientSeperatedFolder = os.path.join(os.path.dirname(self.sourceFolder), '{}/patients'.format(subfol))
os.makedirs(self.patientSeperatedFolder)
self.patientSeperatedTrainingFolder = os.path.join(self.patientSeperatedFolder, 'training')
self.patientSeperatedValidationFolder = os.path.join(self.patientSeperatedFolder, 'validation')
self.patientSeperatedTestFolder = os.path.join(self.patientSeperatedFolder, 'test')
os.makedirs(self.patientSeperatedTrainingFolder)
os.makedirs(self.patientSeperatedValidationFolder)
os.makedirs(self.patientSeperatedTestFolder)
for file in os.listdir(self.sourceFolder):
if ".p" in file:
tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb'))
patho = tmpPat.pathology.strip()
if "U18" in patho or "sport" in patho or "Normal" in patho:
continue
# elif "sport" in patho:
# patho = "Sport"
# elif "Normal" not in patho and "HCM" not in patho:
# patho = "Other"
imPatho = patho
# if "sport" in patho:
# imPatho = "Sport"
# if "Normal" not in patho:
# imPatho = "Hypertrophic"
classificationReady = False
if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and
tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None):
classificationReady = True
# if tmpPat.normalSaImages is not None:
# for i in range(tmpPat.normalSaImages.shape[0]):
# for j in range(tmpPat.normalSaImages.shape[1]):
# self.saveImageForAutoEncoder(tmpPat.normalSaImages, tmpPat.patientID, autoSaTestFolder,
# autoSaTrainingFolder, self.curSaImagePatientNum,
# self.totalSaImagePatientNum, i, j)
# self.curSaImagePatientNum += 1
if classificationReady:
self.saveImageForClassification(tmpPat.contrastSaImages, tmpPat.patientID, patho,
contrastSaTestFolder, contrastSaValidationFolder,
contrastSaTrainingFolder, 'SA', imPatho,
self.curContrastSaImagePatientNum, contrastSaAllFolder,
self.contrastSApathologyDict)
self.curContrastSaImagePatientNum[patho] += 1
# if tmpPat.normalLaImages.ch2Images is not None:
# for i in range(tmpPat.normalLaImages.ch2Images.shape[0]):
# self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch2Images, tmpPat.patientID,
# autoCH2TestFolder,
# autoCH2TrainingFolder, self.curCH2ImagePatientNum,
# self.totalCH2ImagePatientNum, None, i)
# self.curCH2ImagePatientNum += 1
if classificationReady:
self.saveImageForClassification(tmpPat.contrastLaImages.ch2Images, tmpPat.patientID, patho,
contrastCH2TestFolder, contrastCH2ValidationFolder,
contrastCH2TrainingFolder, 'CH2', imPatho,
self.curContrastCH2ImagePatientNum, contrastCH2AllFolder,
self.contrastCH2pathologyDict)
self.curContrastCH2ImagePatientNum[patho] += 1
# if tmpPat.normalLaImages.ch3Images is not None:
# for i in range(tmpPat.normalLaImages.ch3Images.shape[0]):
# self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch3Images, tmpPat.patientID,
# autoCH3TestFolder,
# autoCH3TrainingFolder, self.curCH3ImagePatientNum,
# self.totalCH3ImagePatientNum, None, i)
# self.curCH3ImagePatientNum += 1
if classificationReady:
self.saveImageForClassification(tmpPat.contrastLaImages.ch3Images, tmpPat.patientID, patho,
contrastCH3TestFolder, contrastCH3ValidationFolder,
contrastCH3TrainingFolder, 'CH3', imPatho,
self.curContrastCH3ImagePatientNum, contrastCH3AllFolder,
self.contrastCH3pathologyDict)
self.curContrastCH3ImagePatientNum[patho] += 1
# if tmpPat.normalLaImages.ch4Images is not None:
# for i in range(tmpPat.normalLaImages.ch4Images.shape[0]):
# self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch4Images, tmpPat.patientID,
# autoCH4TestFolder,
# autoCH4TrainingFolder, self.curCH4ImagePatientNum,
# self.totalCH4ImagePatientNum, None, i)
# self.curCH4ImagePatientNum += 1
if classificationReady:
self.saveImageForClassification(tmpPat.contrastLaImages.ch4Images, tmpPat.patientID, patho,
contrastCH4TestFolder, contrastCH4ValidationFolder,
contrastCH4TrainingFolder, 'CH4', imPatho,
self.curContrastCH4ImagePatientNum, contrastCH4AllFolder,
self.contrastCH4pathologyDict)
self.curContrastCH4ImagePatientNum[patho] += 1
self.createLabelFileFromPathoDict(contrastSaFolder, self.contrastSApathologyDict)
self.createLabelFileFromPathoDict(contrastCH2Folder, self.contrastCH2pathologyDict)
self.createLabelFileFromPathoDict(contrastCH3Folder, self.contrastCH3pathologyDict)
self.createLabelFileFromPathoDict(contrastCH4Folder, self.contrastCH4pathologyDict)
def createLabelFileFromPathoDict(self, destination, pathoDict):
file = open(os.path.join(destination, "pathologies.txt"), "w")
for key in pathoDict:
file.write("{}\n".format(key))
file.close()
if __name__ == "__main__":
sourceFolder = 'D:/BME/7felev/Szakdolgozat/whole_dataset/filtered_data'
imageFolderArranger = PatientToImageFolder(sourceFolder)
imageFolderArranger.createImageFolderDatasets()
| 17,055 | 4,749 |
import pandas as pd
import numpy as np
import pickle
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
from math import sqrt
from sklearn.svm import SVR
from sklearn.svm import LinearSVR
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
def prune(x):
if x < 1:
return 1
elif x > 3:
return 3
else:
return x
def regression(reg_type, standardize_df, debug=False):
# load model
filename = '../../dataset/model_' + reg_type + '.pickle'
lin_model = None
with open(filename, 'rb') as f:
lin_model = pickle.load(f)
score_df_tst = pd.read_pickle('../../dataset/score_df_final_tst.pickle')
# Fill NaN value
# score_df = score_df.fillna(0.0)
# The last column is the target
X = np.array(score_df_tst)
if standardize_df:
print("Standardizing...")
with open("../../dataset/scaler.pickle", 'rb') as handle:
scaler = pickle.load(handle)
X = scaler.transform(X)
# Debug
if debug:
print("Score DataFrame")
print(score_df)
print("")
print("Training Values")
print(X)
print("")
print("Output Values")
print(Y)
print("")
print("Shapes of X and Y")
print(X.shape)
print(Y.shape)
# Debug
if debug:
print("XTR - XTS")
print(xtr.shape)
print(xts.shape)
print("")
print("YTR - YTS")
print(ytr.shape)
print(yts.shape)
print("")
yts_pred = lin_model.predict(X)
#yts_error = sqrt(mean_squared_error(yts_pred, yts))
print("Prediction by (" + reg_type + ") on Test data have finished")
# create submission file
id_series = pd.read_csv('../../dataset/test.csv')['id']
submission_df = pd.DataFrame(id_series, columns=['id'])
submission_df['relevance'] = yts_pred
submission_df['relevance'] = submission_df['relevance'].map(lambda x: prune(x))
submission_df.to_csv('../../dataset/submission.csv', columns=['id', 'relevance'], index=False)
if __name__ == "__main__":
# Change between:
# svr
# linear
# rfr
regression_type = 'svr'
standardize_df = True
regression(regression_type, standardize_df, debug=False) | 2,140 | 842 |
import numpy as np
import serial
import time
import matplotlib.pyplot as plt
def getData():
ser = serial.Serial('/dev/ttyACM7', 9600)
sensorReadings = []
start = time.time()
current = time.time()
while current - start < 10:
data =ser.readline()
sensorReadings.append(float(data))
current = time.time()
return sensorReadings
def plotter(sensorReadings):
plt.plot(sensorReadings)
plt.ylabel('EEG Sensor sensorReadings')
plt.show()
if __name__ == '__main__':
sensorReadings = getData()
plotter(sensorReadings)
| 602 | 205 |
from CMText.TextClient import TextClient
# Message to be send
message = 'Examples message to be send'
# Media to be send
media = {
"mediaName": "conversational-commerce",
"mediaUri": "https://www.cm.com/cdn/cm/cm.png",
"mimeType": "image/png"
}
# AllowedChannels in this case Whatsapp
allowedChannels = ['Whatsapp']
# Recipients
to = ['003156789000', '002134567890']
# Instantiate client with your own api-key
client = TextClient(apikey=UNIQUE_API_KEY)
# Add a Rich message to the queue
client.AddRichMessage(message=message, from_='pythonSDK', to=to, allowedChannels=allowedChannels, media=media)
# Send the messages
response = client.send()
# Print response
print(response.text) | 732 | 249 |
import pygame
from pygame.math import Vector2
class Sound:
def __init__(self, manager, snd, volume=1.0):
self.manager = manager
self.snd = pygame.mixer.Sound(snd)
self.snd.set_volume(1.0)
self.ttl = snd.get_length()
self.playing = True
self.snd.play()
def update(self, dtime):
self.ttl -= dtime
if self.ttl <= 0:
self.playing = False
class AttachedSound(Sound):
def __init__(self, manager, snd, position, volume=1.0, fade_dist=1, min_volume=0.1):
super().__init__(manager, snd)
if not isinstance(position, Vector2):
position = Vector2(position)
self.position = position
self.volume = volume
self.fade_dist = fade_dist
self.min_volume = min_volume
def update(self, dtime):
super().update(dtime)
if self.playing and self.manager.track_object is not None:
dist = self.position.distance_to(self.manager.track_object.position)
volume = self.volume*self.fade_dist/dist
if volume > self.min_volume:
self.snd.set_volume(volume)
else:
self.snd.set_volume(0)
class AudioManager:
def __init__(self):
self.loaded = {}
self.sounds = []
self.track_object = None
def play_sound(self, d):
name = d["name"]
if self.loaded.get(name) is None:
self.loaded[name] = pygame.mixer.Sound(name)
if d["type"] == "normal":
self.sounds.append(Sound(self, self.loaded[name], volume=d.get("volume", 1.0)))
# Actually sound can be "attached_to_position" and "attached_to_entity".
# To avoid adding EntityManager reference into AudioManager, "position"
# will be replaced by entity.position in Connection when sound event handled.
# Anyway, d["type"] will be set to "attached"
elif d["type"] == "attached":
self.sounds.append(AttachedSound(self, self.loaded[name], d["position"],
volume=d.get("volume", 1.0),
fade_dist=d.get("fade_dist", 1),
min_volume=d.get("min_volume", 0.1)))
def update(self, dtime):
for sound in self.sounds:
sound.update(dtime)
if not sound.playing:
self.sounds.remove(sound)
| 2,630 | 770 |
# Copyright 2021 Mohammad Kazemi <kazemi.me.222@gmail.com>.
# SPDX-License-Identifier: MIT
# Telegram API framework core imports
from collections import namedtuple
from functools import partial
from ganjoor.ganjoor import Ganjoor
from telegram.ext import Dispatcher, CallbackContext
from telegram import Update
# Helper methods import
from utils.logger import get_logger
from utils.telegram.keyboards import category_keyboard
# Telegram API framework handlers imports
from telegram.ext import CallbackQueryHandler
# Init logger
logger = get_logger(__name__)
CallbackData = namedtuple('CallbackData', "menu_name doto")
def init(dispatcher: Dispatcher, ganjoor: Ganjoor):
"""Provide handlers initialization."""
dispatcher.add_handler(CallbackQueryHandler(
partial(category_id, ganjoor=ganjoor), pattern=r'^category_*'))
def category_id(update: Update, context: CallbackContext, ganjoor: Ganjoor) -> int:
"""Process a /start command."""
query = update.callback_query
message_id = '_'.join(query.data.split('_')[2:])
cat_id = query.data.split('_')[1]
cat = ganjoor.find_category_by_id(cat_id, with_poems=True)
# query.answer()
query.answer()
context.bot.edit_message_reply_markup(
inline_message_id=message_id, reply_markup=category_keyboard(cat, message_id))
# query.edit_reply_markup()
| 1,353 | 434 |
import os
import sys
import struct
import re
import logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logger = logging.getLogger(__name__)
def list_to_md(str_list):
output = ""
for str in str_list:
output = output + "* %s \n" % str
return output
def str_to_md_list(the_str, sep):
str_list = the_str.split(sep)
output = ""
for str in str_list:
output = output + "* %s \n" % str
return output
| 457 | 156 |
import uos as os
import time
def countdown():
for i in range(5, 0, -1):
print("start stubbing in {}...".format(i))
time.sleep(1)
import createstubs
# import stub_lvgl
try:
# only run import if no stubs yet
os.listdir("stubs")
print("stub folder was found, stubbing is not automatically started")
except OSError:
countdown()
| 374 | 125 |
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from blinkpy.common.host import Host
from blinkpy.common.host_mock import MockHost
from blinkpy.web_tests.breakpad.dump_reader_multipart import DumpReaderMultipart
class TestDumpReaderMultipart(unittest.TestCase):
_MULTIPART_DUMP = [
'--boundary',
'Content-Disposition: form-data; name="prod"',
'',
'content_shell',
'--boundary',
'Content-Disposition: form-data; name="pid"',
'',
'4711',
'--boundary',
'Content-Disposition: form-data; name="upload_file_minidump"; filename="dump"',
'Content-Type: application/octet-stream',
'',
'MDMP',
'--boundary--',
]
def test_check_generate_breakpad_symbols_actually_exists(self):
host = Host()
dump_reader = DumpReaderMultipart(host, build_dir=None)
self.assertTrue(host.filesystem.exists(dump_reader._path_to_generate_breakpad_symbols()))
def test_check_is_functional_breakpad_tools_not_found(self):
host = MockHost()
build_dir = "/mock-checkout/out/Debug"
host.filesystem.maybe_make_directory(build_dir)
dump_reader = DumpReaderMultipart(host, build_dir)
dump_reader._file_extension = lambda: 'dmp'
dump_reader._binaries_to_symbolize = lambda: ['content_shell']
self.assertFalse(dump_reader.check_is_functional())
def test_get_pid_from_dump(self):
host = MockHost()
dump_file = '/crash-dumps/dump.dmp'
expected_pid = '4711'
host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP))
build_dir = "/mock-checkout/out/Debug"
host.filesystem.maybe_make_directory(build_dir)
host.filesystem.exists = lambda x: True
# The mock file object returned by open_binary_file_for_reading doesn't
# have readline(), however, the real File object does.
host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading
dump_reader = DumpReaderMultipart(host, build_dir)
dump_reader._file_extension = lambda: 'dmp'
dump_reader._binaries_to_symbolize = lambda: ['content_shell']
self.assertTrue(dump_reader.check_is_functional())
self.assertEqual(expected_pid, dump_reader._get_pid_from_dump(dump_file))
def test_get_stack_from_dump(self):
host = MockHost()
dump_file = '/crash-dumps/dump.dmp'
host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP))
build_dir = "/mock-checkout/out/Debug"
host.filesystem.maybe_make_directory(build_dir)
host.filesystem.exists = lambda x: True
# The mock file object returned by open_binary_file_for_reading doesn't
# have readline(), however, the real File object does.
host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading
dump_reader = DumpReaderMultipart(host, build_dir)
dump_reader._file_extension = lambda: 'dmp'
dump_reader._binaries_to_symbolize = lambda: ['content_shell']
self.assertTrue(dump_reader.check_is_functional())
self.assertEqual("MOCK output of child process", dump_reader._get_stack_from_dump(dump_file))
self.assertEqual(2, len(host.executive.calls))
cmd_line = " ".join(host.executive.calls[0])
self.assertIn('generate_breakpad_symbols.py', cmd_line)
cmd_line = " ".join(host.executive.calls[1])
self.assertIn('minidump_stackwalk', cmd_line)
| 5,110 | 1,670 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from subprocess import Popen, PIPE
emojis="""โ๐ป Helmet With White Cross, Type-1-2
โ๐ผ Helmet With White Cross, Type-3
โ๐ฝ Helmet With White Cross, Type-4
โ๐พ Helmet With White Cross, Type-5
โ๐ฟ Helmet With White Cross, Type-6
๐๐ป Kiss, Type-1-2
๐๐ผ Kiss, Type-3
๐๐ฝ Kiss, Type-4
๐๐พ Kiss, Type-5
๐๐ฟ Kiss, Type-6
๐๐ป Couple With Heart, Type-1-2
๐๐ผ Couple With Heart, Type-3
๐๐ฝ Couple With Heart, Type-4
๐๐พ Couple With Heart, Type-5
๐๐ฟ Couple With Heart, Type-6
โท๐ป Skier, Type-1-2
โท๐ผ Skier, Type-3
โท๐ฝ Skier, Type-4
โท๐พ Skier, Type-5
โท๐ฟ Skier, Type-6
๐ Grinning Face
๐ Grinning Face With Smiling Eyes
๐ Face With Tears of Joy
๐คฃ Rolling on the Floor Laughing
๐ Smiling Face With Open Mouth
๐ Smiling Face With Open Mouth & Smiling Eyes
๐
Smiling Face With Open Mouth & Cold Sweat
๐ Smiling Face With Open Mouth & Closed Eyes
๐ Winking Face
๐ Smiling Face With Smiling Eyes
๐ Face Savouring Delicious Food
๐ Smiling Face With Sunglasses
๐ Smiling Face With Heart-Eyes
๐ Face Blowing a Kiss
๐ Kissing Face
๐ Kissing Face With Smiling Eyes
๐ Kissing Face With Closed Eyes
โบ Smiling Face
๐ Slightly Smiling Face
๐ค Hugging Face
๐คฉ Star-Struck
๐ค Thinking Face
๐คจ Face With Raised Eyebrow
๐ Neutral Face
๐ Expressionless Face
๐ถ Face Without Mouth
๐ Face With Rolling Eyes
๐ Smirking Face
๐ฃ Persevering Face
๐ฅ Disappointed but Relieved Face
๐ฎ Face With Open Mouth
๐ค Zipper-Mouth Face
๐ฏ Hushed Face
๐ช Sleepy Face
๐ซ Tired Face
๐ด Sleeping Face
๐ Relieved Face
๐ Face With Stuck-Out Tongue
๐ Face With Stuck-Out Tongue & Winking Eye
๐ Face With Stuck-Out Tongue & Closed Eyes
๐คค Drooling Face
๐ Unamused Face
๐ Face With Cold Sweat
๐ Pensive Face
๐ Confused Face
๐ Upside-Down Face
๐ค Money-Mouth Face
๐ฒ Astonished Face
โน Frowning Face
๐ Slightly Frowning Face
๐ Confounded Face
๐ Disappointed Face
๐ Worried Face
๐ค Face With Steam From Nose
๐ข Crying Face
๐ญ Loudly Crying Face
๐ฆ Frowning Face With Open Mouth
๐ง Anguished Face
๐จ Fearful Face
๐ฉ Weary Face
๐คฏ Exploding Head
๐ฌ Grimacing Face
๐ฐ Face With Open Mouth & Cold Sweat
๐ฑ Face Screaming in Fear
๐ณ Flushed Face
๐คช Crazy Face
๐ต Dizzy Face
๐ก Pouting Face
๐ Angry Face
๐คฌ Face With Symbols Over Mouth
๐ท Face With Medical Mask
๐ค Face With Thermometer
๐ค Face With Head-Bandage
๐คข Nauseated Face
๐คฎ Face Vomiting
๐คง Sneezing Face
๐ Smiling Face With Halo
๐ค Cowboy Hat Face
๐คก Clown Face
๐คฅ Lying Face
๐คซ Shushing Face
๐คญ Face With Hand Over Mouth
๐ง Face With Monocle
๐ค Nerd Face
๐ Smiling Face With Horns
๐ฟ Angry Face With Horns
๐น Ogre
๐บ Goblin
๐ Skull
โ Skull and Crossbones
๐ป Ghost
๐ฝ Alien
๐พ Alien Monster
๐ค Robot Face
๐ฉ Pile of Poo
๐บ Smiling Cat Face With Open Mouth
๐ธ Grinning Cat Face With Smiling Eyes
๐น Cat Face With Tears of Joy
๐ป Smiling Cat Face With Heart-Eyes
๐ผ Cat Face With Wry Smile
๐ฝ Kissing Cat Face With Closed Eyes
๐ Weary Cat Face
๐ฟ Crying Cat Face
๐พ Pouting Cat Face
๐ See-No-Evil Monkey
๐ Hear-No-Evil Monkey
๐ Speak-No-Evil Monkey
๐ถ Baby
๐ถ๐ป Baby: Light Skin Tone
๐ถ๐ผ Baby: Medium-Light Skin Tone
๐ถ๐ฝ Baby: Medium Skin Tone
๐ถ๐พ Baby: Medium-Dark Skin Tone
๐ถ๐ฟ Baby: Dark Skin Tone
๐ง Child
๐ง๐ป Child: Light Skin Tone
๐ง๐ผ Child: Medium-Light Skin Tone
๐ง๐ฝ Child: Medium Skin Tone
๐ง๐พ Child: Medium-Dark Skin Tone
๐ง๐ฟ Child: Dark Skin Tone
๐ฆ Boy
๐ฆ๐ป Boy: Light Skin Tone
๐ฆ๐ผ Boy: Medium-Light Skin Tone
๐ฆ๐ฝ Boy: Medium Skin Tone
๐ฆ๐พ Boy: Medium-Dark Skin Tone
๐ฆ๐ฟ Boy: Dark Skin Tone
๐ง Girl
๐ง๐ป Girl: Light Skin Tone
๐ง๐ผ Girl: Medium-Light Skin Tone
๐ง๐ฝ Girl: Medium Skin Tone
๐ง๐พ Girl: Medium-Dark Skin Tone
๐ง๐ฟ Girl: Dark Skin Tone
๐ง Adult
๐ง๐ป Adult: Light Skin Tone
๐ง๐ผ Adult: Medium-Light Skin Tone
๐ง๐ฝ Adult: Medium Skin Tone
๐ง๐พ Adult: Medium-Dark Skin Tone
๐ง๐ฟ Adult: Dark Skin Tone
๐จ Man
๐จ๐ป Man: Light Skin Tone
๐จ๐ผ Man: Medium-Light Skin Tone
๐จ๐ฝ Man: Medium Skin Tone
๐จ๐พ Man: Medium-Dark Skin Tone
๐จ๐ฟ Man: Dark Skin Tone
๐ฉ Woman
๐ฉ๐ป Woman: Light Skin Tone
๐ฉ๐ผ Woman: Medium-Light Skin Tone
๐ฉ๐ฝ Woman: Medium Skin Tone
๐ฉ๐พ Woman: Medium-Dark Skin Tone
๐ฉ๐ฟ Woman: Dark Skin Tone
๐ง Older Adult
๐ง๐ป Older Adult: Light Skin Tone
๐ง๐ผ Older Adult: Medium-Light Skin Tone
๐ง๐ฝ Older Adult: Medium Skin Tone
๐ง๐พ Older Adult: Medium-Dark Skin Tone
๐ง๐ฟ Older Adult: Dark Skin Tone
๐ด Old Man
๐ด๐ป Old Man: Light Skin Tone
๐ด๐ผ Old Man: Medium-Light Skin Tone
๐ด๐ฝ Old Man: Medium Skin Tone
๐ด๐พ Old Man: Medium-Dark Skin Tone
๐ด๐ฟ Old Man: Dark Skin Tone
๐ต Old Woman
๐ต๐ป Old Woman: Light Skin Tone
๐ต๐ผ Old Woman: Medium-Light Skin Tone
๐ต๐ฝ Old Woman: Medium Skin Tone
๐ต๐พ Old Woman: Medium-Dark Skin Tone
๐ต๐ฟ Old Woman: Dark Skin Tone
๐จโโ๏ธ Man Health Worker
๐จ๐ปโโ๏ธ Man Health Worker: Light Skin Tone
๐จ๐ผโโ๏ธ Man Health Worker: Medium-Light Skin Tone
๐จ๐ฝโโ๏ธ Man Health Worker: Medium Skin Tone
๐จ๐พโโ๏ธ Man Health Worker: Medium-Dark Skin Tone
๐จ๐ฟโโ๏ธ Man Health Worker: Dark Skin Tone
๐ฉโโ๏ธ Woman Health Worker
๐ฉ๐ปโโ๏ธ Woman Health Worker: Light Skin Tone
๐ฉ๐ผโโ๏ธ Woman Health Worker: Medium-Light Skin Tone
๐ฉ๐ฝโโ๏ธ Woman Health Worker: Medium Skin Tone
๐ฉ๐พโโ๏ธ Woman Health Worker: Medium-Dark Skin Tone
๐ฉ๐ฟโโ๏ธ Woman Health Worker: Dark Skin Tone
๐จโ๐ Man Student
๐จ๐ปโ๐ Man Student: Light Skin Tone
๐จ๐ผโ๐ Man Student: Medium-Light Skin Tone
๐จ๐ฝโ๐ Man Student: Medium Skin Tone
๐จ๐พโ๐ Man Student: Medium-Dark Skin Tone
๐จ๐ฟโ๐ Man Student: Dark Skin Tone
๐ฉโ๐ Woman Student
๐ฉ๐ปโ๐ Woman Student: Light Skin Tone
๐ฉ๐ผโ๐ Woman Student: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ Woman Student: Medium Skin Tone
๐ฉ๐พโ๐ Woman Student: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ Woman Student: Dark Skin Tone
๐จโ๐ซ Man Teacher
๐จ๐ปโ๐ซ Man Teacher: Light Skin Tone
๐จ๐ผโ๐ซ Man Teacher: Medium-Light Skin Tone
๐จ๐ฝโ๐ซ Man Teacher: Medium Skin Tone
๐จ๐พโ๐ซ Man Teacher: Medium-Dark Skin Tone
๐จ๐ฟโ๐ซ Man Teacher: Dark Skin Tone
๐ฉโ๐ซ Woman Teacher
๐ฉ๐ปโ๐ซ Woman Teacher: Light Skin Tone
๐ฉ๐ผโ๐ซ Woman Teacher: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ซ Woman Teacher: Medium Skin Tone
๐ฉ๐พโ๐ซ Woman Teacher: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ซ Woman Teacher: Dark Skin Tone
๐จโโ๏ธ Man Judge
๐จ๐ปโโ๏ธ Man Judge: Light Skin Tone
๐จ๐ผโโ๏ธ Man Judge: Medium-Light Skin Tone
๐จ๐ฝโโ๏ธ Man Judge: Medium Skin Tone
๐จ๐พโโ๏ธ Man Judge: Medium-Dark Skin Tone
๐จ๐ฟโโ๏ธ Man Judge: Dark Skin Tone
๐ฉโโ๏ธ Woman Judge
๐ฉ๐ปโโ๏ธ Woman Judge: Light Skin Tone
๐ฉ๐ผโโ๏ธ Woman Judge: Medium-Light Skin Tone
๐ฉ๐ฝโโ๏ธ Woman Judge: Medium Skin Tone
๐ฉ๐พโโ๏ธ Woman Judge: Medium-Dark Skin Tone
๐ฉ๐ฟโโ๏ธ Woman Judge: Dark Skin Tone
๐จโ๐พ Man Farmer
๐จ๐ปโ๐พ Man Farmer: Light Skin Tone
๐จ๐ผโ๐พ Man Farmer: Medium-Light Skin Tone
๐จ๐ฝโ๐พ Man Farmer: Medium Skin Tone
๐จ๐พโ๐พ Man Farmer: Medium-Dark Skin Tone
๐จ๐ฟโ๐พ Man Farmer: Dark Skin Tone
๐ฉโ๐พ Woman Farmer
๐ฉ๐ปโ๐พ Woman Farmer: Light Skin Tone
๐ฉ๐ผโ๐พ Woman Farmer: Medium-Light Skin Tone
๐ฉ๐ฝโ๐พ Woman Farmer: Medium Skin Tone
๐ฉ๐พโ๐พ Woman Farmer: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐พ Woman Farmer: Dark Skin Tone
๐จโ๐ณ Man Cook
๐จ๐ปโ๐ณ Man Cook: Light Skin Tone
๐จ๐ผโ๐ณ Man Cook: Medium-Light Skin Tone
๐จ๐ฝโ๐ณ Man Cook: Medium Skin Tone
๐จ๐พโ๐ณ Man Cook: Medium-Dark Skin Tone
๐จ๐ฟโ๐ณ Man Cook: Dark Skin Tone
๐ฉโ๐ณ Woman Cook
๐ฉ๐ปโ๐ณ Woman Cook: Light Skin Tone
๐ฉ๐ผโ๐ณ Woman Cook: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ณ Woman Cook: Medium Skin Tone
๐ฉ๐พโ๐ณ Woman Cook: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ณ Woman Cook: Dark Skin Tone
๐จโ๐ง Man Mechanic
๐จ๐ปโ๐ง Man Mechanic: Light Skin Tone
๐จ๐ผโ๐ง Man Mechanic: Medium-Light Skin Tone
๐จ๐ฝโ๐ง Man Mechanic: Medium Skin Tone
๐จ๐พโ๐ง Man Mechanic: Medium-Dark Skin Tone
๐จ๐ฟโ๐ง Man Mechanic: Dark Skin Tone
๐ฉโ๐ง Woman Mechanic
๐ฉ๐ปโ๐ง Woman Mechanic: Light Skin Tone
๐ฉ๐ผโ๐ง Woman Mechanic: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ง Woman Mechanic: Medium Skin Tone
๐ฉ๐พโ๐ง Woman Mechanic: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ง Woman Mechanic: Dark Skin Tone
๐จโ๐ญ Man Factory Worker
๐จ๐ปโ๐ญ Man Factory Worker: Light Skin Tone
๐จ๐ผโ๐ญ Man Factory Worker: Medium-Light Skin Tone
๐จ๐ฝโ๐ญ Man Factory Worker: Medium Skin Tone
๐จ๐พโ๐ญ Man Factory Worker: Medium-Dark Skin Tone
๐จ๐ฟโ๐ญ Man Factory Worker: Dark Skin Tone
๐ฉโ๐ญ Woman Factory Worker
๐ฉ๐ปโ๐ญ Woman Factory Worker: Light Skin Tone
๐ฉ๐ผโ๐ญ Woman Factory Worker: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ญ Woman Factory Worker: Medium Skin Tone
๐ฉ๐พโ๐ญ Woman Factory Worker: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ญ Woman Factory Worker: Dark Skin Tone
๐จโ๐ผ Man Office Worker
๐จ๐ปโ๐ผ Man Office Worker: Light Skin Tone
๐จ๐ผโ๐ผ Man Office Worker: Medium-Light Skin Tone
๐จ๐ฝโ๐ผ Man Office Worker: Medium Skin Tone
๐จ๐พโ๐ผ Man Office Worker: Medium-Dark Skin Tone
๐จ๐ฟโ๐ผ Man Office Worker: Dark Skin Tone
๐ฉโ๐ผ Woman Office Worker
๐ฉ๐ปโ๐ผ Woman Office Worker: Light Skin Tone
๐ฉ๐ผโ๐ผ Woman Office Worker: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ผ Woman Office Worker: Medium Skin Tone
๐ฉ๐พโ๐ผ Woman Office Worker: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ผ Woman Office Worker: Dark Skin Tone
๐จโ๐ฌ Man Scientist
๐จ๐ปโ๐ฌ Man Scientist: Light Skin Tone
๐จ๐ผโ๐ฌ Man Scientist: Medium-Light Skin Tone
๐จ๐ฝโ๐ฌ Man Scientist: Medium Skin Tone
๐จ๐พโ๐ฌ Man Scientist: Medium-Dark Skin Tone
๐จ๐ฟโ๐ฌ Man Scientist: Dark Skin Tone
๐ฉโ๐ฌ Woman Scientist
๐ฉ๐ปโ๐ฌ Woman Scientist: Light Skin Tone
๐ฉ๐ผโ๐ฌ Woman Scientist: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฌ Woman Scientist: Medium Skin Tone
๐ฉ๐พโ๐ฌ Woman Scientist: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฌ Woman Scientist: Dark Skin Tone
๐จโ๐ป Man Technologist
๐จ๐ปโ๐ป Man Technologist: Light Skin Tone
๐จ๐ผโ๐ป Man Technologist: Medium-Light Skin Tone
๐จ๐ฝโ๐ป Man Technologist: Medium Skin Tone
๐จ๐พโ๐ป Man Technologist: Medium-Dark Skin Tone
๐จ๐ฟโ๐ป Man Technologist: Dark Skin Tone
๐ฉโ๐ป Woman Technologist
๐ฉ๐ปโ๐ป Woman Technologist: Light Skin Tone
๐ฉ๐ผโ๐ป Woman Technologist: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ป Woman Technologist: Medium Skin Tone
๐ฉ๐พโ๐ป Woman Technologist: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ป Woman Technologist: Dark Skin Tone
๐จโ๐ค Man Singer
๐จ๐ปโ๐ค Man Singer: Light Skin Tone
๐จ๐ผโ๐ค Man Singer: Medium-Light Skin Tone
๐จ๐ฝโ๐ค Man Singer: Medium Skin Tone
๐จ๐พโ๐ค Man Singer: Medium-Dark Skin Tone
๐จ๐ฟโ๐ค Man Singer: Dark Skin Tone
๐ฉโ๐ค Woman Singer
๐ฉ๐ปโ๐ค Woman Singer: Light Skin Tone
๐ฉ๐ผโ๐ค Woman Singer: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ค Woman Singer: Medium Skin Tone
๐ฉ๐พโ๐ค Woman Singer: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ค Woman Singer: Dark Skin Tone
๐จโ๐จ Man Artist
๐จ๐ปโ๐จ Man Artist: Light Skin Tone
๐จ๐ผโ๐จ Man Artist: Medium-Light Skin Tone
๐จ๐ฝโ๐จ Man Artist: Medium Skin Tone
๐จ๐พโ๐จ Man Artist: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ Man Artist: Dark Skin Tone
๐ฉโ๐จ Woman Artist
๐ฉ๐ปโ๐จ Woman Artist: Light Skin Tone
๐ฉ๐ผโ๐จ Woman Artist: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ Woman Artist: Medium Skin Tone
๐ฉ๐พโ๐จ Woman Artist: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ Woman Artist: Dark Skin Tone
๐จโโ๏ธ Man Pilot
๐จ๐ปโโ๏ธ Man Pilot: Light Skin Tone
๐จ๐ผโโ๏ธ Man Pilot: Medium-Light Skin Tone
๐จ๐ฝโโ๏ธ Man Pilot: Medium Skin Tone
๐จ๐พโโ๏ธ Man Pilot: Medium-Dark Skin Tone
๐จ๐ฟโโ๏ธ Man Pilot: Dark Skin Tone
๐ฉโโ๏ธ Woman Pilot
๐ฉ๐ปโโ๏ธ Woman Pilot: Light Skin Tone
๐ฉ๐ผโโ๏ธ Woman Pilot: Medium-Light Skin Tone
๐ฉ๐ฝโโ๏ธ Woman Pilot: Medium Skin Tone
๐ฉ๐พโโ๏ธ Woman Pilot: Medium-Dark Skin Tone
๐ฉ๐ฟโโ๏ธ Woman Pilot: Dark Skin Tone
๐จโ๐ Man Astronaut
๐จ๐ปโ๐ Man Astronaut: Light Skin Tone
๐จ๐ผโ๐ Man Astronaut: Medium-Light Skin Tone
๐จ๐ฝโ๐ Man Astronaut: Medium Skin Tone
๐จ๐พโ๐ Man Astronaut: Medium-Dark Skin Tone
๐จ๐ฟโ๐ Man Astronaut: Dark Skin Tone
๐ฉโ๐ Woman Astronaut
๐ฉ๐ปโ๐ Woman Astronaut: Light Skin Tone
๐ฉ๐ผโ๐ Woman Astronaut: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ Woman Astronaut: Medium Skin Tone
๐ฉ๐พโ๐ Woman Astronaut: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ Woman Astronaut: Dark Skin Tone
๐จโ๐ Man Firefighter
๐จ๐ปโ๐ Man Firefighter: Light Skin Tone
๐จ๐ผโ๐ Man Firefighter: Medium-Light Skin Tone
๐จ๐ฝโ๐ Man Firefighter: Medium Skin Tone
๐จ๐พโ๐ Man Firefighter: Medium-Dark Skin Tone
๐จ๐ฟโ๐ Man Firefighter: Dark Skin Tone
๐ฉโ๐ Woman Firefighter
๐ฉ๐ปโ๐ Woman Firefighter: Light Skin Tone
๐ฉ๐ผโ๐ Woman Firefighter: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ Woman Firefighter: Medium Skin Tone
๐ฉ๐พโ๐ Woman Firefighter: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ Woman Firefighter: Dark Skin Tone
๐ฎ Police Officer
๐ฎ๐ป Police Officer: Light Skin Tone
๐ฎ๐ผ Police Officer: Medium-Light Skin Tone
๐ฎ๐ฝ Police Officer: Medium Skin Tone
๐ฎ๐พ Police Officer: Medium-Dark Skin Tone
๐ฎ๐ฟ Police Officer: Dark Skin Tone
๐ฎโโ๏ธ Man Police Officer
๐ฎ๐ปโโ๏ธ Man Police Officer: Light Skin Tone
๐ฎ๐ผโโ๏ธ Man Police Officer: Medium-Light Skin Tone
๐ฎ๐ฝโโ๏ธ Man Police Officer: Medium Skin Tone
๐ฎ๐พโโ๏ธ Man Police Officer: Medium-Dark Skin Tone
๐ฎ๐ฟโโ๏ธ Man Police Officer: Dark Skin Tone
๐ฎโโ๏ธ Woman Police Officer
๐ฎ๐ปโโ๏ธ Woman Police Officer: Light Skin Tone
๐ฎ๐ผโโ๏ธ Woman Police Officer: Medium-Light Skin Tone
๐ฎ๐ฝโโ๏ธ Woman Police Officer: Medium Skin Tone
๐ฎ๐พโโ๏ธ Woman Police Officer: Medium-Dark Skin Tone
๐ฎ๐ฟโโ๏ธ Woman Police Officer: Dark Skin Tone
๐ต Detective
๐ต๐ป Detective: Light Skin Tone
๐ต๐ผ Detective: Medium-Light Skin Tone
๐ต๐ฝ Detective: Medium Skin Tone
๐ต๐พ Detective: Medium-Dark Skin Tone
๐ต๐ฟ Detective: Dark Skin Tone
๐ต๏ธโโ๏ธ Man Detective
๐ต๐ปโโ๏ธ Man Detective: Light Skin Tone
๐ต๐ผโโ๏ธ Man Detective: Medium-Light Skin Tone
๐ต๐ฝโโ๏ธ Man Detective: Medium Skin Tone
๐ต๐พโโ๏ธ Man Detective: Medium-Dark Skin Tone
๐ต๐ฟโโ๏ธ Man Detective: Dark Skin Tone
๐ต๏ธโโ๏ธ Woman Detective
๐ต๐ปโโ๏ธ Woman Detective: Light Skin Tone
๐ต๐ผโโ๏ธ Woman Detective: Medium-Light Skin Tone
๐ต๐ฝโโ๏ธ Woman Detective: Medium Skin Tone
๐ต๐พโโ๏ธ Woman Detective: Medium-Dark Skin Tone
๐ต๐ฟโโ๏ธ Woman Detective: Dark Skin Tone
๐ Guard
๐๐ป Guard: Light Skin Tone
๐๐ผ Guard: Medium-Light Skin Tone
๐๐ฝ Guard: Medium Skin Tone
๐๐พ Guard: Medium-Dark Skin Tone
๐๐ฟ Guard: Dark Skin Tone
๐โโ๏ธ Man Guard
๐๐ปโโ๏ธ Man Guard: Light Skin Tone
๐๐ผโโ๏ธ Man Guard: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Guard: Medium Skin Tone
๐๐พโโ๏ธ Man Guard: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Guard: Dark Skin Tone
๐โโ๏ธ Woman Guard
๐๐ปโโ๏ธ Woman Guard: Light Skin Tone
๐๐ผโโ๏ธ Woman Guard: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Guard: Medium Skin Tone
๐๐พโโ๏ธ Woman Guard: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Guard: Dark Skin Tone
๐ท Construction Worker
๐ท๐ป Construction Worker: Light Skin Tone
๐ท๐ผ Construction Worker: Medium-Light Skin Tone
๐ท๐ฝ Construction Worker: Medium Skin Tone
๐ท๐พ Construction Worker: Medium-Dark Skin Tone
๐ท๐ฟ Construction Worker: Dark Skin Tone
๐ทโโ๏ธ Man Construction Worker
๐ท๐ปโโ๏ธ Man Construction Worker: Light Skin Tone
๐ท๐ผโโ๏ธ Man Construction Worker: Medium-Light Skin Tone
๐ท๐ฝโโ๏ธ Man Construction Worker: Medium Skin Tone
๐ท๐พโโ๏ธ Man Construction Worker: Medium-Dark Skin Tone
๐ท๐ฟโโ๏ธ Man Construction Worker: Dark Skin Tone
๐ทโโ๏ธ Woman Construction Worker
๐ท๐ปโโ๏ธ Woman Construction Worker: Light Skin Tone
๐ท๐ผโโ๏ธ Woman Construction Worker: Medium-Light Skin Tone
๐ท๐ฝโโ๏ธ Woman Construction Worker: Medium Skin Tone
๐ท๐พโโ๏ธ Woman Construction Worker: Medium-Dark Skin Tone
๐ท๐ฟโโ๏ธ Woman Construction Worker: Dark Skin Tone
๐คด Prince
๐คด๐ป Prince: Light Skin Tone
๐คด๐ผ Prince: Medium-Light Skin Tone
๐คด๐ฝ Prince: Medium Skin Tone
๐คด๐พ Prince: Medium-Dark Skin Tone
๐คด๐ฟ Prince: Dark Skin Tone
๐ธ Princess
๐ธ๐ป Princess: Light Skin Tone
๐ธ๐ผ Princess: Medium-Light Skin Tone
๐ธ๐ฝ Princess: Medium Skin Tone
๐ธ๐พ Princess: Medium-Dark Skin Tone
๐ธ๐ฟ Princess: Dark Skin Tone
๐ณ Person Wearing Turban
๐ณ๐ป Person Wearing Turban: Light Skin Tone
๐ณ๐ผ Person Wearing Turban: Medium-Light Skin Tone
๐ณ๐ฝ Person Wearing Turban: Medium Skin Tone
๐ณ๐พ Person Wearing Turban: Medium-Dark Skin Tone
๐ณ๐ฟ Person Wearing Turban: Dark Skin Tone
๐ณโโ๏ธ Man Wearing Turban
๐ณ๐ปโโ๏ธ Man Wearing Turban: Light Skin Tone
๐ณ๐ผโโ๏ธ Man Wearing Turban: Medium-Light Skin Tone
๐ณ๐ฝโโ๏ธ Man Wearing Turban: Medium Skin Tone
๐ณ๐พโโ๏ธ Man Wearing Turban: Medium-Dark Skin Tone
๐ณ๐ฟโโ๏ธ Man Wearing Turban: Dark Skin Tone
๐ณโโ๏ธ Woman Wearing Turban
๐ณ๐ปโโ๏ธ Woman Wearing Turban: Light Skin Tone
๐ณ๐ผโโ๏ธ Woman Wearing Turban: Medium-Light Skin Tone
๐ณ๐ฝโโ๏ธ Woman Wearing Turban: Medium Skin Tone
๐ณ๐พโโ๏ธ Woman Wearing Turban: Medium-Dark Skin Tone
๐ณ๐ฟโโ๏ธ Woman Wearing Turban: Dark Skin Tone
๐ฒ Man With Chinese Cap
๐ฒ๐ป Man With Chinese Cap: Light Skin Tone
๐ฒ๐ผ Man With Chinese Cap: Medium-Light Skin Tone
๐ฒ๐ฝ Man With Chinese Cap: Medium Skin Tone
๐ฒ๐พ Man With Chinese Cap: Medium-Dark Skin Tone
๐ฒ๐ฟ Man With Chinese Cap: Dark Skin Tone
๐ง Woman With Headscarf
๐ง๐ป Person With Headscarf: Light Skin Tone
๐ง๐ผ Person With Headscarf: Medium-Light Skin Tone
๐ง๐ฝ Person With Headscarf: Medium Skin Tone
๐ง๐พ Person With Headscarf: Medium-Dark Skin Tone
๐ง๐ฟ Person With Headscarf: Dark Skin Tone
๐ง Bearded Person
๐ง๐ป Bearded Person: Light Skin Tone
๐ง๐ผ Bearded Person: Medium-Light Skin Tone
๐ง๐ฝ Bearded Person: Medium Skin Tone
๐ง๐พ Bearded Person: Medium-Dark Skin Tone
๐ง๐ฟ Bearded Person: Dark Skin Tone
๐ฑ Blond-Haired Person
๐ฑ๐ป Blond-Haired Person: Light Skin Tone
๐ฑ๐ผ Blond-Haired Person: Medium-Light Skin Tone
๐ฑ๐ฝ Blond-Haired Person: Medium Skin Tone
๐ฑ๐พ Blond-Haired Person: Medium-Dark Skin Tone
๐ฑ๐ฟ Blond-Haired Person: Dark Skin Tone
๐ฑโโ๏ธ Blond-Haired Man
๐ฑ๐ปโโ๏ธ Blond-Haired Man: Light Skin Tone
๐ฑ๐ผโโ๏ธ Blond-Haired Man: Medium-Light Skin Tone
๐ฑ๐ฝโโ๏ธ Blond-Haired Man: Medium Skin Tone
๐ฑ๐พโโ๏ธ Blond-Haired Man: Medium-Dark Skin Tone
๐ฑ๐ฟโโ๏ธ Blond-Haired Man: Dark Skin Tone
๐ฑโโ๏ธ Blond-Haired Woman
๐ฑ๐ปโโ๏ธ Blond-Haired Woman: Light Skin Tone
๐ฑ๐ผโโ๏ธ Blond-Haired Woman: Medium-Light Skin Tone
๐ฑ๐ฝโโ๏ธ Blond-Haired Woman: Medium Skin Tone
๐ฑ๐พโโ๏ธ Blond-Haired Woman: Medium-Dark Skin Tone
๐ฑ๐ฟโโ๏ธ Blond-Haired Woman: Dark Skin Tone
๐คต Man in Tuxedo
๐คต๐ป Man in Tuxedo: Light Skin Tone
๐คต๐ผ Man in Tuxedo: Medium-Light Skin Tone
๐คต๐ฝ Man in Tuxedo: Medium Skin Tone
๐คต๐พ Man in Tuxedo: Medium-Dark Skin Tone
๐คต๐ฟ Man in Tuxedo: Dark Skin Tone
๐ฐ Bride With Veil
๐ฐ๐ป Bride With Veil: Light Skin Tone
๐ฐ๐ผ Bride With Veil: Medium-Light Skin Tone
๐ฐ๐ฝ Bride With Veil: Medium Skin Tone
๐ฐ๐พ Bride With Veil: Medium-Dark Skin Tone
๐ฐ๐ฟ Bride With Veil: Dark Skin Tone
๐คฐ Pregnant Woman
๐คฐ๐ป Pregnant Woman: Light Skin Tone
๐คฐ๐ผ Pregnant Woman: Medium-Light Skin Tone
๐คฐ๐ฝ Pregnant Woman: Medium Skin Tone
๐คฐ๐พ Pregnant Woman: Medium-Dark Skin Tone
๐คฐ๐ฟ Pregnant Woman: Dark Skin Tone
๐คฑ Breast-Feeding
๐คฑ๐ป Breast-Feeding: Light Skin Tone
๐คฑ๐ผ Breast-Feeding: Medium-Light Skin Tone
๐คฑ๐ฝ Breast-Feeding: Medium Skin Tone
๐คฑ๐พ Breast-Feeding: Medium-Dark Skin Tone
๐คฑ๐ฟ Breast-Feeding: Dark Skin Tone
๐ผ Baby Angel
๐ผ๐ป Baby Angel: Light Skin Tone
๐ผ๐ผ Baby Angel: Medium-Light Skin Tone
๐ผ๐ฝ Baby Angel: Medium Skin Tone
๐ผ๐พ Baby Angel: Medium-Dark Skin Tone
๐ผ๐ฟ Baby Angel: Dark Skin Tone
๐
Santa Claus
๐
๐ป Santa Claus: Light Skin Tone
๐
๐ผ Santa Claus: Medium-Light Skin Tone
๐
๐ฝ Santa Claus: Medium Skin Tone
๐
๐พ Santa Claus: Medium-Dark Skin Tone
๐
๐ฟ Santa Claus: Dark Skin Tone
๐คถ Mrs. Claus
๐คถ๐ป Mrs. Claus: Light Skin Tone
๐คถ๐ผ Mrs. Claus: Medium-Light Skin Tone
๐คถ๐ฝ Mrs. Claus: Medium Skin Tone
๐คถ๐พ Mrs. Claus: Medium-Dark Skin Tone
๐คถ๐ฟ Mrs. Claus: Dark Skin Tone
๐ง Mage
๐ง๐ป Mage: Light Skin Tone
๐ง๐ผ Mage: Medium-Light Skin Tone
๐ง๐ฝ Mage: Medium Skin Tone
๐ง๐พ Mage: Medium-Dark Skin Tone
๐ง๐ฟ Mage: Dark Skin Tone
๐งโโ๏ธ Woman Mage
๐ง๐ปโโ๏ธ Woman Mage: Light Skin Tone
๐ง๐ผโโ๏ธ Woman Mage: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman Mage: Medium Skin Tone
๐ง๐พโโ๏ธ Woman Mage: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman Mage: Dark Skin Tone
๐งโโ๏ธ Man Mage
๐ง๐ปโโ๏ธ Man Mage: Light Skin Tone
๐ง๐ผโโ๏ธ Man Mage: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man Mage: Medium Skin Tone
๐ง๐พโโ๏ธ Man Mage: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man Mage: Dark Skin Tone
๐ง Fairy
๐ง๐ป Fairy: Light Skin Tone
๐ง๐ผ Fairy: Medium-Light Skin Tone
๐ง๐ฝ Fairy: Medium Skin Tone
๐ง๐พ Fairy: Medium-Dark Skin Tone
๐ง๐ฟ Fairy: Dark Skin Tone
๐งโโ๏ธ Woman Fairy
๐ง๐ปโโ๏ธ Woman Fairy: Light Skin Tone
๐ง๐ผโโ๏ธ Woman Fairy: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman Fairy: Medium Skin Tone
๐ง๐พโโ๏ธ Woman Fairy: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman Fairy: Dark Skin Tone
๐งโโ๏ธ Man Fairy
๐ง๐ปโโ๏ธ Man Fairy: Light Skin Tone
๐ง๐ผโโ๏ธ Man Fairy: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man Fairy: Medium Skin Tone
๐ง๐พโโ๏ธ Man Fairy: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man Fairy: Dark Skin Tone
๐ง Vampire
๐ง๐ป Vampire: Light Skin Tone
๐ง๐ผ Vampire: Medium-Light Skin Tone
๐ง๐ฝ Vampire: Medium Skin Tone
๐ง๐พ Vampire: Medium-Dark Skin Tone
๐ง๐ฟ Vampire: Dark Skin Tone
๐งโโ๏ธ Woman Vampire
๐ง๐ปโโ๏ธ Woman Vampire: Light Skin Tone
๐ง๐ผโโ๏ธ Woman Vampire: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman Vampire: Medium Skin Tone
๐ง๐พโโ๏ธ Woman Vampire: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman Vampire: Dark Skin Tone
๐งโโ๏ธ Man Vampire
๐ง๐ปโโ๏ธ Man Vampire: Light Skin Tone
๐ง๐ผโโ๏ธ Man Vampire: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man Vampire: Medium Skin Tone
๐ง๐พโโ๏ธ Man Vampire: Medium-Dark Skin Tone
๐ฏ๐ป Woman With Bunny Ears, Type-1-2
๐ฏ๐ผ Woman With Bunny Ears, Type-3
๐ง๐ฟโโ๏ธ Man Vampire: Dark Skin Tone
๐ฏ๐ฝ Woman With Bunny Ears, Type-4
๐ฏ๐พ Woman With Bunny Ears, Type-5
๐ง Merperson
๐ฏ๐ฟ Woman With Bunny Ears, Type-6
๐ง๐ป Merperson: Light Skin Tone
๐ฏ๐ปโโ๏ธ Men With Bunny Ears Partying, Type-1-2
๐ง๐ผ Merperson: Medium-Light Skin Tone
๐ฏ๐ผโโ๏ธ Men With Bunny Ears Partying, Type-3
๐ง๐ฝ Merperson: Medium Skin Tone
๐ฏ๐ฝโโ๏ธ Men With Bunny Ears Partying, Type-4
๐ง๐พ Merperson: Medium-Dark Skin Tone
๐ฏ๐พโโ๏ธ Men With Bunny Ears Partying, Type-5
๐ง๐ฟ Merperson: Dark Skin Tone
๐ฏ๐ฟโโ๏ธ Men With Bunny Ears Partying, Type-6
๐งโโ๏ธ Mermaid
๐ฏ๐ปโโ๏ธ Women With Bunny Ears Partying, Type-1-2
๐ง๐ปโโ๏ธ Mermaid: Light Skin Tone
๐ฏ๐ผโโ๏ธ Women With Bunny Ears Partying, Type-3
๐ง๐ผโโ๏ธ Mermaid: Medium-Light Skin Tone
๐ฏ๐ฝโโ๏ธ Women With Bunny Ears Partying, Type-4
๐ฏ๐พโโ๏ธ Women With Bunny Ears Partying, Type-5
๐ง๐ฝโโ๏ธ Mermaid: Medium Skin Tone
๐ฏ๐ฟโโ๏ธ Women With Bunny Ears Partying, Type-6
๐ง๐พโโ๏ธ Mermaid: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Mermaid: Dark Skin Tone
๐งโโ๏ธ Merman
๐ง๐ปโโ๏ธ Merman: Light Skin Tone
๐ง๐ผโโ๏ธ Merman: Medium-Light Skin Tone
๐ซ๐ป Man and Woman Holding Hands, Type-1-2
๐ง๐ฝโโ๏ธ Merman: Medium Skin Tone
๐ซ๐ผ Man and Woman Holding Hands, Type-3
๐ซ๐ฝ Man and Woman Holding Hands, Type-4
๐ง๐พโโ๏ธ Merman: Medium-Dark Skin Tone
๐ซ๐พ Man and Woman Holding Hands, Type-5
๐ซ๐ฟ Man and Woman Holding Hands, Type-6
๐ง๐ฟโโ๏ธ Merman: Dark Skin Tone
๐ฌ๐ป Two Men Holding Hands, Type-1-2
๐ง Elf
๐ฌ๐ผ Two Men Holding Hands, Type-3
๐ฌ๐ฝ Two Men Holding Hands, Type-4
๐ง๐ป Elf: Light Skin Tone
๐ฌ๐พ Two Men Holding Hands, Type-5
๐ง๐ผ Elf: Medium-Light Skin Tone
๐ฌ๐ฟ Two Men Holding Hands, Type-6
๐ง๐ฝ Elf: Medium Skin Tone
๐ง๐พ Elf: Medium-Dark Skin Tone
๐ญ๐ป Two Women Holding Hands, Type-1-2
๐ง๐ฟ Elf: Dark Skin Tone
๐งโโ๏ธ Woman Elf
๐ญ๐ผ Two Women Holding Hands, Type-3
๐ญ๐ฝ Two Women Holding Hands, Type-4
๐ง๐ปโโ๏ธ Woman Elf: Light Skin Tone
๐ญ๐พ Two Women Holding Hands, Type-5
๐ญ๐ฟ Two Women Holding Hands, Type-6
๐ง๐ผโโ๏ธ Woman Elf: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman Elf: Medium Skin Tone
๐ง๐พโโ๏ธ Woman Elf: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman Elf: Dark Skin Tone
๐งโโ๏ธ Man Elf
๐ช๐ป Family, Type-1-2
๐ง๐ปโโ๏ธ Man Elf: Light Skin Tone
๐ช๐ผ Family, Type-3
๐ช๐ฝ Family, Type-4
๐ง๐ผโโ๏ธ Man Elf: Medium-Light Skin Tone
๐ช๐พ Family, Type-5
๐ช๐ฟ Family, Type-6
๐ง๐ฝโโ๏ธ Man Elf: Medium Skin Tone
๐ง๐พโโ๏ธ Man Elf: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man Elf: Dark Skin Tone
๐ง Genie
๐งโโ๏ธ Woman Genie
๐งโโ๏ธ Man Genie
๐ง Zombie
๐งโโ๏ธ Woman Zombie
๐งโโ๏ธ Man Zombie
๐ Person Frowning
๐๐ป Person Frowning: Light Skin Tone
๐๐ผ Person Frowning: Medium-Light Skin Tone
๐๐ฝ Person Frowning: Medium Skin Tone
๐๐พ Person Frowning: Medium-Dark Skin Tone
๐๐ฟ Person Frowning: Dark Skin Tone
๐โโ๏ธ Man Frowning
๐๐ปโโ๏ธ Man Frowning: Light Skin Tone
๐ป Light Skin Tone
๐ผ Medium-Light Skin Tone
๐๐ผโโ๏ธ Man Frowning: Medium-Light Skin Tone
๐ฝ Medium Skin Tone
๐๐ฝโโ๏ธ Man Frowning: Medium Skin Tone
๐พ Medium-Dark Skin Tone
๐ฟ Dark Skin Tone
๐๐พโโ๏ธ Man Frowning: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Frowning: Dark Skin Tone
๐โโ๏ธ Woman Frowning
๐๐ปโโ๏ธ Woman Frowning: Light Skin Tone
๐๐ผโโ๏ธ Woman Frowning: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Frowning: Medium Skin Tone
๐๐พโโ๏ธ Woman Frowning: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Frowning: Dark Skin Tone
๐ Person Pouting
๐๐ป Person Pouting: Light Skin Tone
๐๐ผ Person Pouting: Medium-Light Skin Tone
๐๐ฝ Person Pouting: Medium Skin Tone
๐๐พ Person Pouting: Medium-Dark Skin Tone
๐๐ฟ Person Pouting: Dark Skin Tone
๐โโ๏ธ Man Pouting
๐๐ปโโ๏ธ Man Pouting: Light Skin Tone
๐๐ผโโ๏ธ Man Pouting: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Pouting: Medium Skin Tone
๐๐พโโ๏ธ Man Pouting: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Pouting: Dark Skin Tone
๐โโ๏ธ Woman Pouting
๐๐ปโโ๏ธ Woman Pouting: Light Skin Tone
๐๐ผโโ๏ธ Woman Pouting: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Pouting: Medium Skin Tone
๐๐พโโ๏ธ Woman Pouting: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Pouting: Dark Skin Tone
๐
Person Gesturing No
๐
๐ป Person Gesturing No: Light Skin Tone
๐
๐ผ Person Gesturing No: Medium-Light Skin Tone
๐
๐ฝ Person Gesturing No: Medium Skin Tone
๐
๐พ Person Gesturing No: Medium-Dark Skin Tone
๐
๐ฟ Person Gesturing No: Dark Skin Tone
๐
โโ๏ธ Man Gesturing No
๐
๐ปโโ๏ธ Man Gesturing No: Light Skin Tone
๐
๐ผโโ๏ธ Man Gesturing No: Medium-Light Skin Tone
๐
๐ฝโโ๏ธ Man Gesturing No: Medium Skin Tone
๐
๐พโโ๏ธ Man Gesturing No: Medium-Dark Skin Tone
๐
๐ฟโโ๏ธ Man Gesturing No: Dark Skin Tone
๐
โโ๏ธ Woman Gesturing No
๐
๐ปโโ๏ธ Woman Gesturing No: Light Skin Tone
๐
๐ผโโ๏ธ Woman Gesturing No: Medium-Light Skin Tone
๐
๐ฝโโ๏ธ Woman Gesturing No: Medium Skin Tone
๐
๐พโโ๏ธ Woman Gesturing No: Medium-Dark Skin Tone
๐
๐ฟโโ๏ธ Woman Gesturing No: Dark Skin Tone
๐ Person Gesturing OK
๐๐ป Person Gesturing OK: Light Skin Tone
๐๐ผ Person Gesturing OK: Medium-Light Skin Tone
๐๐ฝ Person Gesturing OK: Medium Skin Tone
๐๐พ Person Gesturing OK: Medium-Dark Skin Tone
๐๐ฟ Person Gesturing OK: Dark Skin Tone
๐โโ๏ธ Man Gesturing OK
๐๐ปโโ๏ธ Man Gesturing OK: Light Skin Tone
๐๐ผโโ๏ธ Man Gesturing OK: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Gesturing OK: Medium Skin Tone
๐๐พโโ๏ธ Man Gesturing OK: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Gesturing OK: Dark Skin Tone
๐โโ๏ธ Woman Gesturing OK
๐๐ปโโ๏ธ Woman Gesturing OK: Light Skin Tone
๐๐ผโโ๏ธ Woman Gesturing OK: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Gesturing OK: Medium Skin Tone
๐๐พโโ๏ธ Woman Gesturing OK: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Gesturing OK: Dark Skin Tone
๐ Person Tipping Hand
๐๐ป Person Tipping Hand: Light Skin Tone
๐๐ผ Person Tipping Hand: Medium-Light Skin Tone
๐๐ฝ Person Tipping Hand: Medium Skin Tone
๐๐พ Person Tipping Hand: Medium-Dark Skin Tone
๐๐ฟ Person Tipping Hand: Dark Skin Tone
๐โโ๏ธ Man Tipping Hand
๐๐ปโโ๏ธ Man Tipping Hand: Light Skin Tone
๐๐ผโโ๏ธ Man Tipping Hand: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Tipping Hand: Medium Skin Tone
๐๐พโโ๏ธ Man Tipping Hand: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Tipping Hand: Dark Skin Tone
๐โโ๏ธ Woman Tipping Hand
๐๐ปโโ๏ธ Woman Tipping Hand: Light Skin Tone
๐๐ผโโ๏ธ Woman Tipping Hand: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Tipping Hand: Medium Skin Tone
๐๐พโโ๏ธ Woman Tipping Hand: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Tipping Hand: Dark Skin Tone
๐ Person Raising Hand
๐๐ป Person Raising Hand: Light Skin Tone
๐๐ผ Person Raising Hand: Medium-Light Skin Tone
๐๐ฝ Person Raising Hand: Medium Skin Tone
๐๐พ Person Raising Hand: Medium-Dark Skin Tone
๐๐ฟ Person Raising Hand: Dark Skin Tone
๐โโ๏ธ Man Raising Hand
๐๐ปโโ๏ธ Man Raising Hand: Light Skin Tone
๐๐ผโโ๏ธ Man Raising Hand: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Raising Hand: Medium Skin Tone
๐๐พโโ๏ธ Man Raising Hand: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Raising Hand: Dark Skin Tone
๐โโ๏ธ Woman Raising Hand
๐๐ปโโ๏ธ Woman Raising Hand: Light Skin Tone
๐๐ผโโ๏ธ Woman Raising Hand: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Raising Hand: Medium Skin Tone
๐๐พโโ๏ธ Woman Raising Hand: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Raising Hand: Dark Skin Tone
๐ Person Bowing
๐๐ป Person Bowing: Light Skin Tone
๐๐ผ Person Bowing: Medium-Light Skin Tone
๐๐ฝ Person Bowing: Medium Skin Tone
๐๐พ Person Bowing: Medium-Dark Skin Tone
๐๐ฟ Person Bowing: Dark Skin Tone
๐โโ๏ธ Man Bowing
๐๐ปโโ๏ธ Man Bowing: Light Skin Tone
๐ค๐ป Handshake, Type-1-2
๐๐ผโโ๏ธ Man Bowing: Medium-Light Skin Tone
๐ค๐ผ Handshake, Type-3
๐ค๐ฝ Handshake, Type-4
๐๐ฝโโ๏ธ Man Bowing: Medium Skin Tone
๐ค๐พ Handshake, Type-5
๐ค๐ฟ Handshake, Type-6
๐๐พโโ๏ธ Man Bowing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Bowing: Dark Skin Tone
๐โโ๏ธ Woman Bowing
๐๐ปโโ๏ธ Woman Bowing: Light Skin Tone
๐๐ผโโ๏ธ Woman Bowing: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Bowing: Medium Skin Tone
๐๐พโโ๏ธ Woman Bowing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Bowing: Dark Skin Tone
๐คฆ Person Facepalming
๐คฆ๐ป Person Facepalming: Light Skin Tone
๐คฆ๐ผ Person Facepalming: Medium-Light Skin Tone
๐คฆ๐ฝ Person Facepalming: Medium Skin Tone
๐คฆ๐พ Person Facepalming: Medium-Dark Skin Tone
๐คฆ๐ฟ Person Facepalming: Dark Skin Tone
๐คฆโโ๏ธ Man Facepalming
๐คฆ๐ปโโ๏ธ Man Facepalming: Light Skin Tone
๐คฆ๐ผโโ๏ธ Man Facepalming: Medium-Light Skin Tone
๐คฆ๐ฝโโ๏ธ Man Facepalming: Medium Skin Tone
๐คฆ๐พโโ๏ธ Man Facepalming: Medium-Dark Skin Tone
๐คฆ๐ฟโโ๏ธ Man Facepalming: Dark Skin Tone
๐คฆโโ๏ธ Woman Facepalming
๐คฆ๐ปโโ๏ธ Woman Facepalming: Light Skin Tone
๐คฆ๐ผโโ๏ธ Woman Facepalming: Medium-Light Skin Tone
๐คฆ๐ฝโโ๏ธ Woman Facepalming: Medium Skin Tone
๐คฆ๐พโโ๏ธ Woman Facepalming: Medium-Dark Skin Tone
๐คฆ๐ฟโโ๏ธ Woman Facepalming: Dark Skin Tone
๐คท Person Shrugging
๐คท๐ป Person Shrugging: Light Skin Tone
๐คท๐ผ Person Shrugging: Medium-Light Skin Tone
๐คท๐ฝ Person Shrugging: Medium Skin Tone
๐คท๐พ Person Shrugging: Medium-Dark Skin Tone
๐คท๐ฟ Person Shrugging: Dark Skin Tone
๐คทโโ๏ธ Man Shrugging
๐คท๐ปโโ๏ธ Man Shrugging: Light Skin Tone
๐คท๐ผโโ๏ธ Man Shrugging: Medium-Light Skin Tone
๐คท๐ฝโโ๏ธ Man Shrugging: Medium Skin Tone
๐คท๐พโโ๏ธ Man Shrugging: Medium-Dark Skin Tone
๐คท๐ฟโโ๏ธ Man Shrugging: Dark Skin Tone
๐คทโโ๏ธ Woman Shrugging
๐คท๐ปโโ๏ธ Woman Shrugging: Light Skin Tone
๐คท๐ผโโ๏ธ Woman Shrugging: Medium-Light Skin Tone
๐คท๐ฝโโ๏ธ Woman Shrugging: Medium Skin Tone
๐คท๐พโโ๏ธ Woman Shrugging: Medium-Dark Skin Tone
๐คท๐ฟโโ๏ธ Woman Shrugging: Dark Skin Tone
๐ Person Getting Massage
๐๐ป Person Getting Massage: Light Skin Tone
๐๐ผ Person Getting Massage: Medium-Light Skin Tone
๐๐ฝ Person Getting Massage: Medium Skin Tone
๐๐พ Person Getting Massage: Medium-Dark Skin Tone
๐๐ฟ Person Getting Massage: Dark Skin Tone
๐โโ๏ธ Man Getting Massage
๐๐ปโโ๏ธ Man Getting Massage: Light Skin Tone
๐๐ผโโ๏ธ Man Getting Massage: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Getting Massage: Medium Skin Tone
๐๐พโโ๏ธ Man Getting Massage: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Getting Massage: Dark Skin Tone
๐โโ๏ธ Woman Getting Massage
๐๐ปโโ๏ธ Woman Getting Massage: Light Skin Tone
๐๐ผโโ๏ธ Woman Getting Massage: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Getting Massage: Medium Skin Tone
๐๐พโโ๏ธ Woman Getting Massage: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Getting Massage: Dark Skin Tone
๐ Person Getting Haircut
๐๐ป Person Getting Haircut: Light Skin Tone
๐๐ผ Person Getting Haircut: Medium-Light Skin Tone
๐๐ฝ Person Getting Haircut: Medium Skin Tone
๐๐พ Person Getting Haircut: Medium-Dark Skin Tone
๐๐ฟ Person Getting Haircut: Dark Skin Tone
๐โโ๏ธ Man Getting Haircut
๐๐ปโโ๏ธ Man Getting Haircut: Light Skin Tone
๐๐ผโโ๏ธ Man Getting Haircut: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Getting Haircut: Medium Skin Tone
๐๐พโโ๏ธ Man Getting Haircut: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Getting Haircut: Dark Skin Tone
๐โโ๏ธ Woman Getting Haircut
๐๐ปโโ๏ธ Woman Getting Haircut: Light Skin Tone
๐๐ผโโ๏ธ Woman Getting Haircut: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Getting Haircut: Medium Skin Tone
๐๐พโโ๏ธ Woman Getting Haircut: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Getting Haircut: Dark Skin Tone
๐ถ Person Walking
๐ถ๐ป Person Walking: Light Skin Tone
๐ถ๐ผ Person Walking: Medium-Light Skin Tone
๐ถ๐ฝ Person Walking: Medium Skin Tone
๐ถ๐พ Person Walking: Medium-Dark Skin Tone
๐ถ๐ฟ Person Walking: Dark Skin Tone
๐ถโโ๏ธ Man Walking
๐ถ๐ปโโ๏ธ Man Walking: Light Skin Tone
๐ถ๐ผโโ๏ธ Man Walking: Medium-Light Skin Tone
๐ถ๐ฝโโ๏ธ Man Walking: Medium Skin Tone
๐ถ๐พโโ๏ธ Man Walking: Medium-Dark Skin Tone
๐ถ๐ฟโโ๏ธ Man Walking: Dark Skin Tone
๐ถโโ๏ธ Woman Walking
๐ถ๐ปโโ๏ธ Woman Walking: Light Skin Tone
๐ถ๐ผโโ๏ธ Woman Walking: Medium-Light Skin Tone
๐ถ๐ฝโโ๏ธ Woman Walking: Medium Skin Tone
๐ถ๐พโโ๏ธ Woman Walking: Medium-Dark Skin Tone
๐ถ๐ฟโโ๏ธ Woman Walking: Dark Skin Tone
๐ Person Running
๐๐ป Person Running: Light Skin Tone
๐๐ผ Person Running: Medium-Light Skin Tone
๐๐ฝ Person Running: Medium Skin Tone
๐๐พ Person Running: Medium-Dark Skin Tone
๐๐ฟ Person Running: Dark Skin Tone
๐โโ๏ธ Man Running
๐๐ปโโ๏ธ Man Running: Light Skin Tone
๐๐ผโโ๏ธ Man Running: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Running: Medium Skin Tone
๐๐พโโ๏ธ Man Running: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Running: Dark Skin Tone
๐โโ๏ธ Woman Running
๐๐ปโโ๏ธ Woman Running: Light Skin Tone
๐๐ผโโ๏ธ Woman Running: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Running: Medium Skin Tone
๐๐พโโ๏ธ Woman Running: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Running: Dark Skin Tone
๐ Woman Dancing
๐๐ป Woman Dancing: Light Skin Tone
๐๐ผ Woman Dancing: Medium-Light Skin Tone
๐๐ฝ Woman Dancing: Medium Skin Tone
๐๐พ Woman Dancing: Medium-Dark Skin Tone
๐๐ฟ Woman Dancing: Dark Skin Tone
๐บ Man Dancing
๐บ๐ป Man Dancing: Light Skin Tone
๐บ๐ผ Man Dancing: Medium-Light Skin Tone
๐บ๐ฝ Man Dancing: Medium Skin Tone
๐บ๐พ Man Dancing: Medium-Dark Skin Tone
๐บ๐ฟ Man Dancing: Dark Skin Tone
๐ฏ People With Bunny Ears Partying
๐ฏโโ๏ธ Men With Bunny Ears Partying
๐ฏโโ๏ธ Women With Bunny Ears Partying
๐ง Person in Steamy Room
๐ง๐ป Person in Steamy Room: Light Skin Tone
๐ง๐ผ Person in Steamy Room: Medium-Light Skin Tone
๐ง๐ฝ Person in Steamy Room: Medium Skin Tone
๐ง๐พ Person in Steamy Room: Medium-Dark Skin Tone
๐ง๐ฟ Person in Steamy Room: Dark Skin Tone
๐งโโ๏ธ Woman in Steamy Room
๐ง๐ปโโ๏ธ Woman in Steamy Room: Light Skin Tone
๐ง๐ผโโ๏ธ Woman in Steamy Room: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman in Steamy Room: Medium Skin Tone
๐ง๐พโโ๏ธ Woman in Steamy Room: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman in Steamy Room: Dark Skin Tone
๐งโโ๏ธ Man in Steamy Room
๐ง๐ปโโ๏ธ Man in Steamy Room: Light Skin Tone
๐ง๐ผโโ๏ธ Man in Steamy Room: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man in Steamy Room: Medium Skin Tone
๐ง๐พโโ๏ธ Man in Steamy Room: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man in Steamy Room: Dark Skin Tone
๐ง Person Climbing
๐ง๐ป Person Climbing: Light Skin Tone
๐ง๐ผ Person Climbing: Medium-Light Skin Tone
๐ง๐ฝ Person Climbing: Medium Skin Tone
๐ง๐พ Person Climbing: Medium-Dark Skin Tone
๐ง๐ฟ Person Climbing: Dark Skin Tone
๐งโโ๏ธ Woman Climbing
๐ง๐ปโโ๏ธ Woman Climbing: Light Skin Tone
๐ง๐ผโโ๏ธ Woman Climbing: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman Climbing: Medium Skin Tone
๐ง๐พโโ๏ธ Woman Climbing: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman Climbing: Dark Skin Tone
๐งโโ๏ธ Man Climbing
๐ง๐ปโโ๏ธ Man Climbing: Light Skin Tone
๐ง๐ผโโ๏ธ Man Climbing: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man Climbing: Medium Skin Tone
๐ง๐พโโ๏ธ Man Climbing: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man Climbing: Dark Skin Tone
๐ง Person in Lotus Position
๐ง๐ป Person in Lotus Position: Light Skin Tone
๐ง๐ผ Person in Lotus Position: Medium-Light Skin Tone
๐ง๐ฝ Person in Lotus Position: Medium Skin Tone
๐ง๐พ Person in Lotus Position: Medium-Dark Skin Tone
๐ง๐ฟ Person in Lotus Position: Dark Skin Tone
๐งโโ๏ธ Woman in Lotus Position
๐ง๐ปโโ๏ธ Woman in Lotus Position: Light Skin Tone
๐ง๐ผโโ๏ธ Woman in Lotus Position: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Woman in Lotus Position: Medium Skin Tone
๐ง๐พโโ๏ธ Woman in Lotus Position: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Woman in Lotus Position: Dark Skin Tone
๐งโโ๏ธ Man in Lotus Position
๐ง๐ปโโ๏ธ Man in Lotus Position: Light Skin Tone
๐ง๐ผโโ๏ธ Man in Lotus Position: Medium-Light Skin Tone
๐ง๐ฝโโ๏ธ Man in Lotus Position: Medium Skin Tone
๐ง๐พโโ๏ธ Man in Lotus Position: Medium-Dark Skin Tone
๐ง๐ฟโโ๏ธ Man in Lotus Position: Dark Skin Tone
๐ Person Taking Bath
๐๐ป Person Taking Bath: Light Skin Tone
๐๐ผ Person Taking Bath: Medium-Light Skin Tone
๐๐ฝ Person Taking Bath: Medium Skin Tone
๐๐พ Person Taking Bath: Medium-Dark Skin Tone
๐๐ฟ Person Taking Bath: Dark Skin Tone
๐ Person in Bed
๐๐ป Person in Bed: Light Skin Tone
๐๐ผ Person in Bed: Medium-Light Skin Tone
๐๐ฝ Person in Bed: Medium Skin Tone
๐๐พ Person in Bed: Medium-Dark Skin Tone
๐๐ฟ Person in Bed: Dark Skin Tone
๐ด Man in Business Suit Levitating
๐ด๐ป Man in Business Suit Levitating: Light Skin Tone
๐ด๐ผ Man in Business Suit Levitating: Medium-Light Skin Tone
๐ด๐ฝ Man in Business Suit Levitating: Medium Skin Tone
๐ด๐พ Man in Business Suit Levitating: Medium-Dark Skin Tone
๐ด๐ฟ Man in Business Suit Levitating: Dark Skin Tone
๐ฃ Speaking Head
๐ค Bust in Silhouette
๐ฅ Busts in Silhouette
๐คบ Person Fencing
๐ Horse Racing
๐๐ป Horse Racing: Light Skin Tone
๐๐ผ Horse Racing: Medium-Light Skin Tone
๐๐ฝ Horse Racing: Medium Skin Tone
๐๐พ Horse Racing: Medium-Dark Skin Tone
๐๐ฟ Horse Racing: Dark Skin Tone
โท Skier
๐ Snowboarder
๐๐ป Snowboarder: Light Skin Tone
๐๐ผ Snowboarder: Medium-Light Skin Tone
๐๐ฝ Snowboarder: Medium Skin Tone
๐๐พ Snowboarder: Medium-Dark Skin Tone
๐๐ฟ Snowboarder: Dark Skin Tone
๐ Person Golfing
๐๐ป Person Golfing: Light Skin Tone
๐๐ผ Person Golfing: Medium-Light Skin Tone
๐๐ฝ Person Golfing: Medium Skin Tone
๐๐พ Person Golfing: Medium-Dark Skin Tone
๐๐ฟ Person Golfing: Dark Skin Tone
๐๏ธโโ๏ธ Man Golfing
๐๐ปโโ๏ธ Man Golfing: Light Skin Tone
๐๐ผโโ๏ธ Man Golfing: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Golfing: Medium Skin Tone
๐๐พโโ๏ธ Man Golfing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Golfing: Dark Skin Tone
๐๏ธโโ๏ธ Woman Golfing
๐๐ปโโ๏ธ Woman Golfing: Light Skin Tone
๐๐ผโโ๏ธ Woman Golfing: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Golfing: Medium Skin Tone
๐๐พโโ๏ธ Woman Golfing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Golfing: Dark Skin Tone
๐ Person Surfing
๐๐ป Person Surfing: Light Skin Tone
๐๐ผ Person Surfing: Medium-Light Skin Tone
๐๐ฝ Person Surfing: Medium Skin Tone
๐๐พ Person Surfing: Medium-Dark Skin Tone
๐๐ฟ Person Surfing: Dark Skin Tone
๐โโ๏ธ Man Surfing
๐๐ปโโ๏ธ Man Surfing: Light Skin Tone
๐๐ผโโ๏ธ Man Surfing: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Surfing: Medium Skin Tone
๐๐พโโ๏ธ Man Surfing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Surfing: Dark Skin Tone
๐โโ๏ธ Woman Surfing
๐๐ปโโ๏ธ Woman Surfing: Light Skin Tone
๐๐ผโโ๏ธ Woman Surfing: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Surfing: Medium Skin Tone
๐๐พโโ๏ธ Woman Surfing: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Surfing: Dark Skin Tone
๐ฃ Person Rowing Boat
๐ฃ๐ป Person Rowing Boat: Light Skin Tone
๐ฃ๐ผ Person Rowing Boat: Medium-Light Skin Tone
๐ฃ๐ฝ Person Rowing Boat: Medium Skin Tone
๐ฃ๐พ Person Rowing Boat: Medium-Dark Skin Tone
๐ฃ๐ฟ Person Rowing Boat: Dark Skin Tone
๐ฃโโ๏ธ Man Rowing Boat
๐ฃ๐ปโโ๏ธ Man Rowing Boat: Light Skin Tone
๐ฃ๐ผโโ๏ธ Man Rowing Boat: Medium-Light Skin Tone
๐ฃ๐ฝโโ๏ธ Man Rowing Boat: Medium Skin Tone
๐ฃ๐พโโ๏ธ Man Rowing Boat: Medium-Dark Skin Tone
๐ฃ๐ฟโโ๏ธ Man Rowing Boat: Dark Skin Tone
๐ฃโโ๏ธ Woman Rowing Boat
๐ฃ๐ปโโ๏ธ Woman Rowing Boat: Light Skin Tone
๐ฃ๐ผโโ๏ธ Woman Rowing Boat: Medium-Light Skin Tone
๐ฃ๐ฝโโ๏ธ Woman Rowing Boat: Medium Skin Tone
๐ฃ๐พโโ๏ธ Woman Rowing Boat: Medium-Dark Skin Tone
๐ฃ๐ฟโโ๏ธ Woman Rowing Boat: Dark Skin Tone
๐ Person Swimming
๐๐ป Person Swimming: Light Skin Tone
๐๐ผ Person Swimming: Medium-Light Skin Tone
๐๐ฝ Person Swimming: Medium Skin Tone
๐๐พ Person Swimming: Medium-Dark Skin Tone
๐๐ฟ Person Swimming: Dark Skin Tone
๐โโ๏ธ Man Swimming
๐๐ปโโ๏ธ Man Swimming: Light Skin Tone
๐๐ผโโ๏ธ Man Swimming: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Swimming: Medium Skin Tone
๐๐พโโ๏ธ Man Swimming: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Swimming: Dark Skin Tone
๐โโ๏ธ Woman Swimming
๐๐ปโโ๏ธ Woman Swimming: Light Skin Tone
๐๐ผโโ๏ธ Woman Swimming: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Swimming: Medium Skin Tone
๐๐พโโ๏ธ Woman Swimming: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Swimming: Dark Skin Tone
โน Person Bouncing Ball
โน๐ป Person Bouncing Ball: Light Skin Tone
โน๐ผ Person Bouncing Ball: Medium-Light Skin Tone
โน๐ฝ Person Bouncing Ball: Medium Skin Tone
โน๐พ Person Bouncing Ball: Medium-Dark Skin Tone
โน๐ฟ Person Bouncing Ball: Dark Skin Tone
โน๏ธโโ๏ธ Man Bouncing Ball
โน๐ปโโ๏ธ Man Bouncing Ball: Light Skin Tone
โน๐ผโโ๏ธ Man Bouncing Ball: Medium-Light Skin Tone
โน๐ฝโโ๏ธ Man Bouncing Ball: Medium Skin Tone
โน๐พโโ๏ธ Man Bouncing Ball: Medium-Dark Skin Tone
โน๐ฟโโ๏ธ Man Bouncing Ball: Dark Skin Tone
โน๏ธโโ๏ธ Woman Bouncing Ball
โน๐ปโโ๏ธ Woman Bouncing Ball: Light Skin Tone
โน๐ผโโ๏ธ Woman Bouncing Ball: Medium-Light Skin Tone
โน๐ฝโโ๏ธ Woman Bouncing Ball: Medium Skin Tone
โน๐พโโ๏ธ Woman Bouncing Ball: Medium-Dark Skin Tone
โน๐ฟโโ๏ธ Woman Bouncing Ball: Dark Skin Tone
๐ Person Lifting Weights
๐๐ป Person Lifting Weights: Light Skin Tone
๐๐ผ Person Lifting Weights: Medium-Light Skin Tone
๐๐ฝ Person Lifting Weights: Medium Skin Tone
๐๐พ Person Lifting Weights: Medium-Dark Skin Tone
๐๐ฟ Person Lifting Weights: Dark Skin Tone
๐๏ธโโ๏ธ Man Lifting Weights
๐๐ปโโ๏ธ Man Lifting Weights: Light Skin Tone
๐๐ผโโ๏ธ Man Lifting Weights: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Man Lifting Weights: Medium Skin Tone
๐๐พโโ๏ธ Man Lifting Weights: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Man Lifting Weights: Dark Skin Tone
๐๏ธโโ๏ธ Woman Lifting Weights
๐๐ปโโ๏ธ Woman Lifting Weights: Light Skin Tone
๐๐ผโโ๏ธ Woman Lifting Weights: Medium-Light Skin Tone
๐๐ฝโโ๏ธ Woman Lifting Weights: Medium Skin Tone
๐๐พโโ๏ธ Woman Lifting Weights: Medium-Dark Skin Tone
๐๐ฟโโ๏ธ Woman Lifting Weights: Dark Skin Tone
๐ด Person Biking
๐ด๐ป Person Biking: Light Skin Tone
๐ด๐ผ Person Biking: Medium-Light Skin Tone
๐ด๐ฝ Person Biking: Medium Skin Tone
๐ด๐พ Person Biking: Medium-Dark Skin Tone
๐ด๐ฟ Person Biking: Dark Skin Tone
๐ดโโ๏ธ Man Biking
๐ด๐ปโโ๏ธ Man Biking: Light Skin Tone
๐ด๐ผโโ๏ธ Man Biking: Medium-Light Skin Tone
๐ด๐ฝโโ๏ธ Man Biking: Medium Skin Tone
๐ด๐พโโ๏ธ Man Biking: Medium-Dark Skin Tone
๐ด๐ฟโโ๏ธ Man Biking: Dark Skin Tone
๐ดโโ๏ธ Woman Biking
๐ด๐ปโโ๏ธ Woman Biking: Light Skin Tone
๐ด๐ผโโ๏ธ Woman Biking: Medium-Light Skin Tone
๐ด๐ฝโโ๏ธ Woman Biking: Medium Skin Tone
๐ด๐พโโ๏ธ Woman Biking: Medium-Dark Skin Tone
๐ด๐ฟโโ๏ธ Woman Biking: Dark Skin Tone
๐ต Person Mountain Biking
๐ต๐ป Person Mountain Biking: Light Skin Tone
๐ต๐ผ Person Mountain Biking: Medium-Light Skin Tone
๐ต๐ฝ Person Mountain Biking: Medium Skin Tone
๐ต๐พ Person Mountain Biking: Medium-Dark Skin Tone
๐ต๐ฟ Person Mountain Biking: Dark Skin Tone
๐ตโโ๏ธ Man Mountain Biking
๐ต๐ปโโ๏ธ Man Mountain Biking: Light Skin Tone
๐ต๐ผโโ๏ธ Man Mountain Biking: Medium-Light Skin Tone
๐ต๐ฝโโ๏ธ Man Mountain Biking: Medium Skin Tone
๐ต๐พโโ๏ธ Man Mountain Biking: Medium-Dark Skin Tone
๐ต๐ฟโโ๏ธ Man Mountain Biking: Dark Skin Tone
๐ตโโ๏ธ Woman Mountain Biking
๐ต๐ปโโ๏ธ Woman Mountain Biking: Light Skin Tone
๐ต๐ผโโ๏ธ Woman Mountain Biking: Medium-Light Skin Tone
๐ต๐ฝโโ๏ธ Woman Mountain Biking: Medium Skin Tone
๐ต๐พโโ๏ธ Woman Mountain Biking: Medium-Dark Skin Tone
๐ต๐ฟโโ๏ธ Woman Mountain Biking: Dark Skin Tone
๐ Racing Car
๐ Motorcycle
๐คธ Person Cartwheeling
๐คธ๐ป Person Cartwheeling: Light Skin Tone
๐คธ๐ผ Person Cartwheeling: Medium-Light Skin Tone
๐คธ๐ฝ Person Cartwheeling: Medium Skin Tone
๐คธ๐พ Person Cartwheeling: Medium-Dark Skin Tone
๐คธ๐ฟ Person Cartwheeling: Dark Skin Tone
๐คธโโ๏ธ Man Cartwheeling
๐คธ๐ปโโ๏ธ Man Cartwheeling: Light Skin Tone
๐คธ๐ผโโ๏ธ Man Cartwheeling: Medium-Light Skin Tone
๐คธ๐ฝโโ๏ธ Man Cartwheeling: Medium Skin Tone
๐คธ๐พโโ๏ธ Man Cartwheeling: Medium-Dark Skin Tone
๐คธ๐ฟโโ๏ธ Man Cartwheeling: Dark Skin Tone
๐คธโโ๏ธ Woman Cartwheeling
๐คธ๐ปโโ๏ธ Woman Cartwheeling: Light Skin Tone
๐คธ๐ผโโ๏ธ Woman Cartwheeling: Medium-Light Skin Tone
๐คธ๐ฝโโ๏ธ Woman Cartwheeling: Medium Skin Tone
๐คธ๐พโโ๏ธ Woman Cartwheeling: Medium-Dark Skin Tone
๐คธ๐ฟโโ๏ธ Woman Cartwheeling: Dark Skin Tone
๐คผ People Wrestling
๐คผโโ๏ธ Men Wrestling
๐คผโโ๏ธ Women Wrestling
๐คฝ Person Playing Water Polo
๐คฝ๐ป Person Playing Water Polo: Light Skin Tone
๐คฝ๐ผ Person Playing Water Polo: Medium-Light Skin Tone
๐คฝ๐ฝ Person Playing Water Polo: Medium Skin Tone
๐คฝ๐พ Person Playing Water Polo: Medium-Dark Skin Tone
๐คฝ๐ฟ Person Playing Water Polo: Dark Skin Tone
๐คฝโโ๏ธ Man Playing Water Polo
๐คฝ๐ปโโ๏ธ Man Playing Water Polo: Light Skin Tone
๐คฝ๐ผโโ๏ธ Man Playing Water Polo: Medium-Light Skin Tone
๐คฝ๐ฝโโ๏ธ Man Playing Water Polo: Medium Skin Tone
๐คฝ๐พโโ๏ธ Man Playing Water Polo: Medium-Dark Skin Tone
๐คฝ๐ฟโโ๏ธ Man Playing Water Polo: Dark Skin Tone
๐คฝโโ๏ธ Woman Playing Water Polo
๐คฝ๐ปโโ๏ธ Woman Playing Water Polo: Light Skin Tone
๐คฝ๐ผโโ๏ธ Woman Playing Water Polo: Medium-Light Skin Tone
๐คฝ๐ฝโโ๏ธ Woman Playing Water Polo: Medium Skin Tone
๐คฝ๐พโโ๏ธ Woman Playing Water Polo: Medium-Dark Skin Tone
๐คฝ๐ฟโโ๏ธ Woman Playing Water Polo: Dark Skin Tone
๐คพ Person Playing Handball
๐คพ๐ป Person Playing Handball: Light Skin Tone
๐คพ๐ผ Person Playing Handball: Medium-Light Skin Tone
๐คพ๐ฝ Person Playing Handball: Medium Skin Tone
๐คพ๐พ Person Playing Handball: Medium-Dark Skin Tone
๐คพ๐ฟ Person Playing Handball: Dark Skin Tone
๐คพโโ๏ธ Man Playing Handball
๐คพ๐ปโโ๏ธ Man Playing Handball: Light Skin Tone
๐คพ๐ผโโ๏ธ Man Playing Handball: Medium-Light Skin Tone
๐คพ๐ฝโโ๏ธ Man Playing Handball: Medium Skin Tone
๐คพ๐พโโ๏ธ Man Playing Handball: Medium-Dark Skin Tone
๐คพ๐ฟโโ๏ธ Man Playing Handball: Dark Skin Tone
๐คพโโ๏ธ Woman Playing Handball
๐คพ๐ปโโ๏ธ Woman Playing Handball: Light Skin Tone
๐คพ๐ผโโ๏ธ Woman Playing Handball: Medium-Light Skin Tone
๐คพ๐ฝโโ๏ธ Woman Playing Handball: Medium Skin Tone
๐คพ๐พโโ๏ธ Woman Playing Handball: Medium-Dark Skin Tone
๐คพ๐ฟโโ๏ธ Woman Playing Handball: Dark Skin Tone
๐คน Person Juggling
๐คน๐ป Person Juggling: Light Skin Tone
๐คน๐ผ Person Juggling: Medium-Light Skin Tone
๐คน๐ฝ Person Juggling: Medium Skin Tone
๐คน๐พ Person Juggling: Medium-Dark Skin Tone
๐คน๐ฟ Person Juggling: Dark Skin Tone
๐คนโโ๏ธ Man Juggling
๐คน๐ปโโ๏ธ Man Juggling: Light Skin Tone
๐คน๐ผโโ๏ธ Man Juggling: Medium-Light Skin Tone
๐คน๐ฝโโ๏ธ Man Juggling: Medium Skin Tone
๐คน๐พโโ๏ธ Man Juggling: Medium-Dark Skin Tone
๐คน๐ฟโโ๏ธ Man Juggling: Dark Skin Tone
๐คนโโ๏ธ Woman Juggling
๐คน๐ปโโ๏ธ Woman Juggling: Light Skin Tone
๐คน๐ผโโ๏ธ Woman Juggling: Medium-Light Skin Tone
๐คน๐ฝโโ๏ธ Woman Juggling: Medium Skin Tone
๐คน๐พโโ๏ธ Woman Juggling: Medium-Dark Skin Tone
๐คน๐ฟโโ๏ธ Woman Juggling: Dark Skin Tone
๐คผ๐ป Wrestlers, Type-1-2
๐คผ๐ผ Wrestlers, Type-3
๐ซ Man and Woman Holding Hands
๐คผ๐ฝ Wrestlers, Type-4
๐ฌ Two Men Holding Hands
๐คผ๐พ Wrestlers, Type-5
๐ญ Two Women Holding Hands
๐คผ๐ฟ Wrestlers, Type-6
๐ Kiss
๐ฉโโค๏ธโ๐โ๐จ Kiss: Woman, Man
๐คผ๐ปโโ๏ธ Men Wrestling, Type-1-2
๐คผ๐ผโโ๏ธ Men Wrestling, Type-3
๐คผ๐ฝโโ๏ธ Men Wrestling, Type-4
๐จโโค๏ธโ๐โ๐จ Kiss: Man, Man
๐คผ๐พโโ๏ธ Men Wrestling, Type-5
๐คผ๐ฟโโ๏ธ Men Wrestling, Type-6
๐ฉโโค๏ธโ๐โ๐ฉ Kiss: Woman, Woman
๐คผ๐ปโโ๏ธ Women Wrestling, Type-1-2
๐ Couple With Heart
๐คผ๐ผโโ๏ธ Women Wrestling, Type-3
๐ฉโโค๏ธโ๐จ Couple With Heart: Woman, Man
๐คผ๐ฝโโ๏ธ Women Wrestling, Type-4
๐คผ๐พโโ๏ธ Women Wrestling, Type-5
๐จโโค๏ธโ๐จ Couple With Heart: Man, Man
๐คผ๐ฟโโ๏ธ Women Wrestling, Type-6
๐ฉโโค๏ธโ๐ฉ Couple With Heart: Woman, Woman
๐ช Family
๐จโ๐ฉโ๐ฆ Family: Man, Woman, Boy
๐จโ๐ฉโ๐ง Family: Man, Woman, Girl
๐จโ๐ฉโ๐งโ๐ฆ Family: Man, Woman, Girl, Boy
๐จโ๐ฉโ๐ฆโ๐ฆ Family: Man, Woman, Boy, Boy
๐จโ๐ฉโ๐งโ๐ง Family: Man, Woman, Girl, Girl
๐จโ๐จโ๐ฆ Family: Man, Man, Boy
๐จโ๐จโ๐ง Family: Man, Man, Girl
๐จโ๐จโ๐งโ๐ฆ Family: Man, Man, Girl, Boy
๐จโ๐จโ๐ฆโ๐ฆ Family: Man, Man, Boy, Boy
๐จโ๐จโ๐งโ๐ง Family: Man, Man, Girl, Girl
๐ฉโ๐ฉโ๐ฆ Family: Woman, Woman, Boy
๐ฉโ๐ฉโ๐ง Family: Woman, Woman, Girl
๐ฉโ๐ฉโ๐งโ๐ฆ Family: Woman, Woman, Girl, Boy
๐ฉโ๐ฉโ๐ฆโ๐ฆ Family: Woman, Woman, Boy, Boy
๐ฉโ๐ฉโ๐งโ๐ง Family: Woman, Woman, Girl, Girl
๐จโ๐ฆ Family: Man, Boy
๐จโ๐ฆโ๐ฆ Family: Man, Boy, Boy
๐จโ๐ง Family: Man, Girl
๐จโ๐งโ๐ฆ Family: Man, Girl, Boy
๐จโ๐งโ๐ง Family: Man, Girl, Girl
๐ฉโ๐ฆ Family: Woman, Boy
๐ฉโ๐ฆโ๐ฆ Family: Woman, Boy, Boy
๐ฉโ๐ง Family: Woman, Girl
๐ฉโ๐งโ๐ฆ Family: Woman, Girl, Boy
๐ฉโ๐งโ๐ง Family: Woman, Girl, Girl
๐คณ Selfie
๐คณ๐ป Selfie: Light Skin Tone
๐คณ๐ผ Selfie: Medium-Light Skin Tone
๐คณ๐ฝ Selfie: Medium Skin Tone
๐คณ๐พ Selfie: Medium-Dark Skin Tone
๐คณ๐ฟ Selfie: Dark Skin Tone
๐ช Flexed Biceps
๐ช๐ป Flexed Biceps: Light Skin Tone
๐ช๐ผ Flexed Biceps: Medium-Light Skin Tone
๐ช๐ฝ Flexed Biceps: Medium Skin Tone
๐ช๐พ Flexed Biceps: Medium-Dark Skin Tone
๐ช๐ฟ Flexed Biceps: Dark Skin Tone
๐ Backhand Index Pointing Left
๐๐ป Backhand Index Pointing Left: Light Skin Tone
๐๐ผ Backhand Index Pointing Left: Medium-Light Skin Tone
๐๐ฝ Backhand Index Pointing Left: Medium Skin Tone
๐๐พ Backhand Index Pointing Left: Medium-Dark Skin Tone
๐๐ฟ Backhand Index Pointing Left: Dark Skin Tone
๐ Backhand Index Pointing Right
๐๐ป Backhand Index Pointing Right: Light Skin Tone
๐๐ผ Backhand Index Pointing Right: Medium-Light Skin Tone
๐๐ฝ Backhand Index Pointing Right: Medium Skin Tone
๐๐พ Backhand Index Pointing Right: Medium-Dark Skin Tone
๐๐ฟ Backhand Index Pointing Right: Dark Skin Tone
โ Index Pointing Up
โ๐ป Index Pointing Up: Light Skin Tone
โ๐ผ Index Pointing Up: Medium-Light Skin Tone
โ๐ฝ Index Pointing Up: Medium Skin Tone
โ๐พ Index Pointing Up: Medium-Dark Skin Tone
โ๐ฟ Index Pointing Up: Dark Skin Tone
๐ Backhand Index Pointing Up
๐๐ป Backhand Index Pointing Up: Light Skin Tone
๐๐ผ Backhand Index Pointing Up: Medium-Light Skin Tone
๐๐ฝ Backhand Index Pointing Up: Medium Skin Tone
๐๐พ Backhand Index Pointing Up: Medium-Dark Skin Tone
๐๐ฟ Backhand Index Pointing Up: Dark Skin Tone
๐ Middle Finger
๐๐ป Middle Finger: Light Skin Tone
๐๐ผ Middle Finger: Medium-Light Skin Tone
๐๐ฝ Middle Finger: Medium Skin Tone
๐๐พ Middle Finger: Medium-Dark Skin Tone
๐๐ฟ Middle Finger: Dark Skin Tone
๐ Backhand Index Pointing Down
๐๐ป Backhand Index Pointing Down: Light Skin Tone
๐๐ผ Backhand Index Pointing Down: Medium-Light Skin Tone
๐๐ฝ Backhand Index Pointing Down: Medium Skin Tone
๐๐พ Backhand Index Pointing Down: Medium-Dark Skin Tone
๐๐ฟ Backhand Index Pointing Down: Dark Skin Tone
โ Victory Hand
โ๐ป Victory Hand: Light Skin Tone
โ๐ผ Victory Hand: Medium-Light Skin Tone
โ๐ฝ Victory Hand: Medium Skin Tone
โ๐พ Victory Hand: Medium-Dark Skin Tone
โ๐ฟ Victory Hand: Dark Skin Tone
๐ค Crossed Fingers
๐ค๐ป Crossed Fingers: Light Skin Tone
๐ค๐ผ Crossed Fingers: Medium-Light Skin Tone
๐ค๐ฝ Crossed Fingers: Medium Skin Tone
๐ค๐พ Crossed Fingers: Medium-Dark Skin Tone
๐ค๐ฟ Crossed Fingers: Dark Skin Tone
๐ Vulcan Salute
๐๐ป Vulcan Salute: Light Skin Tone
๐๐ผ Vulcan Salute: Medium-Light Skin Tone
๐๐ฝ Vulcan Salute: Medium Skin Tone
๐๐พ Vulcan Salute: Medium-Dark Skin Tone
๐๐ฟ Vulcan Salute: Dark Skin Tone
๐ค Sign of the Horns
๐ค๐ป Sign of the Horns: Light Skin Tone
๐ค๐ผ Sign of the Horns: Medium-Light Skin Tone
๐ค๐ฝ Sign of the Horns: Medium Skin Tone
๐ค๐พ Sign of the Horns: Medium-Dark Skin Tone
๐ค๐ฟ Sign of the Horns: Dark Skin Tone
๐ค Call Me Hand
๐ค๐ป Call Me Hand: Light Skin Tone
๐ค๐ผ Call Me Hand: Medium-Light Skin Tone
๐ค๐ฝ Call Me Hand: Medium Skin Tone
๐ค๐พ Call Me Hand: Medium-Dark Skin Tone
๐ค๐ฟ Call Me Hand: Dark Skin Tone
๐ Raised Hand With Fingers Splayed
๐๐ป Raised Hand With Fingers Splayed: Light Skin Tone
๐๐ผ Raised Hand With Fingers Splayed: Medium-Light Skin Tone
๐๐ฝ Raised Hand With Fingers Splayed: Medium Skin Tone
๐๐พ Raised Hand With Fingers Splayed: Medium-Dark Skin Tone
๐๐ฟ Raised Hand With Fingers Splayed: Dark Skin Tone
โ Raised Hand
โ๐ป Raised Hand: Light Skin Tone
โ๐ผ Raised Hand: Medium-Light Skin Tone
โ๐ฝ Raised Hand: Medium Skin Tone
โ๐พ Raised Hand: Medium-Dark Skin Tone
โ๐ฟ Raised Hand: Dark Skin Tone
๐ OK Hand
๐๐ป OK Hand: Light Skin Tone
๐๐ผ OK Hand: Medium-Light Skin Tone
๐๐ฝ OK Hand: Medium Skin Tone
๐๐พ OK Hand: Medium-Dark Skin Tone
๐๐ฟ OK Hand: Dark Skin Tone
๐ Thumbs Up
๐๐ป Thumbs Up: Light Skin Tone
๐๐ผ Thumbs Up: Medium-Light Skin Tone
๐๐ฝ Thumbs Up: Medium Skin Tone
๐๐พ Thumbs Up: Medium-Dark Skin Tone
๐๐ฟ Thumbs Up: Dark Skin Tone
๐ Thumbs Down
๐๐ป Thumbs Down: Light Skin Tone
๐๐ผ Thumbs Down: Medium-Light Skin Tone
๐๐ฝ Thumbs Down: Medium Skin Tone
๐๐พ Thumbs Down: Medium-Dark Skin Tone
๐๐ฟ Thumbs Down: Dark Skin Tone
โ Raised Fist
โ๐ป Raised Fist: Light Skin Tone
โ๐ผ Raised Fist: Medium-Light Skin Tone
โ๐ฝ Raised Fist: Medium Skin Tone
โ๐พ Raised Fist: Medium-Dark Skin Tone
โ๐ฟ Raised Fist: Dark Skin Tone
๐ Oncoming Fist
๐๐ป Oncoming Fist: Light Skin Tone
๐๐ผ Oncoming Fist: Medium-Light Skin Tone
๐๐ฝ Oncoming Fist: Medium Skin Tone
๐๐พ Oncoming Fist: Medium-Dark Skin Tone
๐๐ฟ Oncoming Fist: Dark Skin Tone
๐ค Left-Facing Fist
๐ค๐ป Left-Facing Fist: Light Skin Tone
๐ค๐ผ Left-Facing Fist: Medium-Light Skin Tone
๐ค๐ฝ Left-Facing Fist: Medium Skin Tone
๐ค๐พ Left-Facing Fist: Medium-Dark Skin Tone
๐ค๐ฟ Left-Facing Fist: Dark Skin Tone
๐ค Right-Facing Fist
๐ค๐ป Right-Facing Fist: Light Skin Tone
๐ค๐ผ Right-Facing Fist: Medium-Light Skin Tone
๐ค๐ฝ Right-Facing Fist: Medium Skin Tone
๐ค๐พ Right-Facing Fist: Medium-Dark Skin Tone
๐ค๐ฟ Right-Facing Fist: Dark Skin Tone
๐ค Raised Back of Hand
๐ค๐ป Raised Back of Hand: Light Skin Tone
๐ค๐ผ Raised Back of Hand: Medium-Light Skin Tone
๐ค๐ฝ Raised Back of Hand: Medium Skin Tone
๐ค๐พ Raised Back of Hand: Medium-Dark Skin Tone
๐ค๐ฟ Raised Back of Hand: Dark Skin Tone
๐ Waving Hand
๐๐ป Waving Hand: Light Skin Tone
๐๐ผ Waving Hand: Medium-Light Skin Tone
๐๐ฝ Waving Hand: Medium Skin Tone
๐๐พ Waving Hand: Medium-Dark Skin Tone
๐๐ฟ Waving Hand: Dark Skin Tone
๐ค Love-You Gesture
๐ค๐ป Love-You Gesture: Light Skin Tone
๐ค๐ผ Love-You Gesture: Medium-Light Skin Tone
๐ค๐ฝ Love-You Gesture: Medium Skin Tone
๐ค๐พ Love-You Gesture: Medium-Dark Skin Tone
๐ค๐ฟ Love-You Gesture: Dark Skin Tone
โ Writing Hand
โ๐ป Writing Hand: Light Skin Tone
โ๐ผ Writing Hand: Medium-Light Skin Tone
โ๐ฝ Writing Hand: Medium Skin Tone
โ๐พ Writing Hand: Medium-Dark Skin Tone
โ๐ฟ Writing Hand: Dark Skin Tone
๐ Clapping Hands
๐๐ป Clapping Hands: Light Skin Tone
๐๐ผ Clapping Hands: Medium-Light Skin Tone
๐๐ฝ Clapping Hands: Medium Skin Tone
๐๐พ Clapping Hands: Medium-Dark Skin Tone
๐๐ฟ Clapping Hands: Dark Skin Tone
๐ Open Hands
๐๐ป Open Hands: Light Skin Tone
๐๐ผ Open Hands: Medium-Light Skin Tone
๐๐ฝ Open Hands: Medium Skin Tone
๐๐พ Open Hands: Medium-Dark Skin Tone
๐๐ฟ Open Hands: Dark Skin Tone
๐ Raising Hands
๐๐ป Raising Hands: Light Skin Tone
๐๐ผ Raising Hands: Medium-Light Skin Tone
๐๐ฝ Raising Hands: Medium Skin Tone
๐๐พ Raising Hands: Medium-Dark Skin Tone
๐๐ฟ Raising Hands: Dark Skin Tone
๐คฒ Palms Up Together
๐คฒ๐ป Palms Up Together: Light Skin Tone
๐คฒ๐ผ Palms Up Together: Medium-Light Skin Tone
๐คฒ๐ฝ Palms Up Together: Medium Skin Tone
๐คฒ๐พ Palms Up Together: Medium-Dark Skin Tone
๐คฒ๐ฟ Palms Up Together: Dark Skin Tone
๐ Folded Hands
๐๐ป Folded Hands: Light Skin Tone
๐๐ผ Folded Hands: Medium-Light Skin Tone
๐๐ฝ Folded Hands: Medium Skin Tone
๐๐พ Folded Hands: Medium-Dark Skin Tone
๐๐ฟ Folded Hands: Dark Skin Tone
๐ค Handshake
๐
Nail Polish
๐
๐ป Nail Polish: Light Skin Tone
๐
๐ผ Nail Polish: Medium-Light Skin Tone
๐
๐ฝ Nail Polish: Medium Skin Tone
๐
๐พ Nail Polish: Medium-Dark Skin Tone
๐
๐ฟ Nail Polish: Dark Skin Tone
๐ Ear
๐๐ป Ear: Light Skin Tone
๐๐ผ Ear: Medium-Light Skin Tone
๐๐ฝ Ear: Medium Skin Tone
๐๐พ Ear: Medium-Dark Skin Tone
๐๐ฟ Ear: Dark Skin Tone
๐ Nose
๐๐ป Nose: Light Skin Tone
๐๐ผ Nose: Medium-Light Skin Tone
๐๐ฝ Nose: Medium Skin Tone
๐๐พ Nose: Medium-Dark Skin Tone
๐๐ฟ Nose: Dark Skin Tone
๐ฃ Footprints
๐ Eyes
๐ Eye
๐๏ธโ๐จ๏ธ Eye in Speech Bubble
๐ง Brain
๐
Tongue
๐ Mouth
๐ Kiss Mark
๐ Heart With Arrow
โค Red Heart
๐ Beating Heart
๐ Broken Heart
๐ Two Hearts
๐ Sparkling Heart
๐ Growing Heart
๐ Blue Heart
๐ Green Heart
๐ Yellow Heart
๐งก Orange Heart
๐ Purple Heart
๐ค Black Heart
๐ Heart With Ribbon
๐ Revolving Hearts
๐ Heart Decoration
โฃ Heavy Heart Exclamation
๐ Love Letter
๐ค Zzz
๐ข Anger Symbol
๐ฃ Bomb
๐ฅ Collision
๐ฆ Sweat Droplets
๐จ Dashing Away
๐ซ Dizzy
๐ฌ Speech Balloon
๐จ Left Speech Bubble
๐ฏ Right Anger Bubble
๐ญ Thought Balloon
๐ณ Hole
๐ Glasses
๐ถ Sunglasses
๐ Necktie
๐ T-Shirt
๐ Jeans
๐งฃ Scarf
๐งค Gloves
๐งฅ Coat
๐งฆ Socks
๐ Dress
๐ Kimono
๐ Bikini
๐ Womanโs Clothes
๐ Purse
๐ Handbag
๐ Clutch Bag
๐ Shopping Bags
๐ School Backpack
๐ Manโs Shoe
๐ Running Shoe
๐ High-Heeled Shoe
๐ก Womanโs Sandal
๐ข Womanโs Boot
๐ Crown
๐ Womanโs Hat
๐ฉ Top Hat
๐ Graduation Cap
๐งข Billed Cap
โ Rescue Workerโs Helmet
๐ฟ Prayer Beads
๐ Lipstick
๐ Ring
๐ Gem Stone
๐ต Monkey Face
๐ Monkey
๐ฆ Gorilla
๐ถ Dog Face
๐ Dog
๐ฉ Poodle
๐บ Wolf Face
๐ฆ Fox Face
๐ฑ Cat Face
๐ Cat
๐ฆ Lion Face
๐ฏ Tiger Face
๐
Tiger
๐ Leopard
๐ด Horse Face
๐ Horse
๐ฆ Unicorn Face
๐ฆ Zebra
๐ฆ Deer
๐ฎ Cow Face
๐ Ox
๐ Water Buffalo
๐ Cow
๐ท Pig Face
๐ Pig
๐ Boar
๐ฝ Pig Nose
๐ Ram
๐ Ewe
๐ Goat
๐ช Camel
๐ซ Two-Hump Camel
๐ฆ Giraffe
๐ Elephant
๐ฆ Rhinoceros
๐ญ Mouse Face
๐ Mouse
๐ Rat
๐น Hamster Face
๐ฐ Rabbit Face
๐ Rabbit
๐ฟ Chipmunk
๐ฆ Hedgehog
๐ฆ Bat
๐ป Bear Face
๐จ Koala
๐ผ Panda Face
๐พ Paw Prints
๐ฆ Turkey
๐ Chicken
๐ Rooster
๐ฃ Hatching Chick
๐ค Baby Chick
๐ฅ Front-Facing Baby Chick
๐ฆ Bird
๐ง Penguin
๐ Dove
๐ฆ
Eagle
๐ฆ Duck
๐ฆ Owl
๐ธ Frog Face
๐ Crocodile
๐ข Turtle
๐ฆ Lizard
๐ Snake
๐ฒ Dragon Face
๐ Dragon
๐ฆ Sauropod
๐ฆ T-Rex
๐ณ Spouting Whale
๐ Whale
๐ฌ Dolphin
๐ Fish
๐ Tropical Fish
๐ก Blowfish
๐ฆ Shark
๐ Octopus
๐ Spiral Shell
๐ฆ Crab
๐ฆ Shrimp
๐ฆ Squid
๐ Snail
๐ฆ Butterfly
๐ Bug
๐ Ant
๐ Honeybee
๐ Lady Beetle
๐ฆ Cricket
๐ท Spider
๐ธ Spider Web
๐ฆ Scorpion
๐ Bouquet
๐ธ Cherry Blossom
๐ฎ White Flower
๐ต Rosette
๐น Rose
๐ฅ Wilted Flower
๐บ Hibiscus
๐ป Sunflower
๐ผ Blossom
๐ท Tulip
๐ฑ Seedling
๐ฒ Evergreen Tree
๐ณ Deciduous Tree
๐ด Palm Tree
๐ต Cactus
๐พ Sheaf of Rice
๐ฟ Herb
โ Shamrock
๐ Four Leaf Clover
๐ Maple Leaf
๐ Fallen Leaf
๐ Leaf Fluttering in Wind
๐ Grapes
๐ Melon
๐ Watermelon
๐ Tangerine
๐ Lemon
๐ Banana
๐ Pineapple
๐ Red Apple
๐ Green Apple
๐ Pear
๐ Peach
๐ Cherries
๐ Strawberry
๐ฅ Kiwi Fruit
๐
Tomato
๐ฅฅ Coconut
๐ฅ Avocado
๐ Eggplant
๐ฅ Potato
๐ฅ Carrot
๐ฝ Ear of Corn
๐ถ Hot Pepper
๐ฅ Cucumber
๐ฅฆ Broccoli
๐ Mushroom
๐ฅ Peanuts
๐ฐ Chestnut
๐ Bread
๐ฅ Croissant
๐ฅ Baguette Bread
๐ฅจ Pretzel
๐ฅ Pancakes
๐ง Cheese Wedge
๐ Meat on Bone
๐ Poultry Leg
๐ฅฉ Cut of Meat
๐ฅ Bacon
๐ Hamburger
๐ French Fries
๐ Pizza
๐ญ Hot Dog
๐ฅช Sandwich
๐ฎ Taco
๐ฏ Burrito
๐ฅ Stuffed Flatbread
๐ฅ Egg
๐ณ Cooking
๐ฅ Shallow Pan of Food
๐ฒ Pot of Food
๐ฅฃ Bowl With Spoon
๐ฅ Green Salad
๐ฟ Popcorn
๐ฅซ Canned Food
๐ฑ Bento Box
๐ Rice Cracker
๐ Rice Ball
๐ Cooked Rice
๐ Curry Rice
๐ Steaming Bowl
๐ Spaghetti
๐ Roasted Sweet Potato
๐ข Oden
๐ฃ Sushi
๐ค Fried Shrimp
๐ฅ Fish Cake With Swirl
๐ก Dango
๐ฅ Dumpling
๐ฅ Fortune Cookie
๐ฅก Takeout Box
๐ฆ Soft Ice Cream
๐ง Shaved Ice
๐จ Ice Cream
๐ฉ Doughnut
๐ช Cookie
๐ Birthday Cake
๐ฐ Shortcake
๐ฅง Pie
๐ซ Chocolate Bar
๐ฌ Candy
๐ญ Lollipop
๐ฎ Custard
๐ฏ Honey Pot
๐ผ Baby Bottle
๐ฅ Glass of Milk
โ Hot Beverage
๐ต Teacup Without Handle
๐ถ Sake
๐พ Bottle With Popping Cork
๐ท Wine Glass
๐ธ Cocktail Glass
๐น Tropical Drink
๐บ Beer Mug
๐ป Clinking Beer Mugs
๐ฅ Clinking Glasses
๐ฅ Tumbler Glass
๐ฅค Cup With Straw
๐ฅข Chopsticks
๐ฝ Fork and Knife With Plate
๐ด Fork and Knife
๐ฅ Spoon
๐ช Kitchen Knife
๐บ Amphora
๐ Globe Showing Europe-Africa
๐ Globe Showing Americas
๐ Globe Showing Asia-Australia
๐ Globe With Meridians
๐บ World Map
๐พ Map of Japan
๐ Snow-Capped Mountain
โฐ Mountain
๐ Volcano
๐ป Mount Fuji
๐ Camping
๐ Beach With Umbrella
๐ Desert
๐ Desert Island
๐ National Park
๐ Stadium
๐ Classical Building
๐ Building Construction
๐ House
๐ Cityscape
๐ Derelict House
๐ House
๐ก House With Garden
๐ข Office Building
๐ฃ Japanese Post Office
๐ค Post Office
๐ฅ Hospital
๐ฆ Bank
๐จ Hotel
๐ฉ Love Hotel
๐ช Convenience Store
๐ซ School
๐ฌ Department Store
๐ญ Factory
๐ฏ Japanese Castle
๐ฐ Castle
๐ Wedding
๐ผ Tokyo Tower
๐ฝ Statue of Liberty
โช Church
๐ Mosque
๐ Synagogue
โฉ Shinto Shrine
๐ Kaaba
โฒ Fountain
โบ Tent
๐ Foggy
๐ Night With Stars
๐ Sunrise Over Mountains
๐
Sunrise
๐ Cityscape at Dusk
๐ Sunset
๐ Bridge at Night
โจ Hot Springs
๐ Milky Way
๐ Carousel Horse
๐ก Ferris Wheel
๐ข Roller Coaster
๐ Barber Pole
๐ช Circus Tent
๐ญ Performing Arts
๐ผ Framed Picture
๐จ Artist Palette
๐ฐ Slot Machine
๐ Locomotive
๐ Railway Car
๐ High-Speed Train
๐
High-Speed Train With Bullet Nose
๐ Train
๐ Metro
๐ Light Rail
๐ Station
๐ Tram
๐ Monorail
๐ Mountain Railway
๐ Tram Car
๐ Bus
๐ Oncoming Bus
๐ Trolleybus
๐ Minibus
๐ Ambulance
๐ Fire Engine
๐ Police Car
๐ Oncoming Police Car
๐ Taxi
๐ Oncoming Taxi
๐ Automobile
๐ Oncoming Automobile
๐ Sport Utility Vehicle
๐ Delivery Truck
๐ Articulated Lorry
๐ Tractor
๐ฒ Bicycle
๐ด Kick Scooter
๐ต Motor Scooter
๐ Bus Stop
๐ฃ Motorway
๐ค Railway Track
โฝ Fuel Pump
๐จ Police Car Light
๐ฅ Horizontal Traffic Light
๐ฆ Vertical Traffic Light
๐ง Construction
๐ Stop Sign
โ Anchor
โต Sailboat
๐ถ Canoe
๐ค Speedboat
๐ณ Passenger Ship
โด Ferry
๐ฅ Motor Boat
๐ข Ship
โ Airplane
๐ฉ Small Airplane
๐ซ Airplane Departure
๐ฌ Airplane Arrival
๐บ Seat
๐ Helicopter
๐ Suspension Railway
๐ Mountain Cableway
๐ก Aerial Tramway
๐ฐ Satellite
๐ Rocket
๐ธ Flying Saucer
๐ Bellhop Bell
๐ช Door
๐ Bed
๐ Couch and Lamp
๐ฝ Toilet
๐ฟ Shower
๐ Bathtub
โ Hourglass
โณ Hourglass With Flowing Sand
โ Watch
โฐ Alarm Clock
โฑ Stopwatch
โฒ Timer Clock
๐ฐ Mantelpiece Clock
๐ Twelve Oโclock
๐ง Twelve-Thirty
๐ One Oโclock
๐ One-Thirty
๐ Two Oโclock
๐ Two-Thirty
๐ Three Oโclock
๐ Three-Thirty
๐ Four Oโclock
๐ Four-Thirty
๐ Five Oโclock
๐ Five-Thirty
๐ Six Oโclock
๐ก Six-Thirty
๐ Seven Oโclock
๐ข Seven-Thirty
๐ Eight Oโclock
๐ฃ Eight-Thirty
๐ Nine Oโclock
๐ค Nine-Thirty
๐ Ten Oโclock
๐ฅ Ten-Thirty
๐ Eleven Oโclock
๐ฆ Eleven-Thirty
๐ New Moon
๐ Waxing Crescent Moon
๐ First Quarter Moon
๐ Waxing Gibbous Moon
๐ Full Moon
๐ Waning Gibbous Moon
๐ Last Quarter Moon
๐ Waning Crescent Moon
๐ Crescent Moon
๐ New Moon Face
๐ First Quarter Moon With Face
๐ Last Quarter Moon With Face
๐ก Thermometer
โ Sun
๐ Full Moon With Face
๐ Sun With Face
โญ White Medium Star
๐ Glowing Star
๐ Shooting Star
โ Cloud
โ
Sun Behind Cloud
โ Cloud With Lightning and Rain
๐ค Sun Behind Small Cloud
๐ฅ Sun Behind Large Cloud
๐ฆ Sun Behind Rain Cloud
๐ง Cloud With Rain
๐จ Cloud With Snow
๐ฉ Cloud With Lightning
๐ช Tornado
๐ซ Fog
๐ฌ Wind Face
๐ Cyclone
๐ Rainbow
๐ Closed Umbrella
โ Umbrella
โ Umbrella With Rain Drops
โฑ Umbrella on Ground
โก High Voltage
โ Snowflake
โ Snowman
โ Snowman Without Snow
โ Comet
๐ฅ Fire
๐ง Droplet
๐ Water Wave
๐ Jack-O-Lantern
๐ Christmas Tree
๐ Fireworks
๐ Sparkler
โจ Sparkles
๐ Balloon
๐ Party Popper
๐ Confetti Ball
๐ Tanabata Tree
๐ Pine Decoration
๐ Japanese Dolls
๐ Carp Streamer
๐ Wind Chime
๐ Moon Viewing Ceremony
๐ Ribbon
๐ Wrapped Gift
๐ Reminder Ribbon
๐ Admission Tickets
๐ซ Ticket
๐ Military Medal
๐ Trophy
๐
Sports Medal
๐ฅ 1st Place Medal
๐ฅ 2nd Place Medal
๐ฅ 3rd Place Medal
โฝ Soccer Ball
โพ Baseball
๐ Basketball
๐ Volleyball
๐ American Football
๐ Rugby Football
๐พ Tennis
๐ฑ Pool 8 Ball
๐ณ Bowling
๐ Cricket
๐ Field Hockey
๐ Ice Hockey
๐ Ping Pong
๐ธ Badminton
๐ฅ Boxing Glove
๐ฅ Martial Arts Uniform
๐ฅ
Goal Net
๐ฏ Direct Hit
โณ Flag in Hole
โธ Ice Skate
๐ฃ Fishing Pole
๐ฝ Running Shirt
๐ฟ Skis
๐ท Sled
๐ฅ Curling Stone
๐ฎ Video Game
๐น Joystick
๐ฒ Game Die
โ Spade Suit
โฅ Heart Suit
โฆ Diamond Suit
โฃ Club Suit
๐ Joker
๐ Mahjong Red Dragon
๐ด Flower Playing Cards
๐ Muted Speaker
๐ Speaker Low Volume
๐ Speaker Medium Volume
๐ Speaker High Volume
๐ข Loudspeaker
๐ฃ Megaphone
๐ฏ Postal Horn
๐ Bell
๐ Bell With Slash
๐ผ Musical Score
๐ต Musical Note
๐ถ Musical Notes
๐ Studio Microphone
๐ Level Slider
๐ Control Knobs
๐ค Microphone
๐ง Headphone
๐ป Radio
๐ท Saxophone
๐ธ Guitar
๐น Musical Keyboard
๐บ Trumpet
๐ป Violin
๐ฅ Drum
๐ฑ Mobile Phone
๐ฒ Mobile Phone With Arrow
โ Telephone
๐ Telephone Receiver
๐ Pager
๐ Fax Machine
๐ Battery
๐ Electric Plug
๐ป Laptop Computer
๐ฅ Desktop Computer
๐จ Printer
โจ Keyboard
๐ฑ Computer Mouse
๐ฒ Trackball
๐ฝ Computer Disk
๐พ Floppy Disk
๐ฟ Optical Disk
๐ DVD
๐ฅ Movie Camera
๐ Film Frames
๐ฝ Film Projector
๐ฌ Clapper Board
๐บ Television
๐ท Camera
๐ธ Camera With Flash
๐น Video Camera
๐ผ Videocassette
๐ Left-Pointing Magnifying Glass
๐ Right-Pointing Magnifying Glass
๐ฌ Microscope
๐ญ Telescope
๐ก Satellite Antenna
๐ฏ Candle
๐ก Light Bulb
๐ฆ Flashlight
๐ฎ Red Paper Lantern
๐ Notebook With Decorative Cover
๐ Closed Book
๐ Open Book
๐ Green Book
๐ Blue Book
๐ Orange Book
๐ Books
๐ Notebook
๐ Ledger
๐ Page With Curl
๐ Scroll
๐ Page Facing Up
๐ฐ Newspaper
๐ Rolled-Up Newspaper
๐ Bookmark Tabs
๐ Bookmark
๐ท Label
๐ฐ Money Bag
๐ด Yen Banknote
๐ต Dollar Banknote
๐ถ Euro Banknote
๐ท Pound Banknote
๐ธ Money With Wings
๐ณ Credit Card
๐น Chart Increasing With Yen
๐ฑ Currency Exchange
๐ฒ Heavy Dollar Sign
โ Envelope
๐ง E-Mail
๐จ Incoming Envelope
๐ฉ Envelope With Arrow
๐ค Outbox Tray
๐ฅ Inbox Tray
๐ฆ Package
๐ซ Closed Mailbox With Raised Flag
๐ช Closed Mailbox With Lowered Flag
๐ฌ Open Mailbox With Raised Flag
๐ญ Open Mailbox With Lowered Flag
๐ฎ Postbox
๐ณ Ballot Box With Ballot
โ Pencil
โ Black Nib
๐ Fountain Pen
๐ Pen
๐ Paintbrush
๐ Crayon
๐ Memo
๐ผ Briefcase
๐ File Folder
๐ Open File Folder
๐ Card Index Dividers
๐
Calendar
๐ Tear-Off Calendar
๐ Spiral Notepad
๐ Spiral Calendar
๐ Card Index
๐ Chart Increasing
๐ Chart Decreasing
๐ Bar Chart
๐ Clipboard
๐ Pushpin
๐ Round Pushpin
๐ Paperclip
๐ Linked Paperclips
๐ Straight Ruler
๐ Triangular Ruler
โ Scissors
๐ Card File Box
๐ File Cabinet
๐ Wastebasket
๐ Locked
๐ Unlocked
๐ Locked With Pen
๐ Locked With Key
๐ Key
๐ Old Key
๐จ Hammer
โ Pick
โ Hammer and Pick
๐ Hammer and Wrench
๐ก Dagger
โ Crossed Swords
๐ซ Pistol
๐น Bow and Arrow
๐ก Shield
๐ง Wrench
๐ฉ Nut and Bolt
โ Gear
๐ Clamp
โ Alembic
โ Balance Scale
๐ Link
โ Chains
๐ Syringe
๐ Pill
๐ฌ Cigarette
โฐ Coffin
โฑ Funeral Urn
๐ฟ Moai
๐ข Oil Drum
๐ฎ Crystal Ball
๐ Shopping Cart
๐ง Atm Sign
๐ฎ Litter in Bin Sign
๐ฐ Potable Water
โฟ Wheelchair Symbol
๐น Menโs Room
๐บ Womenโs Room
๐ป Restroom
๐ผ Baby Symbol
๐พ Water Closet
๐ Passport Control
๐ Customs
๐ Baggage Claim
๐
Left Luggage
โ Warning
๐ธ Children Crossing
โ No Entry
๐ซ Prohibited
๐ณ No Bicycles
๐ญ No Smoking
๐ฏ No Littering
๐ฑ Non-Potable Water
๐ท No Pedestrians
๐ต No Mobile Phones
๐ No One Under Eighteen
โข Radioactive
โฃ Biohazard
โฌ Up Arrow
โ Up-Right Arrow
โก Right Arrow
โ Down-Right Arrow
โฌ Down Arrow
โ Down-Left Arrow
โฌ
Left Arrow
โ Up-Left Arrow
โ Up-Down Arrow
โ Left-Right Arrow
โฉ Right Arrow Curving Left
โช Left Arrow Curving Right
โคด Right Arrow Curving Up
โคต Right Arrow Curving Down
๐ Clockwise Vertical Arrows
๐ Anticlockwise Arrows Button
๐ Back Arrow
๐ End Arrow
๐ On! Arrow
๐ Soon Arrow
๐ Top Arrow
๐ Place of Worship
โ Atom Symbol
๐ Om
โก Star of David
โธ Wheel of Dharma
โฏ Yin Yang
โ Latin Cross
โฆ Orthodox Cross
โช Star and Crescent
โฎ Peace Symbol
๐ Menorah
๐ฏ Dotted Six-Pointed Star
โ Aries
โ Taurus
โ Gemini
โ Cancer
โ Leo
โ Virgo
โ Libra
โ Scorpius
โ Sagittarius
โ Capricorn
โ Aquarius
โ Pisces
โ Ophiuchus
๐ Shuffle Tracks Button
๐ Repeat Button
๐ Repeat Single Button
โถ Play Button
โฉ Fast-Forward Button
โญ Next Track Button
โฏ Play or Pause Button
โ Reverse Button
โช Fast Reverse Button
โฎ Last Track Button
๐ผ Up Button
โซ Fast Up Button
๐ฝ Down Button
โฌ Fast Down Button
โธ Pause Button
โน Stop Button
โบ Record Button
โ Eject Button
๐ฆ Cinema
๐
Dim Button
๐ Bright Button
๐ถ Antenna Bars
๐ณ Vibration Mode
๐ด Mobile Phone Off
โ Female Sign
โ Male Sign
โ Medical Symbol
โป Recycling Symbol
โ Fleur-De-Lis
๐ฑ Trident Emblem
๐ Name Badge
๐ฐ Japanese Symbol for Beginner
โญ Heavy Large Circle
โ
White Heavy Check Mark
โ Ballot Box With Check
โ Heavy Check Mark
โ Heavy Multiplication X
โ Cross Mark
โ Cross Mark Button
โ Heavy Plus Sign
โ Heavy Minus Sign
โ Heavy Division Sign
โฐ Curly Loop
โฟ Double Curly Loop
ใฝ Part Alternation Mark
โณ Eight-Spoked Asterisk
โด Eight-Pointed Star
โ Sparkle
โผ Double Exclamation Mark
โ Exclamation Question Mark
โ Question Mark
โ White Question Mark
โ White Exclamation Mark
โ Exclamation Mark
ใฐ Wavy Dash
ยฉ Copyright
ยฎ Registered
โข Trade Mark
#๏ธโฃ Keycap Number Sign
*๏ธโฃ Keycap Asterisk
0๏ธโฃ Keycap Digit Zero
1๏ธโฃ Keycap Digit One
2๏ธโฃ Keycap Digit Two
3๏ธโฃ Keycap Digit Three
4๏ธโฃ Keycap Digit Four
5๏ธโฃ Keycap Digit Five
6๏ธโฃ Keycap Digit Six
7๏ธโฃ Keycap Digit Seven
8๏ธโฃ Keycap Digit Eight
9๏ธโฃ Keycap Digit Nine
๐ Keycap 10
๐ฏ Hundred Points
๐ Input Latin Uppercase
๐ก Input Latin Lowercase
๐ข Input Numbers
๐ฃ Input Symbols
๐ค Input Latin Letters
๐
ฐ A Button (blood Type)
๐ Ab Button (blood Type)
๐
ฑ B Button (blood Type)
๐ CL Button
๐ Cool Button
๐ Free Button
โน Information
๐ ID Button
โ Circled M
๐ New Button
๐ NG Button
๐
พ O Button (blood Type)
๐ OK Button
๐
ฟ P Button
๐ SOS Button
๐ Up! Button
๐ Vs Button
๐ Japanese โhereโ Button
๐ Japanese โservice Chargeโ Button
๐ท Japanese โmonthly Amountโ Button
๐ถ Japanese โnot Free of Chargeโ Button
๐ฏ Japanese โreservedโ Button
๐ Japanese โbargainโ Button
๐น Japanese โdiscountโ Button
๐ Japanese โfree of Chargeโ Button
๐ฒ Japanese โprohibitedโ Button
๐ Japanese โacceptableโ Button
๐ธ Japanese โapplicationโ Button
๐ด Japanese โpassing Gradeโ Button
๐ณ Japanese โvacancyโ Button
ใ Japanese โcongratulationsโ Button
ใ Japanese โsecretโ Button
๐บ Japanese โopen for Businessโ Button
๐ต Japanese โno Vacancyโ Button
โช Black Small Square
โซ White Small Square
โป White Medium Square
โผ Black Medium Square
โฝ White Medium-Small Square
โพ Black Medium-Small Square
โฌ Black Large Square
โฌ White Large Square
๐ถ Large Orange Diamond
๐ท Large Blue Diamond
๐ธ Small Orange Diamond
๐น Small Blue Diamond
๐บ Red Triangle Pointed Up
๐ป Red Triangle Pointed Down
๐ Diamond With a Dot
๐ Radio Button
๐ฒ Black Square Button
๐ณ White Square Button
โช White Circle
โซ Black Circle
๐ด Red Circle
๐ต Blue Circle
๐ Chequered Flag
๐ฉ Triangular Flag
๐ Crossed Flags
๐ด Black Flag
๐ณ White Flag
๐ณ๏ธโ๐ Rainbow Flag
๐ฆ๐จ Ascension Island
๐ฆ๐ฉ Andorra
๐ฆ๐ช United Arab Emirates
๐ฆ๐ซ Afghanistan
๐ฆ๐ฌ Antigua & Barbuda
๐ฆ๐ฎ Anguilla
๐ฆ๐ฑ Albania
๐ฆ๐ฒ Armenia
๐ฆ๐ด Angola
๐ฆ๐ถ Antarctica
๐ฆ๐ท Argentina
๐ฆ๐ธ American Samoa
๐ฆ๐น Austria
๐ฆ๐บ Australia
๐ฆ๐ผ Aruba
๐ฆ๐ฝ ร
land Islands
๐ฆ๐ฟ Azerbaijan
๐ง๐ฆ Bosnia & Herzegovina
๐ง๐ง Barbados
๐ง๐ฉ Bangladesh
๐ง๐ช Belgium
๐ง๐ซ Burkina Faso
๐ง๐ฌ Bulgaria
๐ง๐ญ Bahrain
๐ง๐ฎ Burundi
๐ง๐ฏ Benin
๐ง๐ฑ St. Barthรฉlemy
๐ง๐ฒ Bermuda
๐ง๐ณ Brunei
๐ง๐ด Bolivia
๐ง๐ถ Caribbean Netherlands
๐ง๐ท Brazil
๐ง๐ธ Bahamas
๐ง๐น Bhutan
๐ง๐ป Bouvet Island
๐ง๐ผ Botswana
๐ง๐พ Belarus
๐ง๐ฟ Belize
๐จ๐ฆ Canada
๐จ๐จ Cocos (Keeling) Islands
๐จ๐ฉ Congo - Kinshasa
๐จ๐ซ Central African Republic
๐จ๐ฌ Congo - Brazzaville
๐จ๐ญ Switzerland
๐จ๐ฎ Cรดte DโIvoire
๐จ๐ฐ Cook Islands
๐จ๐ฑ Chile
๐จ๐ฒ Cameroon
๐จ๐ณ China
๐จ๐ด Colombia
๐จ๐ต Clipperton Island
๐จ๐ท Costa Rica
๐จ๐บ Cuba
๐จ๐ป Cape Verde
๐จ๐ผ Curaรงao
๐จ๐ฝ Christmas Island
๐จ๐พ Cyprus
๐จ๐ฟ Czechia
๐ฉ๐ช Germany
๐ฉ๐ฌ Diego Garcia
๐ฉ๐ฏ Djibouti
๐ฉ๐ฐ Denmark
๐ฉ๐ฒ Dominica
๐ฉ๐ด Dominican Republic
๐ฉ๐ฟ Algeria
๐ช๐ฆ Ceuta & Melilla
๐ช๐จ Ecuador
๐ช๐ช Estonia
๐ช๐ฌ Egypt
๐ช๐ญ Western Sahara
๐ช๐ท Eritrea
๐ช๐ธ Spain
๐ช๐น Ethiopia
๐ช๐บ European Union
๐ซ๐ฎ Finland
๐ซ๐ฏ Fiji
๐ซ๐ฐ Falkland Islands
๐ซ๐ฒ Micronesia
๐ซ๐ด Faroe Islands
๐ซ๐ท France
๐ฌ๐ฆ Gabon
๐ฌ๐ง United Kingdom
๐ฌ๐ฉ Grenada
๐ฌ๐ช Georgia
๐ฌ๐ซ French Guiana
๐ฌ๐ฌ Guernsey
๐ฌ๐ญ Ghana
๐ฌ๐ฎ Gibraltar
๐ฌ๐ฑ Greenland
๐ฌ๐ฒ Gambia
๐ฌ๐ณ Guinea
๐ฌ๐ต Guadeloupe
๐ฌ๐ถ Equatorial Guinea
๐ฌ๐ท Greece
๐ฌ๐ธ South Georgia & South Sandwich Islands
๐ฌ๐น Guatemala
๐ฌ๐บ Guam
๐ฌ๐ผ Guinea-Bissau
๐ฌ๐พ Guyana
๐ญ๐ฐ Hong Kong Sar China
๐ญ๐ฒ Heard & Mcdonald Islands
๐ญ๐ณ Honduras
๐ญ๐ท Croatia
๐ญ๐น Haiti
๐ญ๐บ Hungary
๐ฎ๐จ Canary Islands
๐ฎ๐ฉ Indonesia
๐ฎ๐ช Ireland
๐ฎ๐ฑ Israel
๐ฎ๐ฒ Isle of Man
๐ฎ๐ณ India
๐ฎ๐ด British Indian Ocean Territory
๐ฎ๐ถ Iraq
๐ฎ๐ท Iran
๐ฎ๐ธ Iceland
๐ฎ๐น Italy
๐ฏ๐ช Jersey
๐ฏ๐ฒ Jamaica
๐ฏ๐ด Jordan
๐ฏ๐ต Japan
๐ฐ๐ช Kenya
๐ฐ๐ฌ Kyrgyzstan
๐ฐ๐ญ Cambodia
๐ฐ๐ฎ Kiribati
๐ฐ๐ฒ Comoros
๐ฐ๐ณ St. Kitts & Nevis
๐ฐ๐ต North Korea
๐ฐ๐ท South Korea
๐ฐ๐ผ Kuwait
๐ฐ๐พ Cayman Islands
๐ฐ๐ฟ Kazakhstan
๐ฑ๐ฆ Laos
๐ฑ๐ง Lebanon
๐ฑ๐จ St. Lucia
๐ฑ๐ฎ Liechtenstein
๐ฑ๐ฐ Sri Lanka
๐ฑ๐ท Liberia
๐ฑ๐ธ Lesotho
๐ฑ๐น Lithuania
๐ฑ๐บ Luxembourg
๐ฑ๐ป Latvia
๐ฑ๐พ Libya
๐ฒ๐ฆ Morocco
๐ฒ๐จ Monaco
๐ฒ๐ฉ Moldova
๐ฒ๐ช Montenegro
๐ฒ๐ซ St. Martin
๐ฒ๐ฌ Madagascar
๐ฒ๐ญ Marshall Islands
๐ฒ๐ฐ Macedonia
๐ฒ๐ฑ Mali
๐ฒ๐ฒ Myanmar (Burma)
๐ฒ๐ณ Mongolia
๐ฒ๐ด Macau Sar China
๐ฒ๐ต Northern Mariana Islands
๐ฒ๐ถ Martinique
๐ฒ๐ท Mauritania
๐ฒ๐ธ Montserrat
๐ฒ๐น Malta
๐ฒ๐บ Mauritius
๐ฒ๐ป Maldives
๐ฒ๐ผ Malawi
๐ฒ๐ฝ Mexico
๐ฒ๐พ Malaysia
๐ฒ๐ฟ Mozambique
๐ณ๐ฆ Namibia
๐ณ๐จ New Caledonia
๐ณ๐ช Niger
๐ณ๐ซ Norfolk Island
๐ณ๐ฌ Nigeria
๐ณ๐ฎ Nicaragua
๐ณ๐ฑ Netherlands
๐ณ๐ด Norway
๐ณ๐ต Nepal
๐ณ๐ท Nauru
๐ณ๐บ Niue
๐ณ๐ฟ New Zealand
๐ด๐ฒ Oman
๐ต๐ฆ Panama
๐ต๐ช Peru
๐ต๐ซ French Polynesia
๐ต๐ฌ Papua New Guinea
๐ต๐ญ Philippines
๐ต๐ฐ Pakistan
๐ต๐ฑ Poland
๐ต๐ฒ St. Pierre & Miquelon
๐ต๐ณ Pitcairn Islands
๐ต๐ท Puerto Rico
๐ต๐ธ Palestinian Territories
๐ต๐น Portugal
๐ต๐ผ Palau
๐ต๐พ Paraguay
๐ถ๐ฆ Qatar
๐ท๐ช Rรฉunion
๐ท๐ด Romania
๐ท๐ธ Serbia
๐ท๐บ Russia
๐ท๐ผ Rwanda
๐ธ๐ฆ Saudi Arabia
๐ธ๐ง Solomon Islands
๐ธ๐จ Seychelles
๐ธ๐ฉ Sudan
๐ธ๐ช Sweden
๐ธ๐ฌ Singapore
๐ธ๐ญ St. Helena
๐ธ๐ฎ Slovenia
๐ธ๐ฏ Svalbard & Jan Mayen
๐ธ๐ฐ Slovakia
๐ธ๐ฑ Sierra Leone
๐ธ๐ฒ San Marino
๐ธ๐ณ Senegal
๐ธ๐ด Somalia
๐ธ๐ท Suriname
๐ธ๐ธ South Sudan
๐ธ๐น Sรฃo Tomรฉ & Prรญncipe
๐ธ๐ป El Salvador
๐ธ๐ฝ Sint Maarten
๐ธ๐พ Syria
๐ธ๐ฟ Swaziland
๐น๐ฆ Tristan Da Cunha
๐น๐จ Turks & Caicos Islands
๐น๐ฉ Chad
๐น๐ซ French Southern Territories
๐น๐ฌ Togo
๐น๐ญ Thailand
๐น๐ฏ Tajikistan
๐น๐ฐ Tokelau
๐น๐ฑ Timor-Leste
๐น๐ฒ Turkmenistan
๐น๐ณ Tunisia
๐น๐ด Tonga
๐น๐ท Turkey
๐น๐น Trinidad & Tobago
๐น๐ป Tuvalu
๐น๐ผ Taiwan
๐น๐ฟ Tanzania
๐บ๐ฆ Ukraine
๐บ๐ฌ Uganda
๐บ๐ฒ U.S. Outlying Islands
๐บ๐ณ United Nations
๐บ๐ธ United States
๐บ๐พ Uruguay
๐บ๐ฟ Uzbekistan
๐ป๐ฆ Vatican City
๐ป๐จ St. Vincent & Grenadines
๐ป๐ช Venezuela
๐ป๐ฌ British Virgin Islands
๐ป๐ฎ U.S. Virgin Islands
๐ป๐ณ Vietnam
๐ป๐บ Vanuatu
๐ผ๐ซ Wallis & Futuna
๐ผ๐ธ Samoa
๐ฝ๐ฐ Kosovo
๐พ๐ช Yemen
๐พ๐น Mayotte
๐ฟ๐ฆ South Africa
๐ฟ๐ฒ Zambia
๐ฟ๐ผ Zimbabwe
๐ด๓ ง๓ ข๓ ฅ๓ ฎ๓ ง๓ ฟ Flag for England (GB-ENG)
๐ด๓ ง๓ ข๓ ณ๓ ฃ๓ ด๓ ฟ Flag for Scotland (GB-SCT)
๐ด๓ ง๓ ข๓ ท๓ ฌ๓ ณ๓ ฟ Flag for Wales (GB-WLS)
๐ฅ Rifle
๐คป Modern Pentathlon
๐ดโโ ๏ธ Pirate Flag
๐ฆ Regional Indicator Symbol Letter A
๐ง Regional Indicator Symbol Letter B
๐จ Regional Indicator Symbol Letter C
๐ฉ Regional Indicator Symbol Letter D
๐ช Regional Indicator Symbol Letter E
๐ซ Regional Indicator Symbol Letter F
๐ฌ Regional Indicator Symbol Letter G
๐ญ Regional Indicator Symbol Letter H
๐ฎ Regional Indicator Symbol Letter I
๐ฏ Regional Indicator Symbol Letter J
๐ฐ Regional Indicator Symbol Letter K
๐ฑ Regional Indicator Symbol Letter L
๐ฒ Regional Indicator Symbol Letter M
๐ณ Regional Indicator Symbol Letter N
๐ด Regional Indicator Symbol Letter O
๐ต Regional Indicator Symbol Letter P
๐ถ Regional Indicator Symbol Letter Q
๐ท Regional Indicator Symbol Letter R
๐ธ Regional Indicator Symbol Letter S
๐น Regional Indicator Symbol Letter T
๐บ Regional Indicator Symbol Letter U
๐ป Regional Indicator Symbol Letter V
๐ผ Regional Indicator Symbol Letter W
๐ฝ Regional Indicator Symbol Letter X
๐พ Regional Indicator Symbol Letter Y
๐ฟ Regional Indicator Symbol Letter Z
๐ฑโ๐ Dino Cat
๐ฑโ๐ Astro Cat
๐ฑโ๐ค Ninja Cat
๐ฑโ๐ป Hacker Cat
๐ฑโ๐ Stunt Cat
๐ฑโ๐ Hipster Cat
โฏโโฏโโฏโโฏโโฏ Olympic Rings
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Baiti (NR-05)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ท๓ ฟ Flag for Nord-Trรธndelag (NO-17)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ฒ๓ ฟ Flag for Hordaland (NO-12)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ฒ๓ ฟ Flag for Akershus (NO-02)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ถ๓ ฟ Flag for Sรธr-Trรธndelag (NO-16)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ธ๓ ฟ Flag for Telemark (NO-08)
๐ด๓ ฎ๓ ฌ๓ ต๓ ด๓ ฟ Flag for Utrecht (NL-UT)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ต๓ ฟ Flag for Mรธre og Romsdal (NO-15)
๐ด๓ ฎ๓ ฏ๓ ฒ๓ ฑ๓ ฟ Flag for Svalbard (NO-21)
๐ด๓ ฎ๓ ฐ๓ ด๓ ฟ Flag for Purwanchal (NP-4)
๐ด๓ ฎ๓ ฐ๓ ฑ๓ ฟ Flag for Central (NP-1)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ณ๓ ฟ Flag for Oslo (NO-03)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for Boe (NR-06)
๐จ๐พโ๐จ๐พโ๐ฆ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ฎ๓ ฌ๓ ฎ๓ ข๓ ฟ Flag for North Brabant (NL-NB)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ น๓ ฟ Flag for Aust-Agder (NO-09)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Anabar (NR-02)
๐ด๓ ฎ๓ ฌ๓ ฌ๓ ฉ๓ ฟ Flag for Limburg (NL-LI)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ถ๓ ฟ Flag for Buskerud (NO-06)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ด๓ ฟ Flag for Hedmark (NO-04)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ท๓ ฟ Flag for Vestfold (NO-07)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Anibare (NR-04)
๐ด๓ ฎ๓ ฏ๓ ฒ๓ ฐ๓ ฟ Flag for Finnmark (NO-20)
๐ด๓ ฎ๓ ฌ๓ ฏ๓ ถ๓ ฟ Flag for Overijssel (NL-OV)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ฑ๓ ฟ Flag for Rogaland (NO-11)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ฑ๓ ฟ Flag for รstfold (NO-01)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for Aiwo (NR-01)
๐ด๓ ฎ๓ ฌ๓ บ๓ ฅ๓ ฟ Flag for Zeeland (NL-ZE)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Buada (NR-07)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ น๓ ฟ Flag for Troms (NO-19)
๐ด๓ ฎ๓ ฏ๓ ฐ๓ ต๓ ฟ Flag for Oppland (NO-05)
๐ด๓ ฎ๓ ฐ๓ ฒ๓ ฟ Flag for Madhya Pashchimanchal (NP-2)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Anetan (NR-03)
๐ด๓ ฎ๓ ฐ๓ ณ๓ ฟ Flag for Western (NP-3)
๐ด๓ ฎ๓ ฏ๓ ฒ๓ ฒ๓ ฟ Flag for Jan Mayen (NO-22)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ธ๓ ฟ Flag for Nordland (NO-18)
๐ด๓ ฐ๓ ก๓ ฑ๓ ฟ Flag for Bocas del Toro (PA-1)
๐ด๓ ฐ๓ ก๓ ณ๓ ฟ Flag for Colรณn (PA-3)
๐ด๓ ฏ๓ ญ๓ ค๓ ก๓ ฟ Flag for Ad Dakhiliyah (OM-DA)
๐ด๓ ฏ๓ ญ๓ ญ๓ ก๓ ฟ Flag for Muscat (OM-MA)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ น๓ ฟ Flag for Ewa (NR-09)
๐ด๓ ฎ๓ บ๓ ด๓ ซ๓ ฉ๓ ฟ Flag for Taranaki (NZ-TKI)
๐ด๓ ฎ๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Ijuw (NR-10)
๐ด๓ ฎ๓ บ๓ ท๓ ด๓ ฃ๓ ฟ Flag for West Coast (NZ-WTC)
๐ด๓ ฎ๓ บ๓ ณ๓ ด๓ ฌ๓ ฟ Flag for Southland (NZ-STL)
๐ด๓ ฎ๓ บ๓ ด๓ ก๓ ณ๓ ฟ Flag for Tasman (NZ-TAS)
๐ด๓ ฎ๓ บ๓ ญ๓ ท๓ ด๓ ฟ Flag for Manawatu-Wanganui (NZ-MWT)
๐ด๓ ฎ๓ บ๓ ท๓ ซ๓ ฏ๓ ฟ Flag for Waikato (NZ-WKO)
๐ด๓ ฎ๓ บ๓ ญ๓ ข๓ จ๓ ฟ Flag for Marl (NZ-MBH)
๐ด๓ ฎ๓ บ๓ ข๓ ฏ๓ ฐ๓ ฟ Flag for Bay of Plenty (NZ-BOP)
๐ด๓ ฎ๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Nibok (NR-12)
๐ด๓ ฏ๓ ญ๓ ข๓ ต๓ ฟ Flag for Al Buraimi (OM-BU)
๐ด๓ ฎ๓ บ๓ ก๓ ต๓ ซ๓ ฟ Flag for Auckland (NZ-AUK)
๐ด๓ ฏ๓ ญ๓ ณ๓ ช๓ ฟ Flag for Janub ash Sharqiyah (OM-SJ)
๐ด๓ ฏ๓ ญ๓ ณ๓ ณ๓ ฟ Flag for Shamal ash Sharqiyah (OM-SS)
๐ด๓ ฐ๓ ก๓ ฒ๓ ฟ Flag for Coclรฉ (PA-2)
๐ด๓ ฎ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Meneng (NR-11)
๐ด๓ ฐ๓ ก๓ ฑ๓ ฐ๓ ฟ Flag for West Panamรก (PA-10)
๐ด๓ ฏ๓ ญ๓ บ๓ ก๓ ฟ Flag for Ad Dhahirah (OM-ZA)
๐ด๓ ฎ๓ บ๓ ฎ๓ ด๓ ฌ๓ ฟ Flag for Northland (NZ-NTL)
๐ด๓ ฎ๓ บ๓ ฃ๓ ก๓ ฎ๓ ฟ Flag for Canterbury (NZ-CAN)
๐ด๓ ฎ๓ บ๓ ง๓ ฉ๓ ณ๓ ฟ Flag for Gisborne (NZ-GIS)
๐ด๓ ฎ๓ บ๓ ฃ๓ ฉ๓ ด๓ ฟ Flag for Chatham Islands (NZ-CIT)
๐ด๓ ฎ๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Uaboe (NR-13)
๐ด๓ ฎ๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for Denigomodu (NR-08)
๐ด๓ ฏ๓ ญ๓ ญ๓ ต๓ ฟ Flag for Musandam (OM-MU)
๐ด๓ ฏ๓ ญ๓ ข๓ ณ๓ ฟ Flag for Shamal al Batinah (OM-BS)
๐ด๓ ฎ๓ บ๓ จ๓ ซ๓ ข๓ ฟ Flag for Hawkeโs Bay (NZ-HKB)
๐ด๓ ฎ๓ บ๓ ฏ๓ ด๓ ก๓ ฟ Flag for Otago (NZ-OTA)
๐ด๓ ฏ๓ ญ๓ ข๓ ช๓ ฟ Flag for Janub al Batinah (OM-BJ)
๐ด๓ ฏ๓ ญ๓ บ๓ ต๓ ฟ Flag for Dhofar (OM-ZU)
๐ด๓ ฐ๓ ก๓ ต๓ ฟ Flag for Dariรฉn (PA-5)
๐ด๓ ฐ๓ ฅ๓ ฃ๓ ก๓ ฌ๓ ฟ Flag for El Callao (PE-CAL)
๐ด๓ ฐ๓ ก๓ ถ๓ ฟ Flag for Herrera (PA-6)
๐ด๓ ฐ๓ ก๓ ซ๓ น๓ ฟ Flag for Guna Yala (PA-KY)
๐ด๓ ฐ๓ ก๓ ฅ๓ ญ๓ ฟ Flag for Emberรก (PA-EM)
๐ด๓ ฐ๓ ฅ๓ ฌ๓ ก๓ ฌ๓ ฟ Flag for La Libertad (PE-LAL)
๐ด๓ ฐ๓ ก๓ น๓ ฟ Flag for Veraguas (PA-9)
๐ด๓ ฐ๓ ฅ๓ ฌ๓ ฏ๓ ฒ๓ ฟ Flag for Loreto (PE-LOR)
๐ด๓ ฐ๓ ฅ๓ ก๓ ญ๓ ก๓ ฟ Flag for Amazonas (PE-AMA)
๐ด๓ ฐ๓ ก๓ ด๓ ฟ Flag for Chiriquรญ (PA-4)
๐ด๓ ฐ๓ ง๓ ฃ๓ ฐ๓ ซ๓ ฟ Flag for Chimbu (PG-CPK)
๐ด๓ ฐ๓ ง๓ ฅ๓ จ๓ ง๓ ฟ Flag for Eastern Highlands (PG-EHG)
๐ด๓ ฐ๓ ฅ๓ ณ๓ ก๓ ญ๓ ฟ Flag for San Martรญn (PE-SAM)
๐ด๓ ฐ๓ ฅ๓ ช๓ ต๓ ฎ๓ ฟ Flag for Junรญn (PE-JUN)
๐ด๓ ฐ๓ ฅ๓ จ๓ ต๓ ฃ๓ ฟ Flag for Huรกnuco (PE-HUC)
๐ด๓ ฐ๓ ฅ๓ ฐ๓ ก๓ ณ๓ ฟ Flag for Pasco (PE-PAS)
๐ด๓ ฐ๓ ก๓ ฎ๓ ข๓ ฟ Flag for Ngรถbe-Buglรฉ (PA-NB)
๐ด๓ ฐ๓ ฅ๓ ฃ๓ ก๓ ช๓ ฟ Flag for Cajamarca (PE-CAJ)
๐ด๓ ฐ๓ ฅ๓ ฉ๓ ฃ๓ ก๓ ฟ Flag for Ica (PE-ICA)
๐ด๓ ฐ๓ ฅ๓ ฌ๓ ฉ๓ ญ๓ ฟ Flag for Lima Region (PE-LIM)
๐ด๓ ฐ๓ ฅ๓ ญ๓ ฏ๓ ฑ๓ ฟ Flag for Moquegua (PE-MOQ)
๐ด๓ ฐ๓ ฅ๓ ฐ๓ ต๓ ฎ๓ ฟ Flag for Puno (PE-PUN)
๐ด๓ ฐ๓ ฅ๓ ต๓ ฃ๓ ก๓ ฟ Flag for Ucayali (PE-UCA)
๐ด๓ ฐ๓ ฅ๓ ฌ๓ ญ๓ ก๓ ฟ Flag for Lima (PE-LMA)
๐ด๓ ฐ๓ ฅ๓ ฐ๓ ฉ๓ ต๓ ฟ Flag for Piura (PE-PIU)
๐ด๓ ฐ๓ ฅ๓ ด๓ ต๓ ญ๓ ฟ Flag for Tumbes (PE-TUM)
๐ด๓ ฐ๓ ฅ๓ ฃ๓ ต๓ ณ๓ ฟ Flag for Cusco (PE-CUS)
๐ด๓ ฐ๓ ก๓ ธ๓ ฟ Flag for Panamรก (PA-8)
๐ด๓ ฐ๓ ฅ๓ ด๓ ก๓ ฃ๓ ฟ Flag for Tacna (PE-TAC)
๐ด๓ ฐ๓ ง๓ ฃ๓ ฐ๓ ญ๓ ฟ Flag for Central (PG-CPM)
๐ด๓ ฐ๓ ก๓ ท๓ ฟ Flag for Los Santos (PA-7)
๐ด๓ ฐ๓ ฅ๓ ฌ๓ ก๓ ญ๓ ฟ Flag for Lambayeque (PE-LAM)
๐ด๓ ฐ๓ ฅ๓ จ๓ ต๓ ถ๓ ฟ Flag for Huancavelica (PE-HUV)
๐ด๓ ฐ๓ ฅ๓ ก๓ ฎ๓ ฃ๓ ฟ Flag for Ancash (PE-ANC)
๐ด๓ ฐ๓ ง๓ จ๓ ฌ๓ ก๓ ฟ Flag for Hela (PG-HLA)
๐ด๓ ฐ๓ ง๓ ฎ๓ ฃ๓ ค๓ ฟ Flag for Port Moresby (PG-NCD)
๐ด๓ ฐ๓ ซ๓ ฉ๓ ณ๓ ฟ Flag for Islamabad (PK-IS)
๐ด๓ ฐ๓ จ๓ ฐ๓ ฐ๓ ฟ Flag for Metro Manila (PH-00)
๐ด๓ ฐ๓ จ๓ ฐ๓ ต๓ ฟ Flag for Bicol (PH-05)
๐ด๓ ฐ๓ ง๓ ง๓ ฐ๓ ซ๓ ฟ Flag for Gulf (PG-GPK)
๐ด๓ ฐ๓ จ๓ ฐ๓ น๓ ฟ Flag for Zamboanga Peninsula (PH-09)
๐ด๓ ฐ๓ ง๓ ฎ๓ ณ๓ ข๓ ฟ Flag for Bougainville (PG-NSB)
๐ด๓ ฐ๓ ซ๓ ง๓ ข๓ ฟ Flag for Gilgit-Baltistan (PK-GB)
๐ด๓ ฐ๓ ง๓ ญ๓ ฐ๓ ญ๓ ฟ Flag for Madang (PG-MPM)
๐ด๓ ฆ๓ ช๓ ท๓ ฟ Flag for Western (FJ-W)
๐ด๓ ฐ๓ จ๓ ฑ๓ ฒ๓ ฟ Flag for Soccsksargen (PH-12)
๐ด๓ ฐ๓ จ๓ ฐ๓ ธ๓ ฟ Flag for Eastern Visayas (PH-08)
๐ด๓ ฐ๓ ง๓ ฅ๓ ฐ๓ ท๓ ฟ Flag for Enga (PG-EPW)
๐ด๓ ฐ๓ ง๓ ญ๓ ข๓ ก๓ ฟ Flag for Milne Bay (PG-MBA)
๐ด๓ ฐ๓ จ๓ ด๓ ฐ๓ ฟ Flag for Calabarzon (PH-40)
๐ด๓ ฐ๓ ง๓ ช๓ ท๓ ซ๓ ฟ Flag for Jiwaka (PG-JWK)
๐ด๓ ฐ๓ จ๓ ฐ๓ ฒ๓ ฟ Flag for Cagayan Valley (PH-02)
๐จ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ฐ๓ ง๓ ญ๓ ฐ๓ ฌ๓ ฟ Flag for Morobe (PG-MPL)
๐ด๓ ฐ๓ จ๓ ฑ๓ ฐ๓ ฟ Flag for Northern Mindanao (PH-10)
๐ด๓ ฐ๓ จ๓ ด๓ ฑ๓ ฟ Flag for Mimaropa (PH-41)
๐ด๓ ฐ๓ ซ๓ ข๓ ก๓ ฟ Flag for Balochistan (PK-BA)
๐ด๓ ฐ๓ จ๓ ฑ๓ ณ๓ ฟ Flag for Caraga (PH-13)
๐ด๓ ฐ๓ ง๓ ฅ๓ ณ๓ ท๓ ฟ Flag for East Sepik (PG-ESW)
๐ด๓ ฐ๓ จ๓ ฐ๓ ถ๓ ฟ Flag for Western Visayas (PH-06)
๐ด๓ ฐ๓ จ๓ ฐ๓ ณ๓ ฟ Flag for Central Luzon (PH-03)
๐ด๓ ฐ๓ จ๓ ฑ๓ ด๓ ฟ Flag for Muslim Mindanao (PH-14)
๐ด๓ ฐ๓ ง๓ ณ๓ จ๓ ญ๓ ฟ Flag for Southern Highlands (PG-SHM)
๐ด๓ ฐ๓ ง๓ ท๓ ฐ๓ ค๓ ฟ Flag for Western (PG-WPD)
๐ด๓ ฐ๓ ง๓ ณ๓ ก๓ ฎ๓ ฟ Flag for Sandaun (PG-SAN)
๐ด๓ ฐ๓ ง๓ ฎ๓ ฉ๓ ซ๓ ฟ Flag for New Ireland (PG-NIK)
๐ด๓ ฐ๓ ง๓ ฎ๓ ฐ๓ ฐ๓ ฟ Flag for Oro (PG-NPP)
๐ด๓ ฐ๓ ง๓ ญ๓ ฒ๓ ฌ๓ ฟ Flag for Manus (PG-MRL)
๐ด๓ ฐ๓ ง๓ ท๓ จ๓ ญ๓ ฟ Flag for Western Highlands (PG-WHM)
๐ด๓ ฐ๓ จ๓ ฑ๓ ฑ๓ ฟ Flag for Davao (PH-11)
๐ด๓ ฐ๓ ซ๓ ฐ๓ ข๓ ฟ Flag for Punjab (PK-PB)
๐ด๓ ฐ๓ ฌ๓ ฐ๓ ญ๓ ฟ Flag for Federal Capital Territory (PL-PM)
๐ด๓ ฐ๓ ฌ๓ ณ๓ ฌ๓ ฟ Flag for Silesia (PL-SL)
๐ด๓ ฐ๓ ฌ๓ ซ๓ ฐ๓ ฟ Flag for Kuyavian-Pomerania (PL-KP)
๐ด๓ ฐ๓ ณ๓ ด๓ ข๓ ณ๓ ฟ Flag for Tubas (PS-TBS)
๐ด๓ ฐ๓ ณ๓ ฒ๓ ข๓ จ๓ ฟ Flag for Ramallah and al-Bireh (PS-RBH)
๐ด๓ ฐ๓ ณ๓ ง๓ บ๓ ก๓ ฟ Flag for Gaza (PS-GZA)
๐ด๓ ฐ๓ ณ๓ ฒ๓ ฆ๓ จ๓ ฟ Flag for Rafah (PS-RFH)
๐ด๓ ฐ๓ ณ๓ จ๓ ข๓ ฎ๓ ฟ Flag for Hebron (PS-HBN)
๐ด๓ ฐ๓ ฌ๓ ฐ๓ ค๓ ฟ Flag for Podlaskie (PL-PD)
๐ด๓ ฐ๓ ฌ๓ ฐ๓ ซ๓ ฟ Flag for Subcarpathia (PL-PK)
๐ด๓ ฐ๓ ณ๓ ช๓ ฅ๓ ฎ๓ ฟ Flag for Jenin (PS-JEN)
๐ด๓ ฐ๓ ฌ๓ ค๓ ณ๓ ฟ Flag for Lower Silesian (PL-DS)
๐ด๓ ฐ๓ ณ๓ ซ๓ น๓ ณ๓ ฟ Flag for Khan Yunis (PS-KYS)
๐ด๓ ฐ๓ ฌ๓ ฌ๓ ค๓ ฟ Flag for ลรณdลบ (PL-LD)
๐ด๓ ฐ๓ ณ๓ ฎ๓ ง๓ บ๓ ฟ Flag for North Gaza (PS-NGZ)
๐ด๓ ฐ๓ ฌ๓ บ๓ ฐ๓ ฟ Flag for West Pomerania (PL-ZP)
๐ด๓ ฐ๓ ซ๓ ช๓ ซ๓ ฟ Flag for Azad Kashmir (PK-JK)
๐ด๓ ฐ๓ ณ๓ ณ๓ ฌ๓ ด๓ ฟ Flag for Salfit (PS-SLT)
๐ด๓ ฐ๓ ฌ๓ ญ๓ บ๓ ฟ Flag for Mazovia (PL-MZ)
๐ด๓ ฐ๓ ฌ๓ ญ๓ ก๓ ฟ Flag for Lesser Poland (PL-MA)
๐ด๓ ฐ๓ ณ๓ ฑ๓ ฑ๓ ก๓ ฟ Flag for Qalqilya (PS-QQA)
๐ด๓ ฐ๓ ด๓ ฐ๓ ฑ๓ ฟ Flag for Aveiro (PT-01)
๐ด๓ ฐ๓ ฌ๓ ท๓ ฐ๓ ฟ Flag for Greater Poland (PL-WP)
๐ด๓ ฐ๓ ฌ๓ ฏ๓ ฐ๓ ฟ Flag for Opole (PL-OP)
๐ด๓ ฐ๓ ณ๓ ข๓ ด๓ จ๓ ฟ Flag for Bethlehem (PS-BTH)
๐ด๓ ฐ๓ ซ๓ ซ๓ ฐ๓ ฟ Flag for Khyber Pakhtunkhwa (PK-KP)
๐ด๓ ฐ๓ ณ๓ ด๓ ซ๓ ญ๓ ฟ Flag for Tulkarm (PS-TKM)
๐ด๓ ฐ๓ ณ๓ ฎ๓ ข๓ ณ๓ ฟ Flag for Nablus (PS-NBS)
๐ด๓ ฐ๓ ฌ๓ ท๓ ฎ๓ ฟ Flag for Warmian-Masuria (PL-WN)
๐ด๓ ฐ๓ ณ๓ ช๓ ฒ๓ จ๓ ฟ Flag for Jericho (PS-JRH)
๐ด๓ ฐ๓ ซ๓ ณ๓ ค๓ ฟ Flag for Sindh (PK-SD)
๐ด๓ ฐ๓ ฌ๓ ฌ๓ ต๓ ฟ Flag for Lublin (PL-LU)
๐ด๓ ฐ๓ ณ๓ ช๓ ฅ๓ ญ๓ ฟ Flag for Jerusalem (PS-JEM)
๐ด๓ ฐ๓ ฌ๓ ฌ๓ ข๓ ฟ Flag for Lubusz (PL-LB)
๐ด๓ ฐ๓ ฌ๓ ณ๓ ซ๓ ฟ Flag for ลwiฤtokrzyskie (PL-SK)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Melekeok (PW-212)
๐ด๓ ฐ๓ ด๓ ฐ๓ ธ๓ ฟ Flag for Faro (PT-08)
๐ด๓ ฐ๓ น๓ ฑ๓ ฑ๓ ฟ Flag for Central (PY-11)
๐ด๓ ฐ๓ ด๓ ฐ๓ ท๓ ฟ Flag for รvora (PT-07)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฒ๓ ธ๓ ฟ Flag for Ngiwal (PW-228)
๐ด๓ ฐ๓ น๓ ฑ๓ ฒ๓ ฟ Flag for รeembucรบ (PY-12)
๐ด๓ ฐ๓ ด๓ ฑ๓ ถ๓ ฟ Flag for Viana do Castelo (PT-16)
๐ด๓ ฐ๓ ด๓ ฑ๓ ฑ๓ ฟ Flag for Lisbon (PT-11)
๐ด๓ ฐ๓ น๓ ฑ๓ ต๓ ฟ Flag for Presidente Hayes (PY-15)
๐ด๓ ฐ๓ ด๓ ฑ๓ ท๓ ฟ Flag for Vila Real (PT-17)
๐ด๓ ฐ๓ ด๓ ฑ๓ ธ๓ ฟ Flag for Viseu (PT-18)
๐ด๓ ฐ๓ ท๓ ฐ๓ ฐ๓ ด๓ ฟ Flag for Airai (PW-004)
๐ด๓ ฐ๓ น๓ ฑ๓ ณ๓ ฟ Flag for Amambay (PY-13)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฒ๓ ด๓ ฟ Flag for Ngatpang (PW-224)
๐ด๓ ฐ๓ ด๓ ฐ๓ ถ๓ ฟ Flag for Coimbra (PT-06)
๐ด๓ ฐ๓ ด๓ ฑ๓ ฒ๓ ฟ Flag for Portalegre (PT-12)
๐ด๓ ฐ๓ ท๓ ณ๓ ต๓ ฐ๓ ฟ Flag for Peleliu (PW-350)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฒ๓ ฒ๓ ฟ Flag for Ngardmau (PW-222)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Ngaraard (PW-214)
๐ด๓ ฐ๓ น๓ ฑ๓ ด๓ ฟ Flag for Canindeyรบ (PY-14)
๐ด๓ ฐ๓ ท๓ ฐ๓ ฑ๓ ฐ๓ ฟ Flag for Angaur (PW-010)
๐ด๓ ฐ๓ ท๓ ณ๓ ท๓ ฐ๓ ฟ Flag for Sonsorol (PW-370)
๐ด๓ ฐ๓ ด๓ ฐ๓ ด๓ ฟ Flag for Braganรงa (PT-04)
๐ด๓ ฐ๓ ด๓ ฐ๓ ต๓ ฟ Flag for Castelo Branco (PT-05)
๐ด๓ ฐ๓ ด๓ ฑ๓ ด๓ ฟ Flag for Santarรฉm (PT-14)
๐ด๓ ฐ๓ ด๓ ฐ๓ ณ๓ ฟ Flag for Braga (PT-03)
๐ด๓ ฐ๓ ท๓ ฐ๓ ต๓ ฐ๓ ฟ Flag for Hatohobei (PW-050)
๐ด๓ ฐ๓ ท๓ ฑ๓ ต๓ ฐ๓ ฟ Flag for Koror (PW-150)
๐ด๓ ฐ๓ น๓ ฑ๓ ฐ๓ ฟ Flag for Alto Paranรก (PY-10)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฒ๓ ท๓ ฟ Flag for Ngeremlengui (PW-227)
๐ด๓ ฐ๓ ด๓ ฑ๓ ฐ๓ ฟ Flag for Leiria (PT-10)
๐ด๓ ฐ๓ ด๓ ฑ๓ ณ๓ ฟ Flag for Porto (PT-13)
๐ด๓ ฐ๓ ด๓ ฑ๓ ต๓ ฟ Flag for Setรบbal (PT-15)
๐ด๓ ฐ๓ ท๓ ฐ๓ ฐ๓ ฒ๓ ฟ Flag for Aimeliik (PW-002)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฒ๓ ถ๓ ฟ Flag for Ngchesar (PW-226)
๐ด๓ ฐ๓ ด๓ ฐ๓ น๓ ฟ Flag for Guarda (PT-09)
๐ด๓ ฐ๓ น๓ ฒ๓ ฟ Flag for San Pedro (PY-2)
๐ด๓ ฐ๓ น๓ ต๓ ฟ Flag for Caaguazรบ (PY-5)
๐ด๓ ฐ๓ น๓ ด๓ ฟ Flag for Guairรก (PY-4)
๐ด๓ ฒ๓ ฏ๓ ข๓ ฃ๓ ฟ Flag for Bacฤu (RO-BC)
๐ด๓ ฐ๓ น๓ ท๓ ฟ Flag for Itapรบa (PY-7)
๐ด๓ ฒ๓ ฏ๓ ฃ๓ ณ๓ ฟ Flag for Caraศ-Severin (RO-CS)
๐ด๓ ฐ๓ น๓ ถ๓ ฟ Flag for Caazapรก (PY-6)
๐ด๓ ฑ๓ ก๓ ซ๓ จ๓ ฟ Flag for Al Khor (QA-KH)
๐ด๓ ฒ๓ ฏ๓ ฃ๓ ถ๓ ฟ Flag for Covasna (RO-CV)
๐ด๓ ฒ๓ ฏ๓ ก๓ ข๓ ฟ Flag for Alba (RO-AB)
๐ด๓ ฑ๓ ก๓ ค๓ ก๓ ฟ Flag for Doha (QA-DA)
๐ด๓ ฒ๓ ฏ๓ ค๓ ช๓ ฟ Flag for Dolj (RO-DJ)
๐ด๓ ฐ๓ น๓ ณ๓ ฟ Flag for Cordillera (PY-3)
๐ด๓ ฑ๓ ก๓ ญ๓ ณ๓ ฟ Flag for Madinat ash Shamal (QA-MS)
๐ด๓ ฒ๓ ฏ๓ ข๓ จ๓ ฟ Flag for Bihor (RO-BH)
๐ด๓ ฒ๓ ฏ๓ จ๓ ฒ๓ ฟ Flag for Harghita (RO-HR)
๐ด๓ ฒ๓ ฏ๓ ข๓ ฒ๓ ฟ Flag for Brฤila (RO-BR)
๐ด๓ ฒ๓ ฏ๓ ก๓ ง๓ ฟ Flag for Argeศ (RO-AG)
๐ด๓ ฑ๓ ก๓ บ๓ ก๓ ฟ Flag for Al Daayen (QA-ZA)
๐ด๓ ฒ๓ ฏ๓ ข๓ ฎ๓ ฟ Flag for Bistriลฃa-Nฤsฤud (RO-BN)
๐ด๓ ฒ๓ ฏ๓ ฃ๓ ฌ๓ ฟ Flag for Cฤlฤraศi (RO-CL)
๐ด๓ ฐ๓ น๓ ก๓ ณ๓ ต๓ ฟ Flag for Asunciรณn (PY-ASU)
๐ด๓ ฐ๓ น๓ ฑ๓ ฟ Flag for Concepciรณn (PY-1)
๐ด๓ ฒ๓ ฏ๓ ข๓ ด๓ ฟ Flag for Botoลani (RO-BT)
๐ด๓ ฒ๓ ฏ๓ ง๓ ฌ๓ ฟ Flag for Galaศi (RO-GL)
๐ด๓ ฒ๓ ฏ๓ ง๓ ฒ๓ ฟ Flag for Giurgiu (RO-GR)
๐ด๓ ฐ๓ น๓ ฑ๓ น๓ ฟ Flag for Boquerรณn (PY-19)
๐ด๓ ฐ๓ น๓ ธ๓ ฟ Flag for Misiones (PY-8)
๐ด๓ ฒ๓ ฏ๓ ข๓ ฟ Flag for Bucharest (RO-B)
๐ด๓ ฐ๓ น๓ น๓ ฟ Flag for Paraguarรญ (PY-9)
๐ด๓ ฑ๓ ก๓ ฒ๓ ก๓ ฟ Flag for Al Rayyan (QA-RA)
๐ด๓ ฒ๓ ฏ๓ ฃ๓ ด๓ ฟ Flag for Constanศa (RO-CT)
๐ด๓ ฒ๓ ฏ๓ จ๓ ค๓ ฟ Flag for Hunedoara (RO-HD)
๐ด๓ ฒ๓ ฏ๓ ค๓ ข๓ ฟ Flag for Dรขmboviศa (RO-DB)
๐ด๓ ฒ๓ ฏ๓ ก๓ ฒ๓ ฟ Flag for Arad (RO-AR)
๐ด๓ ฒ๓ ฏ๓ ฃ๓ ช๓ ฟ Flag for Cluj (RO-CJ)
๐ด๓ ฒ๓ ฏ๓ ข๓ บ๓ ฟ Flag for Buzฤu (RO-BZ)
๐ด๓ ฑ๓ ก๓ ท๓ ก๓ ฟ Flag for Al Wakrah (QA-WA)
๐ด๓ ฒ๓ ฏ๓ ถ๓ ฌ๓ ฟ Flag for Vรขlcea (RO-VL)
๐ด๓ ฒ๓ ฏ๓ ฉ๓ ณ๓ ฟ Flag for Iaศi (RO-IS)
๐ด๓ ฒ๓ ฏ๓ ญ๓ จ๓ ฟ Flag for Mehedinศi (RO-MH)
๐ด๓ ฒ๓ ณ๓ ซ๓ ญ๓ ฟ Flag for Kosovo-Metohija (RS-KM)
๐ด๓ ฒ๓ ฏ๓ ฉ๓ ฌ๓ ฟ Flag for Ialomiศa (RO-IL)
๐ด๓ ฒ๓ ฏ๓ ด๓ ฒ๓ ฟ Flag for Teleorman (RO-TR)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ฒ๓ ฟ Flag for ล umadija (RS-12)
๐ด๓ ฒ๓ ณ๓ ฒ๓ ฐ๓ ฟ Flag for Niลกava (RS-20)
๐ด๓ ฒ๓ ต๓ ก๓ ฌ๓ ฟ Flag for Altai (RU-AL)
๐ด๓ ฒ๓ ฏ๓ ถ๓ ฎ๓ ฟ Flag for Vrancea (RO-VN)
๐ด๓ ฒ๓ ฏ๓ ถ๓ ณ๓ ฟ Flag for Vaslui (RO-VS)
๐ด๓ ฒ๓ ฏ๓ ฉ๓ ฆ๓ ฟ Flag for Ilfov (RO-IF)
๐ด๓ ฒ๓ ณ๓ ฐ๓ ธ๓ ฟ Flag for Maฤva (RS-08)
๐ด๓ ฒ๓ ณ๓ ฐ๓ น๓ ฟ Flag for Kolubara (RS-09)
๐ด๓ ฒ๓ ฏ๓ ฐ๓ จ๓ ฟ Flag for Prahova (RO-PH)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ฑ๓ ฟ Flag for Braniฤevo (RS-11)
๐ด๓ ฒ๓ ณ๓ ฐ๓ ฐ๓ ฟ Flag for Beograd (RS-00)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ต๓ ฟ Flag for Zajeฤar (RS-15)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ท๓ ฟ Flag for Moravica (RS-17)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ณ๓ ฟ Flag for Pomoravlje (RS-13)
๐ด๓ ฒ๓ ฏ๓ ฏ๓ ด๓ ฟ Flag for Olt (RO-OT)
๐ด๓ ฒ๓ ฏ๓ ณ๓ ญ๓ ฟ Flag for Satu Mare (RO-SM)
๐ด๓ ฒ๓ ณ๓ ฒ๓ ฑ๓ ฟ Flag for Toplica (RS-21)
๐ด๓ ฒ๓ ฏ๓ ณ๓ ช๓ ฟ Flag for Sฤlaj (RO-SJ)
๐ด๓ ฒ๓ ฏ๓ ญ๓ ณ๓ ฟ Flag for Mureล (RO-MS)
๐ด๓ ฒ๓ ณ๓ ฒ๓ ฒ๓ ฟ Flag for Pirot (RS-22)
๐ด๓ ฒ๓ ณ๓ ฑ๓ น๓ ฟ Flag for Rasina (RS-19)
๐ด๓ ฒ๓ ณ๓ ฒ๓ ด๓ ฟ Flag for Pฤinja (RS-24)
๐ด๓ ฒ๓ ฏ๓ ญ๓ ญ๓ ฟ Flag for Maramureล (RO-MM)
๐ด๓ ฒ๓ ฏ๓ ณ๓ ถ๓ ฟ Flag for Suceava (RO-SV)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ธ๓ ฟ Flag for Raลกka (RS-18)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ด๓ ฟ Flag for Bor (RS-14)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ฐ๓ ฟ Flag for Podunavlje (RS-10)
๐ด๓ ฒ๓ ฏ๓ ฎ๓ ด๓ ฟ Flag for Neamลฃ (RO-NT)
๐ด๓ ฒ๓ ณ๓ ฑ๓ ถ๓ ฟ Flag for Zlatibor (RS-16)
๐ด๓ ฒ๓ ณ๓ ถ๓ ฏ๓ ฟ Flag for Vojvodina (RS-VO)
๐ด๓ ฒ๓ ณ๓ ฒ๓ ณ๓ ฟ Flag for Jablanica (RS-23)
๐ด๓ ฒ๓ ฏ๓ ด๓ ฌ๓ ฟ Flag for Tulcea (RO-TL)
๐ด๓ ฒ๓ ต๓ ก๓ ค๓ ฟ Flag for Adygea (RU-AD)
๐ด๓ ฒ๓ ฏ๓ ด๓ ญ๓ ฟ Flag for Timiศ (RO-TM)
๐ฉ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ฒ๓ ต๓ ซ๓ ฃ๓ ฟ Flag for Karachay-Cherkess (RU-KC)
๐ด๓ ฒ๓ ต๓ ซ๓ ซ๓ ฟ Flag for Khakassia (RU-KK)
๐ด๓ ฒ๓ ต๓ ข๓ ต๓ ฟ Flag for Buryat (RU-BU)
๐ด๓ ฒ๓ ต๓ ซ๓ ฌ๓ ฟ Flag for Kalmykia (RU-KL)
๐ด๓ ฒ๓ ต๓ ข๓ ฅ๓ ฌ๓ ฟ Flag for Belgorod (RU-BEL)
๐ด๓ ฒ๓ ต๓ ซ๓ จ๓ ญ๓ ฟ Flag for Khanty-Mansi (RU-KHM)
๐ด๓ ฒ๓ ต๓ ฌ๓ ฅ๓ ฎ๓ ฟ Flag for Leningrad (RU-LEN)
๐ด๓ ฒ๓ ต๓ ซ๓ ง๓ ฎ๓ ฟ Flag for Kurgan (RU-KGN)
๐ด๓ ฒ๓ ต๓ ฉ๓ ถ๓ ก๓ ฟ Flag for Ivanovo (RU-IVA)
๐ด๓ ฒ๓ ต๓ ฉ๓ ฎ๓ ฟ Flag for Ingushetia (RU-IN)
๐ด๓ ฒ๓ ต๓ ซ๓ ฉ๓ ฒ๓ ฟ Flag for Kirov (RU-KIR)
๐ด๓ ฒ๓ ต๓ ซ๓ ค๓ ก๓ ฟ Flag for Krasnodar Krai (RU-KDA)
๐ด๓ ฒ๓ ต๓ ซ๓ ฒ๓ ฟ Flag for Karelia (RU-KR)
๐ด๓ ฒ๓ ต๓ ญ๓ ก๓ ง๓ ฟ Flag for Magadan (RU-MAG)
๐ด๓ ฒ๓ ต๓ ซ๓ น๓ ก๓ ฟ Flag for Krasnoyarsk Krai (RU-KYA)
๐ด๓ ฒ๓ ต๓ ซ๓ ฅ๓ ญ๓ ฟ Flag for Kemerovo (RU-KEM)
๐ด๓ ฒ๓ ต๓ ก๓ ณ๓ ด๓ ฟ Flag for Astrakhan (RU-AST)
๐ด๓ ฒ๓ ต๓ ก๓ ญ๓ ต๓ ฟ Flag for Amur (RU-AMU)
๐ด๓ ฒ๓ ต๓ ญ๓ ฏ๓ ฟ Flag for Mordovia (RU-MO)
๐ด๓ ฒ๓ ต๓ ซ๓ ฏ๓ ฟ Flag for Komi (RU-KO)
๐ด๓ ฒ๓ ต๓ ฃ๓ จ๓ ฅ๓ ฟ Flag for Chelyabinsk (RU-CHE)
๐ด๓ ฒ๓ ต๓ ซ๓ จ๓ ก๓ ฟ Flag for Khabarovsk Krai (RU-KHA)
๐ด๓ ฒ๓ ต๓ ซ๓ ฒ๓ ณ๓ ฟ Flag for Kursk (RU-KRS)
๐ด๓ ฒ๓ ต๓ ญ๓ ฅ๓ ฟ Flag for Mari El (RU-ME)
๐ด๓ ฒ๓ ต๓ ฃ๓ จ๓ ต๓ ฟ Flag for Chukotka Okrug (RU-CHU)
๐ด๓ ฒ๓ ต๓ ซ๓ ง๓ ค๓ ฟ Flag for Kaliningrad (RU-KGD)
๐ด๓ ฒ๓ ต๓ ฉ๓ ฒ๓ ซ๓ ฟ Flag for Irkutsk (RU-IRK)
๐ด๓ ฒ๓ ต๓ ซ๓ ฌ๓ ต๓ ฟ Flag for Kaluga (RU-KLU)
๐ด๓ ฒ๓ ต๓ ซ๓ ข๓ ฟ Flag for Kabardino-Balkar (RU-KB)
๐ด๓ ฒ๓ ต๓ ฌ๓ ฉ๓ ฐ๓ ฟ Flag for Lipetsk (RU-LIP)
๐ด๓ ฒ๓ ต๓ ข๓ ก๓ ฟ Flag for Bashkortostan (RU-BA)
๐ด๓ ฒ๓ ต๓ ฃ๓ ต๓ ฟ Flag for Chuvash (RU-CU)
๐ด๓ ฒ๓ ต๓ ซ๓ ก๓ ญ๓ ฟ Flag for Kamchatka Krai (RU-KAM)
๐ด๓ ฒ๓ ต๓ ซ๓ ฏ๓ ณ๓ ฟ Flag for Kostroma (RU-KOS)
๐ด๓ ฒ๓ ต๓ ณ๓ ก๓ ซ๓ ฟ Flag for Sakhalin (RU-SAK)
๐ด๓ ฒ๓ ต๓ ด๓ ถ๓ ฅ๓ ฟ Flag for Tver (RU-TVE)
๐ด๓ ฒ๓ ต๓ ฎ๓ ถ๓ ณ๓ ฟ Flag for Novosibirsk (RU-NVS)
๐ด๓ ฒ๓ ต๓ ถ๓ ฌ๓ ก๓ ฟ Flag for Vladimir (RU-VLA)
๐ด๓ ฒ๓ ต๓ ฏ๓ ฒ๓ ฌ๓ ฟ Flag for Oryol (RU-ORL)
๐ด๓ ฒ๓ ต๓ ณ๓ ด๓ ก๓ ฟ Flag for Stavropol Krai (RU-STA)
๐ด๓ ฒ๓ ต๓ ฎ๓ ฉ๓ บ๓ ฟ Flag for Nizhny Novgorod (RU-NIZ)
๐ด๓ ฒ๓ ต๓ ณ๓ ก๓ ฒ๓ ฟ Flag for Saratov (RU-SAR)
๐ด๓ ฒ๓ ต๓ ฏ๓ ฒ๓ ฅ๓ ฟ Flag for Orenburg (RU-ORE)
๐ด๓ ฒ๓ ต๓ ฎ๓ ฅ๓ ฎ๓ ฟ Flag for Nenets (RU-NEN)
๐ด๓ ฒ๓ ต๓ ถ๓ ง๓ ง๓ ฟ Flag for Volgograd (RU-VGG)
๐ด๓ ฒ๓ ต๓ ด๓ ฏ๓ ญ๓ ฟ Flag for Tomsk (RU-TOM)
๐ด๓ ฒ๓ ต๓ ณ๓ ถ๓ ฅ๓ ฟ Flag for Sverdlovsk (RU-SVE)
๐ด๓ ฒ๓ ต๓ ณ๓ ฐ๓ ฅ๓ ฟ Flag for Saint Petersburg (RU-SPE)
๐ด๓ ฒ๓ ต๓ น๓ ก๓ ฎ๓ ฟ Flag for Yamalo-Nenets Okrug (RU-YAN)
๐ด๓ ฒ๓ ต๓ ณ๓ ก๓ ฟ Flag for Sakha (RU-SA)
๐ด๓ ฒ๓ ต๓ ญ๓ ฏ๓ ท๓ ฟ Flag for Moscow (RU-MOW)
๐ด๓ ฒ๓ ต๓ ฐ๓ ฎ๓ บ๓ ฟ Flag for Penza (RU-PNZ)
๐ด๓ ฒ๓ ต๓ ณ๓ ญ๓ ฏ๓ ฟ Flag for Smolensk (RU-SMO)
๐ด๓ ฒ๓ ต๓ ด๓ ก๓ ฟ Flag for Tatarstan (RU-TA)
๐ด๓ ฒ๓ ต๓ ถ๓ ฌ๓ ง๓ ฟ Flag for Vologda (RU-VLG)
๐ด๓ ฒ๓ ต๓ ด๓ ต๓ ฌ๓ ฟ Flag for Tula (RU-TUL)
๐ด๓ ฒ๓ ต๓ น๓ ก๓ ฒ๓ ฟ Flag for Yaroslavl (RU-YAR)
๐ด๓ ฒ๓ ต๓ ด๓ น๓ ต๓ ฟ Flag for Tyumen (RU-TYU)
๐ด๓ ฒ๓ ต๓ ฐ๓ ณ๓ ซ๓ ฟ Flag for Pskov (RU-PSK)
๐ด๓ ฒ๓ ต๓ ต๓ ค๓ ฟ Flag for Udmurt (RU-UD)
๐ด๓ ฒ๓ ต๓ ณ๓ ก๓ ญ๓ ฟ Flag for Samara (RU-SAM)
๐ด๓ ฒ๓ ต๓ ต๓ ฌ๓ น๓ ฟ Flag for Ulyanovsk (RU-ULY)
๐ด๓ ฒ๓ ต๓ ฒ๓ น๓ ก๓ ฟ Flag for Ryazan (RU-RYA)
๐ด๓ ฒ๓ ต๓ ฏ๓ ญ๓ ณ๓ ฟ Flag for Omsk (RU-OMS)
๐ด๓ ฒ๓ ต๓ ฐ๓ ฅ๓ ฒ๓ ฟ Flag for Perm Krai (RU-PER)
๐ด๓ ฒ๓ ต๓ ถ๓ ฏ๓ ฒ๓ ฟ Flag for Voronezh (RU-VOR)
๐ด๓ ฒ๓ ต๓ ฎ๓ ง๓ ฒ๓ ฟ Flag for Novgorod (RU-NGR)
๐ด๓ ฒ๓ ต๓ ด๓ ก๓ ญ๓ ฟ Flag for Tambov (RU-TAM)
๐ด๓ ฒ๓ ต๓ ด๓ น๓ ฟ Flag for Tuva (RU-TY)
๐ด๓ ฒ๓ ต๓ ฒ๓ ฏ๓ ณ๓ ฟ Flag for Rostov (RU-ROS)
๐ด๓ ฒ๓ ต๓ ญ๓ ต๓ ฒ๓ ฟ Flag for Murmansk (RU-MUR)
๐ด๓ ฒ๓ ท๓ ฐ๓ ฑ๓ ฟ Flag for Kigali (RW-01)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ณ๓ ฟ Flag for Anse Etoile (SC-03)
๐ด๓ ณ๓ ข๓ ฉ๓ ณ๓ ฟ Flag for Isabel (SB-IS)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ฒ๓ ฟ Flag for Anse Boileau (SC-02)
๐ด๓ ณ๓ ก๓ ฐ๓ ท๓ ฟ Flag for Tabuk (SA-07)
๐ด๓ ณ๓ ข๓ ง๓ ต๓ ฟ Flag for Guadalcanal (SB-GU)
๐ด๓ ฒ๓ ท๓ ฐ๓ ณ๓ ฟ Flag for Northern (RW-03)
๐ด๓ ฒ๓ ท๓ ฐ๓ ต๓ ฟ Flag for Southern (RW-05)
๐ด๓ ณ๓ ข๓ ฃ๓ ฅ๓ ฟ Flag for Central (SB-CE)
๐ด๓ ณ๓ ก๓ ฐ๓ ถ๓ ฟ Flag for Haโil (SA-06)
๐ด๓ ณ๓ ฃ๓ ฐ๓ น๓ ฟ Flag for Bel Air (SC-09)
๐ด๓ ณ๓ ข๓ ญ๓ ฌ๓ ฟ Flag for Malaita (SB-ML)
๐ด๓ ณ๓ ก๓ ฑ๓ ฐ๓ ฟ Flag for Najran (SA-10)
๐ด๓ ณ๓ ก๓ ฑ๓ ฒ๓ ฟ Flag for Al Jawf (SA-12)
๐ด๓ ณ๓ ข๓ ฃ๓ ด๓ ฟ Flag for Honiara (SB-CT)
๐ด๓ ณ๓ ข๓ ท๓ ฅ๓ ฟ Flag for Western (SB-WE)
๐ด๓ ณ๓ ก๓ ฐ๓ ธ๓ ฟ Flag for Northern Borders (SA-08)
๐ด๓ ณ๓ ก๓ ฐ๓ ฑ๓ ฟ Flag for Riyadh (SA-01)
๐ด๓ ณ๓ ข๓ ฒ๓ ข๓ ฟ Flag for Rennell and Bellona (SB-RB)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ด๓ ฟ Flag for Au Cap (SC-04)
๐ด๓ ฒ๓ ท๓ ฐ๓ ฒ๓ ฟ Flag for Eastern (RW-02)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ต๓ ฟ Flag for Anse Royale (SC-05)
๐ด๓ ฒ๓ ต๓ น๓ ฅ๓ ถ๓ ฟ Flag for Jewish (RU-YEV)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ฐ๓ ฟ Flag for Bel Ombre (SC-10)
๐ด๓ ณ๓ ก๓ ฐ๓ ต๓ ฟ Flag for Al-Qassim (SA-05)
๐ด๓ ณ๓ ข๓ ด๓ ฅ๓ ฟ Flag for Temotu (SB-TE)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ท๓ ฟ Flag for Baie Sainte Anne (SC-07)
๐ด๓ ณ๓ ข๓ ฃ๓ จ๓ ฟ Flag for Choiseul (SB-CH)
๐ด๓ ฒ๓ ท๓ ฐ๓ ด๓ ฟ Flag for Western (RW-04)
๐ด๓ ณ๓ ข๓ ญ๓ ซ๓ ฟ Flag for Makira-Ulawa (SB-MK)
๐ด๓ ณ๓ ก๓ ฐ๓ ฒ๓ ฟ Flag for Makkah (SA-02)
๐ด๓ ณ๓ ก๓ ฐ๓ น๓ ฟ Flag for Jizan (SA-09)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ฑ๓ ฟ Flag for Anse aux Pins (SC-01)
๐ด๓ ณ๓ ก๓ ฐ๓ ด๓ ฟ Flag for Eastern (SA-04)
๐ด๓ ณ๓ ก๓ ฑ๓ ด๓ ฟ Flag for Asir (SA-14)
๐ด๓ ฒ๓ ต๓ บ๓ ก๓ ข๓ ฟ Flag for Zabaykalsky Krai (RU-ZAB)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ธ๓ ฟ Flag for Beau Vallon (SC-08)
๐ด๓ ณ๓ ก๓ ฐ๓ ณ๓ ฟ Flag for Al Madinah (SA-03)
๐ด๓ ณ๓ ฃ๓ ฐ๓ ถ๓ ฟ Flag for Baie Lazare (SC-06)
๐ด๓ ณ๓ ฃ๓ ฑ๓ น๓ ฟ Flag for Plaisance (SC-19)
๐ด๓ ณ๓ ฅ๓ ค๓ ฟ Flag for Sรถdermanland (SE-D)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ถ๓ ฟ Flag for La Riviรจre Anglaise (SC-16)
๐ด๓ ณ๓ ฃ๓ ฒ๓ ฒ๓ ฟ Flag for Saint Louis (SC-22)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ธ๓ ฟ Flag for Mont Fleuri (SC-18)
๐ด๓ ณ๓ ค๓ ฎ๓ ฏ๓ ฟ Flag for Northern (SD-NO)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ณ๓ ฟ Flag for GrandโAnse Mahรฉ (SC-13)
๐ด๓ ณ๓ ฃ๓ ฒ๓ ณ๓ ฟ Flag for Takamaka (SC-23)
๐ด๓ ณ๓ ค๓ ค๓ ท๓ ฟ Flag for West Darfur (SD-DW)
๐ด๓ ณ๓ ค๓ ง๓ ค๓ ฟ Flag for Al Qadarif (SD-GD)
๐ด๓ ณ๓ ค๓ ค๓ ณ๓ ฟ Flag for South Darfur (SD-DS)
๐ด๓ ณ๓ ค๓ ฎ๓ ฒ๓ ฟ Flag for River Nile (SD-NR)
๐ด๓ ณ๓ ค๓ ง๓ ซ๓ ฟ Flag for West Kurdufan (SD-GK)
๐ด๓ ณ๓ ค๓ ซ๓ ก๓ ฟ Flag for Kassala (SD-KA)
๐ด๓ ณ๓ ค๓ ซ๓ จ๓ ฟ Flag for Khartoum (SD-KH)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ต๓ ฟ Flag for La Digue (SC-15)
๐ด๓ ณ๓ ฃ๓ ฒ๓ ด๓ ฟ Flag for Les Mamelles (SC-24)
๐ด๓ ณ๓ ฃ๓ ฒ๓ ฑ๓ ฟ Flag for Port Glaud (SC-21)
๐ด๓ ณ๓ ฅ๓ ก๓ ฃ๓ ฟ Flag for Vรคsterbotten (SE-AC)
๐ด๓ ณ๓ ฅ๓ ฆ๓ ฟ Flag for Jรถnkรถping (SE-F)
๐ด๓ ณ๓ ฅ๓ ก๓ ข๓ ฟ Flag for Stockholm (SE-AB)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ฒ๓ ฟ Flag for Glacis (SC-12)
๐ด๓ ณ๓ ฃ๓ ฒ๓ ฐ๓ ฟ Flag for Pointe La Rue (SC-20)
๐ด๓ ณ๓ ค๓ ฎ๓ ท๓ ฟ Flag for White Nile (SD-NW)
๐ด๓ ณ๓ ค๓ ง๓ บ๓ ฟ Flag for Al Jazirah (SD-GZ)
๐ด๓ ณ๓ ฅ๓ ฅ๓ ฟ Flag for รstergรถtland (SE-E)
๐ด๓ ณ๓ ฅ๓ ข๓ ค๓ ฟ Flag for Norrbotten (SE-BD)
๐ด๓ ณ๓ ฅ๓ ฃ๓ ฟ Flag for Uppsala (SE-C)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ท๓ ฟ Flag for Mont Buxton (SC-17)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ด๓ ฟ Flag for GrandโAnse Praslin (SC-14)
๐ด๓ ณ๓ ค๓ ซ๓ ณ๓ ฟ Flag for South Kurdufan (SD-KS)
๐ด๓ ณ๓ ฃ๓ ฑ๓ ฑ๓ ฟ Flag for Cascade (SC-11)
๐ด๓ ณ๓ ค๓ ซ๓ ฎ๓ ฟ Flag for North Kurdufan (SD-KN)
๐ด๓ ณ๓ ค๓ ณ๓ ฉ๓ ฟ Flag for Sennar (SD-SI)
๐ด๓ ณ๓ ค๓ ค๓ ฅ๓ ฟ Flag for East Darfur (SD-DE)
๐ด๓ ณ๓ ค๓ ฎ๓ ข๓ ฟ Flag for Blue Nile (SD-NB)
๐ด๓ ณ๓ ค๓ ค๓ ฎ๓ ฟ Flag for North Darfur (SD-DN)
๐ด๓ ณ๓ ค๓ ค๓ ฃ๓ ฟ Flag for Central Darfur (SD-DC)
๐ด๓ ณ๓ ฅ๓ ต๓ ฟ Flag for Vรคstmanland (SE-U)
๐ด๓ ณ๓ ฅ๓ ณ๓ ฟ Flag for Vรคrmland (SE-S)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ท๓ ฟ Flag for ฤrnomelj (SI-017)
๐ด๓ ณ๓ ฅ๓ น๓ ฟ Flag for Vรคsternorrland (SE-Y)
๐ด๓ ณ๓ ง๓ ฐ๓ ต๓ ฟ Flag for South West (SG-05)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ถ๓ ฟ Flag for ฤrna na Koroลกkem (SI-016)
๐ด๓ ณ๓ ฅ๓ ฏ๓ ฟ Flag for Vรคstra Gรถtaland (SE-O)
๐ด๓ ณ๓ ฅ๓ ธ๓ ฟ Flag for Gรคvleborg (SE-X)
๐ด๓ ณ๓ ง๓ ฐ๓ ฒ๓ ฟ Flag for North East (SG-02)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ท๓ ฟ Flag for Brda (SI-007)
๐ด๓ ณ๓ ฅ๓ จ๓ ฟ Flag for Kalmar (SE-H)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ธ๓ ฟ Flag for Destrnik (SI-018)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ฒ๓ ฟ Flag for Beltinci (SI-002)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ด๓ ฟ Flag for Bohinj (SI-004)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ น๓ ฟ Flag for Breลพice (SI-009)
๐ด๓ ณ๓ ง๓ ฐ๓ ณ๓ ฟ Flag for North West (SG-03)
๐ด๓ ณ๓ จ๓ ก๓ ฃ๓ ฟ Flag for Ascension Island (SH-AC)
๐ฉ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ฒ๓ ฟ Flag for Cerklje na Gorenjskem (SI-012)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ณ๓ ฟ Flag for Cerknica (SI-013)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ถ๓ ฟ Flag for Bovec (SI-006)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ต๓ ฟ Flag for ฤrenลกovci (SI-015)
๐ด๓ ณ๓ ฅ๓ ง๓ ฟ Flag for Kronoberg (SE-G)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ฑ๓ ฟ Flag for Ajdovลกฤina (SI-001)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ฐ๓ ฟ Flag for Tiลกina (SI-010)
๐ด๓ ณ๓ ง๓ ฐ๓ ด๓ ฟ Flag for South East (SG-04)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ธ๓ ฟ Flag for Brezovica (SI-008)
๐ด๓ ณ๓ จ๓ จ๓ ฌ๓ ฟ Flag for Saint Helena (SH-HL)
๐ด๓ ณ๓ ฅ๓ บ๓ ฟ Flag for Jรคmtland (SE-Z)
๐ด๓ ณ๓ ฅ๓ ฉ๓ ฟ Flag for Gotland (SE-I)
๐ด๓ ณ๓ ฅ๓ ท๓ ฟ Flag for Dalarna (SE-W)
๐ด๓ ณ๓ ฅ๓ ซ๓ ฟ Flag for Blekinge (SE-K)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ต๓ ฟ Flag for Borovnica (SI-005)
๐ด๓ ณ๓ จ๓ ด๓ ก๓ ฟ Flag for Tristan da Cunha (SH-TA)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฐ๓ ณ๓ ฟ Flag for Bled (SI-003)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ด๓ ฟ Flag for Cerkno (SI-014)
๐ด๓ ณ๓ ฅ๓ ด๓ ฟ Flag for รrebro (SE-T)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ณ๓ ฟ Flag for Domลพale (SI-023)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ฐ๓ ฟ Flag for Izola (SI-040)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ถ๓ ฟ Flag for Kuzma (SI-056)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ต๓ ฟ Flag for Dravograd (SI-025)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ถ๓ ฟ Flag for Duplek (SI-026)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ฑ๓ ฟ Flag for Jesenice (SI-041)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ธ๓ ฟ Flag for Goriลกnica (SI-028)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ น๓ ฟ Flag for Gornja Radgona (SI-029)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ฐ๓ ฟ Flag for Dobrepolje (SI-020)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ฑ๓ ฟ Flag for Gornji Petrovci (SI-031)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ด๓ ฟ Flag for Dornava (SI-024)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ด๓ ฟ Flag for Hrastnik (SI-034)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ น๓ ฟ Flag for Ivanฤna Gorica (SI-039)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ น๓ ฟ Flag for Komen (SI-049)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ฑ๓ ฟ Flag for Kozje (SI-051)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ น๓ ฟ Flag for Divaฤa (SI-019)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ถ๓ ฟ Flag for Idrija (SI-036)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ต๓ ฟ Flag for Kidriฤevo (SI-045)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ถ๓ ฟ Flag for Kobarid (SI-046)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ท๓ ฟ Flag for Kobilje (SI-047)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ฐ๓ ฟ Flag for Koper (SI-050)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ท๓ ฟ Flag for Ig (SI-037)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ต๓ ฟ Flag for Kungota (SI-055)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ฒ๓ ฟ Flag for Grosuplje (SI-032)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ฑ๓ ฟ Flag for DobrovaโPolhov Gradec (SI-021)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ฒ๓ ฟ Flag for Jurลกinci (SI-042)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ด๓ ฟ Flag for Krลกko (SI-054)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ณ๓ ฟ Flag for ล alovci (SI-033)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ณ๓ ฟ Flag for Kranjska Gora (SI-053)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ธ๓ ฟ Flag for Koฤevje (SI-048)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ธ๓ ฟ Flag for Ilirska Bistrica (SI-038)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ณ๓ ฟ Flag for Kamnik (SI-043)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ต๓ ฟ Flag for HrpeljeโKozina (SI-035)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ณ๓ ฐ๓ ฟ Flag for Gornji Grad (SI-030)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ด๓ ด๓ ฟ Flag for Kanal (SI-044)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ฒ๓ ฟ Flag for Dol pri Ljubljani (SI-022)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ น๓ ฟ Flag for Pesnica (SI-089)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ฐ๓ ฟ Flag for Piran (SI-090)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ด๓ ฟ Flag for Meลพica (SI-074)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ฑ๓ ฟ Flag for Muta (SI-081)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ฒ๓ ฟ Flag for Ljubno (SI-062)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ท๓ ฟ Flag for Ormoลพ (SI-087)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ด๓ ฟ Flag for Postojna (SI-094)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ถ๓ ฟ Flag for Mislinja (SI-076)
๐ฉ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ น๓ ฟ Flag for Majลกperk (SI-069)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ฒ๓ ฟ Flag for Mengeลก (SI-072)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ณ๓ ฟ Flag for Metlika (SI-073)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ท๓ ฟ Flag for Moravฤe (SI-077)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ธ๓ ฟ Flag for Moravske Toplice (SI-078)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ฑ๓ ฟ Flag for Ljubljana (SI-061)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ฐ๓ ฟ Flag for Murska Sobota (SI-080)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ฒ๓ ฟ Flag for Naklo (SI-082)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ด๓ ฟ Flag for Nova Gorica (SI-084)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ธ๓ ฟ Flag for Osilnica (SI-088)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ฑ๓ ฟ Flag for Pivka (SI-091)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ณ๓ ฟ Flag for Nazarje (SI-083)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ต๓ ฟ Flag for MirenโKostanjevica (SI-075)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ด๓ ฟ Flag for Logatec (SI-064)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ฐ๓ ฟ Flag for Litija (SI-060)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ฐ๓ ฟ Flag for Maribor (SI-070)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ณ๓ ฟ Flag for Ljutomer (SI-063)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ถ๓ ฟ Flag for Loลกki Potok (SI-066)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ท๓ ฟ Flag for Luฤe (SI-067)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ฒ๓ ฟ Flag for Podฤetrtek (SI-092)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ณ๓ ฟ Flag for Podvelka (SI-093)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ ฑ๓ ฟ Flag for Medvode (SI-071)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ต๓ ฟ Flag for Loลกka Dolina (SI-065)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ท๓ ฟ Flag for Laลกko (SI-057)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ น๓ ฟ Flag for Lendava (SI-059)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ท๓ น๓ ฟ Flag for Mozirje (SI-079)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ถ๓ ธ๓ ฟ Flag for Lukovica (SI-068)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ฑ๓ ฟ Flag for Trลพiฤ (SI-131)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ธ๓ ฟ Flag for ล entilj (SI-118)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ธ๓ ฟ Flag for RaฤeโFram (SI-098)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ท๓ ฟ Flag for Puconci (SI-097)
๐ฉ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ต๓ ฟ Flag for Rogaลกovci (SI-105)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ณ๓ ฟ Flag for Slovenska Bistrica (SI-113)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ท๓ ฟ Flag for Rogatec (SI-107)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ถ๓ ฟ Flag for Ptuj (SI-096)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ น๓ ฟ Flag for ล entjernej (SI-119)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ฑ๓ ฟ Flag for Seลพana (SI-111)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ณ๓ ฟ Flag for ล kofljica (SI-123)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ฒ๓ ฟ Flag for Slovenj Gradec (SI-112)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ต๓ ฟ Flag for Starลกe (SI-115)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ถ๓ ฟ Flag for Sveti Jurij (SI-116)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ฐ๓ ฟ Flag for Trebnje (SI-130)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ฐ๓ ฟ Flag for Sevnica (SI-110)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ น๓ ฟ Flag for Radeฤe (SI-099)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ฑ๓ ฟ Flag for ล kocjan (SI-121)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ด๓ ฟ Flag for ล marje pri Jelลกah (SI-124)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ถ๓ ฟ Flag for ล oลกtanj (SI-126)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ท๓ ฟ Flag for ล tore (SI-127)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ถ๓ ฟ Flag for Rogaลกka Slatina (SI-106)
๐ด๓ ณ๓ ฉ๓ ฐ๓ น๓ ต๓ ฟ Flag for Preddvor (SI-095)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ฒ๓ ฟ Flag for Turniลกฤe (SI-132)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ฒ๓ ฟ Flag for Radovljica (SI-102)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ธ๓ ฟ Flag for Ruลกe (SI-108)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ด๓ ฟ Flag for Slovenske Konjice (SI-114)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ฐ๓ ฟ Flag for ล entjur (SI-120)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ธ๓ ฟ Flag for Tolmin (SI-128)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ด๓ ฟ Flag for Ribnica (SI-104)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ฑ๓ ฟ Flag for Radlje ob Dravi (SI-101)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ น๓ ฟ Flag for Trbovlje (SI-129)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ น๓ ฟ Flag for Semiฤ (SI-109)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฑ๓ ท๓ ฟ Flag for ล enฤur (SI-117)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ณ๓ ฟ Flag for Ravne na Koroลกkem (SI-103)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ น๓ ฟ Flag for Miklavลพ na Dravskem Polju (SI-169)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ธ๓ ฟ Flag for Vodice (SI-138)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ณ๓ ฟ Flag for Velenje (SI-133)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ฒ๓ ฟ Flag for Zagorje ob Savi (SI-142)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ฑ๓ ฟ Flag for Vuzenica (SI-141)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ฐ๓ ฟ Flag for Vrhnika (SI-140)
๐ฉ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ถ๓ ฟ Flag for ลฝelezniki (SI-146)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ท๓ ฟ Flag for ลฝiri (SI-147)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ธ๓ ฟ Flag for Benedikt (SI-148)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ด๓ ฟ Flag for Velike Laลกฤe (SI-134)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ท๓ ฟ Flag for Vitanje (SI-137)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ด๓ ฟ Flag for Komenda (SI-164)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ต๓ ฟ Flag for Dobrna (SI-155)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ถ๓ ฟ Flag for Dobrovnik (SI-156)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ท๓ ฟ Flag for Dolenjske Toplice (SI-157)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ น๓ ฟ Flag for Hajdina (SI-159)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ฑ๓ ฟ Flag for Oplotnica (SI-171)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ต๓ ฟ Flag for Videm (SI-135)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ณ๓ ฟ Flag for Jezersko (SI-163)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ฒ๓ ฟ Flag for Cankova (SI-152)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ต๓ ฟ Flag for Kostel (SI-165)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ถ๓ ฟ Flag for Kriลพevci (SI-166)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ น๓ ฟ Flag for Vojnik (SI-139)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ธ๓ ฟ Flag for Markovci (SI-168)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ฐ๓ ฟ Flag for Mirna Peฤ (SI-170)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ณ๓ ถ๓ ฟ Flag for Vipava (SI-136)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ฒ๓ ฟ Flag for Horjul (SI-162)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ณ๓ ฟ Flag for Cerkvenjak (SI-153)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ฐ๓ ฟ Flag for Bloke (SI-150)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ณ๓ ฟ Flag for Zavrฤ (SI-143)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ น๓ ฟ Flag for Bistrica ob Sotli (SI-149)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ด๓ ด๓ ฟ Flag for Zreฤe (SI-144)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ฑ๓ ฟ Flag for Hodoลก (SI-161)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ฐ๓ ฟ Flag for HoฤeโSlivnica (SI-160)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ธ๓ ฟ Flag for Grad (SI-158)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ฒ๓ ฟ Flag for Podlehnik (SI-172)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ถ๓ ฟ Flag for Cirkulane (SI-196)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ด๓ ฟ Flag for Prebold (SI-174)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ถ๓ ฟ Flag for Razkriลพje (SI-176)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ธ๓ ฟ Flag for Verลพej (SI-188)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ฐ๓ ฟ Flag for ลฝalec (SI-190)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ฐ๓ ฟ Flag for Solฤava (SI-180)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ฑ๓ ฟ Flag for Sveta Ana (SI-181)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ณ๓ ฟ Flag for ล empeterโVrtojba (SI-183)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ต๓ ฟ Flag for Trnovska Vas (SI-185)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ น๓ ฟ Flag for Sodraลพica (SI-179)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ธ๓ ฟ Flag for Makole (SI-198)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Straลพa (SI-203)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ธ๓ ฟ Flag for Selnica ob Dravi (SI-178)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ณ๓ ฟ Flag for ลฝuลพemberk (SI-193)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ท๓ ฟ Flag for Kostanjevica na Krki (SI-197)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ต๓ ฟ Flag for Prevalje (SI-175)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ด๓ ฟ Flag for ล martno pri Litiji (SI-194)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ฑ๓ ฟ Flag for ลฝetale (SI-191)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ น๓ ฟ Flag for Vransko (SI-189)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for RenฤeโVogrsko (SI-201)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Srediลกฤe ob Dravi (SI-202)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ถ๓ ฟ Flag for Trzin (SI-186)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Sveta Trojica v Slovenskih Goricah (SI-204)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Sveti Tomaลพ (SI-205)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ท๓ ฟ Flag for Ribnica na Pohorju (SI-177)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Gorje (SI-207)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ด๓ ฟ Flag for Tabor (SI-184)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ น๓ ฟ Flag for MokronogโTrebelno (SI-199)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ท๓ ณ๓ ฟ Flag for Polzela (SI-173)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ฐ๓ ฟ Flag for Poljฤane (SI-200)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ต๓ ฟ Flag for Apaฤe (SI-195)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ท๓ ฟ Flag for Velika Polana (SI-187)
๐ด๓ ณ๓ ซ๓ ด๓ ก๓ ฟ Flag for Trnava (SK-TA)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ น๓ ฟ Flag for Reฤica ob Savinji (SI-209)
๐ด๓ ณ๓ ญ๓ ฐ๓ น๓ ฟ Flag for Serravalle (SM-09)
๐ด๓ ณ๓ ญ๓ ฐ๓ ฒ๓ ฟ Flag for Chiesanuova (SM-02)
๐ด๓ ณ๓ ฎ๓ ซ๓ ก๓ ฟ Flag for Kaffrine (SN-KA)
๐ด๓ ณ๓ ซ๓ ฎ๓ ฉ๓ ฟ Flag for Nitra (SK-NI)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for ล entrupert (SI-211)
๐ด๓ ณ๓ ญ๓ ฐ๓ ถ๓ ฟ Flag for Borgo Maggiore (SM-06)
๐ด๓ ณ๓ ซ๓ ซ๓ ฉ๓ ฟ Flag for Koลกice (SK-KI)
๐ด๓ ณ๓ ซ๓ ข๓ ฃ๓ ฟ Flag for Banskรก Bystrica (SK-BC)
๐ด๓ ณ๓ ญ๓ ฐ๓ ธ๓ ฟ Flag for Montegiardino (SM-08)
๐ด๓ ณ๓ ฎ๓ ค๓ ซ๓ ฟ Flag for Dakar (SN-DK)
๐ด๓ ณ๓ ซ๓ ฐ๓ ถ๓ ฟ Flag for Preลกov (SK-PV)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Mirna (SI-212)
๐ด๓ ณ๓ ญ๓ ฐ๓ ต๓ ฟ Flag for Fiorentino (SM-05)
๐ด๓ ณ๓ ฎ๓ ด๓ จ๓ ฟ Flag for Thiรจs (SN-TH)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Ankaran (SI-213)
๐ด๓ ณ๓ ฎ๓ ด๓ ฃ๓ ฟ Flag for Tambacounda (SN-TC)
๐ด๓ ณ๓ ฎ๓ ฆ๓ ซ๓ ฟ Flag for Fatick (SN-FK)
๐ด๓ ณ๓ ซ๓ ด๓ ฃ๓ ฟ Flag for Trenฤรญn (SK-TC)
๐ด๓ ณ๓ ฎ๓ ซ๓ ฌ๓ ฟ Flag for Kaolack (SN-KL)
๐ด๓ ณ๓ ญ๓ ฐ๓ ด๓ ฟ Flag for Faetano (SM-04)
๐ด๓ ณ๓ ซ๓ บ๓ ฉ๓ ฟ Flag for ลฝilina (SK-ZI)
๐ด๓ ณ๓ ฌ๓ ณ๓ ฟ Flag for Southern (SL-S)
๐ด๓ ณ๓ ฎ๓ ณ๓ ฅ๓ ฟ Flag for Sรฉdhiou (SN-SE)
๐ด๓ ณ๓ ซ๓ ข๓ ฌ๓ ฟ Flag for Bratislava (SK-BL)
๐ด๓ ณ๓ ฎ๓ ค๓ ข๓ ฟ Flag for Diourbel (SN-DB)
๐ด๓ ณ๓ ฎ๓ ซ๓ ฅ๓ ฟ Flag for Kรฉdougou (SN-KE)
๐ด๓ ณ๓ ฌ๓ ฎ๓ ฟ Flag for Northern (SL-N)
๐ด๓ ณ๓ ฌ๓ ท๓ ฟ Flag for Western Area (SL-W)
๐ด๓ ณ๓ ฎ๓ ญ๓ ด๓ ฟ Flag for Matam (SN-MT)
๐ด๓ ณ๓ ฌ๓ ฅ๓ ฟ Flag for Eastern (SL-E)
๐ด๓ ณ๓ ญ๓ ฐ๓ ฑ๓ ฟ Flag for Acquaviva (SM-01)
๐ด๓ ณ๓ ฎ๓ ซ๓ ค๓ ฟ Flag for Kolda (SN-KD)
๐ด๓ ณ๓ ฎ๓ ณ๓ ฌ๓ ฟ Flag for Saint-Louis (SN-SL)
๐ด๓ ณ๓ ญ๓ ฐ๓ ท๓ ฟ Flag for San Marino (SM-07)
๐ด๓ ณ๓ ฎ๓ ฌ๓ ง๓ ฟ Flag for Louga (SN-LG)
๐ด๓ ณ๓ ญ๓ ฐ๓ ณ๓ ฟ Flag for Domagnano (SM-03)
๐ด๓ ณ๓ ณ๓ ฅ๓ ฅ๓ ฟ Flag for Eastern Equatoria (SS-EE)
๐ด๓ ณ๓ ฒ๓ ณ๓ ก๓ ฟ Flag for Saramacca (SR-SA)
๐ฉ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ณ๓ ฒ๓ ญ๓ ก๓ ฟ Flag for Marowijne (SR-MA)
๐ด๓ ณ๓ ฏ๓ ช๓ ค๓ ฟ Flag for Middle Juba (SO-JD)
๐ด๓ ณ๓ ฏ๓ ญ๓ ต๓ ฟ Flag for Mudug (SO-MU)
๐ด๓ ณ๓ ฏ๓ ณ๓ จ๓ ฟ Flag for Lower Shebelle (SO-SH)
๐ด๓ ณ๓ ฏ๓ จ๓ ฉ๓ ฟ Flag for Hiran (SO-HI)
๐ด๓ ณ๓ ณ๓ ฅ๓ ฃ๓ ฟ Flag for Central Equatoria (SS-EC)
๐ด๓ ณ๓ ฎ๓ บ๓ ง๓ ฟ Flag for Ziguinchor (SN-ZG)
๐ด๓ ณ๓ ฒ๓ ฃ๓ ฒ๓ ฟ Flag for Coronie (SR-CR)
๐ด๓ ณ๓ ฏ๓ ณ๓ ค๓ ฟ Flag for Middle Shebelle (SO-SD)
๐ด๓ ณ๓ ณ๓ ฎ๓ ต๓ ฟ Flag for Upper Nile (SS-NU)
๐ด๓ ณ๓ ฒ๓ ท๓ ก๓ ฟ Flag for Wanica (SR-WA)
๐ด๓ ณ๓ ฏ๓ ก๓ ท๓ ฟ Flag for Awdal (SO-AW)
๐ด๓ ณ๓ ฏ๓ ณ๓ ก๓ ฟ Flag for Sanaag (SO-SA)
๐ด๓ ณ๓ ฏ๓ ช๓ จ๓ ฟ Flag for Lower Juba (SO-JH)
๐ด๓ ณ๓ ณ๓ ฌ๓ ซ๓ ฟ Flag for Lakes (SS-LK)
๐ด๓ ณ๓ ณ๓ ท๓ ฒ๓ ฟ Flag for Warrap (SS-WR)
๐ด๓ ณ๓ ด๓ ฐ๓ ฟ Flag for Prรญncipe (ST-P)
๐ด๓ ณ๓ ฒ๓ ณ๓ ฉ๓ ฟ Flag for Sipaliwini (SR-SI)
๐ด๓ ณ๓ ณ๓ ข๓ ท๓ ฟ Flag for Western Bahr el Ghazal (SS-BW)
๐ด๓ ณ๓ ณ๓ ฅ๓ ท๓ ฟ Flag for Western Equatoria (SS-EW)
๐ด๓ ณ๓ ฏ๓ ข๓ ฒ๓ ฟ Flag for Bari (SO-BR)
๐ด๓ ณ๓ ณ๓ ช๓ ง๓ ฟ Flag for Jonglei (SS-JG)
๐ด๓ ณ๓ ฒ๓ ฐ๓ ญ๓ ฟ Flag for Paramaribo (SR-PM)
๐ด๓ ณ๓ ฒ๓ ฃ๓ ญ๓ ฟ Flag for Commewijne (SR-CM)
๐ด๓ ณ๓ ฏ๓ ง๓ ก๓ ฟ Flag for Galguduud (SO-GA)
๐ด๓ ณ๓ ฒ๓ ฎ๓ ฉ๓ ฟ Flag for Nickerie (SR-NI)
๐ด๓ ณ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Para (SR-PR)
๐ด๓ ณ๓ ฏ๓ ท๓ ฏ๓ ฟ Flag for Woqooyi Galbeed (SO-WO)
๐ด๓ ณ๓ ฏ๓ ง๓ ฅ๓ ฟ Flag for Gedo (SO-GE)
๐ด๓ ณ๓ ฏ๓ ข๓ น๓ ฟ Flag for Bay, Somalia (SO-BY)
๐ด๓ ณ๓ ฒ๓ ข๓ ฒ๓ ฟ Flag for Brokopondo (SR-BR)
๐ด๓ ณ๓ ฏ๓ ฎ๓ ต๓ ฟ Flag for Nugal (SO-NU)
๐ด๓ ณ๓ ฏ๓ ด๓ ฏ๓ ฟ Flag for Togdheer (SO-TO)
๐ด๓ ณ๓ ฏ๓ ข๓ ซ๓ ฟ Flag for Bakool (SO-BK)
๐ด๓ ณ๓ ฏ๓ ณ๓ ฏ๓ ฟ Flag for Sool (SO-SO)
๐ด๓ ณ๓ บ๓ จ๓ จ๓ ฟ Flag for Hhohho (SZ-HH)
๐ด๓ ด๓ ค๓ ฅ๓ ฏ๓ ฟ Flag for Ennedi-Ouest (TD-EO)
๐ด๓ ด๓ ค๓ ง๓ ฒ๓ ฟ Flag for Guรฉra (TD-GR)
๐ด๓ ณ๓ บ๓ ณ๓ จ๓ ฟ Flag for Shiselweni (SZ-SH)
๐ด๓ ณ๓ น๓ ค๓ ฒ๓ ฟ Flag for Daraa (SY-DR)
๐ด๓ ณ๓ น๓ ฒ๓ ก๓ ฟ Flag for Ar-Raqqah (SY-RA)
๐ด๓ ณ๓ ถ๓ ณ๓ ฏ๓ ฟ Flag for Sonsonate (SV-SO)
๐ด๓ ณ๓ ถ๓ ต๓ ฎ๓ ฟ Flag for La Uniรณn (SV-UN)
๐ด๓ ณ๓ ถ๓ ณ๓ ญ๓ ฟ Flag for San Miguel (SV-SM)
๐ด๓ ณ๓ ถ๓ ญ๓ ฏ๓ ฟ Flag for Morazรกn (SV-MO)
๐ด๓ ณ๓ ถ๓ ณ๓ ณ๓ ฟ Flag for San Salvador (SV-SS)
๐ด๓ ณ๓ น๓ ค๓ น๓ ฟ Flag for Deir ez-Zor (SY-DY)
๐ด๓ ณ๓ ถ๓ ฃ๓ ก๓ ฟ Flag for Cabaรฑas (SV-CA)
๐ด๓ ณ๓ บ๓ ฌ๓ ต๓ ฟ Flag for Lubombo (SZ-LU)
๐ด๓ ณ๓ ถ๓ ฃ๓ จ๓ ฟ Flag for Chalatenango (SV-CH)
๐ด๓ ณ๓ น๓ ฒ๓ ค๓ ฟ Flag for Rif Dimashq (SY-RD)
๐ด๓ ณ๓ น๓ ด๓ ก๓ ฟ Flag for Tartus (SY-TA)
๐ด๓ ด๓ ค๓ ข๓ ฏ๓ ฟ Flag for Borkou (TD-BO)
๐ด๓ ณ๓ บ๓ ญ๓ ก๓ ฟ Flag for Manzini (SZ-MA)
๐ด๓ ด๓ ค๓ ข๓ ก๓ ฟ Flag for Batha (TD-BA)
๐ด๓ ณ๓ น๓ จ๓ ฉ๓ ฟ Flag for Homs (SY-HI)
๐ด๓ ด๓ ค๓ ฅ๓ ฅ๓ ฟ Flag for Ennedi-Est (TD-EE)
๐ด๓ ด๓ ค๓ ข๓ ง๓ ฟ Flag for Bahr el Gazel (TD-BG)
๐ด๓ ด๓ ค๓ ซ๓ ก๓ ฟ Flag for Kanem (TD-KA)
๐ด๓ ณ๓ น๓ จ๓ ญ๓ ฟ Flag for Hama (SY-HM)
๐ด๓ ณ๓ น๓ ฌ๓ ก๓ ฟ Flag for Latakia (SY-LA)
๐ด๓ ณ๓ น๓ ฉ๓ ค๓ ฟ Flag for Idlib (SY-ID)
๐ด๓ ณ๓ ถ๓ ฌ๓ ฉ๓ ฟ Flag for La Libertad (SV-LI)
๐ด๓ ณ๓ น๓ จ๓ ฌ๓ ฟ Flag for Aleppo (SY-HL)
๐ด๓ ณ๓ ถ๓ ก๓ จ๓ ฟ Flag for Ahuachapรกn (SV-AH)
๐ด๓ ด๓ ค๓ ฃ๓ ข๓ ฟ Flag for Chari-Baguirmi (TD-CB)
๐ด๓ ณ๓ ถ๓ ฐ๓ ก๓ ฟ Flag for La Paz (SV-PA)
๐ด๓ ณ๓ น๓ ณ๓ ต๓ ฟ Flag for As-Suwayda (SY-SU)
๐ด๓ ณ๓ น๓ ค๓ ฉ๓ ฟ Flag for Damascus (SY-DI)
๐ด๓ ณ๓ น๓ ฑ๓ ต๓ ฟ Flag for Quneitra (SY-QU)
๐ด๓ ณ๓ น๓ จ๓ ก๓ ฟ Flag for Al-Hasakah (SY-HA)
๐ด๓ ณ๓ ถ๓ ณ๓ ก๓ ฟ Flag for Santa Ana (SV-SA)
๐ด๓ ณ๓ ถ๓ ฃ๓ ต๓ ฟ Flag for Cuscatlรกn (SV-CU)
๐ด๓ ด๓ ค๓ ฌ๓ ฏ๓ ฟ Flag for Logone Occidental (TD-LO)
๐ด๓ ด๓ จ๓ ฒ๓ ฒ๓ ฟ Flag for Chanthaburi (TH-22)
๐ด๓ ด๓ ค๓ ญ๓ ฅ๓ ฟ Flag for Mayo-Kebbi Est (TD-ME)
๐ด๓ ด๓ ค๓ ญ๓ ฃ๓ ฟ Flag for Moyen-Chari (TD-MC)
๐ด๓ ด๓ ค๓ ฌ๓ ฒ๓ ฟ Flag for Logone Oriental (TD-LR)
๐ด๓ ด๓ ง๓ ณ๓ ฟ Flag for Savanes (TG-S)
๐ด๓ ด๓ จ๓ ฑ๓ ด๓ ฟ Flag for Phra Nakhon Si Ayutthaya (TH-14)
๐ด๓ ด๓ ง๓ ฃ๓ ฟ Flag for Centrale (TG-C)
๐ด๓ ด๓ จ๓ ฒ๓ ท๓ ฟ Flag for Sa Kaeo (TH-27)
๐ด๓ ด๓ จ๓ ฑ๓ ฒ๓ ฟ Flag for Nonthaburi (TH-12)
๐ด๓ ด๓ จ๓ ณ๓ ฑ๓ ฟ Flag for Buri Ram (TH-31)
๐ด๓ ด๓ จ๓ ฒ๓ ฐ๓ ฟ Flag for Chon Buri (TH-20)
๐ด๓ ด๓ ค๓ ณ๓ ฉ๓ ฟ Flag for Sila (TD-SI)
๐ด๓ ด๓ ค๓ ฌ๓ ฃ๓ ฟ Flag for Lac (TD-LC)
๐ด๓ ด๓ จ๓ ฒ๓ ฑ๓ ฟ Flag for Rayong (TH-21)
๐ด๓ ด๓ จ๓ ฒ๓ ต๓ ฟ Flag for Prachin Buri (TH-25)
๐ด๓ ด๓ จ๓ ณ๓ ฐ๓ ฟ Flag for Nakhon Ratchasima (TH-30)
๐ด๓ ด๓ ง๓ ซ๓ ฟ Flag for Kara (TG-K)
๐ด๓ ด๓ จ๓ ฑ๓ ต๓ ฟ Flag for Ang Thong (TH-15)
๐ด๓ ด๓ จ๓ ฑ๓ ฐ๓ ฟ Flag for Bangkok (TH-10)
๐ด๓ ด๓ ค๓ ญ๓ ก๓ ฟ Flag for Mandoul (TD-MA)
๐ด๓ ด๓ จ๓ ฑ๓ ณ๓ ฟ Flag for Pathum Thani (TH-13)
๐ด๓ ด๓ จ๓ ฒ๓ ด๓ ฟ Flag for Chachoengsao (TH-24)
๐ด๓ ด๓ จ๓ ฑ๓ ท๓ ฟ Flag for Sing Buri (TH-17)
๐ด๓ ด๓ ค๓ ญ๓ ฏ๓ ฟ Flag for Mayo-Kebbi Ouest (TD-MO)
๐ด๓ ด๓ ค๓ ฏ๓ ค๓ ฟ Flag for Ouaddaรฏ (TD-OD)
๐ด๓ ด๓ จ๓ ณ๓ ฒ๓ ฟ Flag for Surin (TH-32)
๐ด๓ ด๓ จ๓ ฒ๓ ถ๓ ฟ Flag for Nakhon Nayok (TH-26)
๐ด๓ ด๓ ค๓ ณ๓ ก๓ ฟ Flag for Salamat (TD-SA)
๐ด๓ ด๓ ค๓ ด๓ ก๓ ฟ Flag for Tandjilรฉ (TD-TA)
๐ด๓ ด๓ ค๓ ท๓ ฆ๓ ฟ Flag for Wadi Fira (TD-WF)
๐ด๓ ด๓ จ๓ ฑ๓ น๓ ฟ Flag for Saraburi (TH-19)
๐ด๓ ด๓ จ๓ ฑ๓ ฑ๓ ฟ Flag for Samut Prakan (TH-11)
๐ด๓ ด๓ ค๓ ด๓ ฉ๓ ฟ Flag for Tibesti (TD-TI)
๐ด๓ ด๓ ง๓ ฐ๓ ฟ Flag for Plateaux (TG-P)
๐ด๓ ด๓ ค๓ ฎ๓ ค๓ ฟ Flag for NโDjamena (TD-ND)
๐ด๓ ด๓ จ๓ ฑ๓ ธ๓ ฟ Flag for Chai Nat (TH-18)
๐ด๓ ด๓ จ๓ ถ๓ ฒ๓ ฟ Flag for Kamphaeng Phet (TH-62)
๐ด๓ ด๓ จ๓ ท๓ ฒ๓ ฟ Flag for Suphanburi (TH-72)
๐ด๓ ด๓ จ๓ ท๓ ด๓ ฟ Flag for Samut Sakhon (TH-74)
๐ด๓ ด๓ จ๓ ถ๓ ท๓ ฟ Flag for Phetchabun (TH-67)
๐ด๓ ด๓ จ๓ ท๓ ฑ๓ ฟ Flag for Kanchanaburi (TH-71)
๐ด๓ ด๓ จ๓ ต๓ ด๓ ฟ Flag for Phrae (TH-54)
๐ด๓ ด๓ จ๓ ถ๓ ณ๓ ฟ Flag for Tak (TH-63)
๐ด๓ ด๓ จ๓ ด๓ ธ๓ ฟ Flag for Nakhon Phanom (TH-48)
๐ด๓ ด๓ จ๓ ต๓ ฒ๓ ฟ Flag for Lampang (TH-52)
๐ด๓ ด๓ จ๓ ต๓ ธ๓ ฟ Flag for Mae Hong Son (TH-58)
๐ด๓ ด๓ จ๓ ด๓ ท๓ ฟ Flag for Sakon Nakhon (TH-47)
๐ด๓ ด๓ จ๓ ต๓ ถ๓ ฟ Flag for Phayao (TH-56)
๐ด๓ ด๓ จ๓ ด๓ ฑ๓ ฟ Flag for Udon Thani (TH-41)
๐ด๓ ด๓ จ๓ ด๓ น๓ ฟ Flag for Mukdahan (TH-49)
๐ด๓ ด๓ จ๓ ท๓ ณ๓ ฟ Flag for Nakhon Pathom (TH-73)
๐ด๓ ด๓ จ๓ ต๓ ฐ๓ ฟ Flag for Chiang Mai (TH-50)
๐ด๓ ด๓ จ๓ ด๓ ฐ๓ ฟ Flag for Khon Kaen (TH-40)
๐ด๓ ด๓ จ๓ ณ๓ ท๓ ฟ Flag for Amnat Charoen (TH-37)
๐ด๓ ด๓ จ๓ ท๓ ฐ๓ ฟ Flag for Ratchaburi (TH-70)
๐ด๓ ด๓ จ๓ ณ๓ ต๓ ฟ Flag for Yasothon (TH-35)
๐ด๓ ด๓ จ๓ ต๓ ฑ๓ ฟ Flag for Lamphun (TH-51)
๐ด๓ ด๓ จ๓ ด๓ ฒ๓ ฟ Flag for Loei (TH-42)
๐ด๓ ด๓ จ๓ ถ๓ ฐ๓ ฟ Flag for Nakhon Sawan (TH-60)
๐ด๓ ด๓ จ๓ ณ๓ ด๓ ฟ Flag for Ubon Ratchathani (TH-34)
๐ด๓ ด๓ จ๓ ด๓ ด๓ ฟ Flag for Maha Sarakham (TH-44)
๐ด๓ ด๓ จ๓ ด๓ ต๓ ฟ Flag for Roi Et (TH-45)
๐ด๓ ด๓ จ๓ ด๓ ถ๓ ฟ Flag for Kalasin (TH-46)
๐ด๓ ด๓ จ๓ ถ๓ ถ๓ ฟ Flag for Phichit (TH-66)
๐ด๓ ด๓ จ๓ ต๓ ต๓ ฟ Flag for Nan (TH-55)
๐ด๓ ด๓ จ๓ ถ๓ ฑ๓ ฟ Flag for Uthai Thani (TH-61)
๐ด๓ ด๓ จ๓ ณ๓ ธ๓ ฟ Flag for Bueng Kan (TH-38)
๐ด๓ ด๓ จ๓ ณ๓ ณ๓ ฟ Flag for Si Sa Ket (TH-33)
๐ด๓ ด๓ จ๓ ณ๓ น๓ ฟ Flag for Nong Bua Lam Phu (TH-39)
๐ด๓ ด๓ จ๓ ต๓ ณ๓ ฟ Flag for Uttaradit (TH-53)
๐ด๓ ด๓ จ๓ ต๓ ท๓ ฟ Flag for Chiang Rai (TH-57)
๐ด๓ ด๓ จ๓ ถ๓ ด๓ ฟ Flag for Sukhothai (TH-64)
๐ด๓ ด๓ จ๓ ด๓ ณ๓ ฟ Flag for Nong Khai (TH-43)
๐ด๓ ด๓ จ๓ ถ๓ ต๓ ฟ Flag for Phitsanulok (TH-65)
๐ด๓ ด๓ ฌ๓ ฅ๓ ฒ๓ ฟ Flag for Ermera (TL-ER)
๐ด๓ ด๓ ฌ๓ ฏ๓ ฅ๓ ฟ Flag for Oecusse (TL-OE)
๐ด๓ ด๓ ฌ๓ ฌ๓ ฉ๓ ฟ Flag for Liquiรงรก (TL-LI)
๐ด๓ ด๓ ฌ๓ ก๓ ฌ๓ ฟ Flag for Aileu (TL-AL)
๐ด๓ ด๓ ญ๓ ก๓ ฟ Flag for Ahal (TM-A)
๐ด๓ ด๓ จ๓ ธ๓ ด๓ ฟ Flag for Surat Thani (TH-84)
๐ด๓ ด๓ จ๓ ท๓ ถ๓ ฟ Flag for Phetchaburi (TH-76)
๐ด๓ ด๓ ฌ๓ ข๓ ฏ๓ ฟ Flag for Bobonaro (TL-BO)
๐ด๓ ด๓ ฌ๓ ญ๓ ด๓ ฟ Flag for Manatuto (TL-MT)
๐ด๓ ด๓ ช๓ ซ๓ ด๓ ฟ Flag for Khatlon (TJ-KT)
๐ด๓ ด๓ ฌ๓ ก๓ ฎ๓ ฟ Flag for Ainaro (TL-AN)
๐ด๓ ด๓ จ๓ ธ๓ ฒ๓ ฟ Flag for Phang Nga (TH-82)
๐ด๓ ด๓ ฌ๓ ฃ๓ ฏ๓ ฟ Flag for Cova Lima (TL-CO)
๐ด๓ ด๓ ฎ๓ ฑ๓ ฑ๓ ฟ Flag for Tunis (TN-11)
๐ด๓ ด๓ จ๓ ธ๓ ต๓ ฟ Flag for Ranong (TH-85)
๐ด๓ ด๓ จ๓ ธ๓ ฐ๓ ฟ Flag for Nakhon Si Thammarat (TH-80)
๐ด๓ ด๓ จ๓ ท๓ ท๓ ฟ Flag for Prachuap Khiri Khan (TH-77)
๐ด๓ ด๓ ช๓ ค๓ ต๓ ฟ Flag for Dushanbe (TJ-DU)
๐ด๓ ด๓ จ๓ น๓ ต๓ ฟ Flag for Yala (TH-95)
๐ด๓ ด๓ จ๓ น๓ ฐ๓ ฟ Flag for Songkhla (TH-90)
๐ด๓ ด๓ ญ๓ ฌ๓ ฟ Flag for Lebap (TM-L)
๐ด๓ ด๓ จ๓ น๓ ถ๓ ฟ Flag for Narathiwat (TH-96)
๐ด๓ ด๓ ญ๓ ญ๓ ฟ Flag for Mary (TM-M)
๐ด๓ ด๓ ฌ๓ ญ๓ ฆ๓ ฟ Flag for Manufahi (TL-MF)
๐จ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ด๓ ญ๓ ข๓ ฟ Flag for Balkan (TM-B)
๐ด๓ ด๓ ฌ๓ ข๓ ก๓ ฟ Flag for Baucau (TL-BA)
๐ด๓ ด๓ ช๓ ฒ๓ ก๓ ฟ Flag for Nohiyahoi Tobei Jumhurรญ (TJ-RA)
๐ด๓ ด๓ จ๓ น๓ ฒ๓ ฟ Flag for Trang (TH-92)
๐ด๓ ด๓ ช๓ ณ๓ ต๓ ฟ Flag for Sughd (TJ-SU)
๐ด๓ ด๓ ฌ๓ ถ๓ ฉ๓ ฟ Flag for Viqueque (TL-VI)
๐ด๓ ด๓ จ๓ น๓ ด๓ ฟ Flag for Pattani (TH-94)
๐ด๓ ด๓ จ๓ ธ๓ ฑ๓ ฟ Flag for Krabi (TH-81)
๐ด๓ ด๓ ฌ๓ ค๓ ฉ๓ ฟ Flag for Dili (TL-DI)
๐ด๓ ด๓ จ๓ ธ๓ ณ๓ ฟ Flag for Phuket (TH-83)
๐ด๓ ด๓ จ๓ น๓ ฑ๓ ฟ Flag for Satun (TH-91)
๐ด๓ ด๓ จ๓ ณ๓ ฟ Flag for Pattaya (TH-S)
๐ด๓ ด๓ ญ๓ ค๓ ฟ Flag for Daลoguz (TM-D)
๐ด๓ ด๓ ฎ๓ ด๓ ฑ๓ ฟ Flag for Kairouan (TN-41)
๐ด๓ ด๓ ฎ๓ ต๓ ฒ๓ ฟ Flag for Monastir (TN-52)
๐ด๓ ด๓ ฒ๓ ฐ๓ น๓ ฟ Flag for Aydฤฑn (TR-09)
๐ด๓ ด๓ ฎ๓ ณ๓ ฑ๓ ฟ Flag for Bรฉja (TN-31)
๐ด๓ ด๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Antalya (TR-07)
๐ด๓ ด๓ ฎ๓ ฒ๓ ฑ๓ ฟ Flag for Nabeul (TN-21)
๐ด๓ ด๓ ฎ๓ ต๓ ณ๓ ฟ Flag for Mahdia (TN-53)
๐ด๓ ด๓ ฏ๓ ฐ๓ ฒ๓ ฟ Flag for Haสปapai (TO-02)
๐ด๓ ด๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Amasya (TR-05)
๐ด๓ ด๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Bitlis (TR-13)
๐ด๓ ด๓ ฎ๓ ฑ๓ ฒ๓ ฟ Flag for Ariana (TN-12)
๐ด๓ ด๓ ฎ๓ ท๓ ณ๓ ฟ Flag for Kebili (TN-73)
๐ด๓ ด๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for Adana (TR-01)
๐ด๓ ด๓ ฏ๓ ฐ๓ ฑ๓ ฟ Flag for สปEua (TO-01)
๐ด๓ ด๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Bingรถl (TR-12)
๐ด๓ ด๓ ฎ๓ ธ๓ ณ๓ ฟ Flag for Tataouine (TN-83)
๐ด๓ ด๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for Artvin (TR-08)
๐ด๓ ด๓ ฎ๓ ต๓ ฑ๓ ฟ Flag for Sousse (TN-51)
๐ด๓ ด๓ ฎ๓ ธ๓ ฑ๓ ฟ Flag for Gabรจs (TN-81)
๐ด๓ ด๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Aฤrฤฑ (TR-04)
๐ด๓ ด๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Bilecik (TR-11)
๐ด๓ ด๓ ฎ๓ ณ๓ ฒ๓ ฟ Flag for Jendouba (TN-32)
๐ด๓ ด๓ ฏ๓ ฐ๓ ด๓ ฟ Flag for Tongatapu (TO-04)
๐ด๓ ด๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Adฤฑyaman (TR-02)
๐ด๓ ด๓ ฎ๓ ณ๓ ณ๓ ฟ Flag for Kef (TN-33)
๐ด๓ ด๓ ฎ๓ ฒ๓ ฒ๓ ฟ Flag for Zaghouan (TN-22)
๐ด๓ ด๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Balฤฑkesir (TR-10)
๐ด๓ ด๓ ฎ๓ ฑ๓ ณ๓ ฟ Flag for Ben Arous (TN-13)
๐ด๓ ด๓ ฏ๓ ฐ๓ ณ๓ ฟ Flag for Niuas (TO-03)
๐ด๓ ด๓ ฎ๓ ท๓ ฒ๓ ฟ Flag for Tozeur (TN-72)
๐ด๓ ด๓ ฎ๓ ฑ๓ ด๓ ฟ Flag for Manouba (TN-14)
๐ด๓ ด๓ ฎ๓ ด๓ ฒ๓ ฟ Flag for Kasserine (TN-42)
๐ด๓ ด๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Bolu (TR-14)
๐ด๓ ด๓ ฎ๓ ณ๓ ด๓ ฟ Flag for Siliana (TN-34)
๐ด๓ ด๓ ฏ๓ ฐ๓ ต๓ ฟ Flag for Vavaสปu (TO-05)
๐ด๓ ด๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for Ankara (TR-06)
๐ด๓ ด๓ ฎ๓ ถ๓ ฑ๓ ฟ Flag for Sfax (TN-61)
๐ด๓ ด๓ ฎ๓ ด๓ ณ๓ ฟ Flag for Sidi Bouzid (TN-43)
๐ด๓ ด๓ ฎ๓ ธ๓ ฒ๓ ฟ Flag for Medenine (TN-82)
๐ด๓ ด๓ ฎ๓ ฒ๓ ณ๓ ฟ Flag for Bizerte (TN-23)
๐ด๓ ด๓ ฒ๓ ฒ๓ ด๓ ฟ Flag for Erzincan (TR-24)
๐ด๓ ด๓ ฒ๓ ด๓ ถ๓ ฟ Flag for Kahramanmaraล (TR-46)
๐ด๓ ด๓ ฒ๓ ณ๓ ถ๓ ฟ Flag for Kars (TR-36)
๐ด๓ ด๓ ฒ๓ ต๓ ฑ๓ ฟ Flag for Niฤde (TR-51)
๐ด๓ ด๓ ฒ๓ ณ๓ ธ๓ ฟ Flag for Kayseri (TR-38)
๐ด๓ ด๓ ฒ๓ ด๓ ฑ๓ ฟ Flag for Kocaeli (TR-41)
๐ด๓ ด๓ ฒ๓ ฑ๓ ธ๓ ฟ Flag for รankฤฑrฤฑ (TR-18)
๐ด๓ ด๓ ฒ๓ ด๓ ธ๓ ฟ Flag for Muฤla (TR-48)
๐ด๓ ด๓ ฒ๓ ด๓ ฒ๓ ฟ Flag for Konya (TR-42)
๐ด๓ ด๓ ฒ๓ ด๓ ด๓ ฟ Flag for Malatya (TR-44)
๐ด๓ ด๓ ฒ๓ ฒ๓ น๓ ฟ Flag for Gรผmรผลhane (TR-29)
๐ด๓ ด๓ ฒ๓ ฒ๓ ฒ๓ ฟ Flag for Edirne (TR-22)
๐ด๓ ด๓ ฒ๓ ณ๓ น๓ ฟ Flag for Kฤฑrklareli (TR-39)
๐ด๓ ด๓ ฒ๓ ฒ๓ ท๓ ฟ Flag for Gaziantep (TR-27)
๐ด๓ ด๓ ฒ๓ ต๓ ต๓ ฟ Flag for Samsun (TR-55)
๐ด๓ ด๓ ฒ๓ ฒ๓ ฑ๓ ฟ Flag for Diyarbakฤฑr (TR-21)
๐ด๓ ด๓ ฒ๓ ฑ๓ ถ๓ ฟ Flag for Bursa (TR-16)
๐ด๓ ด๓ ฒ๓ ฑ๓ น๓ ฟ Flag for รorum (TR-19)
๐ด๓ ด๓ ฒ๓ ต๓ ฒ๓ ฟ Flag for Ordu (TR-52)
๐ด๓ ด๓ ฒ๓ ด๓ ต๓ ฟ Flag for Manisa (TR-45)
๐ด๓ ด๓ ฒ๓ ฒ๓ ต๓ ฟ Flag for Erzurum (TR-25)
๐ด๓ ด๓ ฒ๓ ฑ๓ ต๓ ฟ Flag for Burdur (TR-15)
๐ด๓ ด๓ ฒ๓ ณ๓ ฒ๓ ฟ Flag for Isparta (TR-32)
๐ด๓ ด๓ ฒ๓ ณ๓ ด๓ ฟ Flag for Istanbul (TR-34)
๐ด๓ ด๓ ฒ๓ ณ๓ ฐ๓ ฟ Flag for Hakkรขri (TR-30)
๐ด๓ ด๓ ฒ๓ ณ๓ ฑ๓ ฟ Flag for Hatay (TR-31)
๐ด๓ ด๓ ฒ๓ ด๓ น๓ ฟ Flag for Muล (TR-49)
๐ด๓ ด๓ ฒ๓ ณ๓ ณ๓ ฟ Flag for Mersin (TR-33)
๐ด๓ ด๓ ฒ๓ ต๓ ถ๓ ฟ Flag for Siirt (TR-56)
๐ด๓ ด๓ ฒ๓ ต๓ ฐ๓ ฟ Flag for Nevลehir (TR-50)
๐ด๓ ด๓ ฒ๓ ฒ๓ ณ๓ ฟ Flag for Elazฤฑฤ (TR-23)
๐ด๓ ด๓ ฒ๓ ฒ๓ ธ๓ ฟ Flag for Giresun (TR-28)
๐ด๓ ด๓ ฒ๓ ฒ๓ ฐ๓ ฟ Flag for Denizli (TR-20)
๐ด๓ ด๓ ฒ๓ ด๓ ท๓ ฟ Flag for Mardin (TR-47)
๐ด๓ ด๓ ฒ๓ ณ๓ ท๓ ฟ Flag for Kastamonu (TR-37)
๐ด๓ ด๓ ฒ๓ ต๓ ด๓ ฟ Flag for Sakarya (TR-54)
๐ด๓ ด๓ ฒ๓ ด๓ ฐ๓ ฟ Flag for Kฤฑrลehir (TR-40)
๐ด๓ ด๓ ฒ๓ ฑ๓ ท๓ ฟ Flag for รanakkale (TR-17)
๐ด๓ ด๓ ฒ๓ ต๓ ณ๓ ฟ Flag for Rize (TR-53)
๐ด๓ ด๓ ฒ๓ ฒ๓ ถ๓ ฟ Flag for Eskiลehir (TR-26)
๐ด๓ ด๓ ฒ๓ ถ๓ ต๓ ฟ Flag for Van (TR-65)
๐ด๓ ด๓ ด๓ ฐ๓ ฒ๓ ด๓ ฟ Flag for Princes Town (TT-PRT)
๐ด๓ ด๓ ด๓ ฃ๓ ด๓ ด๓ ฟ Flag for Couva-Tabaquite-Talparo (TT-CTT)
๐ด๓ ด๓ ด๓ ด๓ ฏ๓ ข๓ ฟ Flag for Tobago (TT-TOB)
๐ด๓ ด๓ ฒ๓ ถ๓ ณ๓ ฟ Flag for ลanlฤฑurfa (TR-63)
๐ด๓ ด๓ ด๓ ก๓ ฒ๓ ฉ๓ ฟ Flag for Arima (TT-ARI)
๐ด๓ ด๓ ฒ๓ ถ๓ ท๓ ฟ Flag for Zonguldak (TR-67)
๐ด๓ ด๓ ด๓ ณ๓ ฉ๓ ฐ๓ ฟ Flag for Siparia (TT-SIP)
๐ด๓ ด๓ ฒ๓ ท๓ ต๓ ฟ Flag for Ardahan (TR-75)
๐ด๓ ด๓ ฒ๓ ท๓ น๓ ฟ Flag for Kilis (TR-79)
๐ด๓ ด๓ ด๓ ฐ๓ ฏ๓ ณ๓ ฟ Flag for Port of Spain (TT-POS)
๐ด๓ ด๓ ฒ๓ ถ๓ ธ๓ ฟ Flag for Aksaray (TR-68)
๐ด๓ ด๓ ด๓ ค๓ ญ๓ ฎ๓ ฟ Flag for Diego Martin (TT-DMN)
๐ด๓ ด๓ ฒ๓ ถ๓ น๓ ฟ Flag for Bayburt (TR-69)
๐ด๓ ด๓ ฒ๓ ต๓ น๓ ฟ Flag for Tekirdaฤ (TR-59)
๐ด๓ ด๓ ฒ๓ ท๓ ฒ๓ ฟ Flag for Batman (TR-72)
๐ด๓ ด๓ ด๓ ฃ๓ จ๓ ก๓ ฟ Flag for Chaguanas (TT-CHA)
๐ด๓ ด๓ ฒ๓ ธ๓ ฐ๓ ฟ Flag for Osmaniye (TR-80)
๐ด๓ ด๓ ฒ๓ ท๓ ท๓ ฟ Flag for Yalova (TR-77)
๐ด๓ ด๓ ด๓ ณ๓ ช๓ ฌ๓ ฟ Flag for San Juan-Laventille (TT-SJL)
๐ด๓ ด๓ ฒ๓ ท๓ ธ๓ ฟ Flag for Karabรผk (TR-78)
๐ด๓ ด๓ ฒ๓ ถ๓ ถ๓ ฟ Flag for Yozgat (TR-66)
๐ด๓ ด๓ ด๓ ญ๓ ฒ๓ ฃ๓ ฟ Flag for Mayaro-Rio Claro (TT-MRC)
๐ด๓ ด๓ ฒ๓ ถ๓ ด๓ ฟ Flag for Uลak (TR-64)
๐ด๓ ด๓ ฒ๓ ต๓ ท๓ ฟ Flag for Sinop (TR-57)
๐ด๓ ด๓ ด๓ ด๓ ต๓ ฐ๓ ฟ Flag for Tunapuna-Piarco (TT-TUP)
๐ด๓ ด๓ ฒ๓ ท๓ ด๓ ฟ Flag for Bartฤฑn (TR-74)
๐ด๓ ด๓ ฒ๓ ท๓ ฑ๓ ฟ Flag for Kฤฑrฤฑkkale (TR-71)
๐ด๓ ด๓ ด๓ ฐ๓ ฅ๓ ค๓ ฟ Flag for Penal-Debe (TT-PED)
๐ด๓ ด๓ ฒ๓ ท๓ ถ๓ ฟ Flag for Iฤdฤฑr (TR-76)
๐ด๓ ด๓ ฒ๓ ท๓ ณ๓ ฟ Flag for ลฤฑrnak (TR-73)
๐ด๓ ด๓ ฒ๓ ถ๓ ฑ๓ ฟ Flag for Trabzon (TR-61)
๐ด๓ ด๓ ด๓ ฐ๓ ด๓ ฆ๓ ฟ Flag for Point Fortin (TT-PTF)
๐ด๓ ด๓ ฒ๓ ถ๓ ฒ๓ ฟ Flag for Tunceli (TR-62)
๐ด๓ ด๓ ฒ๓ ถ๓ ฐ๓ ฟ Flag for Tokat (TR-60)
๐ด๓ ด๓ ฒ๓ ท๓ ฐ๓ ฟ Flag for Karaman (TR-70)
๐ด๓ ด๓ ด๓ ณ๓ ฆ๓ ฏ๓ ฟ Flag for San Fernando (TT-SFO)
๐ด๓ ด๓ ฒ๓ ต๓ ธ๓ ฟ Flag for Sivas (TR-58)
๐ด๓ ด๓ บ๓ ฐ๓ ท๓ ฟ Flag for Zanzibar North (TZ-07)
๐ด๓ ด๓ ท๓ ฃ๓ จ๓ ก๓ ฟ Flag for Changhua (TW-CHA)
๐ด๓ ด๓ ถ๓ ถ๓ ก๓ ฉ๓ ฟ Flag for Vaitupu (TV-VAI)
๐ด๓ ด๓ ท๓ ซ๓ จ๓ จ๓ ฟ Flag for Kaohsiung (TW-KHH)
๐ด๓ ด๓ บ๓ ฐ๓ น๓ ฟ Flag for Kilimanjaro (TZ-09)
๐ด๓ ด๓ ท๓ ซ๓ ฉ๓ ฎ๓ ฟ Flag for Kinmen (TW-KIN)
๐ด๓ ด๓ ท๓ ฐ๓ ฅ๓ ฎ๓ ฟ Flag for Penghu (TW-PEN)
๐ด๓ ด๓ ท๓ ด๓ ฎ๓ ฎ๓ ฟ Flag for Tainan (TW-TNN)
๐ด๓ ด๓ ถ๓ ฎ๓ ซ๓ ฆ๓ ฟ Flag for Nukufetau (TV-NKF)
๐ด๓ ด๓ บ๓ ฐ๓ ธ๓ ฟ Flag for Kigoma (TZ-08)
๐ด๓ ด๓ ท๓ ด๓ ฐ๓ ฅ๓ ฟ Flag for Taipei (TW-TPE)
๐ด๓ ด๓ ท๓ ฐ๓ ฉ๓ ฆ๓ ฟ Flag for Pingtung (TW-PIF)
๐ด๓ ด๓ ท๓ ฉ๓ ฌ๓ ก๓ ฟ Flag for Yilan (TW-ILA)
๐ด๓ ด๓ ท๓ ด๓ ก๓ ฏ๓ ฟ Flag for Taoyuan (TW-TAO)
๐ด๓ ด๓ บ๓ ฐ๓ ณ๓ ฟ Flag for Dodoma (TZ-03)
๐ด๓ ด๓ ถ๓ ฎ๓ ต๓ ฉ๓ ฟ Flag for Nui (TV-NUI)
๐ด๓ ด๓ ถ๓ ฎ๓ ฉ๓ ด๓ ฟ Flag for Niutao (TV-NIT)
๐ด๓ ด๓ บ๓ ฐ๓ ถ๓ ฟ Flag for North Pemba (TZ-06)
๐ด๓ ด๓ ท๓ ฎ๓ ท๓ ด๓ ฟ Flag for New Taipei (TW-NWT)
๐ด๓ ด๓ บ๓ ฐ๓ ด๓ ฟ Flag for Iringa (TZ-04)
๐ด๓ ด๓ บ๓ ฐ๓ ต๓ ฟ Flag for Kagera (TZ-05)
๐ด๓ ด๓ ท๓ น๓ ต๓ ฎ๓ ฟ Flag for Yunlin (TW-YUN)
๐ด๓ ด๓ ท๓ ฌ๓ ฉ๓ ฅ๓ ฟ Flag for Lienchiang (TW-LIE)
๐ด๓ ด๓ ถ๓ ฎ๓ ญ๓ ง๓ ฟ Flag for Nanumanga (TV-NMG)
๐ด๓ ด๓ บ๓ ฐ๓ ฒ๓ ฟ Flag for Dar es Salaam (TZ-02)
๐ด๓ ด๓ ถ๓ ฎ๓ ญ๓ ก๓ ฟ Flag for Nanumea (TV-NMA)
๐ด๓ ด๓ ท๓ ด๓ ด๓ ด๓ ฟ Flag for Taitung (TW-TTT)
๐ด๓ ด๓ ท๓ ฎ๓ ก๓ ฎ๓ ฟ Flag for Nantou (TW-NAN)
๐ด๓ ด๓ ท๓ ฃ๓ น๓ ฑ๓ ฟ Flag for Chiayi (TW-CYQ)
๐ด๓ ด๓ บ๓ ฐ๓ ฑ๓ ฟ Flag for Arusha (TZ-01)
๐ด๓ ด๓ ท๓ จ๓ ต๓ ก๓ ฟ Flag for Hualien (TW-HUA)
๐ด๓ ด๓ ท๓ ฃ๓ น๓ ฉ๓ ฟ Flag for Chiayi County (TW-CYI)
๐ด๓ ด๓ ท๓ ด๓ ธ๓ ง๓ ฟ Flag for Taichung (TW-TXG)
๐ด๓ ด๓ ท๓ ซ๓ ฅ๓ ฅ๓ ฟ Flag for Keelung (TW-KEE)
๐ด๓ ด๓ ท๓ ญ๓ ฉ๓ ก๓ ฟ Flag for Miaoli (TW-MIA)
๐ด๓ ต๓ ก๓ ด๓ ณ๓ ฟ Flag for Crimea (UA-43)
๐ด๓ ด๓ บ๓ ฑ๓ ฒ๓ ฟ Flag for Lindi (TZ-12)
๐ด๓ ด๓ บ๓ ฒ๓ ถ๓ ฟ Flag for Manyara (TZ-26)
๐ด๓ ต๓ ก๓ ฐ๓ น๓ ฟ Flag for Luhanshchyna (UA-09)
๐ด๓ ด๓ บ๓ ฒ๓ ฐ๓ ฟ Flag for Rukwa (TZ-20)
๐ด๓ ต๓ ก๓ ฑ๓ ฒ๓ ฟ Flag for Dnipropetrovshchyna (UA-12)
๐ด๓ ต๓ ก๓ ฐ๓ ท๓ ฟ Flag for Volyn (UA-07)
๐ด๓ ด๓ บ๓ ฒ๓ ฒ๓ ฟ Flag for Shinyanga (TZ-22)
๐ด๓ ต๓ ก๓ ฐ๓ ต๓ ฟ Flag for Vinnychchyna (UA-05)
๐ด๓ ด๓ บ๓ ฒ๓ ฑ๓ ฟ Flag for Ruvuma (TZ-21)
๐ด๓ ด๓ บ๓ ฒ๓ ธ๓ ฟ Flag for Katavi (TZ-28)
๐ด๓ ต๓ ก๓ ฒ๓ ณ๓ ฟ Flag for Zaporizhzhya (UA-23)
๐ด๓ ต๓ ก๓ ณ๓ ฒ๓ ฟ Flag for Kyivshchyna (UA-32)
๐ด๓ ด๓ บ๓ ฒ๓ ณ๓ ฟ Flag for Singida (TZ-23)
๐ด๓ ด๓ บ๓ ฒ๓ ด๓ ฟ Flag for Tabora (TZ-24)
๐ด๓ ด๓ บ๓ ฑ๓ ณ๓ ฟ Flag for Mara (TZ-13)
๐ด๓ ด๓ บ๓ ฒ๓ ท๓ ฟ Flag for Geita (TZ-27)
๐ด๓ ด๓ บ๓ ณ๓ ฐ๓ ฟ Flag for Simiyu (TZ-30)
๐ด๓ ต๓ ก๓ ด๓ ธ๓ ฟ Flag for Mykolayivschyna (UA-48)
๐ด๓ ต๓ ก๓ ณ๓ ต๓ ฟ Flag for Kirovohradschyna (UA-35)
๐ด๓ ต๓ ก๓ ต๓ ถ๓ ฟ Flag for Rivnenshchyna (UA-56)
๐ด๓ ต๓ ก๓ ต๓ ณ๓ ฟ Flag for Poltavshchyna (UA-53)
๐ด๓ ด๓ บ๓ ฑ๓ ด๓ ฟ Flag for Mbeya (TZ-14)
๐ด๓ ด๓ บ๓ ฑ๓ ธ๓ ฟ Flag for Mwanza (TZ-18)
๐ด๓ ต๓ ก๓ ฒ๓ ฑ๓ ฟ Flag for Zakarpattia (UA-21)
๐ด๓ ด๓ บ๓ ฑ๓ ฐ๓ ฟ Flag for South Pemba (TZ-10)
๐ด๓ ด๓ บ๓ ฑ๓ น๓ ฟ Flag for Pwani (TZ-19)
๐ด๓ ด๓ บ๓ ฑ๓ ท๓ ฟ Flag for Mtwara (TZ-17)
๐ด๓ ต๓ ก๓ ด๓ ฐ๓ ฟ Flag for Sevastopol (UA-40)
๐ด๓ ต๓ ก๓ ต๓ ฑ๓ ฟ Flag for Odeshchyna (UA-51)
๐ด๓ ต๓ ก๓ ด๓ ถ๓ ฟ Flag for Lvivshchyna (UA-46)
๐ด๓ ต๓ ก๓ ฑ๓ ด๓ ฟ Flag for Donechchyna (UA-14)
๐ด๓ ต๓ ก๓ ฒ๓ ถ๓ ฟ Flag for Prykarpattia (UA-26)
๐ด๓ ด๓ บ๓ ฑ๓ ต๓ ฟ Flag for Zanzibar Urban/West (TZ-15)
๐ด๓ ด๓ บ๓ ฑ๓ ถ๓ ฟ Flag for Morogoro (TZ-16)
๐ด๓ ด๓ บ๓ ฒ๓ น๓ ฟ Flag for Njombe (TZ-29)
๐ด๓ ต๓ ก๓ ท๓ ท๓ ฟ Flag for Chernivtsi Oblast (UA-77)
๐ด๓ ต๓ ญ๓ น๓ ต๓ ฟ Flag for Palmyra Atoll (UM-95)
๐ด๓ ต๓ ณ๓ ซ๓ ณ๓ ฟ Flag for Kansas (US-KS)
๐จ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ต๓ ณ๓ ก๓ บ๓ ฟ Flag for Arizona (US-AZ)
๐ด๓ ต๓ ญ๓ ถ๓ ท๓ ฟ Flag for Johnston Atoll (UM-67)
๐ด๓ ต๓ ก๓ ท๓ ด๓ ฟ Flag for Chernihivshchyna (UA-74)
๐ด๓ ต๓ ญ๓ ธ๓ ด๓ ฟ Flag for Howland Island (UM-84)
๐ด๓ ต๓ ณ๓ ง๓ ก๓ ฟ Flag for Georgia (US-GA)
๐ด๓ ต๓ ณ๓ จ๓ ฉ๓ ฟ Flag for Hawaii (US-HI)
๐ด๓ ต๓ ญ๓ ท๓ ฑ๓ ฟ Flag for Midway Atoll (UM-71)
๐ด๓ ต๓ ณ๓ ก๓ ณ๓ ฟ Flag for American Samoa (US-AS)
๐ด๓ ต๓ ณ๓ ฃ๓ ด๓ ฟ Flag for Connecticut (US-CT)
๐ด๓ ต๓ ณ๓ ฉ๓ ก๓ ฟ Flag for Iowa (US-IA)
๐ด๓ ต๓ ก๓ ถ๓ ฑ๓ ฟ Flag for Ternopilshchyna (UA-61)
๐ด๓ ต๓ ง๓ ฎ๓ ฟ Flag for Northern (UG-N)
๐ด๓ ต๓ ณ๓ ง๓ ต๓ ฟ Flag for Guam (US-GU)
๐ด๓ ต๓ ญ๓ ธ๓ ฑ๓ ฟ Flag for Baker Island (UM-81)
๐ด๓ ต๓ ง๓ ฅ๓ ฟ Flag for Eastern (UG-E)
๐ด๓ ต๓ ก๓ ถ๓ ต๓ ฟ Flag for Khersonshchyna (UA-65)
๐ด๓ ต๓ ก๓ ต๓ น๓ ฟ Flag for Sumshchyna (UA-59)
๐ด๓ ต๓ ณ๓ ฉ๓ ฎ๓ ฟ Flag for Indiana (US-IN)
๐ด๓ ต๓ ณ๓ ก๓ ฒ๓ ฟ Flag for Arkansas (US-AR)
๐ด๓ ต๓ ณ๓ ค๓ ฅ๓ ฟ Flag for Delaware (US-DE)
๐ด๓ ต๓ ก๓ ถ๓ ณ๓ ฟ Flag for Kharkivshchyna (UA-63)
๐ด๓ ต๓ ณ๓ ก๓ ฌ๓ ฟ Flag for Alabama (US-AL)
๐ด๓ ต๓ ง๓ ท๓ ฟ Flag for Western (UG-W)
๐ด๓ ต๓ ก๓ ถ๓ ธ๓ ฟ Flag for Khmelnychchyna (UA-68)
๐ด๓ ต๓ ญ๓ ท๓ ถ๓ ฟ Flag for Navassa Island (UM-76)
๐ด๓ ต๓ ญ๓ ธ๓ ถ๓ ฟ Flag for Jarvis Island (UM-86)
๐ด๓ ต๓ ณ๓ ฉ๓ ค๓ ฟ Flag for Idaho (US-ID)
๐ด๓ ต๓ ญ๓ ธ๓ น๓ ฟ Flag for Kingman Reef (UM-89)
๐ด๓ ต๓ ณ๓ ฆ๓ ฌ๓ ฟ Flag for Florida (US-FL)
๐ด๓ ต๓ ญ๓ ท๓ น๓ ฟ Flag for Wake Island (UM-79)
๐ด๓ ต๓ ณ๓ ฉ๓ ฌ๓ ฟ Flag for Illinois (US-IL)
๐ด๓ ต๓ ณ๓ ค๓ ฃ๓ ฟ Flag for Washington DC (US-DC)
๐ด๓ ต๓ ก๓ ท๓ ฑ๓ ฟ Flag for Cherkashchyna (UA-71)
๐ด๓ ต๓ ณ๓ ฎ๓ น๓ ฟ Flag for New York (US-NY)
๐จ๐พโ๐จ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ต๓ ณ๓ ฎ๓ ฃ๓ ฟ Flag for North Carolina (US-NC)
๐ด๓ ต๓ ณ๓ ญ๓ ณ๓ ฟ Flag for Mississippi (US-MS)
๐ด๓ ต๓ ณ๓ ญ๓ ก๓ ฟ Flag for Massachusetts (US-MA)
๐ด๓ ต๓ ณ๓ ฎ๓ ถ๓ ฟ Flag for Nevada (US-NV)
๐ด๓ ต๓ ณ๓ ท๓ ฉ๓ ฟ Flag for Wisconsin (US-WI)
๐ด๓ ต๓ ณ๓ ญ๓ ค๓ ฟ Flag for Maryland (US-MD)
๐ด๓ ต๓ ณ๓ ฎ๓ ญ๓ ฟ Flag for New Mexico (US-NM)
๐ด๓ ต๓ ณ๓ ฐ๓ ฒ๓ ฟ Flag for Puerto Rico (US-PR)
๐ด๓ ต๓ ณ๓ ต๓ ญ๓ ฟ Flag for U.S. Outlying Islands (US-UM)
๐ด๓ ต๓ ณ๓ ท๓ น๓ ฟ Flag for Wyoming (US-WY)
๐ด๓ ต๓ ณ๓ ฏ๓ จ๓ ฟ Flag for Ohio (US-OH)
๐ด๓ ต๓ ณ๓ ซ๓ น๓ ฟ Flag for Kentucky (US-KY)
๐ด๓ ต๓ ณ๓ ฎ๓ ช๓ ฟ Flag for New Jersey (US-NJ)
๐ด๓ ต๓ ณ๓ ฏ๓ ฒ๓ ฟ Flag for Oregon (US-OR)
๐ด๓ ต๓ ณ๓ ญ๓ ฉ๓ ฟ Flag for Michigan (US-MI)
๐ด๓ ต๓ ณ๓ ถ๓ ฉ๓ ฟ Flag for U.S. Virgin Islands (US-VI)
๐ด๓ ต๓ ณ๓ ญ๓ ฏ๓ ฟ Flag for Missouri (US-MO)
๐ด๓ ต๓ ณ๓ ฐ๓ ก๓ ฟ Flag for Pennsylvania (US-PA)
๐ด๓ ต๓ ณ๓ ถ๓ ก๓ ฟ Flag for Virginia (US-VA)
๐ด๓ ต๓ น๓ ก๓ ฒ๓ ฟ Flag for Artigas (UY-AR)
๐ด๓ ต๓ น๓ ฃ๓ ก๓ ฟ Flag for Canelones (UY-CA)
๐ด๓ ต๓ ณ๓ ท๓ ก๓ ฟ Flag for Washington (US-WA)
๐ด๓ ต๓ ณ๓ ณ๓ ฃ๓ ฟ Flag for South Carolina (US-SC)
๐ด๓ ต๓ ณ๓ ญ๓ ฅ๓ ฟ Flag for Maine (US-ME)
๐ด๓ ต๓ ณ๓ ฌ๓ ก๓ ฟ Flag for Louisiana (US-LA)
๐ด๓ ต๓ ณ๓ ญ๓ ฎ๓ ฟ Flag for Minnesota (US-MN)
๐ด๓ ต๓ ณ๓ ฒ๓ ฉ๓ ฟ Flag for Rhode Island (US-RI)
๐ด๓ ต๓ ณ๓ ท๓ ถ๓ ฟ Flag for West Virginia (US-WV)
๐ด๓ ต๓ ณ๓ ด๓ ธ๓ ฟ Flag for Texas (US-TX)
๐ด๓ ต๓ ณ๓ ต๓ ด๓ ฟ Flag for Utah (US-UT)
๐ด๓ ต๓ ณ๓ ฏ๓ ซ๓ ฟ Flag for Oklahoma (US-OK)
๐ด๓ ต๓ ณ๓ ฎ๓ จ๓ ฟ Flag for New Hampshire (US-NH)
๐ด๓ ต๓ บ๓ ณ๓ ก๓ ฟ Flag for Samarqand (UZ-SA)
๐ด๓ ต๓ น๓ ญ๓ ก๓ ฟ Flag for Maldonado (UY-MA)
๐ด๓ ต๓ บ๓ ฎ๓ ง๓ ฟ Flag for Namangan (UZ-NG)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ฑ๓ ฟ Flag for Charlotte (VC-01)
๐ด๓ ต๓ น๓ ณ๓ ก๓ ฟ Flag for Salto (UY-SA)
๐ด๓ ต๓ น๓ ฃ๓ ฌ๓ ฟ Flag for Cerro Largo (UY-CL)
๐ด๓ ต๓ น๓ ด๓ ก๓ ฟ Flag for Tacuarembรณ (UY-TA)
๐ด๓ ถ๓ ฅ๓ ก๓ ฟ Flag for Capital (VE-A)
๐ด๓ ถ๓ ฅ๓ ข๓ ฟ Flag for Anzoรกtegui (VE-B)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ฒ๓ ฟ Flag for Saint Andrew (VC-02)
๐ด๓ ต๓ น๓ ณ๓ ฏ๓ ฟ Flag for Soriano (UY-SO)
๐ด๓ ต๓ น๓ ฒ๓ ฏ๓ ฟ Flag for Rocha (UY-RO)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ณ๓ ฟ Flag for Saint David (VC-03)
๐ด๓ ต๓ น๓ ณ๓ ช๓ ฟ Flag for San Josรฉ (UY-SJ)
๐ด๓ ต๓ น๓ ฆ๓ ค๓ ฟ Flag for Florida (UY-FD)
๐ด๓ ต๓ น๓ ฃ๓ ฏ๓ ฟ Flag for Colonia (UY-CO)
๐ด๓ ต๓ น๓ ฆ๓ ณ๓ ฟ Flag for Flores (UY-FS)
๐ด๓ ต๓ บ๓ ธ๓ ฏ๓ ฟ Flag for Xorazm (UZ-XO)
๐ด๓ ต๓ น๓ ค๓ ต๓ ฟ Flag for Durazno (UY-DU)
๐ด๓ ต๓ บ๓ ก๓ ฎ๓ ฟ Flag for Andijan (UZ-AN)
๐ด๓ ถ๓ ฅ๓ ค๓ ฟ Flag for Aragua (VE-D)
๐ด๓ ต๓ บ๓ ณ๓ ฉ๓ ฟ Flag for Sirdaryo (UZ-SI)
๐ด๓ ต๓ น๓ ฐ๓ ก๓ ฟ Flag for Paysandรบ (UY-PA)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ถ๓ ฟ Flag for Grenadines (VC-06)
๐ด๓ ต๓ น๓ ฒ๓ ถ๓ ฟ Flag for Rivera (UY-RV)
๐ด๓ ต๓ น๓ ฌ๓ ก๓ ฟ Flag for Lavalleja (UY-LA)
๐ด๓ ต๓ บ๓ ณ๓ ต๓ ฟ Flag for Surxondaryo (UZ-SU)
๐ด๓ ต๓ บ๓ ด๓ ฏ๓ ฟ Flag for Tashkent Province (UZ-TO)
๐ด๓ ต๓ บ๓ ฑ๓ ก๓ ฟ Flag for Qashqadaryo (UZ-QA)
๐ด๓ ต๓ น๓ ด๓ ด๓ ฟ Flag for Treinta y Tres (UY-TT)
๐ด๓ ต๓ น๓ ญ๓ ฏ๓ ฟ Flag for Montevideo (UY-MO)
๐ด๓ ต๓ บ๓ ข๓ ต๓ ฟ Flag for Bukhara (UZ-BU)
๐ด๓ ต๓ บ๓ ฆ๓ ก๓ ฟ Flag for Fergana (UZ-FA)
๐ด๓ ต๓ บ๓ ฑ๓ ฒ๓ ฟ Flag for Karakalpakstan (UZ-QR)
๐ด๓ ต๓ บ๓ ช๓ ฉ๓ ฟ Flag for Jizzakh (UZ-JI)
๐ด๓ ต๓ น๓ ฒ๓ ฎ๓ ฟ Flag for Rรญo Negro (UY-RN)
๐ด๓ ต๓ บ๓ ด๓ ซ๓ ฟ Flag for Tashkent (UZ-TK)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ต๓ ฟ Flag for Saint Patrick (VC-05)
๐ด๓ ต๓ บ๓ ฎ๓ ท๓ ฟ Flag for Navoiy (UZ-NW)
๐ด๓ ถ๓ ฅ๓ ซ๓ ฟ Flag for Lara (VE-K)
๐ด๓ ถ๓ ฅ๓ ฏ๓ ฟ Flag for Nueva Esparta (VE-O)
๐ด๓ ถ๓ ฅ๓ ณ๓ ฟ Flag for Tรกchira (VE-S)
๐ด๓ ถ๓ ฅ๓ ฆ๓ ฟ Flag for Bolรญvar (VE-F)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ฑ๓ ฟ Flag for Thanh Hรณa (VN-21)
๐ด๓ ถ๓ ฎ๓ ฑ๓ ด๓ ฟ Flag for Hรฒa Bรฌnh (VN-14)
๐ด๓ ถ๓ ฅ๓ ช๓ ฟ Flag for Guรกrico (VE-J)
๐ด๓ ถ๓ ฅ๓ จ๓ ฟ Flag for Cojedes (VE-H)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ถ๓ ฟ Flag for Thแปซa ThiรชnโHuแบฟ (VN-26)
๐ด๓ ถ๓ ฅ๓ ฐ๓ ฟ Flag for Portuguesa (VE-P)
๐ด๓ ถ๓ ฎ๓ ฑ๓ ธ๓ ฟ Flag for Ninh Bรฌnh (VN-18)
๐ด๓ ถ๓ ฅ๓ ฒ๓ ฟ Flag for Sucre (VE-R)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ฑ๓ ฟ Flag for Lai Chรขu (VN-01)
๐ด๓ ถ๓ ฎ๓ ฐ๓ น๓ ฟ Flag for Lแบกng Sฦกn (VN-09)
๐ด๓ ถ๓ ฅ๓ ญ๓ ฟ Flag for Miranda (VE-M)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ด๓ ฟ Flag for Quแบฃng Bรฌnh (VN-24)
๐ด๓ ถ๓ ฅ๓ ฅ๓ ฟ Flag for Barinas (VE-E)
๐ด๓ ถ๓ ฅ๓ ฎ๓ ฟ Flag for Monagas (VE-N)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ฒ๓ ฟ Flag for Nghแป An (VN-22)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ฒ๓ ฟ Flag for Lร o Cai (VN-02)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ท๓ ฟ Flag for Tuyรชn Quang (VN-07)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ต๓ ฟ Flag for Sฦกn La (VN-05)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ฐ๓ ฟ Flag for Thรกi Bรฌnh (VN-20)
๐ด๓ ถ๓ ฅ๓ ท๓ ฟ Flag for Federal Dependencies (VE-W)
๐ด๓ ถ๓ ฎ๓ ฒ๓ น๓ ฟ Flag for Quแบฃng Ngรฃi (VN-29)
๐ด๓ ถ๓ ฅ๓ ฌ๓ ฟ Flag for Mรฉrida (VE-L)
๐ด๓ ถ๓ ฅ๓ ฉ๓ ฟ Flag for Falcรณn (VE-I)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ด๓ ฟ Flag for Cao Bแบฑng (VN-04)
๐ด๓ ถ๓ ฅ๓ บ๓ ฟ Flag for Amazonas (VE-Z)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ถ๓ ฟ Flag for Yรชn Bรกi (VN-06)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ณ๓ ฟ Flag for Hร Tฤฉnh (VN-23)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ธ๓ ฟ Flag for Kon Tum (VN-28)
๐ด๓ ถ๓ ฅ๓ ธ๓ ฟ Flag for Vargas (VE-X)
๐ด๓ ถ๓ ฅ๓ ต๓ ฟ Flag for Yaracuy (VE-U)
๐ด๓ ถ๓ ฅ๓ ด๓ ฟ Flag for Trujillo (VE-T)
๐ด๓ ถ๓ ฎ๓ ฑ๓ ณ๓ ฟ Flag for Quแบฃng Ninh (VN-13)
๐ด๓ ถ๓ ฎ๓ ฐ๓ ณ๓ ฟ Flag for Hร Giang (VN-03)
๐ด๓ ถ๓ ฎ๓ ฒ๓ ท๓ ฟ Flag for Quแบฃng Nam (VN-27)
๐ด๓ ถ๓ ฎ๓ ต๓ ถ๓ ฟ Flag for Bแบฏc Ninh (VN-56)
๐ด๓ ถ๓ ฎ๓ ณ๓ ถ๓ ฟ Flag for Ninh Thuแบญn (VN-36)
๐ด๓ ถ๓ ฎ๓ ถ๓ น๓ ฟ Flag for Thรกi Nguyรชn (VN-69)
๐ด๓ ถ๓ ฎ๓ ถ๓ ท๓ ฟ Flag for Nam ฤแปnh (VN-67)
๐ด๓ ถ๓ ฎ๓ ณ๓ ต๓ ฟ Flag for Lรขm ฤแปng (VN-35)
๐ด๓ ถ๓ ฎ๓ ถ๓ ฑ๓ ฟ Flag for Hแบฃi Dฦฐฦกng (VN-61)
๐ด๓ ถ๓ ฎ๓ ต๓ ฒ๓ ฟ Flag for Sรณc Trฤng (VN-52)
๐ด๓ ถ๓ ฎ๓ ท๓ ณ๓ ฟ Flag for Hแบญu Giang (VN-73)
๐ด๓ ถ๓ ฎ๓ ท๓ ฐ๓ ฟ Flag for Vฤฉnh Phรบc (VN-70)
๐ด๓ ถ๓ ฎ๓ ต๓ ฐ๓ ฟ Flag for Bแบฟn Tre (VN-50)
๐ด๓ ถ๓ ฎ๓ ต๓ ณ๓ ฟ Flag for Bแบฏc Kแบกn (VN-53)
๐ด๓ ถ๓ ฎ๓ ต๓ ด๓ ฟ Flag for Bแบฏc Giang (VN-54)
๐ด๓ ถ๓ ฎ๓ ณ๓ ณ๓ ฟ Flag for ฤแบฏk Lแบฏk (VN-33)
๐ด๓ ถ๓ ฎ๓ ต๓ ท๓ ฟ Flag for Bรฌnh Dฦฐฦกng (VN-57)
๐ด๓ ถ๓ ฎ๓ ค๓ ฎ๓ ฟ Flag for Da Nang (VN-DN)
๐ด๓ ถ๓ ฎ๓ ด๓ ถ๓ ฟ Flag for Tiแปn Giang (VN-46)
๐ด๓ ถ๓ ฎ๓ ด๓ ณ๓ ฟ Flag for Bร RแปaโVลฉng Tร u (VN-43)
๐ด๓ ถ๓ ฎ๓ ท๓ ฑ๓ ฟ Flag for ฤiแปn Biรชn (VN-71)
๐ด๓ ถ๓ ฎ๓ ต๓ ธ๓ ฟ Flag for Bรฌnh Phฦฐแปc (VN-58)
๐ด๓ ถ๓ ฎ๓ ฃ๓ ด๓ ฟ Flag for Can Tho (VN-CT)
๐ด๓ ถ๓ ฎ๓ ต๓ ต๓ ฟ Flag for Bแบกc Liรชu (VN-55)
๐ด๓ ถ๓ ฎ๓ ณ๓ ฒ๓ ฟ Flag for Phรบ Yรชn (VN-32)
๐ด๓ ถ๓ ฎ๓ ด๓ ด๓ ฟ Flag for An Giang (VN-44)
๐ด๓ ถ๓ ฎ๓ ถ๓ ณ๓ ฟ Flag for Hร Nam (VN-63)
๐ด๓ ถ๓ ฎ๓ ต๓ น๓ ฟ Flag for Cร Mau (VN-59)
๐ด๓ ถ๓ ฎ๓ ด๓ ท๓ ฟ Flag for Kiรชn Giang (VN-47)
๐ด๓ ถ๓ ฎ๓ ณ๓ ด๓ ฟ Flag for Khรกnh Hรฒa (VN-34)
๐ด๓ ถ๓ ฎ๓ ด๓ ต๓ ฟ Flag for ฤแปng Thรกp (VN-45)
๐ด๓ ถ๓ ฎ๓ ณ๓ น๓ ฟ Flag for ฤแปng Nai (VN-39)
๐ด๓ ถ๓ ฎ๓ จ๓ ฎ๓ ฟ Flag for Hanoi (VN-HN)
๐ด๓ ถ๓ ฎ๓ ด๓ น๓ ฟ Flag for Vฤฉnh Long (VN-49)
๐ด๓ ถ๓ ฎ๓ ถ๓ ธ๓ ฟ Flag for Phรบ Thแป (VN-68)
๐ด๓ ถ๓ ฎ๓ ณ๓ ท๓ ฟ Flag for Tรขy Ninh (VN-37)
๐ด๓ ถ๓ ฎ๓ ณ๓ ฐ๓ ฟ Flag for Gia Lai (VN-30)
๐ด๓ ถ๓ ฎ๓ ท๓ ฒ๓ ฟ Flag for ฤแบฏk Nรดng (VN-72)
๐ด๓ ถ๓ ฎ๓ ด๓ ฐ๓ ฟ Flag for Bรฌnh Thuแบญn (VN-40)
๐ด๓ ถ๓ ฎ๓ ด๓ ฑ๓ ฟ Flag for Long An (VN-41)
๐ด๓ ถ๓ ฎ๓ ณ๓ ฑ๓ ฟ Flag for Bรฌnh ฤแปnh (VN-31)
๐ด๓ ท๓ ฆ๓ ต๓ ถ๓ ฟ Flag for Uvea (WF-UV)
๐ด๓ น๓ ฅ๓ ณ๓ ค๓ ฟ Flag for Saโdah (YE-SD)
๐ด๓ น๓ ฅ๓ ก๓ ข๓ ฟ Flag for Abyan (YE-AB)
๐ด๓ น๓ ฅ๓ จ๓ ช๓ ฟ Flag for Hajjah (YE-HJ)
๐ด๓ ถ๓ ต๓ ญ๓ ก๓ ฐ๓ ฟ Flag for Malampa (VU-MAP)
๐ด๓ ท๓ ณ๓ ก๓ ด๓ ฟ Flag for Atua (WS-AT)
๐ด๓ ท๓ ณ๓ ถ๓ ฆ๓ ฟ Flag for Vaโa-o-Fonoti (WS-VF)
๐ด๓ น๓ ฅ๓ จ๓ ต๓ ฟ Flag for Al Hudaydah (YE-HU)
๐ด๓ ท๓ ณ๓ ฐ๓ ก๓ ฟ Flag for Palauli (WS-PA)
๐ด๓ ท๓ ณ๓ ณ๓ ก๓ ฟ Flag for Satupaโitea (WS-SA)
๐ด๓ น๓ ฅ๓ ค๓ ก๓ ฟ Flag for Dhale (YE-DA)
๐ด๓ ญ๓ ฌ๓ ถ๓ ฟ Flag for Tombouctou (ML-6)
๐ด๓ น๓ ฅ๓ ฒ๓ ก๓ ฟ Flag for Raymah (YE-RA)
๐ด๓ ถ๓ ต๓ ณ๓ ก๓ ญ๓ ฟ Flag for Sanma (VU-SAM)
๐ด๓ ท๓ ฆ๓ ก๓ ฌ๓ ฟ Flag for Alo (WF-AL)
๐ด๓ น๓ ฅ๓ ญ๓ ฒ๓ ฟ Flag for Al Mahrah (YE-MR)
๐จ๐ปโ๐จ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ น๓ ฅ๓ ก๓ ค๓ ฟ Flag for โAdan (YE-AD)
๐ด๓ น๓ ฅ๓ ณ๓ จ๓ ฟ Flag for Shabwah (YE-SH)
๐ด๓ ถ๓ ต๓ ด๓ ก๓ ฅ๓ ฟ Flag for Tafea (VU-TAE)
๐ด๓ น๓ ฅ๓ ก๓ ญ๓ ฟ Flag for Amran (YE-AM)
๐ด๓ ถ๓ ต๓ ฐ๓ ก๓ ญ๓ ฟ Flag for Penama (VU-PAM)
๐ด๓ น๓ ฅ๓ ญ๓ ท๓ ฟ Flag for Al Mahwit (YE-MW)
๐ด๓ ท๓ ณ๓ ง๓ ฅ๓ ฟ Flag for Gagaโemauga (WS-GE)
๐ด๓ น๓ ฅ๓ จ๓ ค๓ ฟ Flag for Hadramaut (YE-HD)
๐ด๓ ท๓ ณ๓ ก๓ ฌ๓ ฟ Flag for Aiga-i-le-Tai (WS-AL)
๐ด๓ น๓ ฅ๓ ญ๓ ก๓ ฟ Flag for Maโrib (YE-MA)
๐ด๓ น๓ ฅ๓ ข๓ ก๓ ฟ Flag for Al Bayda (YE-BA)
๐ด๓ ถ๓ ฎ๓ จ๓ ฐ๓ ฟ Flag for Haiphong (VN-HP)
๐ด๓ ท๓ ณ๓ ก๓ ก๓ ฟ Flag for Aโana (WS-AA)
๐ด๓ ท๓ ฆ๓ ณ๓ ง๓ ฟ Flag for Sigave (WF-SG)
๐ด๓ น๓ ฅ๓ ฌ๓ ก๓ ฟ Flag for Lahij (YE-LA)
๐ด๓ ถ๓ ต๓ ณ๓ ฅ๓ ฅ๓ ฟ Flag for Shefa (VU-SEE)
๐ด๓ น๓ ฅ๓ ฉ๓ ข๓ ฟ Flag for Ibb (YE-IB)
๐ด๓ ถ๓ ต๓ ด๓ ฏ๓ ข๓ ฟ Flag for Torba (VU-TOB)
๐ด๓ น๓ ฅ๓ ช๓ ก๓ ฟ Flag for Al Jawf (YE-JA)
๐ด๓ ท๓ ณ๓ ด๓ ต๓ ฟ Flag for Tuamasaga (WS-TU)
๐ด๓ น๓ ฅ๓ ค๓ จ๓ ฟ Flag for Dhamar (YE-DH)
๐ด๓ บ๓ ก๓ ท๓ ฃ๓ ฟ Flag for Western Cape (ZA-WC)
๐ด๓ น๓ ฅ๓ ณ๓ ต๓ ฟ Flag for Arkhabil Suqutra (YE-SU)
๐ด๓ บ๓ ท๓ ญ๓ ฎ๓ ฟ Flag for Matabeleland North (ZW-MN)
๐ด๓ บ๓ ท๓ ญ๓ ฅ๓ ฟ Flag for Mashonaland East (ZW-ME)
๐ด๓ บ๓ ญ๓ ฐ๓ ถ๓ ฟ Flag for North-Western (ZM-06)
๐ด๓ น๓ ฅ๓ ณ๓ ฎ๓ ฟ Flag for Sanaโa (YE-SN)
๐ด๓ บ๓ ก๓ ฌ๓ ฐ๓ ฟ Flag for Limpopo (ZA-LP)
๐ด๓ บ๓ ญ๓ ฐ๓ ณ๓ ฟ Flag for Eastern (ZM-03)
๐ด๓ บ๓ ท๓ ญ๓ ฉ๓ ฟ Flag for Midlands (ZW-MI)
๐ด๓ บ๓ ท๓ ข๓ ต๓ ฟ Flag for Bulawayo (ZW-BU)
๐ด๓ บ๓ ญ๓ ฐ๓ ต๓ ฟ Flag for Northern (ZM-05)
๐ด๓ บ๓ ญ๓ ฐ๓ ท๓ ฟ Flag for Southern (ZM-07)
๐ด๓ บ๓ ก๓ ฆ๓ ณ๓ ฟ Flag for Free (ZA-FS)
๐ด๓ บ๓ ท๓ ญ๓ ณ๓ ฟ Flag for Matabeleland South (ZW-MS)
๐ด๓ บ๓ ก๓ ฅ๓ ฃ๓ ฟ Flag for Eastern Cape (ZA-EC)
๐ด๓ บ๓ ญ๓ ฐ๓ ฑ๓ ฟ Flag for Western (ZM-01)
๐จ๐ผโ๐จ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ บ๓ ญ๓ ฐ๓ ธ๓ ฟ Flag for Copperbelt (ZM-08)
๐ด๓ บ๓ ก๓ ฎ๓ ท๓ ฟ Flag for North West (ZA-NW)
๐ด๓ บ๓ ญ๓ ฑ๓ ฐ๓ ฟ Flag for Muchinga (ZM-10)
๐ด๓ บ๓ ก๓ ง๓ ด๓ ฟ Flag for Gauteng (ZA-GT)
๐ด๓ บ๓ ญ๓ ฐ๓ น๓ ฟ Flag for Lusaka (ZM-09)
๐ด๓ บ๓ ญ๓ ฐ๓ ฒ๓ ฟ Flag for Central (ZM-02)
๐ด๓ บ๓ ก๓ ฎ๓ ฃ๓ ฟ Flag for Northern Cape (ZA-NC)
๐ด๓ บ๓ ก๓ ญ๓ ฐ๓ ฟ Flag for Mpumalanga (ZA-MP)
๐ด๓ น๓ ฅ๓ ด๓ ก๓ ฟ Flag for Taiz (YE-TA)
๐ด๓ บ๓ ก๓ ฎ๓ ฌ๓ ฟ Flag for KwaZulu-Natal (ZA-NL)
๐ด๓ บ๓ ท๓ ญ๓ ก๓ ฟ Flag for Manicaland (ZW-MA)
๐ด๓ บ๓ ท๓ ญ๓ ถ๓ ฟ Flag for Masvingo (ZW-MV)
๐ด๓ บ๓ ญ๓ ฐ๓ ด๓ ฟ Flag for Luapula (ZM-04)
๐ด๓ บ๓ ท๓ ญ๓ ท๓ ฟ Flag for Mashonaland West (ZW-MW)
๐ด๓ บ๓ ท๓ จ๓ ก๓ ฟ Flag for Harare (ZW-HA)
๐จ๐ฝโ๐จ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone
๐จ๐พโ๐จ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ฆ๓ ฒ๓ ฐ๓ ค๓ ฌ๓ ฟ Flag for Pays-de-la-Loire (FR-PDL)
๐ด๓ ฌ๓ ด๓ ฒ๓ ฐ๓ ฟ Flag for Klaipฤdos Municipality (LT-20)
๐ด๓ ง๓ ฒ๓ ญ๓ ฟ Flag for Crete (GR-M)
๓ ธ Tag Latin Small Letter X
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ฑ๓ ฟ Flag for Mazandaran (IR-21)
๐ด๓ ฒ๓ ต๓ ฐ๓ ฒ๓ ฉ๓ ฟ Flag for Primorsky Krai (RU-PRI)
๐ด๓ ช๓ ฐ๓ ฐ๓ ท๓ ฟ Flag for Fukushima (JP-07)
๐ด๓ ฃ๓ ก๓ ญ๓ ข๓ ฟ Flag for Manitoba (CA-MB)
๐จ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ปโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman: Light Skin Tone, Woman: Light Skin Tone
๐ด๓ ฃ๓ ก๓ ฑ๓ ฃ๓ ฟ Flag for Quebec (CA-QC)
๐จโ๐ฉโ๐ถ Family: Man, Woman, Baby
๐ด๓ ฎ๓ ก๓ ซ๓ ฅ๓ ฟ Flag for Kavango East (NA-KE)
๐ด๓ ญ๓ ธ๓ ณ๓ ฌ๓ ฐ๓ ฟ Flag for San Luis Potosรญ (MX-SLP)
๐ด๓ ฅ๓ ฅ๓ ต๓ น๓ ฟ Flag for Lรครคne-Viru (EE-59)
๐ด๓ ฌ๓ ฒ๓ ข๓ ง๓ ฟ Flag for Bong (LR-BG)
๐ด๓ ฐ๓ ณ๓ ค๓ ฅ๓ ข๓ ฟ Flag for Deir al-Balah (PS-DEB)
๐จ๐ฟโ๐จ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ช๓ ญ๓ ฐ๓ ณ๓ ฟ Flag for Saint Thomas (JM-03)
๐ด๓ ฐ๓ ท๓ ฑ๓ ฐ๓ ฐ๓ ฟ Flag for Kayangel (PW-100)
๐ด๓ ฃ๓ ง๓ ฑ๓ ฒ๓ ฟ Flag for Pool (CG-12)
๐จโโค๏ธโ๐จ๐พ Couple With Heart - Man, Man: Medium-Dark Skin Tone
๐ด๓ ฅ๓ ณ๓ ฉ๓ ข๓ ฟ Flag for Balearic Islands (ES-IB)
๐ฉโ๐จโ๐ฆ Family: Woman, Man, Boy
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ธ๓ ฟ Flag for Uusimaa (FI-18)
๐จ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ข๓ ฒ๓ ฃ๓ ฅ๓ ฟ Flag for Cearรก (BR-CE)
๐จโ๐ฉโ๐ฆโ๐ถ Family: Man, Woman, Boy, Baby
๐จ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ญ๓ ซ๓ ฒ๓ ต๓ ฟ Flag for Demir Hisar (MK-25)
๐ด๓ ฃ๓ ฌ๓ ก๓ ฎ๓ ฟ Flag for Antofagasta (CL-AN)
๐ด๓ ข๓ ข๓ ฐ๓ ฑ๓ ฟ Flag for Christ Church (BB-01)
๐ด๓ ฅ๓ ฅ๓ ณ๓ ท๓ ฟ Flag for Harju (EE-37)
๐จ๐ฟโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man: Dark Skin Tone, Woman: Medium Skin Tone
๐ด๓ ฎ๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Yaren (NR-14)
๐ฉโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman, Woman: Light Skin Tone
๐ด๓ ญ๓ น๓ ฑ๓ ฐ๓ ฟ Flag for Selangor (MY-10)
๐จ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ฐ๓ ฅ๓ ก๓ ฐ๓ ต๓ ฟ Flag for Apurรญmac (PE-APU)
๐ฉโ๐จโ๐ฆโ๐ง Family: Woman, Man, Boy, Girl
๐จ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ง๓ ฅ๓ ก๓ ข๓ ฟ Flag for Abkhazia (GE-AB)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ธ๓ ฟ Flag for Schellenberg (LI-08)
๐ด๓ ด๓ ฒ๓ ธ๓ ฑ๓ ฟ Flag for Dรผzce (TR-81)
๐ฉ๐พโ๐ง๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉโ๐จโ๐ถโ๐ฆ Family: Woman, Man, Baby, Boy
๐ด๓ ญ๓ ธ๓ ณ๓ ฏ๓ ฎ๓ ฟ Flag for Sonora (MX-SON)
๐ด๓ ฃ๓ ฉ๓ ณ๓ ญ๓ ฟ Flag for Sassandra-Marahouรฉ (CI-SM)
๐ด๓ ฐ๓ ฅ๓ ก๓ ฒ๓ ฅ๓ ฟ Flag for Arequipa (PE-ARE)
๐ฉ๐ฝโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ฃ๓ ง๓ ฑ๓ ฑ๓ ฟ Flag for Bouenza (CG-11)
๐ด๓ ช๓ ญ๓ ฑ๓ ด๓ ฟ Flag for Saint Catherine (JM-14)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ฒ๓ ฟ Flag for ล kofja Loka (SI-122)
๐ฉ๐ปโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman: Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ด๓ ท๓ จ๓ ณ๓ บ๓ ฟ Flag for Hsinchu (TW-HSZ)
๐ฉ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ฌ๓ ซ๓ ณ๓ ฟ Flag for Southern (LK-3)
๐จโโค๏ธโ๐โ๐จ๐ผ Kiss - Man, Man: Medium-Light Skin Tone
๐จ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ฎ๓ ฉ๓ ฌ๓ ฅ๓ ฟ Flag for Leรณn (NI-LE)
๐ด๓ จ๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Varaลพdin (HR-05)
๐ด๓ ฃ๓ ฏ๓ ก๓ ฎ๓ ด๓ ฟ Flag for Antioquia (CO-ANT)
๐ด๓ ญ๓ ฃ๓ ณ๓ ค๓ ฟ Flag for Sainte-Dรฉvote Chapel (MC-SD)
๐ด๓ ญ๓ ซ๓ ถ๓ ฑ๓ ฟ Flag for Plasnica (MK-61)
๐จ๐พโโค๏ธโ๐จ๐ป Couple With Heart - Man: Medium-Dark Skin Tone, Man: Light Skin Tone
๐ด๓ ง๓ ฒ๓ ง๓ ฟ Flag for West Greece (GR-G)
๐ด๓ ญ๓ ถ๓ ฎ๓ ฏ๓ ฟ Flag for North Province (MV-NO)
๐จโโค๏ธโ๐ฉ๐ป Couple With Heart - Man, Woman: Light Skin Tone
๐ด๓ ถ๓ ฅ๓ ฃ๓ ฟ Flag for Apure (VE-C)
โฟ๏ธ Mercury
๐ด๓ ต๓ ณ๓ ญ๓ ด๓ ฟ Flag for Montana (US-MT)
๐ฉ๐ผโโค๏ธโ๐จ๐พ Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐พโ๐จ๐พโ๐ง๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ฅ๓ ฃ๓ ฅ๓ ฟ Flag for Esmeraldas (EC-E)
๐ด๓ ค๓ บ๓ ฐ๓ ธ๓ ฟ Flag for Bรฉchar (DZ-08)
๐ด๓ ฎ๓ ฌ๓ ฎ๓ จ๓ ฟ Flag for North Holland (NL-NH)
๐ด๓ ฆ๓ ฒ๓ ข๓ ฌ๓ ฟ Flag for St. Barthรฉlemy (FR-BL)
๐ด๓ ฃ๓ ฆ๓ ต๓ ซ๓ ฟ Flag for Ouaka (CF-UK)
๐ด๓ ณ๓ ค๓ ฒ๓ ณ๓ ฟ Flag for Red Sea (SD-RS)
๐ด๓ ญ๓ ธ๓ ด๓ ก๓ ข๓ ฟ Flag for Tabasco (MX-TAB)
๐ด๓ ฃ๓ ฎ๓ น๓ ฒ๓ ฟ Flag for Macau SAR China (CN-92)
๐ด๓ จ๓ ต๓ ฅ๓ ง๓ ฟ Flag for Eger (HU-EG)
๐ด๓ ฒ๓ ต๓ ณ๓ ฅ๓ ฟ Flag for North Ossetia-Alania (RU-SE)
๐ด๓ ฃ๓ ค๓ ฅ๓ ฑ๓ ฟ Flag for รquateur (CD-EQ)
๐จ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ฅ๓ ณ๓ ฐ๓ ถ๓ ฟ Flag for Basque Country (ES-PV)
๐จ๐ฝโโค๏ธโ๐โ๐จ๐ป Kiss - Man: Medium Skin Tone, Man: Light Skin Tone
๐ด๓ ด๓ ฎ๓ ท๓ ฑ๓ ฟ Flag for Gafsa (TN-71)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ถ๓ ฟ Flag for Tavastia Proper (FI-06)
๐ด๓ ฉ๓ ฒ๓ ณ๓ ฐ๓ ฟ Flag for Razavi Khorasan (IR-30)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ด๓ ฟ Flag for Dobje (SI-154)
๐จ๐ผโโค๏ธโ๐โ๐จ๐ป Kiss - Man: Medium-Light Skin Tone, Man: Light Skin Tone
๐ด๓ ง๓ ด๓ ฒ๓ ฅ๓ ฟ Flag for Retalhuleu (GT-RE)
๐ด๓ ซ๓ ฉ๓ ฌ๓ ฟ Flag for Line Islands (KI-L)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for West Azarbaijan (IR-02)
๐ด๓ ฃ๓ ฏ๓ ฎ๓ ก๓ ฒ๓ ฟ Flag for Nariรฑo (CO-NAR)
๐ด๓ บ๓ ท๓ ญ๓ ฃ๓ ฟ Flag for Mashonaland Central (ZW-MC)
๐จ๐ปโโค๏ธโ๐จ๐ป Couple With Heart - Man: Light Skin Tone, Man: Light Skin Tone
๐ด๓ ฉ๓ ด๓ ด๓ ต๓ ฟ Flag for Emilia-Romagna (IT-45)
๐ด๓ ฅ๓ ณ๓ ถ๓ ฃ๓ ฟ Flag for Valencian Community (ES-VC)
๐ด๓ ด๓ จ๓ ท๓ ต๓ ฟ Flag for Samut Songkhram (TH-75)
๐ด๓ ฆ๓ ฒ๓ ฉ๓ ค๓ ฆ๓ ฟ Flag for รle-de-France (FR-IDF)
๐ด๓ ฌ๓ ณ๓ ก๓ ฟ Flag for Maseru (LS-A)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ต๓ ฟ Flag for Marsabit (KE-25)
๐ด๓ ค๓ บ๓ ฐ๓ ฑ๓ ฟ Flag for Adrar (DZ-01)
๐ด๓ ณ๓ ถ๓ ต๓ ณ๓ ฟ Flag for Usulutรกn (SV-US)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ฐ๓ ฟ Flag for Mazsalaca (LV-060)
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ด๓ จ๓ ณ๓ ถ๓ ฟ Flag for Chaiyaphum (TH-36)
๐ด๓ ฐ๓ จ๓ ฐ๓ ท๓ ฟ Flag for Central Visayas (PH-07)
๐ด๓ ด๓ จ๓ ธ๓ ถ๓ ฟ Flag for Chumphon (TH-86)
๐ด๓ ฃ๓ ฉ๓ บ๓ บ๓ ฟ Flag for Zanzan (CI-ZZ)
๐ด๓ ฅ๓ ณ๓ ฃ๓ ฌ๓ ฟ Flag for Castile and Leรณn (ES-CL)
๐จ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ณ๓ ก๓ ฑ๓ ฑ๓ ฟ Flag for Al Bahah (SA-11)
๐ด๓ ข๓ ฑ๓ ณ๓ ฅ๓ ฟ Flag for Sint Eustatius (BQ-SE)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ฑ๓ ฟ Flag for ร
land Islands (FI-01)
๐ด๓ ฃ๓ ฒ๓ จ๓ ฟ Flag for Heredia (CR-H)
๐ด๓ ด๓ ฒ๓ ด๓ ณ๓ ฟ Flag for Kรผtahya (TR-43)
๐ด๓ ท๓ ณ๓ ถ๓ ณ๓ ฟ Flag for Vaisigano (WS-VS)
๐จ๐ฟโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ฒ๓ ฟ Flag for Kranj (SI-052)
๐ด๓ ถ๓ ฅ๓ ถ๓ ฟ Flag for Zulia (VE-V)
๐ฉ๐ฝโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ฌ๓ ต๓ ฃ๓ ก๓ ฟ Flag for Capellen (LU-CA)
๐ฉ๐ฝโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ง๓ น๓ ฅ๓ ข๓ ฟ Flag for East Berbice-Corentyne (GY-EB)
๐ด๓ ด๓ จ๓ ฑ๓ ถ๓ ฟ Flag for Lopburi (TH-16)
๐ด๓ ญ๓ ด๓ ฒ๓ ต๓ ฟ Flag for Luqa (MT-25)
๐จ๐ปโโค๏ธโ๐จ๐ผ Couple With Heart - Man: Light Skin Tone, Man: Medium-Light Skin Tone
๐จ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ปโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman: Light Skin Tone, Woman: Medium Skin Tone
๐ด๓ ญ๓ ธ๓ ข๓ ฃ๓ ณ๓ ฟ Flag for Baja California Sur (MX-BCS)
๐ด๓ ฅ๓ ง๓ ข๓ ฎ๓ ณ๓ ฟ Flag for Beni Suef (EG-BNS)
๐ด๓ ด๓ จ๓ น๓ ณ๓ ฟ Flag for Phatthalung (TH-93)
๐ด๓ ด๓ บ๓ ฒ๓ ต๓ ฟ Flag for Tanga (TZ-25)
๐ด๓ ญ๓ ก๓ ฐ๓ ด๓ ฟ Flag for Oriental (MA-04)
๐จ๐พโ๐จ๐พโ๐ง๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฒ๓ ท๓ ฟ Flag for Gorenja VasโPoljane (SI-027)
๐ด๓ ด๓ ด๓ ณ๓ ง๓ ฅ๓ ฟ Flag for Sangre Grande (TT-SGE)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ถ๓ ฟ Flag for Koknese (LV-046)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ถ๓ ฟ Flag for Odranci (SI-086)
๐ด๓ ฎ๓ บ๓ ฎ๓ ณ๓ ฎ๓ ฟ Flag for Nelson (NZ-NSN)
๐ด๓ จ๓ ต๓ ณ๓ บ๓ ฟ Flag for Szabolcs-Szatmรกr-Bereg (HU-SZ)
๐ฉ๐พโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Sveti Jurij v Slovenskih Goricah (SI-210)
฿ท NKo Symbol Gbakurunen
๐ด๓ ฎ๓ ง๓ ค๓ ฅ๓ ฟ Flag for Delta (NG-DE)
๐ด๓ ญ๓ ค๓ ฃ๓ ณ๓ ฟ Flag for Cฤuศeni (MD-CS)
๐ฉ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ฃ๓ ต๓ น๓ น๓ ฟ Flag for Isla de la Juventud (CU-99)
๐ด๓ ซ๓ จ๓ ฒ๓ ฐ๓ ฟ Flag for Svay Rieng (KH-20)
๐ด๓ ด๓ ค๓ จ๓ ฌ๓ ฟ Flag for Hadjer-Lamis (TD-HL)
๐ด๓ ช๓ ฐ๓ ฒ๓ ฑ๓ ฟ Flag for Gifu (JP-21)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ฑ๓ ฟ Flag for Jelgava Municipality (LV-041)
๐ด๓ ฐ๓ ซ๓ ด๓ ก๓ ฟ Flag for Federally Administered Tribal Areas (PK-TA)
๐ด๓ ญ๓ ด๓ ถ๓ ฒ๓ ฟ Flag for Xewkija (MT-62)
๐ด๓ ญ๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Guidimaka (MR-10)
๐ด๓ ญ๓ ซ๓ ฐ๓ ฒ๓ ฟ Flag for Araฤinovo (MK-02)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for LogโDragomer (SI-208)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฒ๓ ต๓ ฟ Flag for ล martno ob Paki (SI-125)
๐ด๓ ฃ๓ ฏ๓ ค๓ ฃ๓ ฟ Flag for Capital District (CO-DC)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ถ๓ ฟ Flag for Ventspils Municipality (LV-106)
๐ด๓ ญ๓ ถ๓ ณ๓ ฃ๓ ฟ Flag for South Central Province (MV-SC)
๐ด๓ ฉ๓ ฎ๓ ก๓ ณ๓ ฟ Flag for Assam (IN-AS)
๐ด๓ ฌ๓ ด๓ ฐ๓ ฒ๓ ฟ Flag for Alytus Municipality (LT-02)
๐ด๓ ถ๓ ฎ๓ ถ๓ ถ๓ ฟ Flag for Hฦฐng Yรชn (VN-66)
๐จ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐จ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ง๓ ด๓ ณ๓ ญ๓ ฟ Flag for San Marcos (GT-SM)
๐จ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ค๓ ฅ๓ ณ๓ จ๓ ฟ Flag for Schleswig-Holstein (DE-SH)
๐จโ๐จโ๐ถโ๐ง Family: Man, Man, Baby, Girl
๏ธ Variation Selector-16
๐จ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐จ๐พโ๐จ๐พโ๐ง๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐จ๐ปโ๐จ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone
๐จ๐ผโ๐จ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐จ๐ฝโ๐จ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone
๐จ๐พโ๐จ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman, Man: Dark Skin Tone
๐ด๓ ฅ๓ ณ๓ ฃ๓ ข๓ ฟ Flag for Cantabria (ES-CB)
๐ด๓ ณ๓ ณ๓ ต๓ น๓ ฟ Flag for Unity (SS-UY)
๐ฉ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐ถ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐ถ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ฆ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ง๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ง๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ถ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ง๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ง๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone
๐จ๐พโ๐ฉ๐พโ๐ง๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ฉ๓ ค๓ ญ๓ ฌ๓ ฟ Flag for Maluku Islands (ID-ML)
๐ฉ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ค๓ ซ๓ ธ๓ ณ๓ ฟ Flag for Southern Denmark (DK-83)
๐ด๓ ญ๓ ซ๓ ธ๓ ต๓ ฟ Flag for Skopje (MK-85)
๐จ๐ผโโค๏ธโ๐โ๐ฉ Kiss - Man: Medium-Light Skin Tone, Woman
๐ด๓ ฐ๓ ด๓ ฐ๓ ฒ๓ ฟ Flag for Beja (PT-02)
๐ด๓ ฉ๓ ด๓ ธ๓ ธ๓ ฟ Flag for Sardinia (IT-88)
๐ด๓ ค๓ ฅ๓ ข๓ น๓ ฟ Flag for Bavaria (DE-BY)
๐ด๓ ฐ๓ ง๓ ฅ๓ ข๓ ฒ๓ ฟ Flag for East New Britain (PG-EBR)
๐ด๓ ฉ๓ ด๓ ณ๓ ฒ๓ ฟ Flag for Trentino-South Tyrol (IT-32)
๐ด๓ ต๓ ณ๓ ด๓ ฎ๓ ฟ Flag for Tennessee (US-TN)
๐ด๓ ฃ๓ ก๓ ณ๓ ซ๓ ฟ Flag for Saskatchewan (CA-SK)
๐ด๓ ด๓ ถ๓ ฆ๓ ต๓ ฎ๓ ฟ Flag for Funafuti (TV-FUN)
๐ด๓ ด๓ ช๓ ง๓ ข๓ ฟ Flag for Gorno-Badakhshan (TJ-GB)
๐ด๓ ณ๓ ฏ๓ ข๓ ฎ๓ ฟ Flag for Banaadir (SO-BN)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ฐ๓ ฐ๓ ฟ Flag for Radenci (SI-100)
๐ด๓ ค๓ ฅ๓ ข๓ ท๓ ฟ Flag for Baden-Wรผrttemberg (DE-BW)
๐ฉ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ถ๓ ฅ๓ ง๓ ฟ Flag for Carabobo (VE-G)
โ Zero Width Joiner
๐ด๓ ซ๓ ฅ๓ ณ๓ ฑ๓ ฟ Flag for Nakuru (KE-31)
๐ด๓ ด๓ ง๓ ญ๓ ฟ Flag for Maritime (TG-M)
๐ด๓ ฎ๓ ง๓ ข๓ ฏ๓ ฟ Flag for Borno (NG-BO)
๐ด๓ ญ๓ ค๓ ณ๓ ฎ๓ ฟ Flag for Transnistria (MD-SN)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Tehran (IR-07)
๐ด๓ ฒ๓ ต๓ ค๓ ก๓ ฟ Flag for Dagestan (RU-DA)
๐ด๓ ฏ๓ ญ๓ ท๓ ต๓ ฟ Flag for Al Wusta (OM-WU)
๐ด๓ ฃ๓ บ๓ ด๓ ฒ๓ ฟ Flag for รsteckรฝ kraj (CZ-42)
๐ด๓ ญ๓ น๓ ฑ๓ ด๓ ฟ Flag for Kuala Lumpur (MY-14)
๐ด๓ ฐ๓ ฅ๓ ก๓ น๓ ก๓ ฟ Flag for Ayacucho (PE-AYA)
๐ด๓ ต๓ ก๓ ณ๓ ฐ๓ ฟ Flag for Kiev (UA-30)
๐ด๓ ก๓ ง๓ ฐ๓ ธ๓ ฟ Flag for Saint Philip (AG-08)
๐ด๓ ญ๓ ด๓ ฒ๓ น๓ ฟ Flag for Mdina (MT-29)
๐ด๓ ง๓ ข๓ ฎ๓ ฉ๓ ฒ๓ ฟ Flag for Northern Ireland (GB-NIR)
๐ด๓ ฆ๓ ฒ๓ ก๓ ฒ๓ ก๓ ฟ Flag for Auvergne-Rhรดne-Alpes (FR-ARA)
๐ด๓ ญ๓ ธ๓ ค๓ ต๓ ฒ๓ ฟ Flag for Durango (MX-DUR)
๐จ๐ผโ๐ฉ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ฌ๓ ซ๓ ต๓ ฟ Flag for Eastern (LK-5)
๐ด๓ ฎ๓ ง๓ ฏ๓ ง๓ ฟ Flag for Ogun (NG-OG)
๐ด๓ ฌ๓ น๓ ช๓ ฉ๓ ฟ Flag for Jafara (LY-JI)
๐ด๓ ณ๓ ฅ๓ ญ๓ ฟ Flag for Skรฅne (SE-M)
๐จ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโ๐ฉ๐พโ๐ง๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ญ๓ ณ๓ ฟ Flag for Mato Grosso do Sul (BR-MS)
๐ด๓ ง๓ ด๓ ณ๓ ฒ๓ ฟ Flag for Santa Rosa (GT-SR)
๐จ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ณ๓ ฉ๓ ฑ๓ ต๓ ฑ๓ ฟ Flag for Braslovฤe (SI-151)
๐ด๓ ฐ๓ ด๓ ณ๓ ฐ๓ ฟ Flag for Madeira (PT-30)
๐ด๓ ณ๓ ถ๓ ณ๓ ถ๓ ฟ Flag for San Vicente (SV-SV)
๐ด๓ ฉ๓ ฒ๓ ณ๓ ฒ๓ ฟ Flag for Alborz (IR-32)
๐ด๓ ท๓ ณ๓ ฆ๓ ก๓ ฟ Flag for Faโasaleleaga (WS-FA)
๐จ๐ผโ๐จ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ฃ๓ ก๓ ฎ๓ ฌ๓ ฟ Flag for Newfoundland and Labrador (CA-NL)
๐ด๓ ง๓ ฒ๓ ช๓ ฟ Flag for Peloponnese (GR-J)
๐ด๓ ฎ๓ ฌ๓ ณ๓ ธ๓ ฟ Flag for Sint Maarten (NL-SX)
๐ด๓ ญ๓ ด๓ ด๓ ธ๓ ฟ Flag for St. Julianโs (MT-48)
๐ด๓ ฎ๓ ง๓ ก๓ ค๓ ฟ Flag for Adamawa (NG-AD)
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ณ๓ ด๓ ณ๓ ฟ Flag for Sรฃo Tomรฉ (ST-S)
๐ฉ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ฐ๓ ฟ Flag for Auce (LV-010)
๐ด๓ ฐ๓ จ๓ ฑ๓ ต๓ ฟ Flag for Cordillera Administrative (PH-15)
๐ด๓ ช๓ ฐ๓ ฑ๓ ธ๓ ฟ Flag for Fukui (JP-18)
๐จ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ง๓ ฅ๓ ซ๓ ก๓ ฟ Flag for Kakheti (GE-KA)
๐ด๓ ซ๓ ฒ๓ ด๓ น๓ ฟ Flag for Jeju (KR-49)
๐ด๓ ญ๓ ก๓ ฑ๓ ณ๓ ฟ Flag for Souss-Massa-Drรขa (MA-13)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ท๓ ฟ Flag for Inฤukalns (LV-037)
๐ด๓ ฆ๓ ฒ๓ ด๓ ฆ๓ ฟ Flag for French Southern Territories (FR-TF)
๐ด๓ ญ๓ ธ๓ ฒ๓ ฏ๓ ฏ๓ ฟ Flag for Quintana Roo (MX-ROO)
๐ฉ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐จ๐พโ๐จ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ จ๓ ต๓ ง๓ ณ๓ ฟ Flag for Gyลr-Moson-Sopron (HU-GS)
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๎ Shibuya
๐ฉโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman, Man: Medium Skin Tone
๐ด๓ ท๓ ณ๓ ง๓ ฉ๓ ฟ Flag for Gagaโifomauga (WS-GI)
๐ด๓ จ๓ ด๓ ฎ๓ ฅ๓ ฟ Flag for Nord-Est (HT-NE)
๐ด๓ ณ๓ ง๓ ฐ๓ ฑ๓ ฟ Flag for Central Singapore (SG-01)
๐ด๓ ฅ๓ ฃ๓ ด๓ ฟ Flag for Tungurahua (EC-T)
# Number Sign
๐จ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
1 Digit One
๐ด๓ ข๓ ฏ๓ ด๓ ฟ Flag for Tarija (BO-T)
๐จ๐พโ๐ฉ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ข๓ ฉ๓ ฃ๓ ฉ๓ ฟ Flag for Cibitoke (BI-CI)
๐ด๓ ญ๓ ถ๓ ต๓ ณ๓ ฟ Flag for Upper South Province (MV-US)
๐ด๓ ก๓ ค๓ ฐ๓ ฒ๓ ฟ Flag for Canillo (AD-02)
๐ด๓ ก๓ ฆ๓ ข๓ ก๓ ญ๓ ฟ Flag for Bamyan (AF-BAM)
๐ด๓ ก๓ ค๓ ฐ๓ ณ๓ ฟ Flag for Encamp (AD-03)
๐ด๓ ต๓ ณ๓ ญ๓ ฐ๓ ฟ Flag for Northern Mariana Islands (US-MP)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ฒ๓ ฟ Flag for Babฤซte (LV-012)
๐ด๓ ฅ๓ ฃ๓ ธ๓ ฟ Flag for Cotopaxi (EC-X)
๐ด๓ ง๓ ก๓ ด๓ ฟ Flag for Ngouniรฉ (GA-4)
* Asterisk
๓ บ Tag Latin Small Letter Z
๐ด๓ ก๓ ค๓ ฐ๓ ด๓ ฟ Flag for La Massana (AD-04)
๓ ณ Tag Digit Three
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone
๐ด๓ ญ๓ ฅ๓ ฐ๓ ณ๓ ฟ Flag for Berane (ME-03)
๐จ๐ฟโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man: Dark Skin Tone, Man: Medium Skin Tone
๐ด๓ ค๓ ฏ๓ ณ๓ ท๓ ฟ Flag for El Valle (DO-37)
๐ฉ๐พโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone
๐ด๓ ซ๓ ฅ๓ ฐ๓ ฑ๓ ฟ Flag for Baringo (KE-01)
๐ด๓ น๓ ฅ๓ ณ๓ ก๓ ฟ Flag for Amanat Al Asimah (YE-SA)
๐จ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๓ ฒ Tag Digit Two
๐ด๓ ญ๓ ด๓ ฒ๓ ฐ๓ ฟ Flag for Senglea (MT-20)
๐ด๏ธโโ๏ธ Woman in Business Suit Levitating
๐ด๓ ฃ๓ ฆ๓ จ๓ ญ๓ ฟ Flag for Haut-Mbomou (CF-HM)
๓ ฑ Tag Digit One
๓ ด Tag Digit Four
๐ด๓ ก๓ บ๓ ก๓ ข๓ ณ๓ ฟ Flag for Absheron (AZ-ABS)
6 Digit Six
๐ด๓ ฌ๓ ก๓ ณ๓ ถ๓ ฟ Flag for Savannakhet (LA-SV)
๐ด๓ ญ๓ ฌ๓ ฑ๓ ฟ Flag for Kayes (ML-1)
๐ด๓ ก๓ ฅ๓ ก๓ บ๓ ฟ Flag for Abu Dhabi (AE-AZ)
๐ด๓ ฅ๓ ณ๓ ก๓ ณ๓ ฟ Flag for Asturias (ES-AS)
๐ด๓ ฉ๓ ฑ๓ ซ๓ ฉ๓ ฟ Flag for Kirkuk (IQ-KI)
๐ฉโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman, Woman: Medium Skin Tone
๐ด๓ ค๓ ฅ๓ ข๓ ฅ๓ ฟ Flag for Berlin (DE-BE)
8 Digit Eight
๐ด๓ ก๓ ค๓ ฐ๓ ธ๓ ฟ Flag for Escaldes-Engordany (AD-08)
๐ด๓ ฃ๓ ฎ๓ ถ๓ ด๓ ฟ Flag for Ningxia (CN-64)
๐ด๓ ฅ๓ ฃ๓ ฆ๓ ฟ Flag for Caรฑar (EC-F)
๐ด๓ ก๓ ฅ๓ ก๓ ช๓ ฟ Flag for Ajman (AE-AJ)
๐ด๐ปโโ๏ธ Woman in Business Suit Levitating: Light Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐ฉ Kiss - Man: Light Skin Tone, Woman
๓ ธ Tag Digit Eight
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Fars (IR-14)
๐ด๓ ก๓ ฅ๓ ฆ๓ ต๓ ฟ Flag for Fujairah (AE-FU)
๐จ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ จ๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Virovitica-Podravina (HR-10)
๓ ฉ Tag Latin Small Letter I
7 Digit Seven
๓ ท Tag Digit Seven
๓ ฅ Tag Latin Small Letter E
๐ฉ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ญ๓ จ๓ ด๓ ฟ Flag for Ratak Chain (MH-T)
๐ด๓ ก๓ ฅ๓ ณ๓ จ๓ ฟ Flag for Sharjah (AE-SH)
๓ ฆ Tag Latin Small Letter F
๐ด๓ ฌ๓ ด๓ ต๓ ท๓ ฟ Flag for Vilniaus Municipality (LT-57)
๐ด๓ ฉ๓ ณ๓ ด๓ ฟ Flag for Westfjords (IS-4)
๐ด๓ ฃ๓ ก๓ ข๓ ฃ๓ ฟ Flag for British Columbia (CA-BC)
4 Digit Four
๐ด๓ ก๓ ฆ๓ ข๓ ก๓ ฌ๓ ฟ Flag for Balkh (AF-BAL)
๐จโ๐ถโ๐ฆ Family: Man, Baby, Boy
๐ด๓ ด๓ ท๓ จ๓ ณ๓ ฑ๓ ฟ Flag for Hsinchu County (TW-HSQ)
๐ฉโ๐ถโ๐ง Family: Woman, Baby, Girl
๐ด๓ ญ๓ ธ๓ ช๓ ก๓ ฌ๓ ฟ Flag for Jalisco (MX-JAL)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ธ๓ ฟ Flag for Kitui (KE-18)
๐ด๓ ฐ๓ ด๓ ฒ๓ ฐ๓ ฟ Flag for Azores (PT-20)
๐ด๓ ฉ๓ ฎ๓ ญ๓ ฎ๓ ฟ Flag for Manipur (IN-MN)
๐ด๓ ก๓ ฆ๓ ข๓ ค๓ ณ๓ ฟ Flag for Badakhshan (AF-BDS)
๐ฉ๐ปโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ก๓ ค๓ ฐ๓ ต๓ ฟ Flag for Ordino (AD-05)
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ Kiss - Woman: Medium Skin Tone, Woman
๐ด๓ ก๓ ฆ๓ ข๓ ง๓ ฌ๓ ฟ Flag for Baghlan (AF-BGL)
๐ด๓ ฎ๓ ง๓ ฃ๓ ฒ๓ ฟ Flag for Cross River (NG-CR)
๐ด๓ ต๓ ณ๓ ฃ๓ ฏ๓ ฟ Flag for Colorado (US-CO)
๓ ด Tag Latin Small Letter T
๐ด๓ ญ๓ ซ๓ ถ๓ ด๓ ฟ Flag for Radoviลก (MK-64)
๐ด๓ ฎ๓ บ๓ ท๓ ง๓ ฎ๓ ฟ Flag for Wellington (NZ-WGN)
๐จ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ถ๓ ฟ Flag for Kurdistan (IR-16)
๐จ๐ฝโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man: Medium Skin Tone, Man: Dark Skin Tone
๓ ณ Tag Latin Small Letter S
๐ฉโ๐ถโ๐ถ Family: Woman, Baby, Baby
๐ด๓ ก๓ ฆ๓ ค๓ ก๓ น๓ ฟ Flag for Daykundi (AF-DAY)
๐จ๐ปโโค๏ธโ๐โ๐จ๐พ Kiss - Man: Light Skin Tone, Man: Medium-Dark Skin Tone
๐ด๓ ก๓ ฆ๓ ฆ๓ ฒ๓ ก๓ ฟ Flag for Farah (AF-FRA)
๓ ฑ Tag Latin Small Letter Q
๐ด๓ ง๓ ด๓ ง๓ ต๓ ฟ Flag for Guatemala (GT-GU)
๐ด๓ ฃ๓ จ๓ ด๓ ง๓ ฟ Flag for Thurgau (CH-TG)
๐ด๓ ฒ๓ ต๓ ฃ๓ ฅ๓ ฟ Flag for Chechen (RU-CE)
๓ ต Tag Digit Five
๐ด๓ ก๓ ฆ๓ ง๓ จ๓ ฏ๓ ฟ Flag for Ghลr (AF-GHO)
๐ด๓ ก๓ ด๓ น๓ ฟ Flag for Vienna (AT-9)
๐ด๓ ก๓ ฆ๓ ง๓ จ๓ ก๓ ฟ Flag for Ghazni (AF-GHA)
๓ ต Tag Latin Small Letter U
๐ด๓ ข๓ ท๓ ง๓ ก๓ ฟ Flag for Gaborone (BW-GA)
๓ น Tag Latin Small Letter Y
๓ ฟ Cancel Tag
๓ ท Tag Latin Small Letter W
๐ฉ๐ฝโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman: Medium Skin Tone, Woman: Dark Skin Tone
๐ด๓ ฃ๓ ฏ๓ ก๓ ญ๓ ก๓ ฟ Flag for Amazonas (CO-AMA)
๓ ฎ Tag Latin Small Letter N
๐ฉโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman, Woman: Medium Skin Tone
๐จโ๐ถ Family: Man, Baby
๐ด๓ ก๓ ด๓ ฑ๓ ฟ Flag for Burgenland (AT-1)
๐ด๓ ก๓ ฆ๓ จ๓ ฅ๓ ฌ๓ ฟ Flag for Helmand (AF-HEL)
๓ ถ Tag Digit Six
๐ด๓ ก๓ ฆ๓ ช๓ ฏ๓ ท๓ ฟ Flag for Jowzjan (AF-JOW)
๐งโโ๏ธ Woman With Headscarf
๓ ข Tag Latin Small Letter B
๓ ฐ Tag Digit Zero
๐ด๓ ก๓ ฆ๓ จ๓ ฅ๓ ฒ๓ ฟ Flag for Herat (AF-HER)
๐ด๓ ง๓ ค๓ ฐ๓ ต๓ ฟ Flag for Saint Mark (GD-05)
3 Digit Three
๓ ง Tag Latin Small Letter G
๐ด๐พโโ๏ธ Woman in Business Suit Levitating: Medium-Dark Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman: Medium Skin Tone, Man: Medium Skin Tone
๐ด๓ ต๓ ณ๓ ก๓ ซ๓ ฟ Flag for Alaska (US-AK)
๓ ฒ Tag Latin Small Letter R
๐ด๓ ด๓ ฌ๓ ฌ๓ ก๓ ฟ Flag for Lautรฉm (TL-LA)
๐ด๓ ก๓ ฆ๓ ซ๓ ก๓ ข๓ ฟ Flag for Kabul (AF-KAB)
๐จโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man, Man: Dark Skin Tone
๐งโโ๏ธ Man With Headscarf
๓ ถ Tag Latin Small Letter V
๓ ค Tag Latin Small Letter D
๐ด๓ ก๓ ฆ๓ ซ๓ ก๓ ฎ๓ ฟ Flag for Kandahar (AF-KAN)
๐ด๓ ก๓ ฆ๓ ซ๓ ก๓ ฐ๓ ฟ Flag for Kapisa (AF-KAP)
๐ด๓ ญ๓ ฃ๓ ณ๓ ฒ๓ ฟ Flag for Saint Roman (MC-SR)
๐ด๓ ฅ๓ ฅ๓ ณ๓ น๓ ฟ Flag for Hiiu (EE-39)
๓ ญ Tag Latin Small Letter M
๐ด๓ ก๓ ฆ๓ ซ๓ จ๓ ฏ๓ ฟ Flag for Khost (AF-KHO)
๐ง๐ปโโ๏ธ Man With Headscarf: Light Skin Tone
๐ด๓ ก๓ ฆ๓ ซ๓ ค๓ บ๓ ฟ Flag for Kunduz (AF-KDZ)
๐ฉ๐ฟโโค๏ธโ๐จ Couple With Heart - Woman: Dark Skin Tone, Man
๐ด๓ ต๓ ณ๓ ณ๓ ค๓ ฟ Flag for South Dakota (US-SD)
๐ด๓ ก๓ ฆ๓ ข๓ ค๓ ง๓ ฟ Flag for Badghis (AF-BDG)
๐ด๓ ฉ๓ ณ๓ ธ๓ ฟ Flag for Southern (IS-8)
๐ด๓ ก๓ ฆ๓ ซ๓ ฎ๓ ฒ๓ ฟ Flag for Kunar (AF-KNR)
๐จโ๐จโ๐ถโ๐ถ Family: Man, Man, Baby, Baby
๐ด๓ ช๓ ฐ๓ ฑ๓ ณ๓ ฟ Flag for Tokyo (JP-13)
๐ด๓ ก๓ ฆ๓ ฌ๓ ก๓ ง๓ ฟ Flag for Laghman (AF-LAG)
๐ง๐ฝโโ๏ธ Man With Headscarf: Medium Skin Tone
๐ด๓ ก๓ ฆ๓ ฌ๓ ฏ๓ ง๓ ฟ Flag for Logar (AF-LOG)
5 Digit Five
๓ ฃ Tag Latin Small Letter C
๐ด๓ ก๓ ฆ๓ ฆ๓ น๓ ข๓ ฟ Flag for Faryab (AF-FYB)
๓ ฐ Tag Latin Small Letter P
๐ด๓ ก๓ ฆ๓ ฎ๓ ก๓ ฎ๓ ฟ Flag for Nangarhar (AF-NAN)
๓ น Tag Digit Nine
๐ด๓ ฅ๓ ณ๓ ฎ๓ ฃ๓ ฟ Flag for Navarra Chartered Community (ES-NC)
๐ฉ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ญ๓ ธ๓ ฎ๓ ก๓ น๓ ฟ Flag for Nayarit (MX-NAY)
๐ด๓ ข๓ ฒ๓ ฐ๓ ฅ๓ ฟ Flag for Pernambuco (BR-PE)
๐ด๓ ฉ๓ ด๓ ท๓ ฒ๓ ฟ Flag for Campania (IT-72)
๐ง๐พโโ๏ธ Man With Headscarf: Medium-Dark Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ก๓ ฆ๓ ฎ๓ ต๓ ฒ๓ ฟ Flag for Nuristan (AF-NUR)
๐จโ๐จโ๐งโ๐ถ Family: Man, Man, Girl, Baby
๐ด๓ ฐ๓ ง๓ ท๓ ข๓ ซ๓ ฟ Flag for West New Britain (PG-WBK)
๐จ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ง๓ น๓ ต๓ ค๓ ฟ Flag for Upper Demerara-Berbice (GY-UD)
๐จโโค๏ธโ๐โ๐ฉ Kiss - Man, Woman
๐ด๓ ฅ๓ ด๓ ก๓ ฆ๓ ฟ Flag for Afar (ET-AF)
๐ด๓ ก๓ ฆ๓ ฐ๓ ก๓ ฒ๓ ฟ Flag for Parwan (AF-PAR)
๐ด๓ ก๓ ฆ๓ ฎ๓ ฉ๓ ญ๓ ฟ Flag for Nimruz (AF-NIM)
๐ด๓ จ๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Karlovac (HR-04)
๐ด๓ ก๓ ฆ๓ ฐ๓ ฉ๓ ก๓ ฟ Flag for Paktia (AF-PIA)
๐ง๐ฟโโ๏ธ Man With Headscarf: Dark Skin Tone
๐ง๐ผโโ๏ธ Man With Headscarf: Medium-Light Skin Tone
๐ด๓ ญ๓ ธ๓ ข๓ ฃ๓ ฎ๓ ฟ Flag for Baja California (MX-BCN)
๐ด๓ ก๓ ฆ๓ ฐ๓ ซ๓ ก๓ ฟ Flag for Paktika (AF-PKA)
๐ด๓ ซ๓ ฉ๓ ฐ๓ ฟ Flag for Phoenix Islands (KI-P)
๓ ฏ Tag Latin Small Letter O
๐ด๓ ก๓ ฆ๓ ฐ๓ ก๓ ฎ๓ ฟ Flag for Panjshir (AF-PAN)
๐ด๓ ฃ๓ จ๓ ด๓ ฉ๓ ฟ Flag for Ticino (CH-TI)
๐ด๓ ณ๓ ฉ๓ ฑ๓ น๓ ฒ๓ ฟ Flag for ลฝirovnica (SI-192)
๐ด๓ ณ๓ ฅ๓ ฎ๓ ฟ Flag for Halland (SE-N)
๓ ช Tag Latin Small Letter J
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman: Medium Skin Tone, Woman: Light Skin Tone
๐จ๐พโ๐จ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ณ๓ ณ๓ ข๓ ฎ๓ ฟ Flag for Northern Bahr el Ghazal (SS-BN)
๐จ๐ฝโโค๏ธโ๐โ๐ฉ Kiss - Man: Medium Skin Tone, Woman
๐ด๓ ฃ๓ ฆ๓ ข๓ ซ๓ ฟ Flag for Basse-Kotto (CF-BK)
๐จโโค๏ธโ๐จ๐ป Couple With Heart - Man, Man: Light Skin Tone
๐จ๐ฝโโค๏ธโ๐จ Couple With Heart - Man: Medium Skin Tone, Man
๐ด๓ ฌ๓ น๓ ข๓ ต๓ ฟ Flag for Butnan (LY-BU)
๐ฉโ๐ถ Family: Woman, Baby
๐ด๓ ฌ๓ ซ๓ น๓ ฟ Flag for Sabaragamuwa (LK-9)
๐ด๓ ก๓ ฆ๓ ณ๓ ก๓ ญ๓ ฟ Flag for Samangan (AF-SAM)
๐ด๓ ด๓ ถ๓ ฎ๓ ซ๓ ฌ๓ ฟ Flag for Nukulaelae (TV-NKL)
๐ด๓ ก๓ ฅ๓ ฒ๓ ซ๓ ฟ Flag for Ras al-Khaimah (AE-RK)
๐ด๓ ฅ๓ ณ๓ ฃ๓ ฅ๓ ฟ Flag for Ceuta (ES-CE)
๐ด๓ ก๓ ฅ๓ ค๓ ต๓ ฟ Flag for Dubai (AE-DU)
๐จ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ช๓ ฐ๓ ด๓ ท๓ ฟ Flag for Okinawa (JP-47)
๐ด๓ ก๓ ฆ๓ ณ๓ ก๓ ฒ๓ ฟ Flag for Sar-e Pol (AF-SAR)
๐ฉ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๓ ฌ Tag Latin Small Letter L
๐ด๓ ก๓ ฆ๓ ต๓ ฒ๓ ต๓ ฟ Flag for Urozgan (AF-URU)
9 Digit Nine
๐ฉ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐จโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man, Man: Medium Skin Tone
๐ด๓ ค๓ ญ๓ ฐ๓ ถ๓ ฟ Flag for Saint Joseph (DM-06)
๐ด๓ ก๓ ง๓ ฐ๓ ด๓ ฟ Flag for Saint John (AG-04)
๐ด๓ ฃ๓ ฏ๓ ถ๓ ฉ๓ ค๓ ฟ Flag for Vichada (CO-VID)
๐ด๓ ฐ๓ ท๓ ฒ๓ ฑ๓ ธ๓ ฟ Flag for Ngarchelong (PW-218)
๐ด๓ ฒ๓ ต๓ ก๓ ฒ๓ ซ๓ ฟ Flag for Arkhangelsk (RU-ARK)
๐ด๓ ก๓ ฆ๓ บ๓ ก๓ ข๓ ฟ Flag for Zabul (AF-ZAB)
๐ด๓ ก๓ ง๓ ฐ๓ ณ๓ ฟ Flag for Saint George (AG-03)
๐ด๓ ฉ๓ ด๓ ฒ๓ ต๓ ฟ Flag for Lombardy (IT-25)
๐จ๐ปโโค๏ธโ๐โ๐จ๐ป Kiss - Man: Light Skin Tone, Man: Light Skin Tone
๐ด๓ ฃ๓ บ๓ ต๓ ณ๓ ฟ Flag for Pardubickรฝ kraj (CZ-53)
๐ด๓ ก๓ ง๓ ฐ๓ ถ๓ ฟ Flag for Saint Paul (AG-06)
๐ด๓ ถ๓ ฎ๓ ต๓ ฑ๓ ฟ Flag for Trร Vinh (VN-51)
๐ฉโ๐จโ๐ถโ๐ง Family: Woman, Man, Baby, Girl
๐ด๓ ซ๓ ฒ๓ ด๓ ธ๓ ฟ Flag for South Gyeongsang (KR-48)
๐ด๓ ก๓ ง๓ ฐ๓ ต๓ ฟ Flag for Saint Mary (AG-05)
๐ด๓ ง๓ ฒ๓ ซ๓ ฟ Flag for North Aegean (GR-K)
๐ฉโ๐ฉโ๐ถโ๐ง Family: Woman, Woman, Baby, Girl
๐ด๓ ฅ๓ ฃ๓ บ๓ ฟ Flag for Zamora-Chinchipe (EC-Z)
๐ด๓ ฎ๓ ฉ๓ ญ๓ ณ๓ ฟ Flag for Masaya (NI-MS)
๐ด๓ ซ๓ ฉ๓ ง๓ ฟ Flag for Gilbert Islands (KI-G)
๐ด๓ ญ๓ ธ๓ ฃ๓ จ๓ จ๓ ฟ Flag for Chihuahua (MX-CHH)
๐จ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉโ๐จโ๐ถโ๐ถ Family: Woman, Man, Baby, Baby
๐ด๓ ก๓ ง๓ ฑ๓ ฑ๓ ฟ Flag for Redonda (AG-11)
๐ฉโ๐ฉโ๐ถ Family: Woman, Woman, Baby
๐จโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man, Woman: Light Skin Tone
๐จโโค๏ธโ๐โ๐จ๐พ Kiss - Man, Man: Medium-Dark Skin Tone
๐ด๓ ก๓ ฌ๓ ฐ๓ ฑ๓ ฟ Flag for Berat County (AL-01)
๓ ก Tag Latin Small Letter A
๐ด๓ ก๓ ง๓ ฑ๓ ฐ๓ ฟ Flag for Barbuda (AG-10)
๐ด๓ ฃ๓ ฏ๓ ณ๓ ก๓ ฐ๓ ฟ Flag for San Andrรฉs & Providencia (CO-SAP)
๐ด๓ ก๓ ฌ๓ ฐ๓ ณ๓ ฟ Flag for Elbasan County (AL-03)
๐จ๐พโ๐จ๐พโ๐ถ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ฉ๓ ฎ๓ ซ๓ ก๓ ฟ Flag for Karnataka (IN-KA)
๐ด๓ ก๓ ฌ๓ ฐ๓ ต๓ ฟ Flag for Gjirokastรซr County (AL-05)
๐ด๓ ช๓ ฐ๓ ฐ๓ ฑ๓ ฟ Flag for Hokkaidล (JP-01)
๐ฉ๐พโ๐จ๐พโ๐ถ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ต๓ ง๓ ฃ๓ ฟ Flag for Central (UG-C)
๐จ๐ผโโค๏ธโ๐โ๐จ Kiss - Man: Medium-Light Skin Tone, Man
๐ด๓ ก๓ ฌ๓ ฐ๓ ฒ๓ ฟ Flag for Durrรซs County (AL-02)
๐ด๓ ก๓ ฌ๓ ฐ๓ ด๓ ฟ Flag for Fier County (AL-04)
๐ด๓ ก๓ ฌ๓ ฐ๓ ถ๓ ฟ Flag for Korรงรซ County (AL-06)
๐ด๓ ฐ๓ น๓ ฑ๓ ถ๓ ฟ Flag for Alto Paraguay (PY-16)
๐ด๓ ก๓ ฌ๓ ฐ๓ ท๓ ฟ Flag for Kukรซs County (AL-07)
๐จ๐ฟโโค๏ธโ๐โ๐จ Kiss - Man: Dark Skin Tone, Man
๐ด๓ ง๓ น๓ ต๓ ด๓ ฟ Flag for Upper Takutu-Upper Essequibo (GY-UT)
๐จ๐พโ๐ถ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐จ๐ปโ๐จ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ก๓ ฌ๓ ฐ๓ น๓ ฟ Flag for Dibรซr County (AL-09)
๐ด๓ ก๓ ฌ๓ ฐ๓ ธ๓ ฟ Flag for Lezhรซ County (AL-08)
๐จ๐ผโ๐จ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ก๓ ฌ๓ ฑ๓ ฑ๓ ฟ Flag for Tirana County (AL-11)
๐ด๓ ก๓ ค๓ ฐ๓ ถ๓ ฟ Flag for Sant Juliร de Lรฒria (AD-06)
๐ด๓ ข๓ ฒ๓ ข๓ ก๓ ฟ Flag for Bahia (BR-BA)
๐ด๓ ก๓ ฌ๓ ฑ๓ ฐ๓ ฟ Flag for Shkodรซr County (AL-10)
๐ฉโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman, Man: Dark Skin Tone
๐จ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐จ๐พโ๐จ๐พโ๐ถ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman, Man: Medium Skin Tone
๐ด๓ ก๓ ฌ๓ ฑ๓ ฒ๓ ฟ Flag for Vlorรซ County (AL-12)
๐ด๓ ด๓ จ๓ ฒ๓ ณ๓ ฟ Flag for Trat (TH-23)
๐ด๓ ก๓ ญ๓ ง๓ ฒ๓ ฟ Flag for Gegharkunik (AM-GR)
๐จ๐ฟโ๐จ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ก๓ ญ๓ ก๓ ง๓ ฟ Flag for Aragatsotn (AM-AG)
๐ด๓ ก๓ ญ๓ ก๓ ฒ๓ ฟ Flag for Ararat (AM-AR)
๐ด๓ ก๓ ญ๓ ฅ๓ ฒ๓ ฟ Flag for Yerevan (AM-ER)
๐ด๓ ก๓ ญ๓ ซ๓ ด๓ ฟ Flag for Kotayk (AM-KT)
๐ด๓ ฆ๓ ฒ๓ ฃ๓ ฏ๓ ฒ๓ ฟ Flag for Corse (FR-COR)
๐ด๓ ก๓ ญ๓ ก๓ ถ๓ ฟ Flag for Armavir (AM-AV)
๐ฉโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman, Woman: Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ญ๓ ง๓ ฟ Flag for Minas Gerais (BR-MG)
๐ด๓ ฃ๓ ง๓ ฑ๓ ถ๓ ฟ Flag for Pointe-Noire (CG-16)
๐ด๓ ก๓ ญ๓ ฌ๓ ฏ๓ ฟ Flag for Lori (AM-LO)
๐ด๓ ค๓ บ๓ ฒ๓ ฑ๓ ฟ Flag for Skikda (DZ-21)
๐ด๓ ก๓ ญ๓ ณ๓ จ๓ ฟ Flag for Shirak (AM-SH)
๐ฉโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman, Woman: Medium-Dark Skin Tone
๐ด๓ ก๓ ค๓ ฐ๓ ท๓ ฟ Flag for Andorra la Vella (AD-07)
๐ด๓ ฒ๓ ต๓ ก๓ ฌ๓ ด๓ ฟ Flag for Altai Krai (RU-ALT)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ถ๓ ท๓ ฟ Flag for Lovrenc na Pohorju (SI-167)
๐ฉโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman, Woman: Medium-Light Skin Tone
๐จ๐ฟโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man: Dark Skin Tone, Woman: Light Skin Tone
๐ด๓ ฌ๓ ด๓ ฐ๓ ฎ๓ ฟ Flag for Panevฤลพys County (LT-PN)
๐ด๓ ค๓ ฏ๓ ณ๓ ต๓ ฟ Flag for Cibao Norte (DO-35)
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ฐ๓ ฟ Flag for Vest-Agder (NO-10)
๐จโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man, Woman: Dark Skin Tone
๐ด๓ ก๓ ญ๓ ถ๓ ค๓ ฟ Flag for Vayots Dzor (AM-VD)
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman: Light Skin Tone, Woman: Light Skin Tone
๐ด๓ ต๓ ณ๓ ถ๓ ด๓ ฟ Flag for Vermont (US-VT)
๐จ๐ฝโโค๏ธโ๐โ๐จ Kiss - Man: Medium Skin Tone, Man
๐ด๓ ก๓ ฏ๓ ข๓ ง๓ ฏ๓ ฟ Flag for Bengo (AO-BGO)
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ Kiss - Woman: Light Skin Tone, Woman
๐ด๓ ฃ๓ ฏ๓ ญ๓ ฅ๓ ด๓ ฟ Flag for Meta (CO-MET)
๐ด๓ ฎ๓ ฌ๓ ข๓ ฑ๓ ฒ๓ ฟ Flag for Saba (NL-BQ2)
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ก๓ ฏ๓ ข๓ ง๓ ต๓ ฟ Flag for Benguela (AO-BGU)
๐ด๓ ฃ๓ ฏ๓ ณ๓ ต๓ ฃ๓ ฟ Flag for Sucre (CO-SUC)
๐ด๓ ก๓ ฏ๓ ฃ๓ ฃ๓ ต๓ ฟ Flag for Cuando Cubango (AO-CCU)
๐ด๓ ฐ๓ ฅ๓ ญ๓ ค๓ ค๓ ฟ Flag for Madre de Dios (PE-MDD)
๐ด๓ ฃ๓ จ๓ ถ๓ ค๓ ฟ Flag for Vaud (CH-VD)
๐ด๓ ก๓ ฏ๓ ข๓ ฉ๓ ฅ๓ ฟ Flag for Biรฉ (AO-BIE)
๐ด๓ ก๓ ฏ๓ ฃ๓ ก๓ ข๓ ฟ Flag for Cabinda (AO-CAB)
๐ด๓ ก๓ ฏ๓ จ๓ ต๓ ฉ๓ ฟ Flag for Huรญla (AO-HUI)
๐ด๓ ก๓ ฏ๓ ฃ๓ ต๓ ณ๓ ฟ Flag for Cuanza Sul (AO-CUS)
๐จโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man, Woman: Medium Skin Tone
๐ฉโ๐ฉโ๐ฆโ๐ถ Family: Woman, Woman, Boy, Baby
๐ด๓ ก๓ ฏ๓ จ๓ ต๓ ก๓ ฟ Flag for Huambo (AO-HUA)
๐จ๐ผโโค๏ธโ๐ฉ๐พ Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ฃ๓ น๓ ฐ๓ ถ๓ ฟ Flag for Kyrenia (CY-06)
๐ฉ๐ผโโค๏ธโ๐โ๐จ๐ป Kiss - Woman: Medium-Light Skin Tone, Man: Light Skin Tone
๐ด๓ ก๓ ฅ๓ ต๓ ฑ๓ ฟ Flag for Umm al-Quwain (AE-UQ)
๐ด๓ ก๓ ฏ๓ ฌ๓ ณ๓ ต๓ ฟ Flag for Lunda Sul (AO-LSU)
๐ด๓ ฌ๓ ฒ๓ ฃ๓ ญ๓ ฟ Flag for Grand Cape Mount (LR-CM)
๐ด๓ ก๓ ฏ๓ ฌ๓ ฎ๓ ฏ๓ ฟ Flag for Lunda Norte (AO-LNO)
๐ฉ๐ฝโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman: Medium Skin Tone, Man: Dark Skin Tone
๐จ๐พโโค๏ธโ๐ฉ๐พ Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ก๓ ฏ๓ ฃ๓ ฎ๓ ฏ๓ ฟ Flag for Cuanza Norte (AO-CNO)
๐ด๓ ก๓ ฏ๓ ญ๓ ก๓ ฌ๓ ฟ Flag for Malanje (AO-MAL)
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ Kiss - Woman: Medium-Light Skin Tone, Woman
๐จ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ก๓ ฏ๓ ญ๓ ฏ๓ ธ๓ ฟ Flag for Moxico (AO-MOX)
๐ด๓ ก๓ ฏ๓ ฎ๓ ก๓ ญ๓ ฟ Flag for Namibe (AO-NAM)
๐จ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๓ ซ Tag Latin Small Letter K
๐ด๐ผโโ๏ธ Woman in Business Suit Levitating: Medium-Light Skin Tone
๐ด๓ ก๓ ฒ๓ ก๓ ฟ Flag for Salta (AR-A)
๐จ๐พโ๐ฉ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ฃ๓ ค๓ ฌ๓ ต๓ ฟ Flag for Lualaba (CD-LU)
๐ด๓ ก๓ ฒ๓ ข๓ ฟ Flag for Buenos Aires Province (AR-B)
๐จ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ก๓ ฒ๓ ค๓ ฟ Flag for San Luis (AR-D)
๐ด๓ ก๓ ฏ๓ บ๓ ก๓ ฉ๓ ฟ Flag for Zaire (AO-ZAI)
๐ด๓ ด๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Afyonkarahisar (TR-03)
0 Digit Zero
๐ด๓ ถ๓ ฎ๓ ฒ๓ ต๓ ฟ Flag for Quแบฃng Trแป (VN-25)
๐ด๐ฟโโ๏ธ Woman in Business Suit Levitating: Dark Skin Tone
๐ด๓ ก๓ ฏ๓ ต๓ ฉ๓ ง๓ ฟ Flag for Uรญge (AO-UIG)
๐ฉ๐พโ๐ง๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ต๓ ก๓ ฑ๓ ธ๓ ฟ Flag for Zhytomyrshchyna (UA-18)
๐จ๐พโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ฅ๓ ณ๓ ฟ Flag for Cesar (CO-CES)
๐ด๓ ก๓ ญ๓ ณ๓ ต๓ ฟ Flag for Syunik (AM-SU)
๐ด๓ ก๓ ฒ๓ ฅ๓ ฟ Flag for Entre Rรญos (AR-E)
๐จ๐ฟโโค๏ธโ๐โ๐ฉ Kiss - Man: Dark Skin Tone, Woman
๐ด๓ ก๓ ฒ๓ ฆ๓ ฟ Flag for La Rioja (AR-F)
๐ด๓ ซ๓ บ๓ ถ๓ ฏ๓ ณ๓ ฟ Flag for East Kazakhstan (KZ-VOS)
๐ด๓ ก๓ ฆ๓ ท๓ ก๓ ฒ๓ ฟ Flag for Maidan Wardak (AF-WAR)
๐ด๓ ก๓ ฒ๓ ช๓ ฟ Flag for San Juan (AR-J)
๐ฉ๐พโ๐ฉ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ก๓ ฏ๓ ฌ๓ ต๓ ก๓ ฟ Flag for Luanda (AO-LUA)
๐ด๓ ก๓ ฒ๓ ฌ๓ ฟ Flag for La Pampa (AR-L)
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone
๐จ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐ผโ๐ฉ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ก๓ ฒ๓ ซ๓ ฟ Flag for Catamarca (AR-K)
๐ด๓ ก๓ ฒ๓ ฒ๓ ฟ Flag for Rรญo Negro (AR-R)
๐ด๓ ก๓ ฒ๓ จ๓ ฟ Flag for Chaco (AR-H)
๐ด๓ ก๓ ฒ๓ ฐ๓ ฟ Flag for Formosa (AR-P)
๐ด๓ ก๓ ฒ๓ ญ๓ ฟ Flag for Mendoza (AR-M)
๐ด๓ ก๓ ฒ๓ ฎ๓ ฟ Flag for Misiones (AR-N)
๐ด๓ ก๓ ฒ๓ ฑ๓ ฟ Flag for Neuquรฉn (AR-Q)
๐จ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ก๓ ฒ๓ ด๓ ฟ Flag for Tucumรกn (AR-T)
๐ด๓ ก๓ ฒ๓ ณ๓ ฟ Flag for Santa Fe (AR-S)
๐ด๓ ก๓ ฒ๓ ท๓ ฟ Flag for Corrientes (AR-W)
๐ด๓ ก๓ ฒ๓ น๓ ฟ Flag for Jujuy (AR-Y)
๐ด๓ ก๓ ฒ๓ ถ๓ ฟ Flag for Tierra del Fuego (AR-V)
๐ด๓ ก๓ ฒ๓ ต๓ ฟ Flag for Chubut (AR-U)
๐ด๓ ก๓ ฒ๓ ธ๓ ฟ Flag for Cรณrdoba (AR-X)
๐ด๓ ก๓ ฒ๓ บ๓ ฟ Flag for Santa Cruz (AR-Z)
๐ด๓ ก๓ ฒ๓ ง๓ ฟ Flag for Santiago del Estero (AR-G)
๐ด๓ ก๓ ด๓ ฒ๓ ฟ Flag for Carinthia (AT-2)
๐ด๓ ฃ๓ จ๓ ข๓ ฌ๓ ฟ Flag for Basel-Landschaft (CH-BL)
๐ฉ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐จ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐ฉ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐จโ๐จโ๐ฆโ๐ง Family: Man, Man, Boy, Girl
๐ด๓ ก๓ ด๓ ณ๓ ฟ Flag for Lower Austria (AT-3)
๐ฉโ๐ถโ๐ฆ Family: Woman, Baby, Boy
๐ด๓ ญ๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Nouakchott Ouest (MR-13)
๐จ๐ผโ๐ฉ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ฃ๓ ฆ๓ ญ๓ ข๓ ฟ Flag for Mbomou (CF-MB)
๐ด๓ ก๓ ด๓ ถ๓ ฟ Flag for Styria (AT-6)
๐ด๓ ฐ๓ จ๓ ฐ๓ ฑ๓ ฟ Flag for Ilocos (PH-01)
๐ด๓ ก๓ ด๓ ท๓ ฟ Flag for Tyrol (AT-7)
๐ด๓ ฃ๓ ฎ๓ ต๓ ฒ๓ ฟ Flag for Guizhou (CN-52)
๐ด๓ ฌ๓ ก๓ ธ๓ ณ๓ ฟ Flag for Xaisomboun (LA-XS)
๐ด๓ ก๓ ด๓ ธ๓ ฟ Flag for Vorarlberg (AT-8)
๐จ๐ผโ๐จ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ก๓ ด๓ ต๓ ฟ Flag for Salzburg (AT-5)
๐จ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐ฉโ๐ฉโ๐ถโ๐ถ Family: Woman, Woman, Baby, Baby
๐ฉโ๐จโ๐งโ๐ฆ Family: Woman, Man, Girl, Boy
๐ฉโ๐จโ๐ง Family: Woman, Man, Girl
๐ฉโ๐ฆโ๐ถ Family: Woman, Boy, Baby
๐ด๓ ก๓ ต๓ ฎ๓ ณ๓ ท๓ ฟ Flag for New South Wales (AU-NSW)
๐ฉโ๐จโ๐งโ๐ถ Family: Woman, Man, Girl, Baby
๐ฉ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ก๓ ต๓ ฎ๓ ด๓ ฟ Flag for Northern Territory (AU-NT)
๐ฉ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ก๓ ต๓ ฑ๓ ฌ๓ ค๓ ฟ Flag for Queensland (AU-QLD)
2 Digit Two
๐ฉโ๐จโ๐งโ๐ง Family: Woman, Man, Girl, Girl
๐ฉ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ก๓ ด๓ ด๓ ฟ Flag for Upper Austria (AT-4)
๐ด๓ ง๓ ฒ๓ ก๓ ฟ Flag for East Macedonia and Thrace (GR-A)
๐จ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐จ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐จโ๐ถโ๐ง Family: Man, Baby, Girl
๐จ๐ปโ๐ฉ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone
๐จ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉโ๐จโ๐ถ Family: Woman, Man, Baby
๐ด๓ ต๓ ณ๓ ฎ๓ ฅ๓ ฟ Flag for Nebraska (US-NE)
๐ด๓ ก๓ บ๓ ก๓ ง๓ ก๓ ฟ Flag for Agstafa (AZ-AGA)
๐ด๓ ก๓ ฆ๓ ด๓ ก๓ ซ๓ ฟ Flag for Takhar (AF-TAK)
๐ด๓ ก๓ ต๓ ท๓ ก๓ ฟ Flag for Western Australia (AU-WA)
๐ด๓ ก๓ บ๓ ก๓ ง๓ ฃ๓ ฟ Flag for Aghjabadi (AZ-AGC)
๐ด๓ ก๓ บ๓ ก๓ ณ๓ ด๓ ฟ Flag for Astara (AZ-AST)
๐ด๓ ก๓ บ๓ ข๓ ก๓ ฌ๓ ฟ Flag for Balakan (AZ-BAL)
๐ฉโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman, Man: Medium-Light Skin Tone
๐ด๓ ต๓ ณ๓ ฃ๓ ก๓ ฟ Flag for California (US-CA)
๐ด๓ ก๓ บ๓ ก๓ ง๓ ณ๓ ฟ Flag for Agdash (AZ-AGS)
๐ด๓ ก๓ บ๓ ข๓ ก๓ ฟ Flag for Baku (AZ-BA)
๐จ๐ปโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man: Light Skin Tone, Woman: Dark Skin Tone
๐ด๓ ก๓ ต๓ ถ๓ ฉ๓ ฃ๓ ฟ Flag for Victoria (AU-VIC)
๐ด๓ ก๓ บ๓ ก๓ ง๓ ญ๓ ฟ Flag for Agdam (AZ-AGM)
๐จ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ก๓ บ๓ ข๓ ก๓ ฒ๓ ฟ Flag for Barda (AZ-BAR)
๐จ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐พโ๐ง๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ก๓ บ๓ ก๓ ง๓ ต๓ ฟ Flag for Agsu (AZ-AGU)
๐ด๓ ฃ๓ ค๓ ด๓ ก๓ ฟ Flag for Tanganyika (CD-TA)
๐ฉ๐ปโโค๏ธโ๐จ๐ผ Couple With Heart - Woman: Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ก๓ บ๓ ข๓ ฉ๓ ฌ๓ ฟ Flag for Bilasuvar (AZ-BIL)
๐ด๓ ก๓ บ๓ ฃ๓ ก๓ ฌ๓ ฟ Flag for Jalilabad (AZ-CAL)
๐ด๓ ก๓ บ๓ ฃ๓ ก๓ ข๓ ฟ Flag for Jabrayil (AZ-CAB)
๐ด๓ ก๓ บ๓ ข๓ ฅ๓ น๓ ฟ Flag for Beylagan (AZ-BEY)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ธ๓ ต๓ ฟ Flag for Novo Mesto (SI-085)
๐ด๓ ฃ๓ ง๓ น๓ ฟ Flag for Niari (CG-9)
๐ด๓ ก๓ บ๓ ค๓ ก๓ ณ๓ ฟ Flag for Dashkasan (AZ-DAS)
๐ด๓ ก๓ บ๓ ฆ๓ ต๓ บ๓ ฟ Flag for Fizuli (AZ-FUZ)
๐ฉ๐ฟโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman: Dark Skin Tone, Man: Medium Skin Tone
๐จ๐ฟโโค๏ธโ๐จ๐พ Couple With Heart - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone
๐ด๓ ก๓ บ๓ ง๓ ฏ๓ น๓ ฟ Flag for Goychay (AZ-GOY)
๐ด๓ ก๓ บ๓ ง๓ ฏ๓ ฒ๓ ฟ Flag for Goranboy (AZ-GOR)
๐ด๓ ก๓ บ๓ ง๓ ก๓ ฟ Flag for Ganja (AZ-GA)
๐ด๓ ฑ๓ ก๓ ต๓ ณ๓ ฟ Flag for Umm Salal (QA-US)
๐ด๓ ฆ๓ ช๓ ฅ๓ ฟ Flag for Eastern (FJ-E)
๐ด๓ ก๓ บ๓ ง๓ น๓ ง๓ ฟ Flag for Goygol (AZ-GYG)
๐ด๓ ก๓ บ๓ จ๓ ก๓ ฃ๓ ฟ Flag for Hajigabul (AZ-HAC)
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ Kiss - Woman: Dark Skin Tone, Woman
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ท๓ ฟ Flag for Rฤzekne Municipality (LV-077)
๐ด๓ ก๓ ต๓ ก๓ ฃ๓ ด๓ ฟ Flag for Australian Capital Territory (AU-ACT)
๐จ๐ฝโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ฎ๓ ง๓ ฆ๓ ฃ๓ ฟ Flag for Federal Capital Territory (NG-FC)
๐ด๓ ฒ๓ ต๓ ข๓ ฒ๓ น๓ ฟ Flag for Bryansk (RU-BRY)
๐ด๓ ก๓ ญ๓ ด๓ ถ๓ ฟ Flag for Tavush (AM-TV)
๐ด๓ ฅ๓ ฃ๓ ณ๓ ค๓ ฟ Flag for Santo Domingo de los Tsรกchilas (EC-SD)
๐ฉ๐ผโโค๏ธโ๐ฉ Couple With Heart - Woman: Medium-Light Skin Tone, Woman
๐ด๓ ก๓ บ๓ ฉ๓ ญ๓ ฉ๓ ฟ Flag for Imishli (AZ-IMI)
๐ด๓ ด๓ ญ๓ ณ๓ ฟ Flag for Aลgabat (TM-S)
๐จโโค๏ธโ๐ฉ๐พ Couple With Heart - Man, Woman: Medium-Dark Skin Tone
๐ด๓ ฌ๓ ก๓ ธ๓ ฅ๓ ฟ Flag for Sekong (LA-XE)
๐ด๓ ฒ๓ ฏ๓ ง๓ ช๓ ฟ Flag for Gorj (RO-GJ)
๐จ๐ปโโค๏ธโ๐จ Couple With Heart - Man: Light Skin Tone, Man
๐ด๓ ก๓ บ๓ ซ๓ ต๓ ฒ๓ ฟ Flag for Kurdamir (AZ-KUR)
๐ฉ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ก๓ บ๓ ซ๓ ก๓ ฌ๓ ฟ Flag for Kalbajar (AZ-KAL)
๐ด๓ ก๓ บ๓ ง๓ ก๓ ค๓ ฟ Flag for Gadabay (AZ-GAD)
๐ด๓ ก๓ บ๓ ฌ๓ ก๓ ฃ๓ ฟ Flag for Lachin (AZ-LAC)
๐ด๓ ก๓ บ๓ ฌ๓ ก๓ ฟ Flag for Lankaran (AZ-LA)
๐ด๓ ถ๓ ฎ๓ ณ๓ ง๓ ฟ Flag for Ho Chi Minh City (VN-SG)
๐ด๓ ก๓ บ๓ ฌ๓ ฅ๓ ฒ๓ ฟ Flag for Lerik (AZ-LER)
๐ด๓ ก๓ บ๓ ญ๓ ฉ๓ ฟ Flag for Mingachevir (AZ-MI)
๐ฉ๐พโ๐จ๐พโ๐ง๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ก๓ บ๓ ฎ๓ ก๓ ฟ Flag for Naftalan (AZ-NA)
๐ด๓ ก๓ บ๓ ญ๓ ก๓ ณ๓ ฟ Flag for Masally (AZ-MAS)
๐จโโค๏ธโ๐ฉ Couple With Heart - Man, Woman
๐ด๓ ก๓ บ๓ ฌ๓ ก๓ ฎ๓ ฟ Flag for Lankaran District (AZ-LAN)
๐ฉ๐ผโ๐จ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐จ๐พ Kiss - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone
๐ฉ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ก๓ บ๓ ฎ๓ ฅ๓ ฆ๓ ฟ Flag for Neftchala (AZ-NEF)
๐ด๓ ก๓ บ๓ ฎ๓ ธ๓ ฟ Flag for Nakhchivan AR (AZ-NX)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ฑ๓ ฑ๓ ฟ Flag for Celje (SI-011)
๐ด๓ ฌ๓ ด๓ ณ๓ ฒ๓ ฟ Flag for Panevฤลพio Municipality (LT-32)
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman: Dark Skin Tone, Woman: Medium Skin Tone
๐จ๐ปโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man: Light Skin Tone, Woman: Dark Skin Tone
๐ด๓ ก๓ บ๓ ฉ๓ ณ๓ ญ๓ ฟ Flag for Ismailli (AZ-ISM)
๓ จ Tag Latin Small Letter H
๐ฉ๐พโโค๏ธโ๐จ๐ป Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone
๐ฉ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ฃ๓ ฆ๓ ฎ๓ ญ๓ ฟ Flag for Nana-Mambรฉrรฉ (CF-NM)
๐ด๓ ก๓ บ๓ ฑ๓ ฏ๓ ข๓ ฟ Flag for Gobustan (AZ-QOB)
๐ฉ๐ฟโโค๏ธโ๐โ๐จ๐ป Kiss - Woman: Dark Skin Tone, Man: Light Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman: Dark Skin Tone, Woman: Dark Skin Tone
๐ด๓ ก๓ บ๓ ฑ๓ ข๓ ฉ๓ ฟ Flag for Qubadli (AZ-QBI)
๐ด๓ ก๓ บ๓ ฑ๓ ก๓ บ๓ ฟ Flag for Qazakh (AZ-QAZ)
๐ด๓ ฒ๓ ฏ๓ ข๓ ถ๓ ฟ Flag for Braลov (RO-BV)
๐จโ๐ฉโ๐งโ๐ถ Family: Man, Woman, Girl, Baby
๐ด๓ ก๓ บ๓ ฑ๓ ข๓ ก๓ ฟ Flag for Quba (AZ-QBA)
๐ด๓ ก๓ บ๓ ฑ๓ ก๓ ข๓ ฟ Flag for Qabala (AZ-QAB)
๐ด๓ ฃ๓ จ๓ ต๓ ฒ๓ ฟ Flag for Uri (CH-UR)
๐ด๓ ก๓ บ๓ ฏ๓ ง๓ ต๓ ฟ Flag for Oghuz (AZ-OGU)
๐ด๓ ก๓ บ๓ ฑ๓ ก๓ ธ๓ ฟ Flag for Qakh (AZ-QAX)
๐ด๓ ณ๓ ฉ๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for ล marjeลกke Toplice (SI-206)
๐จ๐พโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone
๐ด๓ ก๓ ง๓ ฐ๓ ท๓ ฟ Flag for Saint Peter (AG-07)
๐จ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ฌ๓ ฒ๓ ญ๓ น๓ ฟ Flag for Maryland (LR-MY)
๐ด๓ ก๓ ต๓ ณ๓ ก๓ ฟ Flag for South Australia (AU-SA)
๐ด๓ ก๓ บ๓ ฑ๓ ต๓ ณ๓ ฟ Flag for Qusar (AZ-QUS)
๐ด๓ ก๓ บ๓ ณ๓ ก๓ ข๓ ฟ Flag for Sabirabad (AZ-SAB)
๐จโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man, Woman: Medium Skin Tone
๐จโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man, Woman: Medium-Light Skin Tone
๐ด๓ ก๓ บ๓ ณ๓ ก๓ ด๓ ฟ Flag for Saatly (AZ-SAT)
๐ด๓ ก๓ บ๓ ณ๓ ข๓ ฎ๓ ฟ Flag for Shabran (AZ-SBN)
๐จ๐ผโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone
๐ด๓ ก๓ บ๓ ณ๓ ก๓ ซ๓ ฟ Flag for Shaki District (AZ-SAK)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ก๓ ณ๓ ฟ Flag for Casanare (CO-CAS)
๐จโ๐ฉโ๐ถโ๐ถ Family: Man, Woman, Baby, Baby
๐ด๓ ก๓ บ๓ ณ๓ ฒ๓ ฟ Flag for Shirvan (AZ-SR)
๐ด๓ ก๓ บ๓ ณ๓ ต๓ ณ๓ ฟ Flag for Shusha (AZ-SUS)
๐ด๓ ฃ๓ จ๓ ถ๓ ณ๓ ฟ Flag for Valais (CH-VS)
๐ฉ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐ปโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman: Light Skin Tone, Man: Dark Skin Tone
๐ด๓ ก๓ บ๓ ณ๓ ก๓ ฟ Flag for Shaki (AZ-SA)
๐ด๓ ฆ๓ ฒ๓ ญ๓ ฑ๓ ฟ Flag for Martinique (FR-MQ)
๐ด๓ ก๓ บ๓ ณ๓ ญ๓ ฟ Flag for Sumqayit (AZ-SM)
๐ด๓ ก๓ บ๓ ณ๓ ฉ๓ น๓ ฟ Flag for Siazan (AZ-SIY)
๐ด๓ ก๓ บ๓ ณ๓ ญ๓ ฉ๓ ฟ Flag for Shamakhi (AZ-SMI)
๐ฉ๐ฟโโค๏ธโ๐โ๐จ Kiss - Woman: Dark Skin Tone, Man
๐ด๓ ก๓ บ๓ ณ๓ ญ๓ ธ๓ ฟ Flag for Samukh (AZ-SMX)
๐จ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ก๓ บ๓ ด๓ ฏ๓ ถ๓ ฟ Flag for Tovuz (AZ-TOV)
๐ด๓ ก๓ บ๓ ธ๓ ก๓ ฃ๓ ฟ Flag for Khachmaz (AZ-XAC)
๐ด๓ ก๓ บ๓ ต๓ ฃ๓ ก๓ ฟ Flag for Ujar (AZ-UCA)
๐ด๓ ก๓ บ๓ ด๓ ก๓ ฒ๓ ฟ Flag for Tartar (AZ-TAR)
๐จ๐ฟโโค๏ธโ๐โ๐จ๐ป Kiss - Man: Dark Skin Tone, Man: Light Skin Tone
๐ฉ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ก๓ บ๓ ธ๓ ฉ๓ บ๓ ฟ Flag for Khizi (AZ-XIZ)
๐จ๐ฝโโค๏ธโ๐จ๐ผ Couple With Heart - Man: Medium Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ก๓ บ๓ ธ๓ ฃ๓ ฉ๓ ฟ Flag for Khojali (AZ-XCI)
๐ด๓ ถ๓ ฅ๓ น๓ ฟ Flag for Delta Amacuro (VE-Y)
๐ด๓ ก๓ บ๓ ธ๓ ก๓ ฟ Flag for Stepanakert (AZ-XA)
๐ด๓ ก๓ บ๓ น๓ ก๓ ฒ๓ ฟ Flag for Yardymli (AZ-YAR)
๐ด๓ ก๓ บ๓ น๓ ฅ๓ ถ๓ ฟ Flag for Yevlakh District (AZ-YEV)
๐ด๓ ก๓ บ๓ บ๓ ก๓ ฑ๓ ฟ Flag for Zaqatala (AZ-ZAQ)
๐ฉ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ก๓ บ๓ น๓ ฅ๓ ฟ Flag for Yevlakh (AZ-YE)
๐ด๓ ข๓ ก๓ ข๓ ฉ๓ จ๓ ฟ Flag for Federation of Bosnia and Herzegovina (BA-BIH)
๐ด๓ ก๓ บ๓ บ๓ ก๓ ฒ๓ ฟ Flag for Zardab (AZ-ZAR)
๐ด๓ ก๓ บ๓ ณ๓ ก๓ ฌ๓ ฟ Flag for Salyan (AZ-SAL)
๐ด๓ ฃ๓ จ๓ บ๓ ง๓ ฟ Flag for Zug (CH-ZG)
๐จ๐พโ๐ฉ๐พโ๐ง๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐จ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐ฉ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ข๓ ก๓ ณ๓ ฒ๓ ฐ๓ ฟ Flag for Republika Srpska (BA-SRP)
๐จ๐ฝโโค๏ธโ๐ฉ Couple With Heart - Man: Medium Skin Tone, Woman
๐จ๐ปโ๐ฉ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ฅ๓ ณ๓ ก๓ ฎ๓ ฟ Flag for Andalusia (ES-AN)
๐จ๐ผโ๐ฉ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ข๓ ข๓ ฐ๓ ด๓ ฟ Flag for Saint James (BB-04)
๐จ๐พโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ข๓ ข๓ ฐ๓ ณ๓ ฟ Flag for Saint George (BB-03)
๐ด๓ ข๓ ข๓ ฐ๓ ฒ๓ ฟ Flag for Saint Andrew (BB-02)
๐จโ๐ฉโ๐ถโ๐ฆ Family: Man, Woman, Baby, Boy
๐จ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ข๓ ข๓ ฐ๓ ต๓ ฟ Flag for Saint John (BB-05)
๐จ๐พโ๐ฉ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ข๓ ข๓ ฐ๓ ถ๓ ฟ Flag for Saint Joseph (BB-06)
๐ด๓ ฌ๓ ซ๓ ฑ๓ ฟ Flag for Western (LK-1)
๐ด๓ ข๓ น๓ ข๓ ฒ๓ ฟ Flag for Brest (BY-BR)
๐ด๓ ก๓ บ๓ ณ๓ ซ๓ ฒ๓ ฟ Flag for Shamkir (AZ-SKR)
๐ด๓ ข๓ ข๓ ฐ๓ ท๓ ฟ Flag for Saint Lucy (BB-07)
๐ฉ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ฅ๓ ณ๓ ฃ๓ ญ๓ ฟ Flag for Castile-La Mancha (ES-CM)
๐ด๓ ข๓ ข๓ ฑ๓ ฐ๓ ฟ Flag for Saint Philip (BB-10)
๐ด๓ ถ๓ ฃ๓ ฐ๓ ด๓ ฟ Flag for Saint George (VC-04)
๐จ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ปโ๐ง๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ข๓ ค๓ ก๓ ฟ Flag for Barisal (BD-A)
๐ด๓ ก๓ บ๓ บ๓ ก๓ ฎ๓ ฟ Flag for Zangilan (AZ-ZAN)
๐ด๓ ช๓ ญ๓ ฐ๓ ฑ๓ ฟ Flag for Kingston (JM-01)
๐จ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ข๓ ค๓ ฅ๓ ฟ Flag for Rajshahi Division (BD-E)
๐ด๓ ข๓ ค๓ ฆ๓ ฟ Flag for Rangpur Division (BD-F)
๐ด๓ ข๓ ค๓ ฃ๓ ฟ Flag for Dhaka Division (BD-C)
๐ด๓ ข๓ ค๓ ค๓ ฟ Flag for Khulna Division (BD-D)
๐ด๓ ข๓ ข๓ ฐ๓ น๓ ฟ Flag for Saint Peter (BB-09)
๐ด๓ ณ๓ ฉ๓ ฐ๓ ต๓ ธ๓ ฟ Flag for Lenart (SI-058)
๐ฉ๐ผโ๐ถ๐ผ Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ข๓ ฆ๓ ฐ๓ ฒ๓ ฟ Flag for Cascades (BF-02)
๐ด๓ ข๓ ค๓ จ๓ ฟ Flag for Mymensingh Division (BD-H)
๐ด๓ ข๓ ฅ๓ ท๓ ก๓ ฌ๓ ฟ Flag for Wallonia (BE-WAL)
๐ด๓ ญ๓ ต๓ ข๓ ฒ๓ ฟ Flag for Beau-Bassin Rose-Hill (MU-BR)
๐ด๓ ข๓ ฆ๓ ฐ๓ ด๓ ฟ Flag for Centre-Est (BF-04)
๐ด๓ ฃ๓ ฎ๓ น๓ ฑ๓ ฟ Flag for Hong Kong SAR China (CN-91)
๐ด๓ ข๓ ฆ๓ ฐ๓ ฑ๓ ฟ Flag for Boucle du Mouhoun (BF-01)
๐ด๓ ข๓ ฆ๓ ฐ๓ ณ๓ ฟ Flag for Centre (BF-03)
๐ด๓ ค๓ ซ๓ ธ๓ ฒ๓ ฟ Flag for Central Denmark (DK-82)
๐ด๓ ข๓ ฆ๓ ฐ๓ ท๓ ฟ Flag for Centre-Sud (BF-07)
๐จ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ข๓ ฆ๓ ฐ๓ ถ๓ ฟ Flag for Centre-Ouest (BF-06)
๐ด๓ ข๓ ฆ๓ ฐ๓ ต๓ ฟ Flag for Centre-Nord (BF-05)
๐ด๓ ข๓ ข๓ ฐ๓ ธ๓ ฟ Flag for Saint Michael (BB-08)
๐ด๓ ข๓ ข๓ ฑ๓ ฑ๓ ฟ Flag for Saint Thomas (BB-11)
๐จ๐ฝโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man: Medium Skin Tone, Woman: Dark Skin Tone
๐ด๓ ข๓ ฆ๓ ฐ๓ ธ๓ ฟ Flag for Est (BF-08)
๐ด๓ ข๓ ฅ๓ ข๓ ฒ๓ ต๓ ฟ Flag for Brussels (BE-BRU)
๐ด๓ ข๓ ค๓ ง๓ ฟ Flag for Sylhet Division (BD-G)
๐ด๓ ข๓ ฆ๓ ฑ๓ ฑ๓ ฟ Flag for Plateau-Central (BF-11)
๐ด๓ ข๓ ค๓ ข๓ ฟ Flag for Chittagong Division (BD-B)
๐ด๓ ข๓ ฆ๓ ฑ๓ ณ๓ ฟ Flag for Sud-Ouest (BF-13)
๐จ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ข๓ ง๓ ฐ๓ ต๓ ฟ Flag for Vidin (BG-05)
๐ด๓ ข๓ ง๓ ฐ๓ ณ๓ ฟ Flag for Varna (BG-03)
๐จ๐ฟโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man: Dark Skin Tone, Woman: Medium Skin Tone
๐ด๓ ข๓ ง๓ ฐ๓ ฒ๓ ฟ Flag for Burgas (BG-02)
๐ด๓ ข๓ ฆ๓ ฑ๓ ฐ๓ ฟ Flag for Nord (BF-10)
๐ด๓ ข๓ ง๓ ฐ๓ ด๓ ฟ Flag for Veliko Tarnovo (BG-04)
๐จ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ข๓ ง๓ ฐ๓ ท๓ ฟ Flag for Gabrovo (BG-07)
๐จ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ข๓ ง๓ ฐ๓ ธ๓ ฟ Flag for Dobrich (BG-08)
๐ด๓ ข๓ ฆ๓ ฑ๓ ฒ๓ ฟ Flag for Sahel (BF-12)
๐ด๓ ก๓ ต๓ ด๓ ก๓ ณ๓ ฟ Flag for Tasmania (AU-TAS)
๐จ๐ฟโโค๏ธโ๐ฉ๐ป Couple With Heart - Man: Dark Skin Tone, Woman: Light Skin Tone
๐ฉ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐จ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐จ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐พโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ก๓ บ๓ ธ๓ ถ๓ ค๓ ฟ Flag for Khojavend (AZ-XVD)
๐ด๓ ข๓ ง๓ ฑ๓ ฑ๓ ฟ Flag for Lovech (BG-11)
๐ด๓ ฃ๓ ฌ๓ ฌ๓ ฉ๓ ฟ Flag for Libertador General Bernardo OโHiggins (CL-LI)
๐ด๓ ข๓ ง๓ ฑ๓ ณ๓ ฟ Flag for Pazardzhik (BG-13)
๐จ๐ฟโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man: Dark Skin Tone, Woman: Dark Skin Tone
๐ด๓ ข๓ ง๓ ฑ๓ ด๓ ฟ Flag for Pernik (BG-14)
๐ด๓ ข๓ ง๓ ฑ๓ ฐ๓ ฟ Flag for Kyustendil (BG-10)
๐ด๓ ฅ๓ ง๓ ข๓ ก๓ ฟ Flag for Red Sea (EG-BA)
๐ด๓ ด๓ บ๓ ฑ๓ ฑ๓ ฟ Flag for Zanzibar Central/South (TZ-11)
๐จ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ข๓ ง๓ ฑ๓ ต๓ ฟ Flag for Pleven (BG-15)
๐จ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐จ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ข๓ ง๓ ฒ๓ ฑ๓ ฟ Flag for Smolyan (BG-21)
๐ด๓ ข๓ ง๓ ฐ๓ ฑ๓ ฟ Flag for Blagoevgrad (BG-01)
๐ด๓ ค๓ บ๓ ณ๓ ด๓ ฟ Flag for Bordj Bou Arrรฉridj (DZ-34)
๐ด๓ ข๓ ง๓ ฑ๓ ถ๓ ฟ Flag for Plovdiv (BG-16)
๐ด๓ ฃ๓ ฉ๓ ถ๓ ข๓ ฟ Flag for Vallรฉe du Bandama (CI-VB)
๐ด๓ ข๓ ง๓ ฑ๓ น๓ ฟ Flag for Silistra (BG-19)
๐ฉโโค๏ธโ๐จ๐ผ Couple With Heart - Woman, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ง๓ ฑ๓ ท๓ ฟ Flag for Razgrad (BG-17)
๐จ๐พโโค๏ธโ๐จ Couple With Heart - Man: Medium-Dark Skin Tone, Man
๐ด๓ ก๓ ฏ๓ ฃ๓ ฎ๓ ฎ๓ ฟ Flag for Cunene (AO-CNN)
๐ด๓ ข๓ ง๓ ฒ๓ ฐ๓ ฟ Flag for Sliven (BG-20)
๐ง๐ปโโ๏ธ Woman With Headscarf: Light Skin Tone
๐ด๓ ข๓ ง๓ ฒ๓ ต๓ ฟ Flag for Targovishte (BG-25)
๐ฉ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ข๓ ง๓ ฒ๓ ณ๓ ฟ Flag for Sofia District (BG-23)
๐ด๓ ข๓ ง๓ ฒ๓ ฒ๓ ฟ Flag for Sofia (BG-22)
๐จ๐ฟโ๐ฉ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone
๐ง๐ฝโโ๏ธ Woman With Headscarf: Medium Skin Tone
๐ด๓ ข๓ ง๓ ฒ๓ ธ๓ ฟ Flag for Yambol (BG-28)
๐ด๓ ข๓ จ๓ ฑ๓ ณ๓ ฟ Flag for Capital (BH-13)
๐ด๓ ข๓ ง๓ ฒ๓ ถ๓ ฟ Flag for Haskovo (BG-26)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ท๓ ฟ Flag for Schaan (LI-07)
๐จ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ข๓ จ๓ ฑ๓ ต๓ ฟ Flag for Muharraq (BH-15)
๐ด๓ ข๓ จ๓ ฑ๓ ด๓ ฟ Flag for Southern (BH-14)
๐ง๐พโโ๏ธ Woman With Headscarf: Medium-Dark Skin Tone
๐ด๓ ฒ๓ ฏ๓ ณ๓ ข๓ ฟ Flag for Sibiu (RO-SB)
๐ง๐ผโโ๏ธ Woman With Headscarf: Medium-Light Skin Tone
๐ฉ๐ปโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman: Light Skin Tone, Man: Dark Skin Tone
๐ด๓ ข๓ จ๓ ฑ๓ ท๓ ฟ Flag for Northern (BH-17)
๐ด๓ ข๓ ฉ๓ ข๓ ข๓ ฟ Flag for Bubanza (BI-BB)
๐ฉ๐ปโโค๏ธโ๐ฉ Couple With Heart - Woman: Light Skin Tone, Woman
๐ด๓ ข๓ ฅ๓ ถ๓ ฌ๓ ง๓ ฟ Flag for Flanders (BE-VLG)
๐ฉ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone
๐จ๐ปโ๐ฉ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ข๓ ฉ๓ ข๓ ญ๓ ฟ Flag for Bujumbura (BI-BM)
๐ง๐ฟโโ๏ธ Woman With Headscarf: Dark Skin Tone
๐ด๓ ข๓ ฉ๓ ข๓ ฌ๓ ฟ Flag for Bujumbura Rural (BI-BL)
๐จ๐พโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone
๐จ๐ผโ๐ฉ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐จ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ข๓ ฉ๓ ฃ๓ ก๓ ฟ Flag for Cankuzo (BI-CA)
๐ด๓ ข๓ ง๓ ฑ๓ ฒ๓ ฟ Flag for Montana (BG-12)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ต๓ ฟ Flag for Sala (LV-085)
โฃ Combining Enclosing Keycap
๐ด๓ ข๓ ฉ๓ ข๓ ฒ๓ ฟ Flag for Bururi (BI-BR)
๐ด๓ ข๓ ง๓ ฐ๓ น๓ ฟ Flag for Kardzhali (BG-09)
๐ด๓ ข๓ ฉ๓ ฒ๓ ญ๓ ฟ Flag for Rumonge (BI-RM)
๐ด๓ ฎ๓ ฌ๓ ก๓ ท๓ ฟ Flag for Aruba (NL-AW)
๐ด๓ ข๓ ฉ๓ ญ๓ น๓ ฟ Flag for Muyinga (BI-MY)
๐ด๓ ข๓ ฉ๓ ฒ๓ ด๓ ฟ Flag for Rutana (BI-RT)
๐ด๓ ข๓ ฉ๓ ฒ๓ น๓ ฟ Flag for Ruyigi (BI-RY)
๐ด๓ ข๓ ฉ๓ ซ๓ ฉ๓ ฟ Flag for Kirundo (BI-KI)
๐ด๓ ข๓ ฉ๓ ซ๓ น๓ ฟ Flag for Kayanza (BI-KY)
๐ด๓ ข๓ ฉ๓ ญ๓ ท๓ ฟ Flag for Mwaro (BI-MW)
๐ด๓ ข๓ ง๓ ฒ๓ ท๓ ฟ Flag for Shumen (BG-27)
๐ด๓ ข๓ ฉ๓ ฎ๓ ง๓ ฟ Flag for Ngozi (BI-NG)
๐ด๓ ข๓ ฉ๓ ซ๓ ฒ๓ ฟ Flag for Karuzi (BI-KR)
๐ด๓ ข๓ ฉ๓ ญ๓ ต๓ ฟ Flag for Muramvya (BI-MU)
๐ด๓ ญ๓ ก๓ ฑ๓ ต๓ ฟ Flag for Laรขyoune-Boujdour-Sakia El Hamra (MA-15)
๐จ๐ฝโ๐ฉ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉ๐พโ๐จ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐พโ๐ฉ๐พโ๐ถ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ข๓ ช๓ ค๓ ฏ๓ ฟ Flag for Donga (BJ-DO)
๐ฉ๐ฝโ๐จ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ฆ๓ ฒ๓ จ๓ ค๓ ฆ๓ ฟ Flag for Hauts-de-France (FR-HDF)
๐ด๓ ข๓ ช๓ ก๓ ฌ๓ ฟ Flag for Alibori (BJ-AL)
๐ด๓ ข๓ ช๓ ก๓ ซ๓ ฟ Flag for Atakora (BJ-AK)
๐จ๐ฟโ๐ฉ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ข๓ ช๓ ฌ๓ ฉ๓ ฟ Flag for Littoral (BJ-LI)
๐ด๓ ข๓ ช๓ ข๓ ฏ๓ ฟ Flag for Borgou (BJ-BO)
๐ฉโ๐ฉโ๐งโ๐ถ Family: Woman, Woman, Girl, Baby
๐ด๓ ต๓ ณ๓ ฎ๓ ค๓ ฟ Flag for North Dakota (US-ND)
๐จ๐ผโโค๏ธโ๐โ๐จ๐พ Kiss - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
๐ด๓ ข๓ ช๓ ซ๓ ฏ๓ ฟ Flag for Kouffo (BJ-KO)
๐ด๓ ข๓ ช๓ ฐ๓ ฌ๓ ฟ Flag for Plateau (BJ-PL)
๐ด๓ ง๓ ค๓ ฑ๓ ฐ๓ ฟ Flag for Carriacou and Petite Martinique (GD-10)
๐ด๓ ข๓ ช๓ บ๓ ฏ๓ ฟ Flag for Zou (BJ-ZO)
๐ฉ๐ผโโค๏ธโ๐จ๐ป Couple With Heart - Woman: Medium-Light Skin Tone, Man: Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman: Medium Skin Tone, Man: Medium Skin Tone
๐จ๐ฝโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐จ๐ป Couple With Heart - Woman: Medium Skin Tone, Man: Light Skin Tone
๐ด๓ ฌ๓ ข๓ ข๓ ฉ๓ ฟ Flag for Beqaa (LB-BI)
๐ด๓ ข๓ ฎ๓ ด๓ ฅ๓ ฟ Flag for Temburong (BN-TE)
๐ฉ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ข๓ ฎ๓ ด๓ ต๓ ฟ Flag for Tutong (BN-TU)
๐ด๓ ข๓ ฎ๓ ข๓ ญ๓ ฟ Flag for Brunei-Muara (BN-BM)
๐จ๐ปโ๐ฉ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ข๓ ง๓ ฐ๓ ถ๓ ฟ Flag for Vratsa (BG-06)
๐ฉ๐ฝโโค๏ธโ๐จ๐ผ Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ฏ๓ ข๓ ฟ Flag for Beni (BO-B)
๐ด๓ ข๓ ฎ๓ ข๓ ฅ๓ ฟ Flag for Belait (BN-BE)
๐ฉ๐ผโโค๏ธโ๐จ Couple With Heart - Woman: Medium-Light Skin Tone, Man
๐ด๓ ข๓ ช๓ ฏ๓ ต๓ ฟ Flag for Ouรฉmรฉ (BJ-OU)
๐ฉ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ณ๓ ฃ๓ ฒ๓ ต๓ ฟ Flag for Roche Caiman (SC-25)
๐ฉ๐ปโโค๏ธโ๐จ๐พ Couple With Heart - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone
๐ด๓ ข๓ ฏ๓ ฃ๓ ฟ Flag for Cochabamba (BO-C)
๐จ๐พโ๐ฉ๐พโ๐ง๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ข๓ ฏ๓ ฎ๓ ฟ Flag for Pando (BO-N)
๐ฉ๐ฝโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman: Medium Skin Tone, Woman: Light Skin Tone
๐ฉ๐พโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone
๐ด๓ ข๓ ฏ๓ จ๓ ฟ Flag for Chuquisaca (BO-H)
๐ด๓ ข๓ ฏ๓ ฌ๓ ฟ Flag for La Paz (BO-L)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ณ๓ น๓ ฟ Flag for Khentii (MN-039)
๐ด๐ฝโโ๏ธ Woman in Business Suit Levitating: Medium Skin Tone
๐ด๓ ญ๓ ซ๓ ฒ๓ ท๓ ฟ Flag for Dolneni (MK-27)
๐ด๓ ข๓ ง๓ ฒ๓ ด๓ ฟ Flag for Stara Zagora (BG-24)
๐ฉ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Sistan and Baluchestan (IR-13)
๐ฉ๐พโโค๏ธโ๐จ๐ผ Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ฏ๓ ฐ๓ ฟ Flag for Potosรญ (BO-P)
๐ด๓ ข๓ ฑ๓ ข๓ ฏ๓ ฟ Flag for Bonaire (BQ-BO)
๐ฉโโค๏ธโ๐โ๐จ๐ป Kiss - Woman, Man: Light Skin Tone
๐ฉ๐พโโค๏ธโ๐จ Couple With Heart - Woman: Medium-Dark Skin Tone, Man
๐ฉ๐ผโ๐ฆ๐ผโ๐ฆ๐ผ Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ข๓ ก๓ ข๓ ฒ๓ ฃ๓ ฟ Flag for Brฤko District (BA-BRC)
๐ด๓ ข๓ ฑ๓ ณ๓ ก๓ ฟ Flag for Saba (BQ-SA)
๐ฉ๐ฝโโค๏ธโ๐จ๐พ Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone
๐ฉ๐พโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ก๓ ฃ๓ ฟ Flag for Acre (BR-AC)
๐ด๓ ข๓ ฉ๓ ง๓ ฉ๓ ฟ Flag for Gitega (BI-GI)
๐ฉ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ฟโโค๏ธโ๐จ๐ป Couple With Heart - Woman: Dark Skin Tone, Man: Light Skin Tone
๐ด๓ ข๓ ฒ๓ ก๓ ญ๓ ฟ Flag for Amazonas (BR-AM)
๐ด๓ ก๓ ฒ๓ ฃ๓ ฟ Flag for Buenos Aires (AR-C)
๐จ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐จ๐ผ Kiss - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ฒ๓ ฅ๓ ณ๓ ฟ Flag for Espรญrito Santo (BR-ES)
๐จ๐ฟโโค๏ธโ๐โ๐จ๐พ Kiss - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man: Medium-Light Skin Tone, Man: Medium Skin Tone
๐ฉ๐พโ๐ฆ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐จ๐ปโโค๏ธโ๐ฉ Couple With Heart - Man: Light Skin Tone, Woman
๐จ๐ฟโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman: Light Skin Tone, Woman: Medium Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man: Medium-Light Skin Tone, Man: Dark Skin Tone
๐ฉ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐ฟโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ข๓ ฒ๓ ญ๓ ก๓ ฟ Flag for Maranhรฃo (BR-MA)
๐ฉ๐ฟโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman: Dark Skin Tone, Woman: Medium Skin Tone
๐ฉ๐ฟโโค๏ธโ๐ฉ Couple With Heart - Woman: Dark Skin Tone, Woman
๐ด๓ ข๓ ฒ๓ ก๓ ฐ๓ ฟ Flag for Amapรก (BR-AP)
๐จ๐ฝโโค๏ธโ๐จ๐ป Couple With Heart - Man: Medium Skin Tone, Man: Light Skin Tone
๐ฉ๐ปโโค๏ธโ๐โ๐จ๐ป Kiss - Woman: Light Skin Tone, Man: Light Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man: Medium Skin Tone, Man: Medium Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman: Dark Skin Tone, Woman: Light Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man: Medium Skin Tone, Woman: Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐โ๐จ๐พ Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ฟโโค๏ธโ๐โ๐จ๐ผ Kiss - Man: Dark Skin Tone, Man: Medium-Light Skin Tone
๐จ๐พโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman: Medium Skin Tone, Woman: Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man: Medium Skin Tone, Woman: Medium Skin Tone
๐จ๐พโโค๏ธโ๐โ๐จ๐ผ Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man: Medium Skin Tone, Woman: Light Skin Tone
๐จ๐พโโค๏ธโ๐โ๐จ Kiss - Man: Medium-Dark Skin Tone, Man
๐จ๐พโโค๏ธโ๐โ๐จ๐พ Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐โ๐จ๐พ Kiss - Woman, Man: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman, Woman: Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐จ๐ป Kiss - Woman: Medium Skin Tone, Man: Light Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman: Dark Skin Tone, Man: Dark Skin Tone
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman: Light Skin Tone, Woman: Dark Skin Tone
๐ฉ๐ปโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐ฉ Kiss - Woman: Medium-Dark Skin Tone, Woman
๐ฉ๐พโโค๏ธโ๐โ๐ฉ๐ป Kiss - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone
๐ฉ๐ปโโค๏ธโ๐จ Couple With Heart - Woman: Light Skin Tone, Man
๐ฉ๐ปโ๐ฉ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐จ๐พ Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ปโโค๏ธโ๐จ๐ฝ Couple With Heart - Man: Light Skin Tone, Man: Medium Skin Tone
๐ด๓ ข๓ ฒ๓ ญ๓ ด๓ ฟ Flag for Mato Grosso (BR-MT)
๐จ๐ฝโโค๏ธโ๐ฉ๐ป Couple With Heart - Man: Medium Skin Tone, Woman: Light Skin Tone
๐จโโค๏ธโ๐จ๐ฟ Couple With Heart - Man, Man: Dark Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ฉ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ข๓ ฏ๓ ณ๓ ฟ Flag for Santa Cruz (BO-S)
๐จ๐ปโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man: Light Skin Tone, Woman: Medium Skin Tone
๐จ๐ฝโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man: Medium Skin Tone, Woman: Medium Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone
๐ด๓ ข๓ ช๓ ฃ๓ ฏ๓ ฟ Flag for Collines (BJ-CO)
๐จ๐ปโ๐ฉ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone
๐จโโค๏ธโ๐จ๐ฝ Couple With Heart - Man, Man: Medium Skin Tone
๐จ๐พโ๐ฉ๐พโ๐ฆ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ผโโค๏ธโ๐จ Couple With Heart - Man: Medium-Light Skin Tone, Man
๐จ๐พโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone
๐ด๓ ข๓ ฒ๓ ฐ๓ ก๓ ฟ Flag for Parรก (BR-PA)
๐ฉ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐จ๐ผโโค๏ธโ๐จ๐ผ Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
๐จ๐ฟโโค๏ธโ๐จ๐ป Couple With Heart - Man: Dark Skin Tone, Man: Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman: Medium Skin Tone, Woman: Medium Skin Tone
๐จ๐พโโค๏ธโ๐จ๐ฝ Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone
๐จ๐ฝโโค๏ธโ๐จ๐ฝ Couple With Heart - Man: Medium Skin Tone, Man: Medium Skin Tone
๐จ๐ปโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man: Light Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐พโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone
๐จ๐พโโค๏ธโ๐จ๐ผ Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone
๐จ๐ฟโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐ผโโค๏ธโ๐จ๐พ Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ฝโโค๏ธโ๐จ๐พ Couple With Heart - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐จ๐พ Couple With Heart - Woman, Man: Medium-Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ก๓ ฌ๓ ฟ Flag for Alagoas (BR-AL)
๐ฉโโค๏ธโ๐จ๐ป Couple With Heart - Woman, Man: Light Skin Tone
๐ด๓ ข๓ ฆ๓ ฐ๓ น๓ ฟ Flag for Hauts-Bassins (BF-09)
๐จ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ฉ๐พโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ฒ๓ ช๓ ฟ Flag for Rio de Janeiro (BR-RJ)
๐จ๐พโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone
๐ด๓ ข๓ ฒ๓ ฒ๓ ฏ๓ ฟ Flag for Rondรดnia (BR-RO)
๐จ๐พโโค๏ธโ๐จ๐ฟ Couple With Heart - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone
๐จ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Boy: Medium Skin Tone
๐จ๐ผโโค๏ธโ๐จ๐ฝ Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium Skin Tone
๐ด๓ ข๓ ฒ๓ ฐ๓ ฉ๓ ฟ Flag for Piauรญ (BR-PI)
๐จ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ข๓ ฒ๓ ฒ๓ ฎ๓ ฟ Flag for Rio Grande do Norte (BR-RN)
๐ฉ๐ปโโค๏ธโ๐จ๐ป Couple With Heart - Woman: Light Skin Tone, Man: Light Skin Tone
๐จ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Boy: Light Skin Tone
๐ฉ๐ผโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐ฟโโค๏ธโ๐ฉ๐พ Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ณ๓ ฅ๓ ฟ Flag for Sergipe (BR-SE)
๐ด๓ ข๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Paranรก (BR-PR)
๐จ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Boy: Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone
๐ฉ๐พโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ฒ๓ ต๓ ญ๓ ฏ๓ ณ๓ ฟ Flag for Moscow Province (RU-MOS)
๐ฉ๐ฝโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Woman: Medium Skin Tone, Woman: Medium Skin Tone
๐ฉ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ข๓ ฒ๓ ณ๓ ฐ๓ ฟ Flag for Sรฃo Paulo (BR-SP)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for East Azerbaijan (IR-01)
๐ด๓ ข๓ ฒ๓ ฒ๓ ณ๓ ฟ Flag for Rio Grande do Sul (BR-RS)
๐ฉ๐ผโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone
๐ด๓ ฎ๓ ฏ๓ ฑ๓ ด๓ ฟ Flag for Sogn og Fjordane (NO-14)
๐ด๓ ข๓ ฒ๓ ด๓ ฏ๓ ฟ Flag for Tocantins (BR-TO)
๐ด๓ ณ๓ ฉ๓ ฑ๓ ธ๓ ฒ๓ ฟ Flag for Sveti Andraลพ v Slovenskih Goricah (SI-182)
๐จ๐ผโโค๏ธโ๐ฉ๐ป Couple With Heart - Man: Medium-Light Skin Tone, Woman: Light Skin Tone
๐จ๐ฟโโค๏ธโ๐จ๐ฝ Couple With Heart - Man: Dark Skin Tone, Man: Medium Skin Tone
๐จ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐จ๐ฟโ๐ฆ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ข๓ ณ๓ ข๓ ฉ๓ ฟ Flag for Bimini (BS-BI)
๐จ๐ฟโโค๏ธโ๐ฉ Couple With Heart - Man: Dark Skin Tone, Woman
๐ฉ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ข๓ ฒ๓ ฒ๓ ฒ๓ ฟ Flag for Roraima (BR-RR)
๐ด๓ ข๓ ฏ๓ ฏ๓ ฟ Flag for Oruro (BO-O)
๐ด๓ ข๓ ณ๓ ฅ๓ ธ๓ ฟ Flag for Exuma (BS-EX)
๐จ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ฉ๐ฝโโค๏ธโ๐จ Couple With Heart - Woman: Medium Skin Tone, Man
๐ด๓ ข๓ ณ๓ ฃ๓ ฅ๓ ฟ Flag for Central Eleuthera (BS-CE)
๐ด๓ ข๓ ณ๓ ข๓ น๓ ฟ Flag for Berry Islands (BS-BY)
๐ด๓ ข๓ ฉ๓ ญ๓ ก๓ ฟ Flag for Makamba (BI-MA)
๐ด๓ ข๓ ฒ๓ ค๓ ฆ๓ ฟ Flag for Federal District (BR-DF)
๐ฉ๐ปโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
๐ด๓ ข๓ ณ๓ ฃ๓ ฏ๓ ฟ Flag for Central Abaco (BS-CO)
๐ด๓ ข๓ ณ๓ ฅ๓ ง๓ ฟ Flag for East Grand Bahama (BS-EG)
๐ด๓ ข๓ ณ๓ ฃ๓ ณ๓ ฟ Flag for Central Andros (BS-CS)
๐จ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ข๓ ณ๓ ฃ๓ ซ๓ ฟ Flag for Crooked Island (BS-CK)
๐ด๓ ข๓ ณ๓ ข๓ ฐ๓ ฟ Flag for Black Point (BS-BP)
๐จ๐ผโ๐ฆ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐ฉ๐ฟโโค๏ธโ๐จ๐พ Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ณ๓ ฎ๓ ฅ๓ ฟ Flag for North Eleuthera (BS-NE)
๐ด๓ ข๓ ณ๓ ฎ๓ ฏ๓ ฟ Flag for North Abaco (BS-NO)
๐ด๓ ข๓ ณ๓ ญ๓ ง๓ ฟ Flag for Mayaguana (BS-MG)
๐จ๐พโ๐ฆ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man: Medium-Light Skin Tone, Woman: Light Skin Tone
๐ด๓ ข๓ ณ๓ ง๓ ฃ๓ ฟ Flag for Grand Cay (BS-GC)
๐ด๓ ข๓ ณ๓ ฆ๓ ฐ๓ ฟ Flag for Freeport (BS-FP)
๐ด๓ ข๓ ณ๓ ฉ๓ ฎ๓ ฟ Flag for Inagua (BS-IN)
๐ด๓ ข๓ ณ๓ จ๓ ด๓ ฟ Flag for Hope Town (BS-HT)
๐ฉ๐พโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone
๐ด๓ ข๓ ณ๓ ฌ๓ ฉ๓ ฟ Flag for Long Island (BS-LI)
๐จ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐จ๐พโโค๏ธโ๐ฉ Couple With Heart - Man: Medium-Dark Skin Tone, Woman
๐ฉ๐ฟโโค๏ธโ๐จ๐ฟ Couple With Heart - Woman: Dark Skin Tone, Man: Dark Skin Tone
๐จ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐จโ๐จโ๐ถ Family: Man, Man, Baby
๐ฉโ๐งโ๐ถ Family: Woman, Girl, Baby
๐จโ๐ฆโ๐ถ Family: Man, Boy, Baby
๐จโ๐จโ๐ถโ๐ฆ Family: Man, Man, Baby, Boy
๐จโ๐ฆโ๐ง Family: Man, Boy, Girl
๐จโ๐ถโ๐ถ Family: Man, Baby, Baby
๐ด๓ ข๓ ณ๓ ฒ๓ ฉ๓ ฟ Flag for Ragged Island (BS-RI)
๐ฉ๐ฟโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman: Dark Skin Tone, Woman: Dark Skin Tone
๐ฉ๐ฟโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman: Dark Skin Tone, Man: Medium Skin Tone
๐ฉ๐ผโโค๏ธโ๐จ๐ผ Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ณ๓ ฎ๓ ณ๓ ฟ Flag for North Andros (BS-NS)
๐ฉ๐ฟโโค๏ธโ๐ฉ๐ป Couple With Heart - Woman: Dark Skin Tone, Woman: Light Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐จ Kiss - Man: Light Skin Tone, Man
๐ด๓ ข๓ ณ๓ ณ๓ ก๓ ฟ Flag for South Andros (BS-SA)
๐จ๐ปโโค๏ธโ๐โ๐จ๐ผ Kiss - Man: Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ณ๓ ณ๓ ฅ๓ ฟ Flag for South Eleuthera (BS-SE)
๐จ๐ผโ๐ฆ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐ฉ๐ป Kiss - Man: Light Skin Tone, Woman: Light Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐พโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐พโโค๏ธโ๐โ๐จ๐ป Kiss - Man: Medium-Dark Skin Tone, Man: Light Skin Tone
๐ด๓ ข๓ ฒ๓ ณ๓ ฃ๓ ฟ Flag for Santa Catarina (BR-SC)
๐ฉโ๐ฉโ๐ฆโ๐ง Family: Woman, Woman, Boy, Girl
๐จโโค๏ธโ๐โ๐ฉ๐พ Kiss - Man, Woman: Medium-Dark Skin Tone
๐ด๓ ข๓ ณ๓ ฒ๓ ฃ๓ ฟ Flag for Rum Cay (BS-RC)
๐ฉโ๐ฉโ๐ถโ๐ฆ Family: Woman, Woman, Baby, Boy
๐จ๐ปโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man: Light Skin Tone, Woman: Medium Skin Tone
๐ด๓ ข๓ ณ๓ ฃ๓ ฉ๓ ฟ Flag for Cat Island (BS-CI)
๐ฉ๐ฝโโค๏ธโ๐ฉ Couple With Heart - Woman: Medium Skin Tone, Woman
๐จ๐ฝโ๐ฆ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
๐ฉโ๐จโ๐ฆโ๐ถ Family: Woman, Man, Boy, Baby
๐จ๐พโโค๏ธโ๐โ๐ฉ Kiss - Man: Medium-Dark Skin Tone, Woman
๐จโโค๏ธโ๐โ๐จ๐ป Kiss - Man, Man: Light Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man: Light Skin Tone, Man: Dark Skin Tone
๐จ๐ผโโค๏ธโ๐โ๐ฉ๐ฝ Kiss - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone
๐จ๐พโ๐ฆ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ข๓ ณ๓ ณ๓ ฏ๓ ฟ Flag for South Abaco (BS-SO)
๐ฉ๐พโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐ปโโค๏ธโ๐จ๐ฟ Couple With Heart - Man: Light Skin Tone, Man: Dark Skin Tone
๐จ๐ฟโโค๏ธโ๐โ๐จ๐ฟ Kiss - Man: Dark Skin Tone, Man: Dark Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone
๐ฉ๐พโโค๏ธโ๐โ๐จ๐ป Kiss - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐จ Kiss - Woman: Medium Skin Tone, Man
๐จโ๐งโ๐ถ Family: Man, Girl, Baby
๐ฉ๐ปโโค๏ธโ๐โ๐จ๐พ Kiss - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone
๐จโโค๏ธโ๐จ๐ผ Couple With Heart - Man, Man: Medium-Light Skin Tone
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐ฟโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man: Dark Skin Tone, Woman: Dark Skin Tone
๐จโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man, Woman: Medium-Light Skin Tone
๐ด๓ ฃ๓ ฉ๓ ก๓ ข๓ ฟ Flag for Abidjan (CI-AB)
๐ฉ๐ปโโค๏ธโ๐โ๐จ Kiss - Woman: Light Skin Tone, Man
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ๐พ Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐ปโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Man: Light Skin Tone, Woman: Medium-Light Skin Tone
๐ฉ๐ฝโโค๏ธโ๐โ๐จ๐ฟ Kiss - Woman: Medium Skin Tone, Man: Dark Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐ฉ๐ผ Kiss - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone
๐ฉ๐ฟโโค๏ธโ๐โ๐จ๐พ Kiss - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone
๐ฉ๐ผโโค๏ธโ๐โ๐จ Kiss - Woman: Medium-Light Skin Tone, Man
๐ฉโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman, Woman: Medium-Dark Skin Tone
๐จ๐ฟโโค๏ธโ๐จ๐ผ Couple With Heart - Man: Dark Skin Tone, Man: Medium-Light Skin Tone
๐จ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐จ๐ผโโค๏ธโ๐ฉ๐ผ Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
๐ฉ๐ผโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone
๐ด๓ ข๓ ณ๓ ณ๓ ท๓ ฟ Flag for Spanish Wells (BS-SW)
๐จ๐ฟโโค๏ธโ๐จ๐ฟ Couple With Heart - Man: Dark Skin Tone, Man: Dark Skin Tone
๐จ๐ผโโค๏ธโ๐จ๐ฟ Couple With Heart - Man: Medium-Light Skin Tone, Man: Dark Skin Tone
๐จ๐ผโโค๏ธโ๐ฉ Couple With Heart - Man: Medium-Light Skin Tone, Woman
๐ฉ๐ผโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
๐จ๐ผโโค๏ธโ๐จ๐ป Couple With Heart - Man: Medium-Light Skin Tone, Man: Light Skin Tone
๐จ๐พโโค๏ธโ๐จ๐พ Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐ฉ๐ผ Couple With Heart - Woman, Woman: Medium-Light Skin Tone
๐จ๐ผโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone
๐จ๐ปโโค๏ธโ๐จ๐พ Couple With Heart - Man: Light Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ฝโโค๏ธโ๐ฉ๐พ Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone
๐ฉโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman, Woman: Dark Skin Tone
๐จ๐ฝโโค๏ธโ๐จ๐ฟ Couple With Heart - Man: Medium Skin Tone, Man: Dark Skin Tone
๐จโ๐จโ๐ฆโ๐ถ Family: Man, Man, Boy, Baby
๐จ๐ฟโโค๏ธโ๐จ Couple With Heart - Man: Dark Skin Tone, Man
๐ฉ๐ปโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman: Light Skin Tone, Woman: Dark Skin Tone
๐ด๓ ข๓ ณ๓ ณ๓ ณ๓ ฟ Flag for San Salvador (BS-SS)
๐ด๓ ข๓ ด๓ ฑ๓ ด๓ ฟ Flag for Samtse (BT-14)
๐ฉ๐ปโโค๏ธโ๐จ๐ฝ Couple With Heart - Woman: Light Skin Tone, Man: Medium Skin Tone
๐ฉ๐ผโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone
๐จโโค๏ธโ๐ฉ๐ฟ Couple With Heart - Man, Woman: Dark Skin Tone
๐ด๓ ข๓ ด๓ ฑ๓ ฑ๓ ฟ Flag for Paro (BT-11)
๐จ๐ปโโค๏ธโ๐ฉ๐พ Couple With Heart - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone
๐จ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ข๓ ด๓ ฑ๓ ต๓ ฟ Flag for Thimphu (BT-15)
๐ฉ๐พโโค๏ธโ๐ฉ๐ฝ Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone
๐ด๓ ข๓ ณ๓ ท๓ ง๓ ฟ Flag for West Grand Bahama (BS-WG)
๐ด๓ ข๓ ด๓ ฑ๓ ณ๓ ฟ Flag for Haa (BT-13)
๐ด๓ ข๓ ด๓ ฑ๓ ฒ๓ ฟ Flag for Chukha (BT-12)
๐จ๐ปโโค๏ธโ๐โ๐จ๐ฝ Kiss - Man: Light Skin Tone, Man: Medium Skin Tone
๐จ๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐จ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ข๓ ณ๓ ก๓ ซ๓ ฟ Flag for Acklins (BS-AK)
๐ด๓ ข๓ ด๓ ณ๓ ฒ๓ ฟ Flag for Trongsa (BT-32)
๐ด๓ ข๓ ด๓ ด๓ ฑ๓ ฟ Flag for Trashigang (BT-41)
๐ด๓ ข๓ ด๓ ฒ๓ ณ๓ ฟ Flag for Punakha (BT-23)
๐ด๓ ข๓ ด๓ ฒ๓ ด๓ ฟ Flag for Wangdue Phodrang (BT-24)
๐ด๓ ข๓ ด๓ ณ๓ ณ๓ ฟ Flag for Bumthang (BT-33)
๐ด๓ ข๓ ด๓ ณ๓ ด๓ ฟ Flag for Zhemgang (BT-34)
๐ฉ๐ผโโค๏ธโ๐โ๐จ๐ผ Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ข๓ ด๓ ด๓ ฒ๓ ฟ Flag for Mongar (BT-42)
๐ด๓ ข๓ ฒ๓ ฐ๓ ข๓ ฟ Flag for Paraรญba (BR-PB)
๐ฉ๐ฟโโค๏ธโ๐จ๐ผ Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ฃ๓ จ๓ บ๓ จ๓ ฟ Flag for Zรผrich (CH-ZH)
๐ด๓ ข๓ ด๓ ณ๓ ฑ๓ ฟ Flag for Sarpang (BT-31)
๐ด๓ ข๓ ด๓ ฒ๓ ฒ๓ ฟ Flag for Dagana (BT-22)
๐ฉ๐ปโโค๏ธโ๐โ๐จ๐ฝ Kiss - Woman: Light Skin Tone, Man: Medium Skin Tone
๐จ๐ฟโ๐จ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ข๓ ท๓ ฃ๓ ฅ๓ ฟ Flag for Central (BW-CE)
๐ด๓ ข๓ ด๓ ง๓ ก๓ ฟ Flag for Gasa (BT-GA)
๐ด๓ ข๓ ท๓ ฃ๓ จ๓ ฟ Flag for Chobe (BW-CH)
๐ด๓ ข๓ ด๓ ด๓ ต๓ ฟ Flag for Samdrup Jongkhar (BT-45)
๐ด๓ ข๓ ท๓ ฆ๓ ฒ๓ ฟ Flag for Francistown (BW-FR)
๐ด๓ ข๓ ด๓ ด๓ ด๓ ฟ Flag for Lhuntse (BT-44)
๐ด๓ ข๓ ด๓ ด๓ น๓ ฟ Flag for Trashiyangtse (BT-TY)
๐ด๓ ข๓ ด๓ ฒ๓ ฑ๓ ฟ Flag for Tsirang (BT-21)
๐ด๓ ข๓ ด๓ ด๓ ณ๓ ฟ Flag for Pemagatshel (BT-43)
๐จ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ข๓ ท๓ ฎ๓ ฅ๓ ฟ Flag for North East (BW-NE)
๐ด๓ ข๓ ท๓ ซ๓ ฌ๓ ฟ Flag for Kgatleng (BW-KL)
๐ด๓ ข๓ ท๓ ซ๓ ง๓ ฟ Flag for Kgalagadi (BW-KG)
๐ด๓ ข๓ ท๓ ณ๓ ฅ๓ ฟ Flag for South East (BW-SE)
๐ด๓ ข๓ ท๓ ซ๓ ท๓ ฟ Flag for Kweneng (BW-KW)
๐จ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ข๓ ท๓ ฎ๓ ท๓ ฟ Flag for North West (BW-NW)
๐ด๓ ข๓ ท๓ ช๓ ท๓ ฟ Flag for Jwaneng (BW-JW)
๐ด๓ ข๓ ณ๓ ญ๓ ฃ๓ ฟ Flag for Mangrove Cay (BS-MC)
๐ฉ๐ผโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone
๐ด๓ ข๓ ท๓ ง๓ จ๓ ฟ Flag for Ghanzi (BW-GH)
๐จ๐ปโโค๏ธโ๐ฉ๐ป Couple With Heart - Man: Light Skin Tone, Woman: Light Skin Tone
๐ด๓ ข๓ ช๓ ก๓ ฑ๓ ฟ Flag for Atlantique (BJ-AQ)
๐จ๐ผโ๐ง๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐จ๐พโ๐ง๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐จ๐ฟโ๐ง๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ข๓ ท๓ ณ๓ ฏ๓ ฟ Flag for Southern (BW-SO)
๐จ๐ฝโ๐ง๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐พโโค๏ธโ๐ฉ Couple With Heart - Woman: Medium-Dark Skin Tone, Woman
๐จโ๐ฉโ๐ถโ๐ง Family: Man, Woman, Baby, Girl
๐จ๐ฝโโค๏ธโ๐โ๐จ๐พ Kiss - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone
๐ด๓ ข๓ ท๓ ณ๓ ด๓ ฟ Flag for Sowa Town (BW-ST)
๐ด๓ ข๓ ท๓ ณ๓ ฐ๓ ฟ Flag for Selibe Phikwe (BW-SP)
๐ฉ๐ฟโโค๏ธโ๐ฉ๐พ Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone
๐ฉโ๐จโ๐ฆโ๐ฆ Family: Woman, Man, Boy, Boy
๐ฉ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ข๓ น๓ จ๓ ญ๓ ฟ Flag for Minsk (BY-HM)
๐ด๓ ข๓ น๓ จ๓ ฏ๓ ฟ Flag for Homel (BY-HO)
๐จ๐ปโ๐ฆ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
๐จ๐ปโ๐ฉ๐ปโ๐ง๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ด๓ ฒ๓ ณ๓ ต๓ ฟ Flag for Izmir (TR-35)
๐ด๓ ข๓ น๓ จ๓ ฒ๓ ฟ Flag for Hrodna (BY-HR)
๐ด๓ ข๓ น๓ ญ๓ ก๓ ฟ Flag for Magileu (BY-MA)
๐ด๓ ข๓ น๓ ญ๓ ฉ๓ ฟ Flag for Minsk Region (BY-MI)
๐จ๐ผโโค๏ธโ๐โ๐ฉ๐ฟ Kiss - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone
๐จ๐พโโค๏ธโ๐ฉ๐ป Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone
๐ด๓ ข๓ บ๓ ข๓ บ๓ ฟ Flag for Belize (BZ-BZ)
๐ด๓ ข๓ ท๓ ฌ๓ ฏ๓ ฟ Flag for Lobatse (BW-LO)
๐ฉโ๐ฆโ๐ง Family: Woman, Boy, Girl
๐จ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ข๓ ณ๓ ญ๓ ฉ๓ ฟ Flag for Mooreโs Island (BS-MI)
๐ด๓ ข๓ ช๓ ญ๓ ฏ๓ ฟ Flag for Mono (BJ-MO)
๐จ๐ฝโ๐ง๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ข๓ น๓ ถ๓ ฉ๓ ฟ Flag for Vitebsk (BY-VI)
๐ด๓ ข๓ บ๓ ณ๓ ฃ๓ ฟ Flag for Stann Creek (BZ-SC)
๐จ๐พโ๐ง๐พโ๐ง๐พ Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ข๓ บ๓ ฃ๓ บ๓ ฌ๓ ฟ Flag for Corozal (BZ-CZL)
๐จ๐ปโ๐ง๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
๐จ๐ฟโ๐ง๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ข๓ บ๓ ด๓ ฏ๓ ฌ๓ ฟ Flag for Toledo (BZ-TOL)
๐ด๓ ฎ๓ ฐ๓ ต๓ ฟ Flag for Sudur Pashchimanchal (NP-5)
๐ด๓ ข๓ ณ๓ จ๓ ฉ๓ ฟ Flag for Harbour Island (BS-HI)
๐ด๓ ฃ๓ ก๓ ก๓ ข๓ ฟ Flag for Alberta (CA-AB)
๐ฉ๐พโโค๏ธโ๐จ๐พ Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
๐จ๐ฝโโค๏ธโ๐โ๐จ๐ผ Kiss - Man: Medium Skin Tone, Man: Medium-Light Skin Tone
๐ด๓ ฌ๓ ก๓ ถ๓ ฉ๓ ฟ Flag for Vientiane Province (LA-VI)
๐จโ๐ฉโ๐ฆโ๐ง Family: Man, Woman, Boy, Girl
๐จ๐ปโ๐ง๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
๐จ๐ผโ๐ง๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐จ๐ฝโ๐ง๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ฃ๓ ก๓ ฐ๓ ฅ๓ ฟ Flag for Prince Edward Island (CA-PE)
๐ด๓ ฃ๓ ค๓ ซ๓ ง๓ ฟ Flag for Kwango (CD-KG)
๐ด๓ ฃ๓ ก๓ ฎ๓ ณ๓ ฟ Flag for Nova Scotia (CA-NS)
๐จ๐พโ๐ง๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ฃ๓ ค๓ จ๓ ต๓ ฟ Flag for Haut-Uรฉlรฉ (CD-HU)
๐ด๓ ฃ๓ ค๓ ข๓ ฃ๓ ฟ Flag for Bas-Congo (CD-BC)
๐ด๓ ฃ๓ ค๓ ณ๓ ต๓ ฟ Flag for Sud-Ubangi (CD-SU)
๐ด๓ ฃ๓ ค๓ ญ๓ ก๓ ฟ Flag for Maniema (CD-MA)
๐ด๓ ฃ๓ ค๓ ณ๓ ก๓ ฟ Flag for Sankuru (CD-SA)
๐ด๓ ฃ๓ ค๓ ด๓ ต๓ ฟ Flag for Tshuapa (CD-TU)
๐ด๓ ฃ๓ ก๓ น๓ ด๓ ฟ Flag for Yukon (CA-YT)
๐ด๓ ฃ๓ ค๓ ญ๓ ฏ๓ ฟ Flag for Mongala (CD-MO)
๐ด๓ ฃ๓ ฆ๓ ข๓ ข๓ ฟ Flag for Bamingui-Bangoran (CF-BB)
๐ด๓ ฃ๓ ค๓ ญ๓ ฎ๓ ฟ Flag for Mai-Ndombe (CD-MN)
๐ด๓ ฃ๓ ก๓ ฎ๓ ต๓ ฟ Flag for Nunavut (CA-NU)
๐ด๓ ฃ๓ ค๓ ซ๓ ฌ๓ ฟ Flag for Kwilu (CD-KL)
๐ด๓ ฃ๓ ก๓ ฎ๓ ข๓ ฟ Flag for New Brunswick (CA-NB)
๐ด๓ ฃ๓ ฆ๓ ข๓ ง๓ ฆ๓ ฟ Flag for Bangui (CF-BGF)
๐ด๓ ฃ๓ ค๓ ซ๓ ฎ๓ ฟ Flag for Kinshasa (CD-KN)
๐ด๓ ฃ๓ ค๓ ฎ๓ ซ๓ ฟ Flag for North Kivu (CD-NK)
๐ด๓ ฃ๓ ก๓ ฎ๓ ด๓ ฟ Flag for Northwest Territories (CA-NT)
๐ด๓ ฃ๓ ค๓ ด๓ ฏ๓ ฟ Flag for Tshopo (CD-TO)
๐ด๓ ฃ๓ ค๓ ข๓ ต๓ ฟ Flag for Bas-Uรฉlรฉ (CD-BU)
๐ด๓ ฃ๓ ค๓ จ๓ ฌ๓ ฟ Flag for Haut-Lomami (CD-HL)
๐ด๓ ฃ๓ ค๓ จ๓ ซ๓ ฟ Flag for Haut-Katanga (CD-HK)
๐ด๓ ฃ๓ ค๓ ซ๓ ฅ๓ ฟ Flag for Kasaรฏ-Oriental (CD-KE)
๐ด๓ ฃ๓ ค๓ ณ๓ ซ๓ ฟ Flag for South Kivu (CD-SK)
๐ด๓ ฃ๓ ก๓ ฏ๓ ฎ๓ ฟ Flag for Ontario (CA-ON)
๐ด๓ ฃ๓ ฆ๓ ก๓ ฃ๓ ฟ Flag for Ouham (CF-AC)
๐ด๓ ฃ๓ ฆ๓ จ๓ ณ๓ ฟ Flag for Mambรฉrรฉ-Kadรฉรฏ (CF-HS)
๐ด๓ ฃ๓ ค๓ ซ๓ ฃ๓ ฟ Flag for Kasaรฏ Central (CD-KC)
๐ด๓ ฃ๓ ค๓ ฎ๓ ต๓ ฟ Flag for Nord-Ubangi (CD-NU)
๐ด๓ ฃ๓ ค๓ ซ๓ ณ๓ ฟ Flag for Kasaรฏ (CD-KS)
๐ด๓ ฃ๓ ค๓ ฉ๓ ด๓ ฟ Flag for Ituri (CD-IT)
๐ด๓ ฃ๓ จ๓ ข๓ ฅ๓ ฟ Flag for Bern (CH-BE)
๐ด๓ ฃ๓ ง๓ ฒ๓ ฟ Flag for Lรฉkoumou (CG-2)
๐ด๓ ฃ๓ จ๓ ก๓ ฉ๓ ฟ Flag for Appenzell Innerrhoden (CH-AI)
๐ด๓ ฃ๓ ฆ๓ ญ๓ ฐ๓ ฟ Flag for Ombella-MโPoko (CF-MP)
๐จ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ฃ๓ ฆ๓ ซ๓ ง๓ ฟ Flag for Kรฉmo (CF-KG)
๐ด๓ ฃ๓ ง๓ ฑ๓ ณ๓ ฟ Flag for Sangha (CG-13)
๐ด๓ ฃ๓ จ๓ ฌ๓ ต๓ ฟ Flag for Lucerne (CH-LU)
๐ด๓ ฃ๓ จ๓ ง๓ ฅ๓ ฟ Flag for Geneva (CH-GE)
๐ด๓ ฃ๓ จ๓ ฎ๓ ท๓ ฟ Flag for Nidwalden (CH-NW)
๐ด๓ ฃ๓ ง๓ ต๓ ฟ Flag for Kouilou (CG-5)
๐ด๓ ฃ๓ ง๓ ท๓ ฟ Flag for Likouala (CG-7)
๐ด๓ ฃ๓ ง๓ ข๓ บ๓ ถ๓ ฟ Flag for Brazzaville (CG-BZV)
๐ด๓ ฃ๓ จ๓ ณ๓ จ๓ ฟ Flag for Schaffhausen (CH-SH)
๐ด๓ ฃ๓ ค๓ ฌ๓ ฏ๓ ฟ Flag for Lomami (CD-LO)
๐ด๓ ฃ๓ จ๓ ก๓ ฒ๓ ฟ Flag for Appenzell Ausserrhoden (CH-AR)
๐ด๓ ฃ๓ จ๓ ณ๓ บ๓ ฟ Flag for Schwyz (CH-SZ)
๐ด๓ ฃ๓ จ๓ ฎ๓ ฅ๓ ฟ Flag for Neuchรขtel (CH-NE)
๐ด๓ ฃ๓ ฆ๓ ฏ๓ ฐ๓ ฟ Flag for Ouham-Pendรฉ (CF-OP)
๐ด๓ ฃ๓ จ๓ ง๓ ฒ๓ ฟ Flag for Graubรผnden (CH-GR)
๐ด๓ ฃ๓ จ๓ ณ๓ ฏ๓ ฟ Flag for Solothurn (CH-SO)
๐ด๓ ฃ๓ จ๓ ฆ๓ ฒ๓ ฟ Flag for Fribourg (CH-FR)
๐ด๓ ฃ๓ ง๓ ฑ๓ ด๓ ฟ Flag for Plateaux (CG-14)
๐ด๓ ฃ๓ ฆ๓ ณ๓ ฅ๓ ฟ Flag for Sangha-Mbaรฉrรฉ (CF-SE)
๐จ๐ฟโ๐ง๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ฃ๓ จ๓ ก๓ ง๓ ฟ Flag for Aargau (CH-AG)
๐ด๓ ฃ๓ ง๓ ฑ๓ ต๓ ฟ Flag for Cuvette-Ouest (CG-15)
๐ด๓ ฃ๓ จ๓ ณ๓ ง๓ ฟ Flag for St. Gallen (CH-SG)
๐ด๓ ฃ๓ ง๓ ธ๓ ฟ Flag for Cuvette (CG-8)
๐ด๓ ฃ๓ จ๓ ฏ๓ ท๓ ฟ Flag for Obwalden (CH-OW)
๐ด๓ ฃ๓ จ๓ ข๓ ณ๓ ฟ Flag for Basel-Stadt (CH-BS)
๐ด๓ ฃ๓ ฆ๓ ฌ๓ ข๓ ฟ Flag for Lobaye (CF-LB)
๐ด๓ ฃ๓ ฌ๓ ถ๓ ณ๓ ฟ Flag for Valparaรญso (CL-VS)
๐ด๓ ฃ๓ ญ๓ ฎ๓ ท๓ ฟ Flag for Northwest (CM-NW)
๐ด๓ ฃ๓ ฉ๓ ค๓ ฎ๓ ฟ Flag for Denguรฉlรฉ (CI-DN)
๐ด๓ ฃ๓ ญ๓ ฎ๓ ฏ๓ ฟ Flag for North (CM-NO)
๐ด๓ ฃ๓ ฉ๓ น๓ ญ๓ ฟ Flag for Yamoussoukro (CI-YM)
๐ด๓ ฃ๓ ญ๓ ฅ๓ ณ๓ ฟ Flag for East (CM-ES)
๐จ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ฃ๓ ฉ๓ ท๓ ฒ๓ ฟ Flag for Woroba (CI-WR)
๐ด๓ ฃ๓ ฉ๓ ฌ๓ ง๓ ฟ Flag for Lagunes (CI-LG)
๐ด๓ ฃ๓ ฉ๓ ง๓ ค๓ ฟ Flag for Gรดh-Djiboua (CI-GD)
๐ด๓ ฃ๓ ฉ๓ ฃ๓ ญ๓ ฟ Flag for Comoรฉ (CI-CM)
๐ด๓ ฃ๓ ญ๓ ณ๓ ท๓ ฟ Flag for Southwest (CM-SW)
๐ด๓ ฃ๓ ฌ๓ ข๓ ฉ๓ ฟ Flag for Bรญo Bรญo (CL-BI)
๐ด๓ ฃ๓ ฌ๓ ก๓ ฉ๓ ฟ Flag for Aysรฉn (CL-AI)
๐ด๓ ฃ๓ ฌ๓ ฒ๓ ญ๓ ฟ Flag for Santiago Metropolitan (CL-RM)
๐ด๓ ฃ๓ ฌ๓ ด๓ ก๓ ฟ Flag for Tarapacรก (CL-TA)
๐ด๓ ฃ๓ ญ๓ ณ๓ ต๓ ฟ Flag for South (CM-SU)
๐ด๓ ฃ๓ ฌ๓ ก๓ ด๓ ฟ Flag for Atacama (CL-AT)
๐ด๓ ฃ๓ ฎ๓ ฑ๓ ฒ๓ ฟ Flag for Tianjin (CN-12)
๐ด๓ ฃ๓ ฉ๓ ฌ๓ ฃ๓ ฟ Flag for Lacs (CI-LC)
๐ด๓ ฃ๓ ฌ๓ ฃ๓ ฏ๓ ฟ Flag for Coquimbo (CL-CO)
๐ด๓ ฃ๓ ฌ๓ ก๓ ฐ๓ ฟ Flag for Arica y Parinacota (CL-AP)
๐ด๓ ฃ๓ ญ๓ ฌ๓ ด๓ ฟ Flag for Littoral (CM-LT)
๐ด๓ ฃ๓ ญ๓ ฃ๓ ฅ๓ ฟ Flag for Centre (CM-CE)
๐ด๓ ฃ๓ ญ๓ ฅ๓ ฎ๓ ฟ Flag for Far North (CM-EN)
๐ด๓ ฃ๓ ฌ๓ ญ๓ ก๓ ฟ Flag for Magallanes Region (CL-MA)
๐ด๓ ฃ๓ ฌ๓ ญ๓ ฌ๓ ฟ Flag for Maule (CL-ML)
๐ด๓ ฃ๓ ฉ๓ ญ๓ ง๓ ฟ Flag for Montagnes (CI-MG)
๐ด๓ ฃ๓ ฉ๓ ข๓ ณ๓ ฟ Flag for Bas-Sassandra (CI-BS)
๐ด๓ ฃ๓ ญ๓ ก๓ ค๓ ฟ Flag for Adamawa (CM-AD)
๐ด๓ ฃ๓ ฌ๓ ฌ๓ ฒ๓ ฟ Flag for Los Rรญos (CL-LR)
๐ด๓ ฃ๓ ญ๓ ฏ๓ ต๓ ฟ Flag for West (CM-OU)
๐ด๓ ฃ๓ ฉ๓ ณ๓ ถ๓ ฟ Flag for Savanes (CI-SV)
๐ด๓ ฃ๓ ฌ๓ ฌ๓ ฌ๓ ฟ Flag for Los Lagos (CL-LL)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ท๓ ฟ Flag for Shandong (CN-37)
๐ด๓ ฃ๓ ฎ๓ ถ๓ ฒ๓ ฟ Flag for Gansu (CN-62)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ฑ๓ ฟ Flag for Shanghai (CN-31)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ถ๓ ฟ Flag for Jiangxi (CN-36)
๐ด๓ ฃ๓ ฎ๓ ท๓ ฑ๓ ฟ Flag for Taiwan (CN-71)
๐ด๓ ฃ๓ ฏ๓ ข๓ ฏ๓ น๓ ฟ Flag for Boyacรก (CO-BOY)
๐ด๓ ฃ๓ ฎ๓ ฑ๓ ฑ๓ ฟ Flag for Beijing (CN-11)
๐ด๓ ข๓ ง๓ ฑ๓ ธ๓ ฟ Flag for Ruse (BG-18)
๐ด๓ ฃ๓ ฎ๓ ด๓ ด๓ ฟ Flag for Guangdong (CN-44)
๐ด๓ ฃ๓ ฎ๓ ถ๓ ณ๓ ฟ Flag for Qinghai (CN-63)
๐ด๓ ฃ๓ ฎ๓ ฒ๓ ณ๓ ฟ Flag for Heilongjiang (CN-23)
๐ด๓ ฃ๓ ฎ๓ ต๓ ฑ๓ ฟ Flag for Sichuan (CN-51)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ก๓ ฌ๓ ฟ Flag for Caldas (CO-CAL)
๐ด๓ ฃ๓ ฏ๓ ข๓ ฏ๓ ฌ๓ ฟ Flag for Bolรญvar (CO-BOL)
๐ด๓ ฃ๓ ฎ๓ ต๓ ณ๓ ฟ Flag for Yunnan (CN-53)
๐ด๓ ฃ๓ ฏ๓ ก๓ ด๓ ฌ๓ ฟ Flag for Atlรกntico (CO-ATL)
๐ด๓ ฃ๓ ฎ๓ ด๓ ฒ๓ ฟ Flag for Hubei (CN-42)
๐ด๓ ฃ๓ ฎ๓ ฒ๓ ฒ๓ ฟ Flag for Jilin (CN-22)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ก๓ ฑ๓ ฟ Flag for Caquetรก (CO-CAQ)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ณ๓ ฟ Flag for Zhejiang (CN-33)
๐ด๓ ฃ๓ ฎ๓ ฑ๓ ณ๓ ฟ Flag for Hebei (CN-13)
๐ด๓ ฃ๓ ฎ๓ ฑ๓ ต๓ ฟ Flag for Inner Mongolia (CN-15)
๐ด๓ ฃ๓ ฎ๓ ด๓ ณ๓ ฟ Flag for Hunan (CN-43)
๐ด๓ ฃ๓ ฆ๓ จ๓ ซ๓ ฟ Flag for Haute-Kotto (CF-HK)
๐ด๓ ฃ๓ ฎ๓ ถ๓ ต๓ ฟ Flag for Xinjiang (CN-65)
๐ด๓ ฃ๓ ฎ๓ ต๓ ฐ๓ ฟ Flag for Chongqing (CN-50)
๐ด๓ ฃ๓ ฎ๓ ด๓ ต๓ ฟ Flag for Guangxi (CN-45)
๐ด๓ ฃ๓ ฎ๓ ต๓ ด๓ ฟ Flag for Tibet (CN-54)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ฒ๓ ฟ Flag for Jiangsu (CN-32)
๐ด๓ ฃ๓ ฏ๓ ก๓ ฒ๓ ก๓ ฟ Flag for Arauca (CO-ARA)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ต๓ ฟ Flag for Fujian (CN-35)
๐ด๓ ฃ๓ ฎ๓ ด๓ ฑ๓ ฟ Flag for Henan (CN-41)
๐ด๓ ฃ๓ ฎ๓ ด๓ ถ๓ ฟ Flag for Hainan (CN-46)
๐ด๓ ฃ๓ ฎ๓ ฑ๓ ด๓ ฟ Flag for Shanxi (CN-14)
๐ด๓ ฃ๓ ฏ๓ ญ๓ ก๓ ง๓ ฟ Flag for Magdalena (CO-MAG)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ จ๓ ฏ๓ ฟ Flag for Chocรณ (CO-CHO)
๐ด๓ ฃ๓ ฏ๓ ง๓ ต๓ ก๓ ฟ Flag for Guainรญa (CO-GUA)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ฏ๓ ฒ๓ ฟ Flag for Cรณrdoba (CO-COR)
๐ด๓ ฃ๓ ฏ๓ ฐ๓ ต๓ ด๓ ฟ Flag for Putumayo (CO-PUT)
๐ด๓ ฃ๓ ฏ๓ ณ๓ ก๓ ฎ๓ ฟ Flag for Santander (CO-SAN)
๐ด๓ ฃ๓ ต๓ ฐ๓ ต๓ ฟ Flag for Villa Clara (CU-05)
๐ด๓ ฃ๓ ฏ๓ ถ๓ ก๓ ฃ๓ ฟ Flag for Valle del Cauca (CO-VAC)
๐ด๓ ฃ๓ ฏ๓ ฑ๓ ต๓ ฉ๓ ฟ Flag for Quindรญo (CO-QUI)
๐ด๓ ฃ๓ ฏ๓ ฒ๓ ฉ๓ ณ๓ ฟ Flag for Risaralda (CO-RIS)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ต๓ ฎ๓ ฟ Flag for Cundinamarca (CO-CUN)
๐จ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ฃ๓ ฒ๓ ก๓ ฟ Flag for Alajuela (CR-A)
๐ด๓ ฃ๓ ฒ๓ ฐ๓ ฟ Flag for Puntarenas (CR-P)
๐ด๓ ฃ๓ ฏ๓ จ๓ ต๓ ฉ๓ ฟ Flag for Huila (CO-HUI)
๐ด๓ ฃ๓ ฏ๓ ถ๓ ก๓ ต๓ ฟ Flag for Vaupรฉs (CO-VAU)
๐ด๓ ฃ๓ ฏ๓ ฃ๓ ก๓ ต๓ ฟ Flag for Cauca (CO-CAU)
๐ด๓ ฃ๓ ต๓ ฐ๓ ท๓ ฟ Flag for Sancti Spรญritus (CU-07)
๐ด๓ ฃ๓ ฒ๓ ฌ๓ ฟ Flag for Limรณn (CR-L)
๐ด๓ ฃ๓ ฏ๓ ฎ๓ ณ๓ ก๓ ฟ Flag for Norte de Santander (CO-NSA)
๐ด๓ ฃ๓ ต๓ ฐ๓ ด๓ ฟ Flag for Matanzas (CU-04)
๐ด๓ ฃ๓ ฒ๓ ง๓ ฟ Flag for Guanacaste (CR-G)
๐ด๓ ฃ๓ ต๓ ฐ๓ ณ๓ ฟ Flag for Havana (CU-03)
๐ฉ๐พโโค๏ธโ๐โ๐จ Kiss - Woman: Medium-Dark Skin Tone, Man
๐ด๓ ฃ๓ ต๓ ฐ๓ ธ๓ ฟ Flag for Ciego de รvila (CU-08)
๐ด๓ ฃ๓ ฏ๓ ด๓ ฏ๓ ฌ๓ ฟ Flag for Tolima (CO-TOL)
๐ด๓ ฃ๓ ต๓ ฐ๓ น๓ ฟ Flag for Camagรผey (CU-09)
๐ด๓ ฃ๓ ต๓ ฐ๓ ถ๓ ฟ Flag for Cienfuegos (CU-06)
๐ด๓ ฃ๓ ฏ๓ ง๓ ต๓ ถ๓ ฟ Flag for Guaviare (CO-GUV)
๐ด๓ ข๓ บ๓ ฃ๓ น๓ ฟ Flag for Cayo (BZ-CY)
๐ด๓ ฅ๓ ด๓ ณ๓ ฎ๓ ฟ Flag for Southern Nations, Nationalities, and Peoples (ET-SN)
๐ด๓ ฃ๓ ต๓ ฐ๓ ฑ๓ ฟ Flag for Pinar del Rรญo (CU-01)
๐ด๓ ฃ๓ ฒ๓ ณ๓ ช๓ ฟ Flag for San Josรฉ (CR-SJ)
๐ด๓ ฃ๓ ฒ๓ ฃ๓ ฟ Flag for Cartago (CR-C)
๐ด๓ ฃ๓ ฏ๓ ฌ๓ ก๓ ง๓ ฟ Flag for La Guajira (CO-LAG)
๐ด๓ ฃ๓ น๓ ฐ๓ ฒ๓ ฟ Flag for Limassol (CY-02)
๐ด๓ ค๓ ฅ๓ ฎ๓ ฉ๓ ฟ Flag for Lower Saxony (DE-NI)
๐ด๓ ข๓ บ๓ ฏ๓ ท๓ ฟ Flag for Orange Walk (BZ-OW)
๐ด๓ ฃ๓ บ๓ ถ๓ ณ๓ ฟ Flag for Kraj Vysoฤina (CZ-63)
๐ด๓ ฃ๓ บ๓ ต๓ ฑ๓ ฟ Flag for Libereckรฝ kraj (CZ-51)
๐ด๓ ฃ๓ ต๓ ฑ๓ ฐ๓ ฟ Flag for Las Tunas (CU-10)
๐ด๓ ฃ๓ ต๓ ฑ๓ ณ๓ ฟ Flag for Santiago de Cuba (CU-13)
๐จ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ฃ๓ น๓ ฐ๓ ฑ๓ ฟ Flag for Nicosia (CY-01)
๐ด๓ ฃ๓ บ๓ ฒ๓ ฐ๓ ฟ Flag for Stลedoฤeskรฝ kraj (CZ-20)
๐ด๓ ฃ๓ ฆ๓ ถ๓ ซ๓ ฟ Flag for Vakaga (CF-VK)
๐ด๓ ฃ๓ บ๓ ต๓ ฒ๓ ฟ Flag for Krรกlovรฉhradeckรฝ kraj (CZ-52)
๐ด๓ ฃ๓ บ๓ ด๓ ฑ๓ ฟ Flag for Karlovarskรฝ kraj (CZ-41)
๐ด๓ ฃ๓ ต๓ ฑ๓ ต๓ ฟ Flag for Artemisa (CU-15)
๐ด๓ ฃ๓ น๓ ฐ๓ ด๓ ฟ Flag for Famagusta (CY-04)
๐ด๓ ค๓ ฅ๓ จ๓ ข๓ ฟ Flag for Bremen (DE-HB)
๐ด๓ ค๓ ฅ๓ จ๓ ฅ๓ ฟ Flag for Hesse (DE-HE)
๐ด๓ ฃ๓ ต๓ ฑ๓ ฑ๓ ฟ Flag for Holguรญn (CU-11)
๐จ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ฃ๓ บ๓ ธ๓ ฐ๓ ฟ Flag for Moravskoslezskรฝ kraj (CZ-80)
๐ด๓ ฃ๓ บ๓ ณ๓ ฑ๓ ฟ Flag for Jihoฤeskรฝ kraj (CZ-31)
๐ด๓ ฃ๓ จ๓ ง๓ ฌ๓ ฟ Flag for Glarus (CH-GL)
๐ด๓ ฃ๓ บ๓ ฑ๓ ฐ๓ ฟ Flag for Praha, Hlavnรญ meลกto (CZ-10)
๐ด๓ ฃ๓ น๓ ฐ๓ ณ๓ ฟ Flag for Larnaca (CY-03)
๐ด๓ ค๓ ฅ๓ จ๓ จ๓ ฟ Flag for Hamburg (DE-HH)
๐ด๓ ค๓ ฅ๓ ญ๓ ถ๓ ฟ Flag for Mecklenburg-Vorpommern (DE-MV)
๐ด๓ ฃ๓ ถ๓ ข๓ ฟ Flag for Barlavento Islands (CV-B)
๐ด๓ ฃ๓ ถ๓ ณ๓ ฟ Flag for Sotavento Islands (CV-S)
๐ด๓ ฃ๓ ต๓ ฑ๓ ถ๓ ฟ Flag for Mayabeque (CU-16)
๐ด๓ ฃ๓ บ๓ ท๓ ฑ๓ ฟ Flag for Olomouckรฝ kraj (CZ-71)
๐ด๓ ฃ๓ ต๓ ฑ๓ ด๓ ฟ Flag for Guantรกnamo (CU-14)
๐ด๓ ค๓ ฅ๓ ข๓ ข๓ ฟ Flag for Brandenburg (DE-BB)
๐ด๓ ฃ๓ บ๓ ณ๓ ฒ๓ ฟ Flag for Plzeลskรฝ kraj (CZ-32)
๐ด๓ ค๓ ช๓ ก๓ ณ๓ ฟ Flag for Ali Sabieh (DJ-AS)
๐ด๓ ค๓ ฅ๓ ฒ๓ ฐ๓ ฟ Flag for Rhineland-Palatinate (DE-RP)
๐ด๓ ค๓ ฅ๓ ณ๓ ฎ๓ ฟ Flag for Saxony (DE-SN)
๐ด๓ ค๓ ซ๓ ธ๓ ต๓ ฟ Flag for Zealand (DK-85)
๐ด๓ ค๓ ฅ๓ ณ๓ ด๓ ฟ Flag for Saxony-Anhalt (DE-ST)
๐ด๓ ค๓ บ๓ ฐ๓ ฒ๓ ฟ Flag for Chlef (DZ-02)
๐ด๓ ค๓ ญ๓ ฐ๓ ท๓ ฟ Flag for Saint Luke (DM-07)
๐ด๓ ค๓ ช๓ ก๓ ฒ๓ ฟ Flag for Arta (DJ-AR)
๐ด๓ ค๓ ซ๓ ธ๓ ด๓ ฟ Flag for Capital Region (DK-84)
๐ด๓ ค๓ ญ๓ ฑ๓ ฐ๓ ฟ Flag for Saint Paul (DM-10)
๐ด๓ ค๓ ฏ๓ ณ๓ ถ๓ ฟ Flag for Cibao Sur (DO-36)
๐ด๓ ค๓ ฏ๓ ณ๓ ธ๓ ฟ Flag for Enriquillo (DO-38)
๐ด๓ ค๓ ญ๓ ฐ๓ น๓ ฟ Flag for Saint Patrick (DM-09)
๐ด๓ ค๓ ฏ๓ ณ๓ ด๓ ฟ Flag for Cibao Noroeste (DO-34)
๐ด๓ ค๓ ฏ๓ ณ๓ ณ๓ ฟ Flag for Cibao Nordeste (DO-33)
๐ด๓ ค๓ ญ๓ ฐ๓ ต๓ ฟ Flag for Saint John (DM-05)
๐ด๓ ค๓ ฏ๓ ด๓ ฒ๓ ฟ Flag for Yuma (DO-42)
๐ด๓ ค๓ ช๓ ฏ๓ ข๓ ฟ Flag for Obock (DJ-OB)
๐ด๓ ค๓ ฅ๓ ด๓ จ๓ ฟ Flag for Thuringia (DE-TH)
๐ด๓ ค๓ ฏ๓ ด๓ ฐ๓ ฟ Flag for Ozama (DO-40)
๐ด๓ ค๓ ฅ๓ ณ๓ ฌ๓ ฟ Flag for Saarland (DE-SL)
๐ด๓ ค๓ ญ๓ ฐ๓ ด๓ ฟ Flag for Saint George (DM-04)
๐ด๓ ค๓ ญ๓ ฐ๓ ณ๓ ฟ Flag for Saint David (DM-03)
๐ด๓ ค๓ ญ๓ ฐ๓ ฒ๓ ฟ Flag for Saint Andrew (DM-02)
๐ด๓ ค๓ ช๓ ค๓ ฉ๓ ฟ Flag for Dikhil (DJ-DI)
๐ด๓ ค๓ ญ๓ ฐ๓ ธ๓ ฟ Flag for Saint Mark (DM-08)
๐ด๓ ค๓ ช๓ ด๓ ก๓ ฟ Flag for Tadjourah (DJ-TA)
๐ด๓ ค๓ ญ๓ ฑ๓ ฑ๓ ฟ Flag for Saint Peter (DM-11)
๐ด๓ ค๓ ฏ๓ ด๓ ฑ๓ ฟ Flag for Valdesia (DO-41)
๐ด๓ ค๓ ฏ๓ ณ๓ น๓ ฟ Flag for Higรผamo (DO-39)
๐ด๓ ค๓ บ๓ ฐ๓ ณ๓ ฟ Flag for Laghouat (DZ-03)
๐ด๓ ค๓ บ๓ ฒ๓ ธ๓ ฟ Flag for MโSila (DZ-28)
๐ด๓ ค๓ บ๓ ณ๓ ณ๓ ฟ Flag for Illizi (DZ-33)
๐ฉ๐ฟโ๐จ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ค๓ บ๓ ฑ๓ ต๓ ฟ Flag for Tizi Ouzou (DZ-15)
๐ด๓ ค๓ บ๓ ฑ๓ ด๓ ฟ Flag for Tiaret (DZ-14)
๐ด๓ ค๓ บ๓ ฑ๓ น๓ ฟ Flag for Sรฉtif (DZ-19)
๐ด๓ ค๓ บ๓ ฑ๓ ท๓ ฟ Flag for Djelfa (DZ-17)
๐ด๓ ค๓ บ๓ ฒ๓ ต๓ ฟ Flag for Constantine (DZ-25)
๐ด๓ ค๓ บ๓ ฒ๓ ด๓ ฟ Flag for Guelma (DZ-24)
๐ด๓ ค๓ บ๓ ด๓ ฒ๓ ฟ Flag for Tipasa (DZ-42)
๐ด๓ ค๓ บ๓ ฐ๓ ต๓ ฟ Flag for Batna (DZ-05)
๐ด๓ ค๓ บ๓ ฑ๓ ฒ๓ ฟ Flag for Tรฉbessa (DZ-12)
๐ด๓ ค๓ บ๓ ฐ๓ ท๓ ฟ Flag for Biskra (DZ-07)
๐ด๓ ค๓ บ๓ ณ๓ ฐ๓ ฟ Flag for Ouargla (DZ-30)
๐ด๓ ค๓ บ๓ ฒ๓ ฒ๓ ฟ Flag for Sidi Bel Abbรจs (DZ-22)
๐ด๓ ค๓ บ๓ ฑ๓ ฑ๓ ฟ Flag for Tamanghasset (DZ-11)
๐ด๓ ค๓ บ๓ ฒ๓ ถ๓ ฟ Flag for Mรฉdรฉa (DZ-26)
๐ด๓ ค๓ บ๓ ณ๓ ฒ๓ ฟ Flag for El Bayadh (DZ-32)
๐ด๓ ค๓ บ๓ ด๓ ฐ๓ ฟ Flag for Khenchela (DZ-40)
๐ด๓ ค๓ บ๓ ณ๓ ธ๓ ฟ Flag for Tissemsilt (DZ-38)
๐ด๓ ค๓ บ๓ ณ๓ น๓ ฟ Flag for El Oued (DZ-39)
๐ด๓ ค๓ บ๓ ด๓ ฑ๓ ฟ Flag for Souk Ahras (DZ-41)
๐ด๓ ค๓ บ๓ ฑ๓ ณ๓ ฟ Flag for Tlemcen (DZ-13)
๐ด๓ ค๓ บ๓ ฐ๓ ถ๓ ฟ Flag for Bรฉjaรฏa (DZ-06)
๐ด๓ ค๓ บ๓ ด๓ ณ๓ ฟ Flag for Mila (DZ-43)
๐ด๓ ค๓ บ๓ ฒ๓ ฐ๓ ฟ Flag for Saรฏda (DZ-20)
๐ด๓ ค๓ บ๓ ณ๓ ฑ๓ ฟ Flag for Oran (DZ-31)
๐ด๓ ค๓ บ๓ ฑ๓ ฐ๓ ฟ Flag for Bouira (DZ-10)
๐ด๓ ค๓ บ๓ ณ๓ ต๓ ฟ Flag for Boumerdรจs (DZ-35)
๐ด๓ ค๓ บ๓ ณ๓ ถ๓ ฟ Flag for El Tarf (DZ-36)
๐ด๓ ค๓ บ๓ ฑ๓ ถ๓ ฟ Flag for Algiers (DZ-16)
๐ด๓ ค๓ บ๓ ณ๓ ท๓ ฟ Flag for Tindouf (DZ-37)
๐ด๓ ค๓ บ๓ ฒ๓ ณ๓ ฟ Flag for Annaba (DZ-23)
๐ด๓ ค๓ บ๓ ฐ๓ น๓ ฟ Flag for Blida (DZ-09)
๐ด๓ ค๓ บ๓ ฐ๓ ด๓ ฟ Flag for Oum El Bouaghi (DZ-04)
๐ด๓ ค๓ บ๓ ฒ๓ ท๓ ฟ Flag for Mostaganem (DZ-27)
๐ด๓ ฅ๓ ฃ๓ จ๓ ฟ Flag for Chimborazo (EC-H)
๐ด๓ ค๓ บ๓ ด๓ ท๓ ฟ Flag for Ghardaรฏa (DZ-47)
๐ด๓ ฅ๓ ฃ๓ ข๓ ฟ Flag for Bolรญvar (EC-B)
๐ด๓ ฅ๓ ฃ๓ ฃ๓ ฟ Flag for Carchi (EC-C)
๐ด๓ ค๓ บ๓ ด๓ ด๓ ฟ Flag for Aรฏn Defla (DZ-44)
๐ด๓ ฃ๓ น๓ ฐ๓ ต๓ ฟ Flag for Paphos (CY-05)
๐ด๓ ค๓ บ๓ ด๓ ธ๓ ฟ Flag for Relizane (DZ-48)
๐ด๓ ฅ๓ ฃ๓ ณ๓ ฟ Flag for Morona-Santiago (EC-S)
๐ด๓ ฃ๓ จ๓ ช๓ ต๓ ฟ Flag for Jura (CH-JU)
๐ด๓ ฅ๓ ฃ๓ ณ๓ ฅ๓ ฟ Flag for Santa Elena (EC-SE)
๐ด๓ ฅ๓ ฅ๓ ต๓ ท๓ ฟ Flag for Lรครคne (EE-57)
๐ด๓ ฅ๓ ฃ๓ ฉ๓ ฟ Flag for Imbabura (EC-I)
๐ด๓ ค๓ บ๓ ด๓ ถ๓ ฟ Flag for Aรฏn Tรฉmouchent (DZ-46)
๐ด๓ ฅ๓ ฃ๓ ท๓ ฟ Flag for Galรกpagos (EC-W)
๐ด๓ ฅ๓ ฃ๓ ฎ๓ ฟ Flag for Napo (EC-N)
๐จ๐ฝโ๐ถ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ฅ๓ ฅ๓ ถ๓ ท๓ ฟ Flag for Pรคrnu (EE-67)
๐ด๓ ฅ๓ ฅ๓ ท๓ ธ๓ ฟ Flag for Tartu (EE-78)
๐ด๓ ฅ๓ ฃ๓ ก๓ ฟ Flag for Azuay (EC-A)
๐ด๓ ฅ๓ ฃ๓ ญ๓ ฟ Flag for Manabรญ (EC-M)
๐ด๓ ฅ๓ ฃ๓ ฏ๓ ฟ Flag for El Oro (EC-O)
๐ด๓ ฅ๓ ฃ๓ ฐ๓ ฟ Flag for Pichincha (EC-P)
๐ด๓ ฅ๓ ฅ๓ ท๓ ฐ๓ ฟ Flag for Rapla (EE-70)
๐ด๓ ฅ๓ ฅ๓ ท๓ ด๓ ฟ Flag for Saare (EE-74)
๐จ๐พโ๐ถ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ฅ๓ ฅ๓ ถ๓ ต๓ ฟ Flag for Pรตlva (EE-65)
๐ด๓ ฅ๓ ฃ๓ น๓ ฟ Flag for Pastaza (EC-Y)
๐ด๓ ฅ๓ ฃ๓ ง๓ ฟ Flag for Guayas (EC-G)
๐ด๓ ฅ๓ ฃ๓ ฒ๓ ฟ Flag for Los Rรญos (EC-R)
๐ด๓ ฅ๓ ฃ๓ ต๓ ฟ Flag for Sucumbรญos (EC-U)
๐ด๓ ฅ๓ ฅ๓ ด๓ น๓ ฟ Flag for Jรตgeva (EE-49)
๐ด๓ ฅ๓ ฅ๓ ธ๓ ฒ๓ ฟ Flag for Valga (EE-82)
๐ด๓ ฅ๓ ฃ๓ ฌ๓ ฟ Flag for Loja (EC-L)
๐ด๓ ฅ๓ ฃ๓ ค๓ ฟ Flag for Orellana (EC-D)
๐จ๐ผโ๐ถ๐ผโ๐ฆ๐ผ Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
๐ด๓ ค๓ บ๓ ด๓ ต๓ ฟ Flag for Naama (DZ-45)
๐ด๓ ฅ๓ ฅ๓ ต๓ ฑ๓ ฟ Flag for Jรคrva (EE-51)
๐ด๓ ฅ๓ ง๓ ณ๓ ฉ๓ ฎ๓ ฟ Flag for North Sinai (EG-SIN)
๐ด๓ ฅ๓ ง๓ ช๓ ณ๓ ฟ Flag for South Sinai (EG-JS)
๐ด๓ ฅ๓ ง๓ ซ๓ ฎ๓ ฟ Flag for Qena (EG-KN)
๐ด๓ ฅ๓ ฅ๓ ธ๓ ด๓ ฟ Flag for Viljandi (EE-84)
๐ด๓ ฅ๓ ง๓ ฉ๓ ณ๓ ฟ Flag for Ismailia (EG-IS)
๐ด๓ ฅ๓ ง๓ ก๓ ณ๓ ฎ๓ ฟ Flag for Aswan (EG-ASN)
๐ด๓ ฅ๓ ง๓ ค๓ ซ๓ ฟ Flag for Dakahlia (EG-DK)
๐ด๓ ฅ๓ ง๓ ง๓ จ๓ ฟ Flag for Gharbia (EG-GH)
๐ด๓ ฅ๓ ง๓ ข๓ จ๓ ฟ Flag for Beheira (EG-BH)
๐ด๓ ฅ๓ ฅ๓ ธ๓ ถ๓ ฟ Flag for Vรตru (EE-86)
๐ด๓ ฅ๓ ง๓ ก๓ ณ๓ ด๓ ฟ Flag for Asyut (EG-AST)
๐ด๓ ฅ๓ ง๓ ซ๓ ข๓ ฟ Flag for Qalyubia (EG-KB)
๐ด๓ ฅ๓ ง๓ ง๓ บ๓ ฟ Flag for Giza (EG-GZ)
๐จ๐ฟโ๐ถ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ฅ๓ ฒ๓ ก๓ ฎ๓ ฟ Flag for Anseba (ER-AN)
๐ด๓ ฅ๓ ง๓ ซ๓ ฆ๓ ณ๓ ฟ Flag for Kafr el-Sheikh (EG-KFS)
๐ด๓ ฅ๓ ง๓ ญ๓ ด๓ ฟ Flag for Matrouh (EG-MT)
๐ด๓ ฅ๓ ฒ๓ ง๓ ข๓ ฟ Flag for Gash-Barka (ER-GB)
๐ด๓ ฅ๓ ง๓ ญ๓ ฎ๓ ฟ Flag for Minya (EG-MN)
๐ด๓ ฅ๓ ง๓ ก๓ ฌ๓ ธ๓ ฟ Flag for Alexandria (EG-ALX)
๐ด๓ ฅ๓ ฒ๓ ค๓ ซ๓ ฟ Flag for Southern Red Sea (ER-DK)
๐ด๓ ฅ๓ ง๓ ฐ๓ ด๓ ณ๓ ฟ Flag for Port Said (EG-PTS)
๐ด๓ ฅ๓ ง๓ ณ๓ จ๓ ง๓ ฟ Flag for Sohag (EG-SHG)
๐ด๓ ฅ๓ ง๓ ท๓ ก๓ ค๓ ฟ Flag for New Valley (EG-WAD)
๐ด๓ ฅ๓ ฒ๓ ณ๓ ซ๓ ฟ Flag for Northern Red Sea (ER-SK)
๐ด๓ ฅ๓ ง๓ ณ๓ ต๓ บ๓ ฟ Flag for Suez (EG-SUZ)
๐ด๓ ฅ๓ ง๓ ญ๓ ฎ๓ ฆ๓ ฟ Flag for Monufia (EG-MNF)
๐ด๓ ฅ๓ ง๓ ฌ๓ ธ๓ ฟ Flag for Luxor (EG-LX)
๐ด๓ ฅ๓ ฒ๓ ญ๓ ก๓ ฟ Flag for Maekel (ER-MA)
๐ด๓ ฅ๓ ง๓ ค๓ ด๓ ฟ Flag for Damietta (EG-DT)
๐ด๓ ฅ๓ ง๓ ณ๓ จ๓ ฒ๓ ฟ Flag for Al Sharqia (EG-SHR)
๐ด๓ ฅ๓ ง๓ ฆ๓ น๓ ญ๓ ฟ Flag for Faiyum (EG-FYM)
๐ด๓ ฅ๓ ฒ๓ ค๓ ต๓ ฟ Flag for Debub (ER-DU)
๐ด๓ ฅ๓ ณ๓ ก๓ ฒ๓ ฟ Flag for Aragon (ES-AR)
๐ด๓ ฃ๓ ฎ๓ ณ๓ ด๓ ฟ Flag for Anhui (CN-34)
๐ด๓ ค๓ ซ๓ ธ๓ ฑ๓ ฟ Flag for Northern Denmark (DK-81)
๐จ๐ปโ๐ถ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
๐จ๐ผโ๐ถ๐ผโ๐ง๐ผ Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐จ๐ฝโ๐ถ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ฅ๓ ด๓ ด๓ ฉ๓ ฟ Flag for Tigray (ET-TI)
๐ด๓ ฃ๓ ฎ๓ ฒ๓ ฑ๓ ฟ Flag for Liaoning (CN-21)
๐ด๓ ฅ๓ ด๓ ง๓ ก๓ ฟ Flag for Gambela (ET-GA)
๐ด๓ ฅ๓ ณ๓ ญ๓ ฌ๓ ฟ Flag for Melilla (ES-ML)
๐ด๓ ฅ๓ ณ๓ ญ๓ ฃ๓ ฟ Flag for Murcia Region (ES-MC)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ฐ๓ ฟ Flag for Lapland (FI-10)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ท๓ ฟ Flag for Central Ostrobothnia (FI-07)
๐ด๓ ฅ๓ ด๓ ก๓ ญ๓ ฟ Flag for Amhara (ET-AM)
๐ด๓ ฅ๓ ด๓ ข๓ ฅ๓ ฟ Flag for Benishangul-Gumuz (ET-BE)
๐ด๓ ฅ๓ ด๓ ฏ๓ ฒ๓ ฟ Flag for Oromia (ET-OR)
๐ด๓ ฅ๓ ณ๓ ฒ๓ ฉ๓ ฟ Flag for La Rioja (ES-RI)
๐ด๓ ค๓ ช๓ ค๓ ช๓ ฟ Flag for Djibouti (DJ-DJ)
๐ด๓ ฅ๓ ณ๓ ญ๓ ค๓ ฟ Flag for Madrid Autonomous Community (ES-MD)
๐ด๓ ฅ๓ ด๓ ค๓ ค๓ ฟ Flag for Dire Dawa (ET-DD)
๐ด๓ ค๓ บ๓ ฒ๓ น๓ ฟ Flag for Mascara (DZ-29)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ต๓ ฟ Flag for Kainuu (FI-05)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ น๓ ฟ Flag for Kymenlaakso (FI-09)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ณ๓ ฟ Flag for Southern Ostrobothnia (FI-03)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ฑ๓ ฟ Flag for Pirkanmaa (FI-11)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ด๓ ฟ Flag for Southern Savonia (FI-04)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ณ๓ ฟ Flag for North Karelia (FI-13)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ฒ๓ ฟ Flag for South Karelia (FI-02)
๐ด๓ ฅ๓ ด๓ จ๓ ก๓ ฟ Flag for Harari (ET-HA)
๐ด๓ ฃ๓ บ๓ ท๓ ฒ๓ ฟ Flag for Zlรญnskรฝ kraj (CZ-72)
๐ด๓ ฅ๓ ด๓ ณ๓ ฏ๓ ฟ Flag for Somali (ET-SO)
๐ด๓ ฅ๓ ณ๓ ฃ๓ ด๓ ฟ Flag for Catalonia (ES-CT)
๐ด๓ ฆ๓ ญ๓ ซ๓ ณ๓ ก๓ ฟ Flag for Kosrae (FM-KSA)
๐ด๓ ฆ๓ ฒ๓ ฎ๓ ฃ๓ ฟ Flag for New Caledonia (FR-NC)
๐ด๓ ฆ๓ ฒ๓ ฏ๓ ฃ๓ ฃ๓ ฟ Flag for Occitanie (FR-OCC)
๐ด๓ ฆ๓ ฒ๓ ฐ๓ ก๓ ฃ๓ ฟ Flag for Provence-Alpes-Cรดte-dโAzur (FR-PAC)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ต๓ ฟ Flag for Northern Savonia (FI-15)
๐ด๓ ฆ๓ ญ๓ ด๓ ฒ๓ ซ๓ ฟ Flag for Chuuk (FM-TRK)
๐ด๓ ฆ๓ ฒ๓ ข๓ ฆ๓ ฃ๓ ฟ Flag for Bourgogne-Franche-Comtรฉ (FR-BFC)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ด๓ ฟ Flag for Northern Ostrobothnia (FI-14)
๐ด๓ ฆ๓ ช๓ ฒ๓ ฟ Flag for Rotuma (FJ-R)
๐ด๓ ฆ๓ ฒ๓ ญ๓ ก๓ น๓ ฟ Flag for Mayotte (FR-MAY)
๐ด๓ ฆ๓ ฒ๓ ฎ๓ ก๓ ฑ๓ ฟ Flag for Nouvelle-Aquitaine (FR-NAQ)
๐ด๓ ฆ๓ ช๓ ฃ๓ ฟ Flag for Central (FJ-C)
๐ด๓ ฆ๓ ฒ๓ ง๓ ฅ๓ ณ๓ ฟ Flag for Grand-Est (FR-GES)
๐ด๓ ฆ๓ ช๓ ฎ๓ ฟ Flag for Northern (FJ-N)
๐ด๓ ฆ๓ ฒ๓ ง๓ ต๓ ก๓ ฟ Flag for Guadeloupe (FR-GUA)
๐ด๓ ฆ๓ ญ๓ น๓ ก๓ ฐ๓ ฟ Flag for Yap (FM-YAP)
๐ด๓ ฆ๓ ฒ๓ ข๓ ฒ๓ ฅ๓ ฟ Flag for Bretagne (FR-BRE)
๐ด๓ ฆ๓ ฒ๓ ฐ๓ ฆ๓ ฟ Flag for French Polynesia (FR-PF)
๐ด๓ ฆ๓ ฒ๓ ฎ๓ ฏ๓ ฒ๓ ฟ Flag for Normandie (FR-NOR)
๐ด๓ ฆ๓ ฒ๓ ง๓ ฆ๓ ฟ Flag for French Guiana (FR-GF)
๐ด๓ ฆ๓ ฒ๓ ฃ๓ ถ๓ ฌ๓ ฟ Flag for Centre-Val de Loire (FR-CVL)
๐ด๓ ฆ๓ ฒ๓ ฃ๓ ฐ๓ ฟ Flag for Clipperton Island (FR-CP)
๐ด๓ ฆ๓ ฒ๓ ญ๓ ฆ๓ ฟ Flag for St. Martin (FR-MF)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ถ๓ ฟ Flag for Pรคijรคnne Tavastia (FI-16)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ น๓ ฟ Flag for Southwest Finland (FI-19)
๐ด๓ ฆ๓ ฒ๓ ฌ๓ ฒ๓ ฅ๓ ฟ Flag for La Rรฉunion (FR-LRE)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ท๓ ฟ Flag for Satakunta (FI-17)
๐ด๓ ง๓ ฅ๓ ณ๓ ซ๓ ฟ Flag for Shida Kartli (GE-SK)
๐ด๓ ง๓ ก๓ ณ๓ ฟ Flag for Moyen-Ogoouรฉ (GA-3)
๐จ๐ฟโ๐ถ๐ฟโ๐ง๐ฟ Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ง๓ ค๓ ฐ๓ ณ๓ ฟ Flag for Saint George (GD-03)
๐ด๓ ง๓ ก๓ ต๓ ฟ Flag for Nyanga (GA-5)
๐ด๓ ง๓ ก๓ ถ๓ ฟ Flag for Ogoouรฉ-Ivindo (GA-6)
๐ด๓ ง๓ จ๓ ข๓ ก๓ ฟ Flag for Brong-Ahafo (GH-BA)
๐ด๓ ง๓ ก๓ ฒ๓ ฟ Flag for Haut-Ogoouรฉ (GA-2)
๐ด๓ ง๓ ค๓ ฐ๓ ฑ๓ ฟ Flag for Saint Andrew (GD-01)
๐ด๓ ง๓ ค๓ ฐ๓ ถ๓ ฟ Flag for Saint Patrick (GD-06)
๐ด๓ ฅ๓ ณ๓ ง๓ ก๓ ฟ Flag for Galicia (ES-GA)
๐ด๓ ฆ๓ ฒ๓ ท๓ ฆ๓ ฟ Flag for Wallis & Futuna (FR-WF)
๐จ๐ปโ๐ถ๐ปโ๐ถ๐ป Family - Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ฆ๓ ฒ๓ ฐ๓ ญ๓ ฟ Flag for St. Pierre & Miquelon (FR-PM)
๐ด๓ ง๓ ค๓ ฐ๓ ด๓ ฟ Flag for Saint John (GD-04)
๐ด๓ ง๓ ฅ๓ ด๓ ข๓ ฟ Flag for Tbilisi (GE-TB)
๐จ๐ผโ๐ถ๐ผโ๐ถ๐ผ Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
๐ด๓ ง๓ ค๓ ฐ๓ ฒ๓ ฟ Flag for Saint David (GD-02)
๐ด๓ ง๓ ฅ๓ ง๓ ต๓ ฟ Flag for Guria (GE-GU)
๐ด๓ ง๓ ก๓ น๓ ฟ Flag for Woleu-Ntem (GA-9)
๐ด๓ ง๓ ฅ๓ ฒ๓ ฌ๓ ฟ Flag for Racha-Lechkhumi and Kvemo Svaneti (GE-RL)
๐ด๓ ง๓ ฅ๓ ณ๓ ช๓ ฟ Flag for Samtskhe-Javakheti (GE-SJ)
๐ด๓ ง๓ ฅ๓ ญ๓ ญ๓ ฟ Flag for Mtskheta-Mtianeti (GE-MM)
๐ด๓ ง๓ ฅ๓ ฉ๓ ญ๓ ฟ Flag for Imereti (GE-IM)
๐ด๓ ง๓ ก๓ ธ๓ ฟ Flag for Ogoouรฉ-Maritime (GA-8)
๐ด๓ ฃ๓ ฎ๓ ถ๓ ฑ๓ ฟ Flag for Shaanxi (CN-61)
๐ด๓ ง๓ จ๓ ก๓ ก๓ ฟ Flag for Greater Accra (GH-AA)
๐ด๓ ฃ๓ บ๓ ถ๓ ด๓ ฟ Flag for Jihomoravskรฝ kraj (CZ-64)
๐ด๓ ง๓ ฅ๓ ก๓ ช๓ ฟ Flag for Adjara (GE-AJ)
๐ด๓ ง๓ ฅ๓ ณ๓ บ๓ ฟ Flag for Samegrelo-Zemo Svaneti (GE-SZ)
๐ด๓ ง๓ ก๓ ฑ๓ ฟ Flag for Estuaire (GA-1)
๐ด๓ ง๓ ก๓ ท๓ ฟ Flag for Ogoouรฉ-Lolo (GA-7)
๐ด๓ ง๓ ฎ๓ ค๓ ฟ Flag for Kindia Region (GN-D)
๐ด๓ ง๓ ฎ๓ ญ๓ ฟ Flag for Mamou Region (GN-M)
๐จ๐ฝโ๐ถ๐ฝโ๐ถ๐ฝ Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ง๓ ฌ๓ ฑ๓ ก๓ ฟ Flag for Qaasuitsup (GL-QA)
๐ด๓ ง๓ ญ๓ ฎ๓ ฟ Flag for North Bank Division (GM-N)
๐ด๓ ง๓ ฌ๓ ณ๓ ญ๓ ฟ Flag for Sermersooq (GL-SM)
๐ด๓ ง๓ จ๓ ฎ๓ ฐ๓ ฟ Flag for Northern (GH-NP)
๐ด๓ ง๓ ฒ๓ ฆ๓ ฟ Flag for Ionian Islands (GR-F)
๐ด๓ ง๓ ฒ๓ จ๓ ฟ Flag for Central Greece (GR-H)
๐ด๓ ง๓ จ๓ ฃ๓ ฐ๓ ฟ Flag for Central (GH-CP)
๐ด๓ ง๓ ฎ๓ ซ๓ ฟ Flag for Kankan Region (GN-K)
๐ด๓ ง๓ ฒ๓ ฌ๓ ฟ Flag for South Aegean (GR-L)
๐ด๓ ง๓ ฒ๓ ฉ๓ ฟ Flag for Attica (GR-I)
๐ด๓ ง๓ ญ๓ ต๓ ฟ Flag for Upper River Division (GM-U)
๐ด๓ ง๓ จ๓ ฅ๓ ฐ๓ ฟ Flag for Eastern (GH-EP)
๐ด๓ ง๓ ฎ๓ ฎ๓ ฟ Flag for Nzรฉrรฉkorรฉ Region (GN-N)
๐ด๓ ง๓ จ๓ ท๓ ฐ๓ ฟ Flag for Western (GH-WP)
๐ด๓ ง๓ ฒ๓ ฃ๓ ฟ Flag for West Macedonia (GR-C)
๐ด๓ ง๓ ฑ๓ ฃ๓ ฟ Flag for Rรญo Muni (GQ-C)
๐ด๓ ง๓ ญ๓ ฌ๓ ฟ Flag for Lower River Division (GM-L)
๐ด๓ ง๓ จ๓ ต๓ ฅ๓ ฟ Flag for Upper East (GH-UE)
๐ด๓ ง๓ ฎ๓ ฃ๓ ฟ Flag for Conakry (GN-C)
๐ด๓ ง๓ ฒ๓ ข๓ ฟ Flag for Central Macedonia (GR-B)
๐ด๓ ง๓ ญ๓ ญ๓ ฟ Flag for Central River Division (GM-M)
๐ด๓ ง๓ จ๓ ต๓ ท๓ ฟ Flag for Upper West (GH-UW)
๐ด๓ ง๓ ฌ๓ ซ๓ ต๓ ฟ Flag for Kujalleq (GL-KU)
๐ด๓ ง๓ ฎ๓ ข๓ ฟ Flag for Bokรฉ Region (GN-B)
๐ด๓ ง๓ ฌ๓ ฑ๓ ฅ๓ ฟ Flag for Qeqqata (GL-QE)
๐ด๓ ง๓ ฒ๓ ค๓ ฟ Flag for Epirus (GR-D)
๐ด๓ ง๓ จ๓ ก๓ จ๓ ฟ Flag for Ashanti (GH-AH)
๐ด๓ ง๓ จ๓ ด๓ ถ๓ ฟ Flag for Volta (GH-TV)
๐ด๓ ง๓ ฒ๓ ถ๓ น๓ ฟ Flag for Mount Athos (GR-69)
๐ด๓ ง๓ ฑ๓ ฉ๓ ฟ Flag for Insular (GQ-I)
๐ด๓ ง๓ ญ๓ ท๓ ฟ Flag for West Coast Division (GM-W)
๐ด๓ ง๓ ญ๓ ข๓ ฟ Flag for Banjul (GM-B)
๐ด๓ ง๓ ฎ๓ ฌ๓ ฟ Flag for Labรฉ Region (GN-L)
๐ด๓ ง๓ ฒ๓ ฅ๓ ฟ Flag for Thessaly (GR-E)
๐ด๓ ง๓ ฎ๓ ฆ๓ ฟ Flag for Faranah Region (GN-F)
๐ด๓ ง๓ น๓ ฃ๓ ต๓ ฟ Flag for Cuyuni-Mazaruni (GY-CU)
๐ด๓ จ๓ ฎ๓ ก๓ ด๓ ฟ Flag for Atlรกntida (HN-AT)
๐จ๐พโ๐ถ๐พโ๐ถ๐พ Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ง๓ ด๓ จ๓ ต๓ ฟ Flag for Huehuetenango (GT-HU)
๐ด๓ ง๓ ด๓ ก๓ ถ๓ ฟ Flag for Alta Verapaz (GT-AV)
๐ด๓ ง๓ ด๓ ฐ๓ ฒ๓ ฟ Flag for El Progreso (GT-PR)
๐ด๓ ง๓ ท๓ ฎ๓ ฟ Flag for Norte (GW-N)
๐ด๓ ง๓ ด๓ ณ๓ ต๓ ฟ Flag for Suchitepรฉquez (GT-SU)
๐ด๓ ง๓ น๓ ฐ๓ ญ๓ ฟ Flag for Pomeroon-Supenaam (GY-PM)
๐ด๓ ง๓ ด๓ ฉ๓ บ๓ ฟ Flag for Izabal (GT-IZ)
๐ด๓ ง๓ น๓ ฐ๓ ด๓ ฟ Flag for Potaro-Siparuni (GY-PT)
๐ด๓ ง๓ ด๓ ฑ๓ บ๓ ฟ Flag for Quetzaltenango (GT-QZ)
๐ด๓ ง๓ ด๓ ฃ๓ ญ๓ ฟ Flag for Chimaltenango (GT-CM)
๐ด๓ ฅ๓ ด๓ ก๓ ก๓ ฟ Flag for Addis Ababa (ET-AA)
๐ด๓ ง๓ ท๓ ข๓ ณ๓ ฟ Flag for Bissau (GW-BS)
๐ด๓ ง๓ ด๓ ฑ๓ ฃ๓ ฟ Flag for Quichรฉ (GT-QC)
๐ด๓ ง๓ ด๓ ด๓ ฏ๓ ฟ Flag for Totonicapรกn (GT-TO)
๐ด๓ ง๓ น๓ ข๓ ก๓ ฟ Flag for Barima-Waini (GY-BA)
๐ด๓ ง๓ น๓ ฅ๓ ณ๓ ฟ Flag for Essequibo Islands-West Demerara (GY-ES)
๐จ๐ฟโ๐ถ๐ฟโ๐ถ๐ฟ Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ จ๓ ฎ๓ ฃ๓ จ๓ ฟ Flag for Choluteca (HN-CH)
๐ด๓ ง๓ น๓ ค๓ ฅ๓ ฟ Flag for Demerara-Mahaica (GY-DE)
๐จ๐ปโ๐จ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ง๓ ด๓ ณ๓ ก๓ ฟ Flag for Sacatepรฉquez (GT-SA)
๐ด๓ ง๓ ด๓ ช๓ ต๓ ฟ Flag for Jutiapa (GT-JU)
๐ด๓ ง๓ ด๓ ฃ๓ ฑ๓ ฟ Flag for Chiquimula (GT-CQ)
๐ด๓ ง๓ ด๓ ข๓ ถ๓ ฟ Flag for Baja Verapaz (GT-BV)
๐ด๓ ง๓ ด๓ ฅ๓ ณ๓ ฟ Flag for Escuintla (GT-ES)
๐ด๓ ง๓ ด๓ บ๓ ก๓ ฟ Flag for Zacapa (GT-ZA)
๐ด๓ ง๓ ท๓ ณ๓ ฟ Flag for Sul (GW-S)
๐ด๓ ง๓ ท๓ ฌ๓ ฟ Flag for Leste (GW-L)
๐ด๓ ง๓ ด๓ ช๓ ก๓ ฟ Flag for Jalapa (GT-JA)
๐ด๓ ง๓ ด๓ ฐ๓ ฅ๓ ฟ Flag for Petรฉn (GT-PE)
๐ด๓ ง๓ ด๓ ณ๓ ฏ๓ ฟ Flag for Sololรก (GT-SO)
๐ด๓ จ๓ ฎ๓ ฃ๓ ญ๓ ฟ Flag for Comayagua (HN-CM)
๐ด๓ จ๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for Koprivnica-Kriลพevci (HR-06)
๐ด๓ จ๓ ฎ๓ ฃ๓ ฐ๓ ฟ Flag for Copรกn (HN-CP)
๐ด๓ จ๓ ฎ๓ ฉ๓ ข๓ ฟ Flag for Bay Islands (HN-IB)
๐ด๓ จ๓ ฒ๓ ฐ๓ น๓ ฟ Flag for Lika-Senj (HR-09)
๐ด๓ จ๓ ฎ๓ ณ๓ ข๓ ฟ Flag for Santa Bรกrbara (HN-SB)
๐ด๓ จ๓ ฎ๓ ฉ๓ ฎ๓ ฟ Flag for Intibucรก (HN-IN)
๐ด๓ จ๓ ฎ๓ ฆ๓ ญ๓ ฟ Flag for Francisco Morazรกn (HN-FM)
๐ด๓ จ๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for Zagreb County (HR-01)
๐ด๓ จ๓ ฎ๓ ฃ๓ ฌ๓ ฟ Flag for Colรณn (HN-CL)
๐ด๓ จ๓ ด๓ ฃ๓ ฅ๓ ฟ Flag for Centre (HT-CE)
๐ด๓ จ๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for Primorje-Gorski Kotar (HR-08)
๐ด๓ จ๓ ฎ๓ ฌ๓ ฅ๓ ฟ Flag for Lempira (HN-LE)
๐ด๓ จ๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Osijek-Baranja (HR-14)
๐ด๓ จ๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Brod-Posavina (HR-12)
๐ด๓ จ๓ ฒ๓ ฑ๓ ท๓ ฟ Flag for Split-Dalmatia (HR-17)
๐ด๓ จ๓ ฎ๓ ฏ๓ ฌ๓ ฟ Flag for Olancho (HN-OL)
๐ด๓ จ๓ ฎ๓ ฌ๓ ฐ๓ ฟ Flag for La Paz (HN-LP)
๐ด๓ จ๓ ฒ๓ ฒ๓ ฐ๓ ฟ Flag for Meฤimurje (HR-20)
๐ด๓ จ๓ ฎ๓ ฅ๓ ฐ๓ ฟ Flag for El Paraรญso (HN-EP)
๐ด๓ จ๓ ฒ๓ ฒ๓ ฑ๓ ฟ Flag for Zagreb (HR-21)
๐ด๓ จ๓ ฒ๓ ฑ๓ ต๓ ฟ Flag for ล ibenik-Knin (HR-15)
๐ด๓ ฅ๓ ฅ๓ ด๓ ด๓ ฟ Flag for Ida-Viru (EE-44)
๐ด๓ จ๓ ฎ๓ ฃ๓ ฒ๓ ฟ Flag for Cortรฉs (HN-CR)
๐ด๓ จ๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Sisak-Moslavina (HR-03)
๐ด๓ จ๓ ฒ๓ ฑ๓ ณ๓ ฟ Flag for Zadar (HR-13)
๐ด๓ จ๓ ฒ๓ ฑ๓ ธ๓ ฟ Flag for Istria (HR-18)
๐ด๓ จ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Krapina-Zagorje (HR-02)
๐ด๓ จ๓ ฒ๓ ฑ๓ ถ๓ ฟ Flag for Vukovar-Syrmia (HR-16)
๐ด๓ จ๓ ฎ๓ น๓ ฏ๓ ฟ Flag for Yoro (HN-YO)
๐ด๓ จ๓ ด๓ ก๓ ฒ๓ ฟ Flag for Artibonite (HT-AR)
๐ด๓ จ๓ ฎ๓ ง๓ ค๓ ฟ Flag for Gracias a Dios (HN-GD)
๐ด๓ จ๓ ฎ๓ ถ๓ ก๓ ฟ Flag for Valle (HN-VA)
๐ด๓ ค๓ บ๓ ฑ๓ ธ๓ ฟ Flag for Jijel (DZ-18)
๐ด๓ จ๓ ฒ๓ ฑ๓ น๓ ฟ Flag for Dubrovnik-Neretva (HR-19)
๐ด๓ จ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Poลพega-Slavonia (HR-11)
๐ด๓ จ๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Bjelovar-Bilogora (HR-07)
๐ด๓ จ๓ ฎ๓ ฏ๓ ฃ๓ ฟ Flag for Ocotepeque (HN-OC)
๐ด๓ จ๓ ต๓ ข๓ ต๓ ฟ Flag for Budapest (HU-BU)
๐ด๓ จ๓ ต๓ จ๓ ถ๓ ฟ Flag for Hรณdmezลvรกsรกrhely (HU-HV)
๐ด๓ จ๓ ต๓ ฆ๓ ฅ๓ ฟ Flag for Fejรฉr (HU-FE)
๐ด๓ จ๓ ต๓ ข๓ ก๓ ฟ Flag for Baranya (HU-BA)
๐ด๓ จ๓ ต๓ ณ๓ ฆ๓ ฟ Flag for Szรฉkesfehรฉrvรกr (HU-SF)
๐ด๓ จ๓ ต๓ ข๓ บ๓ ฟ Flag for Borsod-Abaรบj-Zemplรฉn (HU-BZ)
๐ด๓ จ๓ ต๓ ฃ๓ ณ๓ ฟ Flag for Csongrรกd (HU-CS)
๐ด๓ จ๓ ต๓ ณ๓ ฎ๓ ฟ Flag for Sopron (HU-SN)
๐ด๓ จ๓ ต๓ ค๓ ต๓ ฟ Flag for Dunaรบjvรกros (HU-DU)
๐ด๓ จ๓ ต๓ ซ๓ ถ๓ ฟ Flag for Kaposvรกr (HU-KV)
๐ด๓ จ๓ ต๓ ฎ๓ น๓ ฟ Flag for Nyรญregyhรกza (HU-NY)
๐ด๓ จ๓ ต๓ จ๓ ข๓ ฟ Flag for Hajdรบ-Bihar (HU-HB)
๐ด๓ จ๓ ด๓ ฏ๓ ต๓ ฟ Flag for Ouest (HT-OU)
๐ด๓ จ๓ ต๓ ณ๓ ค๓ ฟ Flag for Szeged (HU-SD)
๐ด๓ จ๓ ต๓ ฐ๓ ฅ๓ ฟ Flag for Pest (HU-PE)
๐ด๓ จ๓ ต๓ ซ๓ ฅ๓ ฟ Flag for Komรกrom-Esztergom (HU-KE)
๐ด๓ จ๓ ต๓ ฎ๓ ซ๓ ฟ Flag for Nagykanizsa (HU-NK)
๐ด๓ จ๓ ด๓ ง๓ ก๓ ฟ Flag for GrandโAnse (HT-GA)
๐ด๓ จ๓ ต๓ ข๓ ฃ๓ ฟ Flag for Bรฉkรฉscsaba (HU-BC)
๐ด๓ จ๓ ด๓ ณ๓ ค๓ ฟ Flag for Sud (HT-SD)
๐ด๓ จ๓ ด๓ ฎ๓ ฏ๓ ฟ Flag for Nord-Ouest (HT-NO)
๐ด๓ จ๓ ต๓ จ๓ ฅ๓ ฟ Flag for Heves (HU-HE)
๐ด๓ จ๓ ต๓ ข๓ ซ๓ ฟ Flag for Bรกcs-Kiskun (HU-BK)
๐ด๓ จ๓ ต๓ ญ๓ ฉ๓ ฟ Flag for Miskolc (HU-MI)
๐ด๓ จ๓ ต๓ ฅ๓ ฒ๓ ฟ Flag for รrd (HU-ER)
๐จ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ จ๓ ด๓ ฎ๓ ฉ๓ ฟ Flag for Nippes (HT-NI)
๐ด๓ จ๓ ต๓ ณ๓ ซ๓ ฟ Flag for Szolnok (HU-SK)
๐ด๓ จ๓ ด๓ ฎ๓ ค๓ ฟ Flag for Nord (HT-ND)
๐ด๓ จ๓ ด๓ ณ๓ ฅ๓ ฟ Flag for Sud-Est (HT-SE)
๐ด๓ จ๓ ต๓ ช๓ ฎ๓ ฟ Flag for Jรกsz-Nagykun-Szolnok (HU-JN)
๐ด๓ จ๓ ต๓ ฐ๓ ณ๓ ฟ Flag for Pรฉcs (HU-PS)
๐ด๓ จ๓ ต๓ ซ๓ ญ๓ ฟ Flag for Kecskemรฉt (HU-KM)
๐ด๓ จ๓ ต๓ ค๓ ฅ๓ ฟ Flag for Debrecen (HU-DE)
๐ด๓ จ๓ ต๓ ข๓ ฅ๓ ฟ Flag for Bรฉkรฉs (HU-BE)
๐ด๓ จ๓ ต๓ ฎ๓ ฏ๓ ฟ Flag for Nรณgrรกd (HU-NO)
๐ด๓ จ๓ ต๓ ณ๓ จ๓ ฟ Flag for Szombathely (HU-SH)
๐ด๓ จ๓ ต๓ ง๓ น๓ ฟ Flag for Gyลr (HU-GY)
๐ด๓ ฉ๓ ค๓ ฎ๓ ต๓ ฟ Flag for Lesser Sunda Islands (ID-NU)
๐ด๓ จ๓ ต๓ ด๓ ข๓ ฟ Flag for Tatabรกnya (HU-TB)
๐ด๓ ฉ๓ ค๓ ช๓ ท๓ ฟ Flag for Java (ID-JW)
๐ด๓ ฉ๓ ฎ๓ ฃ๓ จ๓ ฟ Flag for Chandigarh (IN-CH)
๐ด๓ ฉ๓ ฎ๓ ง๓ ช๓ ฟ Flag for Gujarat (IN-GJ)
๐ด๓ ฉ๓ ฅ๓ ฌ๓ ฟ Flag for Leinster (IE-L)
๐ด๓ จ๓ ต๓ บ๓ ก๓ ฟ Flag for Zala (HU-ZA)
๐ด๓ ฉ๓ ฎ๓ ค๓ ค๓ ฟ Flag for Daman and Diu (IN-DD)
๐ด๓ ฉ๓ ฌ๓ ด๓ ก๓ ฟ Flag for Tel Aviv District (IL-TA)
๐ด๓ ฉ๓ ค๓ ณ๓ ฌ๓ ฟ Flag for Sulawesi (ID-SL)
๐ด๓ ฉ๓ ฎ๓ ก๓ ฒ๓ ฟ Flag for Arunachal Pradesh (IN-AR)
๐ด๓ จ๓ ต๓ ถ๓ ฅ๓ ฟ Flag for Veszprรฉm County (HU-VE)
๐ด๓ ฉ๓ ฎ๓ ก๓ ฎ๓ ฟ Flag for Andaman and Nicobar Islands (IN-AN)
๐ด๓ จ๓ ต๓ ณ๓ ฏ๓ ฟ Flag for Somogy (HU-SO)
๐ด๓ จ๓ ต๓ ถ๓ ก๓ ฟ Flag for Vas (HU-VA)
๐ด๓ ฉ๓ ฌ๓ ช๓ ญ๓ ฟ Flag for Jerusalem (IL-JM)
๐ด๓ ฉ๓ ฎ๓ ค๓ ฎ๓ ฟ Flag for Dadra and Nagar Haveli (IN-DN)
๐ด๓ จ๓ ต๓ ถ๓ ญ๓ ฟ Flag for Veszprรฉm (HU-VM)
๐ด๓ จ๓ ต๓ ณ๓ ด๓ ฟ Flag for Salgรณtarjรกn (HU-ST)
๐ด๓ ฉ๓ ฎ๓ ฃ๓ ด๓ ฟ Flag for Chhattisgarh (IN-CT)
๐ด๓ ฉ๓ ฅ๓ ต๓ ฟ Flag for Ulster (IE-U)
๐ด๓ ฉ๓ ฎ๓ ค๓ ฌ๓ ฟ Flag for Delhi (IN-DL)
๐ด๓ ฉ๓ ฅ๓ ญ๓ ฟ Flag for Munster (IE-M)
๐ด๓ ฉ๓ ฅ๓ ฃ๓ ฟ Flag for Connacht (IE-C)
๐ด๓ ฉ๓ ฌ๓ จ๓ ก๓ ฟ Flag for Haifa District (IL-HA)
๐ด๓ ฉ๓ ค๓ ซ๓ ก๓ ฟ Flag for Kalimantan (ID-KA)
๐ด๓ ฉ๓ ฎ๓ ง๓ ก๓ ฟ Flag for Goa (IN-GA)
๐ด๓ ฉ๓ ค๓ ณ๓ ญ๓ ฟ Flag for Sumatra (ID-SM)
๐ด๓ ฉ๓ ค๓ ฐ๓ ฐ๓ ฟ Flag for Papua Islands (ID-PP)
๐ด๓ จ๓ ต๓ ณ๓ ณ๓ ฟ Flag for Szekszรกrd (HU-SS)
๐ด๓ ฉ๓ ฌ๓ บ๓ ฟ Flag for Northern District (IL-Z)
๐ด๓ จ๓ ต๓ ด๓ ฏ๓ ฟ Flag for Tolna (HU-TO)
๐ด๓ ฉ๓ ฌ๓ ญ๓ ฟ Flag for Central District (IL-M)
๐ด๓ ฉ๓ ฌ๓ ค๓ ฟ Flag for Southern District (IL-D)
๐ด๓ ฉ๓ ฎ๓ ข๓ ฒ๓ ฟ Flag for Bihar (IN-BR)
๐ด๓ จ๓ ต๓ บ๓ ฅ๓ ฟ Flag for Zalaegerszeg (HU-ZE)
๐ด๓ ฉ๓ ฎ๓ ก๓ ฐ๓ ฟ Flag for Andhra Pradesh (IN-AP)
๐ด๓ ฉ๓ ฑ๓ ค๓ ก๓ ฟ Flag for Dohuk (IQ-DA)
๐ด๓ ฉ๓ ฎ๓ ช๓ จ๓ ฟ Flag for Jharkhand (IN-JH)
๐ด๓ ฉ๓ ฎ๓ ซ๓ ฌ๓ ฟ Flag for Kerala (IN-KL)
๐ด๓ ฉ๓ ฎ๓ ท๓ ข๓ ฟ Flag for West Bengal (IN-WB)
๐ด๓ ฉ๓ ฎ๓ ฏ๓ ฒ๓ ฟ Flag for Odisha (IN-OR)
๐ด๓ ฉ๓ ฎ๓ ฐ๓ น๓ ฟ Flag for Puducherry (IN-PY)
๐ด๓ ฉ๓ ฑ๓ ซ๓ ก๓ ฟ Flag for Karbala (IQ-KA)
๐ด๓ ฉ๓ ฑ๓ ณ๓ ค๓ ฟ Flag for Saladin (IQ-SD)
๐ด๓ ฉ๓ ฎ๓ ญ๓ บ๓ ฟ Flag for Mizoram (IN-MZ)
๐ด๓ ฉ๓ ฎ๓ จ๓ ฐ๓ ฟ Flag for Himachal Pradesh (IN-HP)
๐ด๓ ฉ๓ ฎ๓ ญ๓ ฐ๓ ฟ Flag for Madhya Pradesh (IN-MP)
๐ด๓ ฉ๓ ฎ๓ ฐ๓ ข๓ ฟ Flag for Punjab (IN-PB)
๐ด๓ ฉ๓ ฎ๓ ฎ๓ ฌ๓ ฟ Flag for Nagaland (IN-NL)
๐ด๓ ฉ๓ ฑ๓ ฑ๓ ก๓ ฟ Flag for Al-Qฤdisiyyah (IQ-QA)
๐ด๓ ฉ๓ ฑ๓ ค๓ ฉ๓ ฟ Flag for Diyala (IQ-DI)
๐ด๓ ฉ๓ ฑ๓ ฎ๓ ฉ๓ ฟ Flag for Nineveh (IQ-NI)
๐ด๓ ฉ๓ ฑ๓ ค๓ ฑ๓ ฟ Flag for Dhi Qar (IQ-DQ)
๐ด๓ ฉ๓ ฎ๓ ญ๓ ฌ๓ ฟ Flag for Meghalaya (IN-ML)
๐ด๓ ฉ๓ ฎ๓ ด๓ ฎ๓ ฟ Flag for Tamil Nadu (IN-TN)
๐ด๓ ฉ๓ ฑ๓ ฎ๓ ก๓ ฟ Flag for Najaf (IQ-NA)
๐ด๓ ฉ๓ ฑ๓ ญ๓ ต๓ ฟ Flag for Al Muthanna (IQ-MU)
๐ด๓ ฉ๓ ฎ๓ ด๓ ง๓ ฟ Flag for Telangana (IN-TG)
๐ด๓ ฉ๓ ฎ๓ จ๓ ฒ๓ ฟ Flag for Haryana (IN-HR)
๐ด๓ ฉ๓ ฎ๓ ต๓ ด๓ ฟ Flag for Uttarakhand (IN-UT)
๐ด๓ ฉ๓ ฎ๓ ด๓ ฒ๓ ฟ Flag for Tripura (IN-TR)
๐ด๓ ฉ๓ ฑ๓ ข๓ ง๓ ฟ Flag for Baghdad (IQ-BG)
๐ด๓ ฉ๓ ฎ๓ ฌ๓ ค๓ ฟ Flag for Lakshadweep (IN-LD)
๐ด๓ ฉ๓ ฑ๓ ญ๓ ก๓ ฟ Flag for Maysan (IQ-MA)
๐ด๓ ฉ๓ ฑ๓ ข๓ ก๓ ฟ Flag for Basra (IQ-BA)
๐ด๓ ฉ๓ ฑ๓ ก๓ ฒ๓ ฟ Flag for Erbil (IQ-AR)
๐ด๓ ฉ๓ ฎ๓ ญ๓ จ๓ ฟ Flag for Maharashtra (IN-MH)
๐ด๓ ฉ๓ ฑ๓ ก๓ ฎ๓ ฟ Flag for Al Anbar (IQ-AN)
๐ด๓ ฉ๓ ฎ๓ ณ๓ ซ๓ ฟ Flag for Sikkim (IN-SK)
๐ด๓ ฉ๓ ฑ๓ ข๓ ข๓ ฟ Flag for Babylon (IQ-BB)
๐ด๓ ฉ๓ ฎ๓ ต๓ ฐ๓ ฟ Flag for Uttar Pradesh (IN-UP)
๐ด๓ ฉ๓ ฑ๓ ณ๓ ต๓ ฟ Flag for Sulaymaniyah (IQ-SU)
๐ด๓ ฉ๓ ฎ๓ ฒ๓ ช๓ ฟ Flag for Rajasthan (IN-RJ)
๐ด๓ ฉ๓ ฎ๓ ช๓ ซ๓ ฟ Flag for Jammu and Kashmir (IN-JK)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for Chaharmahal and Bakhtiari (IR-08)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ถ๓ ฟ Flag for Qom (IR-26)
๐ด๓ ฉ๓ ณ๓ ฑ๓ ฟ Flag for Capital (IS-1)
๐จ๐พโ๐จ๐พโ๐ฆ๐พ Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Ardabil (IR-03)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ต๓ ฟ Flag for Yazd (IR-25)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ น๓ ฟ Flag for South Khorasan (IR-29)
๐จ๐ฟโ๐จ๐ฟโ๐ฆ๐ฟ Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ด๓ ฟ Flag for Hamadan (IR-24)
๐ด๓ ง๓ น๓ ญ๓ ก๓ ฟ Flag for Mahaica-Berbice (GY-MA)
๐ด๓ ฉ๓ ณ๓ ณ๓ ฟ Flag for Western (IS-3)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ท๓ ฟ Flag for Golestan (IR-27)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Zanjan (IR-11)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ฐ๓ ฟ Flag for Lorestan (IR-20)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ท๓ ฟ Flag for Kermanshah (IR-17)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ธ๓ ฟ Flag for Kohgiluyeh and Boyer-Ahmad (IR-18)
๐ด๓ ฅ๓ ง๓ ฃ๓ ฟ Flag for Cairo (EG-C)
๐ด๓ ฉ๓ ฒ๓ ณ๓ ฑ๓ ฟ Flag for North Khorasan (IR-31)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for Bushehr (IR-06)
๐ด๓ ฅ๓ ณ๓ ฅ๓ ธ๓ ฟ Flag for Extremadura (ES-EX)
๐ด๓ ฅ๓ ณ๓ ฃ๓ ฎ๓ ฟ Flag for Canary Islands (ES-CN)
๐ด๓ ฉ๓ ณ๓ ท๓ ฟ Flag for Eastern (IS-7)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Ilam (IR-05)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ธ๓ ฟ Flag for Qazvin (IR-28)
๐ด๓ ฉ๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Isfahan (IR-04)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ต๓ ฟ Flag for Kerman (IR-15)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ณ๓ ฟ Flag for Hormozgan (IR-23)
๐ด๓ ฉ๓ ฑ๓ ท๓ ก๓ ฟ Flag for Wasit (IQ-WA)
๐ด๓ ฉ๓ ด๓ ฒ๓ ฑ๓ ฟ Flag for Piedmont (IT-21)
๐ด๓ ฉ๓ ณ๓ ถ๓ ฟ Flag for Northeastern (IS-6)
๐ด๓ ฉ๓ ณ๓ ต๓ ฟ Flag for Northwestern (IS-5)
๐ด๓ ฉ๓ ฒ๓ ฒ๓ ฒ๓ ฟ Flag for Markazi (IR-22)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ น๓ ฟ Flag for Gilan (IR-19)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ฐ๓ ฟ Flag for Khuzestan (IR-10)
๐ด๓ ฉ๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Semnan (IR-12)
๐ด๓ ฉ๓ ณ๓ ฒ๓ ฟ Flag for Southern Peninsula (IS-2)
๐ด๓ ช๓ ญ๓ ฑ๓ ฒ๓ ฟ Flag for Manchester (JM-12)
๐ด๓ ช๓ ฏ๓ ฉ๓ ฒ๓ ฟ Flag for Irbid (JO-IR)
๐ด๓ ช๓ ญ๓ ฐ๓ ต๓ ฟ Flag for Saint Mary (JM-05)
๐ด๓ ฉ๓ ด๓ ท๓ ท๓ ฟ Flag for Basilicata (IT-77)
๐ด๓ ฉ๓ ด๓ ณ๓ ถ๓ ฟ Flag for FriuliโVenezia Giulia (IT-36)
๐ด๓ ช๓ ญ๓ ฑ๓ ณ๓ ฟ Flag for Clarendon (JM-13)
๐ด๓ ฉ๓ ด๓ ต๓ ท๓ ฟ Flag for Marche (IT-57)
๐ด๓ ช๓ ญ๓ ฐ๓ ด๓ ฟ Flag for Portland (JM-04)
๐ด๓ ฉ๓ ด๓ ธ๓ ฒ๓ ฟ Flag for Sicily (IT-82)
๐ด๓ ฉ๓ ด๓ ณ๓ ด๓ ฟ Flag for Veneto (IT-34)
๐ด๓ ฉ๓ ด๓ ถ๓ ต๓ ฟ Flag for Abruzzo (IT-65)
๐ด๓ ฉ๓ ด๓ ถ๓ ท๓ ฟ Flag for Molise (IT-67)
๐ด๓ ช๓ ฏ๓ ข๓ ก๓ ฟ Flag for Balqa (JO-BA)
๐ด๓ ฉ๓ ด๓ ท๓ ต๓ ฟ Flag for Apulia (IT-75)
๐ด๓ ฉ๓ ด๓ ท๓ ธ๓ ฟ Flag for Calabria (IT-78)
๐ด๓ ฉ๓ ด๓ ต๓ ฒ๓ ฟ Flag for Tuscany (IT-52)
๐ด๓ ช๓ ญ๓ ฐ๓ น๓ ฟ Flag for Hanover (JM-09)
๐ด๓ ช๓ ญ๓ ฐ๓ ฒ๓ ฟ Flag for Saint Andrew (JM-02)
๐ด๓ ช๓ ฏ๓ ก๓ ด๓ ฟ Flag for Tafilah (JO-AT)
๐ด๓ ฉ๓ ด๓ ต๓ ต๓ ฟ Flag for Umbria (IT-55)
๐ด๓ ช๓ ญ๓ ฐ๓ ธ๓ ฟ Flag for Saint James (JM-08)
๐ด๓ ช๓ ญ๓ ฐ๓ ถ๓ ฟ Flag for Saint Ann (JM-06)
๐ด๓ ช๓ ญ๓ ฑ๓ ฑ๓ ฟ Flag for Saint Elizabeth (JM-11)
๐ด๓ ช๓ ฏ๓ ก๓ บ๓ ฟ Flag for Zarqa (JO-AZ)
๐ด๓ ฆ๓ ฉ๓ ฑ๓ ฒ๓ ฟ Flag for Ostrobothnia (FI-12)
๐ด๓ ฉ๓ ด๓ ถ๓ ฒ๓ ฟ Flag for Lazio (IT-62)
๐ด๓ ช๓ ฏ๓ ก๓ ช๓ ฟ Flag for Ajloun (JO-AJ)
๐ด๓ ฉ๓ ด๓ ด๓ ฒ๓ ฟ Flag for Liguria (IT-42)
๐ด๓ ช๓ ญ๓ ฐ๓ ท๓ ฟ Flag for Trelawny (JM-07)
๐ด๓ ช๓ ฏ๓ ก๓ ฑ๓ ฟ Flag for Aqaba (JO-AQ)
๐ด๓ ช๓ ฏ๓ ช๓ ก๓ ฟ Flag for Jerash (JO-JA)
๐ด๓ ช๓ ฏ๓ ก๓ ญ๓ ฟ Flag for Amman (JO-AM)
๐ด๓ ฉ๓ ด๓ ฒ๓ ณ๓ ฟ Flag for Aosta Valley (IT-23)
๐ด๓ ช๓ ญ๓ ฑ๓ ฐ๓ ฟ Flag for Westmoreland (JM-10)
๐ด๓ ช๓ ฐ๓ ฐ๓ ธ๓ ฟ Flag for Ibaraki (JP-08)
๐ด๓ ช๓ ฏ๓ ญ๓ ค๓ ฟ Flag for Madaba (JO-MD)
๐ด๓ ช๓ ฐ๓ ณ๓ ฒ๓ ฟ Flag for Shimane (JP-32)
๐ด๓ ช๓ ฐ๓ ฒ๓ ถ๓ ฟ Flag for Kyลto (JP-26)
๐ด๓ ฃ๓ ฌ๓ ก๓ ฒ๓ ฟ Flag for Araucanรญa (CL-AR)
๐ด๓ ช๓ ฐ๓ ฐ๓ น๓ ฟ Flag for Tochigi (JP-09)
๐ด๓ ช๓ ฐ๓ ฐ๓ ต๓ ฟ Flag for Akita (JP-05)
๐ด๓ ช๓ ฐ๓ ฑ๓ ฒ๓ ฟ Flag for Chiba (JP-12)
๐ด๓ ช๓ ฐ๓ ฐ๓ ด๓ ฟ Flag for Miyagi (JP-04)
๐ด๓ ช๓ ฐ๓ ฑ๓ ต๓ ฟ Flag for Niigata (JP-15)
๐ด๓ ช๓ ฐ๓ ฑ๓ ถ๓ ฟ Flag for Toyama (JP-16)
๐ด๓ ช๓ ฐ๓ ฒ๓ ณ๓ ฟ Flag for Aichi (JP-23)
๐ด๓ ช๓ ฐ๓ ณ๓ ถ๓ ฟ Flag for Tokushima (JP-36)
๐ด๓ ช๓ ฐ๓ ฒ๓ ฐ๓ ฟ Flag for Nagano (JP-20)
๐ด๓ ช๓ ฐ๓ ณ๓ ฑ๓ ฟ Flag for Tottori (JP-31)
๐ด๓ ช๓ ฐ๓ ฐ๓ ณ๓ ฟ Flag for Iwate (JP-03)
๐ด๓ ช๓ ฐ๓ ณ๓ ณ๓ ฟ Flag for Okayama (JP-33)
๐ด๓ ช๓ ฐ๓ ฑ๓ ท๓ ฟ Flag for Ishikawa (JP-17)
๐ด๓ ช๓ ฐ๓ ณ๓ ฐ๓ ฟ Flag for Wakayama (JP-30)
๐ด๓ ช๓ ฐ๓ ฑ๓ ฐ๓ ฟ Flag for Gunma (JP-10)
๐ด๓ ช๓ ฏ๓ ญ๓ ก๓ ฟ Flag for Mafraq (JO-MA)
๐ด๓ ช๓ ฐ๓ ณ๓ ต๓ ฟ Flag for Yamaguchi (JP-35)
๐ด๓ ฃ๓ ต๓ ฑ๓ ฒ๓ ฟ Flag for Granma (CU-12)
๐ด๓ ช๓ ฐ๓ ฒ๓ ต๓ ฟ Flag for Shiga (JP-25)
๐ด๓ ช๓ ฐ๓ ฐ๓ ฒ๓ ฟ Flag for Aomori (JP-02)
๐ด๓ ช๓ ฐ๓ ฑ๓ ฑ๓ ฟ Flag for Saitama (JP-11)
๐ด๓ ช๓ ฐ๓ ฒ๓ น๓ ฟ Flag for Nara (JP-29)
๐ด๓ ช๓ ฐ๓ ฑ๓ น๓ ฟ Flag for Yamanashi (JP-19)
๐ด๓ ช๓ ฐ๓ ณ๓ ด๓ ฟ Flag for Hiroshima (JP-34)
๐ด๓ ช๓ ฏ๓ ญ๓ ฎ๓ ฟ Flag for Maโan (JO-MN)
๐ด๓ ช๓ ฐ๓ ฒ๓ ฒ๓ ฟ Flag for Shizuoka (JP-22)
๐ด๓ ช๓ ฐ๓ ฒ๓ ท๓ ฟ Flag for ลsaka (JP-27)
๐ด๓ ช๓ ฐ๓ ฒ๓ ด๓ ฟ Flag for Mie (JP-24)
๐ด๓ ช๓ ฐ๓ ฐ๓ ถ๓ ฟ Flag for Yamagata (JP-06)
๐ด๓ ช๓ ฐ๓ ฒ๓ ธ๓ ฟ Flag for Hyลgo (JP-28)
๐ด๓ ช๓ ฏ๓ ซ๓ ก๓ ฟ Flag for Karak (JO-KA)
๐ด๓ ช๓ ฐ๓ ณ๓ ธ๓ ฟ Flag for Ehime (JP-38)
๐ด๓ ช๓ ฐ๓ ฑ๓ ด๓ ฟ Flag for Kanagawa (JP-14)
๐ด๓ ช๓ ฐ๓ ณ๓ ท๓ ฟ Flag for Kagawa (JP-37)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ท๓ ฟ Flag for Garissa (KE-07)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ด๓ ฟ Flag for Mandera (KE-24)
๐ด๓ ช๓ ฐ๓ ด๓ ถ๓ ฟ Flag for Kagoshima (JP-46)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ท๓ ฟ Flag for Kisumu (KE-17)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ด๓ ฟ Flag for Kilifi (KE-14)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ต๓ ฟ Flag for Kirinyaga (KE-15)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ฐ๓ ฟ Flag for Kajiado (KE-10)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ณ๓ ฟ Flag for Bungoma (KE-03)
๐ด๓ ซ๓ ฅ๓ ณ๓ ฒ๓ ฟ Flag for Nandi (KE-32)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ณ๓ ฟ Flag for Kiambu (KE-13)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ฐ๓ ฟ Flag for Laikipia (KE-20)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ฑ๓ ฟ Flag for Lamu (KE-21)
๐ด๓ ช๓ ฐ๓ ด๓ ฐ๓ ฟ Flag for Fukuoka (JP-40)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ด๓ ฟ Flag for Busia (KE-04)
๐ด๓ ช๓ ฐ๓ ด๓ ฑ๓ ฟ Flag for Saga (JP-41)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ท๓ ฟ Flag for Migori (KE-27)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ถ๓ ฟ Flag for Embu (KE-06)
๐ฉ๐พโ๐ฆ๐พโ๐ง๐พ Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
๐ด๓ ซ๓ ฅ๓ ฑ๓ ฒ๓ ฟ Flag for Kericho (KE-12)
๐ด๓ ซ๓ ฅ๓ ฐ๓ น๓ ฟ Flag for Isiolo (KE-09)
๐ด๓ ซ๓ ฅ๓ ฑ๓ น๓ ฟ Flag for Kwale (KE-19)
๐ด๓ ช๓ ฐ๓ ด๓ ฒ๓ ฟ Flag for Nagasaki (JP-42)
๐ด๓ ซ๓ ฅ๓ ณ๓ ฐ๓ ฟ Flag for Nairobi County (KE-30)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ณ๓ ฟ Flag for Makueni (KE-23)
๐ด๓ ซ๓ ฅ๓ ฒ๓ น๓ ฟ Flag for Murangโa (KE-29)
๐ด๓ ช๓ ฐ๓ ณ๓ น๓ ฟ Flag for Kลchi (JP-39)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ฒ๓ ฟ Flag for Bomet (KE-02)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ธ๓ ฟ Flag for Mombasa (KE-28)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ธ๓ ฟ Flag for Homa Bay (KE-08)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ฑ๓ ฟ Flag for Kakamega (KE-11)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ฒ๓ ฟ Flag for Machakos (KE-22)
๐ด๓ ซ๓ ฅ๓ ฑ๓ ถ๓ ฟ Flag for Kisii (KE-16)
๐ด๓ ซ๓ ฅ๓ ฐ๓ ต๓ ฟ Flag for Elgeyo-Marakwet (KE-05)
๐ด๓ ช๓ ฐ๓ ด๓ ด๓ ฟ Flag for ลita (JP-44)
๐ด๓ ซ๓ ฅ๓ ณ๓ ณ๓ ฟ Flag for Narok (KE-33)
๐ด๓ ซ๓ ฅ๓ ฒ๓ ถ๓ ฟ Flag for Meru (KE-26)
๐ด๓ ช๓ ฐ๓ ด๓ ณ๓ ฟ Flag for Kumamoto (JP-43)
๐ด๓ ช๓ ฐ๓ ด๓ ต๓ ฟ Flag for Miyazaki (JP-45)
๐ด๓ ซ๓ จ๓ ฑ๓ น๓ ฟ Flag for Stung Treng (KH-19)
๐ด๓ ซ๓ ฅ๓ ณ๓ ท๓ ฟ Flag for Samburu (KE-37)
๐ด๓ ซ๓ ฅ๓ ด๓ ท๓ ฟ Flag for West Pokot (KE-47)
๐ด๓ ซ๓ ฅ๓ ณ๓ น๓ ฟ Flag for Taita-Taveta (KE-39)
๐ด๓ ซ๓ จ๓ ฑ๓ ด๓ ฟ Flag for Prey Veng (KH-14)
๐ด๓ ซ๓ ฅ๓ ด๓ ฑ๓ ฟ Flag for Tharaka-Nithi (KE-41)
๐ด๓ ซ๓ ง๓ ฏ๓ ฟ Flag for Osh Region (KG-O)
๐ด๓ ซ๓ จ๓ ฒ๓ ต๓ ฟ Flag for Tbong Khmum (KH-25)
๐ด๓ ซ๓ ง๓ ด๓ ฟ Flag for Talas (KG-T)
๐ด๓ ซ๓ จ๓ ฑ๓ ฒ๓ ฟ Flag for Phnom Penh (KH-12)
๐ด๓ ซ๓ ง๓ ง๓ ข๓ ฟ Flag for Bishkek (KG-GB)
๐ด๓ ซ๓ ฅ๓ ด๓ ด๓ ฟ Flag for Uasin Gishu (KE-44)
๐ด๓ ซ๓ จ๓ ฒ๓ ณ๓ ฟ Flag for Kep (KH-23)
๐ด๓ ซ๓ จ๓ ฑ๓ ฐ๓ ฟ Flag for Kratiรฉ (KH-10)
๐ด๓ ซ๓ จ๓ ฒ๓ ฑ๓ ฟ Flag for Takรฉo (KH-21)
๐ด๓ ซ๓ จ๓ ฒ๓ ฟ Flag for Battambang (KH-2)
๐ด๓ ซ๓ ฅ๓ ณ๓ ถ๓ ฟ Flag for Nyeri (KE-36)
๐ด๓ ซ๓ จ๓ ฑ๓ ณ๓ ฟ Flag for Preah Vihear (KH-13)
๐ด๓ ซ๓ ฅ๓ ด๓ ฐ๓ ฟ Flag for Tana River (KE-40)
๐ด๓ ซ๓ จ๓ ฒ๓ ด๓ ฟ Flag for Pailin (KH-24)
๐ด๓ ซ๓ จ๓ ฑ๓ ถ๓ ฟ Flag for Ratanakiri (KH-16)
๐ด๓ ซ๓ จ๓ ฒ๓ ฒ๓ ฟ Flag for Oddar Meanchey (KH-22)
๐ด๓ ซ๓ ฅ๓ ด๓ ฒ๓ ฟ Flag for Trans Nzoia (KE-42)
๐ด๓ ซ๓ จ๓ ฑ๓ ธ๓ ฟ Flag for Sihanoukville (KH-18)
๐ด๓ ซ๓ ฅ๓ ด๓ ต๓ ฟ Flag for Vihiga (KE-45)
๐ด๓ ซ๓ ง๓ ง๓ ฏ๓ ฟ Flag for Osh (KG-GO)
๐ด๓ ซ๓ ง๓ ข๓ ฟ Flag for Batken (KG-B)
๐ด๓ ซ๓ ง๓ ช๓ ฟ Flag for Jalal-Abad (KG-J)
๐ด๓ ซ๓ จ๓ ฑ๓ ฑ๓ ฟ Flag for Mondulkiri (KH-11)
๐ด๓ ซ๓ จ๓ ฑ๓ ท๓ ฟ Flag for Siem Reap (KH-17)
๐ด๓ ซ๓ ฅ๓ ด๓ ณ๓ ฟ Flag for Turkana (KE-43)
๐ด๓ ซ๓ จ๓ ฑ๓ ฟ Flag for Banteay Meanchey (KH-1)
๐ด๓ ซ๓ ง๓ ฎ๓ ฟ Flag for Naryn (KG-N)
๐ด๓ ซ๓ ฅ๓ ณ๓ ต๓ ฟ Flag for Nyandarua (KE-35)
๐ด๓ ซ๓ ฅ๓ ณ๓ ธ๓ ฟ Flag for Siaya (KE-38)
๐ด๓ ซ๓ ฅ๓ ณ๓ ด๓ ฟ Flag for Nyamira (KE-34)
๐ด๓ ซ๓ จ๓ ฑ๓ ต๓ ฟ Flag for Pursat (KH-15)
๐ด๓ ซ๓ ฅ๓ ด๓ ถ๓ ฟ Flag for Wajir (KE-46)
๐ด๓ ซ๓ ง๓ น๓ ฟ Flag for Issyk-Kul (KG-Y)
๐ด๓ ซ๓ ง๓ ฃ๓ ฟ Flag for Chuy (KG-C)
๐ด๓ ซ๓ ญ๓ ญ๓ ฟ Flag for Mohรฉli (KM-M)
๐ด๓ ซ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Seoul (KR-11)
๐ด๓ ซ๓ จ๓ ด๓ ฟ Flag for Kampong Chhnang (KH-4)
๐ด๓ ซ๓ ฒ๓ ณ๓ ฐ๓ ฟ Flag for Daejeon (KR-30)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ต๓ ฟ Flag for South Hwanghae (KP-05)
๐ด๓ ซ๓ จ๓ ท๓ ฟ Flag for Kampot (KH-7)
๐ด๓ ซ๓ ฎ๓ ฎ๓ ฟ Flag for Nevis (KN-N)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ด๓ ฟ Flag for Chagang (KP-04)
๐ด๓ ซ๓ ฒ๓ ด๓ ถ๓ ฟ Flag for South Jeolla (KR-46)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ถ๓ ฟ Flag for North Hwanghae (KP-06)
๐ด๓ ซ๓ ฎ๓ ซ๓ ฟ Flag for Saint Kitts (KN-K)
๐ด๓ ซ๓ จ๓ ต๓ ฟ Flag for Kampong Speu (KH-5)
๐ด๓ ซ๓ ฒ๓ ด๓ ต๓ ฟ Flag for North Jeolla (KR-45)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ณ๓ ฟ Flag for North Pyongan (KP-03)
๐ด๓ ซ๓ จ๓ น๓ ฟ Flag for Koh Kong (KH-9)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ท๓ ฟ Flag for Kangwon (KP-07)
๐ด๓ ซ๓ ฒ๓ ฒ๓ ถ๓ ฟ Flag for Busan (KR-26)
๐ด๓ ซ๓ ฒ๓ ฒ๓ น๓ ฟ Flag for Gwangju City (KR-29)
๐ด๓ ซ๓ จ๓ ณ๓ ฟ Flag for Kampong Cham (KH-3)
๐ด๓ ซ๓ ฒ๓ ด๓ ณ๓ ฟ Flag for North Chungcheong (KR-43)
๐ด๓ ซ๓ จ๓ ธ๓ ฟ Flag for Kandal (KH-8)
๐ด๓ ซ๓ จ๓ ถ๓ ฟ Flag for Kampong Thom (KH-6)
๐ด๓ ซ๓ ฐ๓ ฑ๓ ฐ๓ ฟ Flag for Ryanggang (KP-10)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ฒ๓ ฟ Flag for South Pyongan (KP-02)
๐ด๓ ซ๓ ญ๓ ง๓ ฟ Flag for Grande Comore (KM-G)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ธ๓ ฟ Flag for South Hamgyong (KP-08)
๐ด๓ ซ๓ ฐ๓ ฑ๓ ณ๓ ฟ Flag for Rason (KP-13)
๐ด๓ ซ๓ ฒ๓ ฒ๓ ท๓ ฟ Flag for Daegu (KR-27)
๐ด๓ ซ๓ ฒ๓ ฒ๓ ธ๓ ฟ Flag for Incheon (KR-28)
๐ด๓ ซ๓ ฒ๓ ด๓ ฒ๓ ฟ Flag for Gangwon (KR-42)
๐ด๓ ซ๓ ฐ๓ ฐ๓ ฑ๓ ฟ Flag for Pyongyang (KP-01)
๐ด๓ ซ๓ ฒ๓ ณ๓ ฑ๓ ฟ Flag for Ulsan (KR-31)
๐ด๓ ซ๓ ฒ๓ ด๓ ด๓ ฟ Flag for South Chungcheong (KR-44)
๐ด๓ ซ๓ ญ๓ ก๓ ฟ Flag for Anjouan (KM-A)
๐ด๓ ซ๓ ฒ๓ ด๓ ฑ๓ ฟ Flag for Gyeonggi (KR-41)
๐ด๓ ซ๓ ฒ๓ ด๓ ท๓ ฟ Flag for North Gyeongsang (KR-47)
๐ด๓ ซ๓ ฐ๓ ฐ๓ น๓ ฟ Flag for North Hamgyong (KP-09)
๐ด๓ ฌ๓ ก๓ จ๓ ฏ๓ ฟ Flag for Houaphanh (LA-HO)
๐ด๓ ซ๓ บ๓ ข๓ ก๓ น๓ ฟ Flag for Bayqongyr (KZ-BAY)
๐ด๓ ฌ๓ ก๓ ฃ๓ จ๓ ฟ Flag for Champasak (LA-CH)
๐ด๓ ฌ๓ ก๓ ถ๓ ด๓ ฟ Flag for Vientiane (LA-VT)
๐ด๓ ซ๓ ท๓ จ๓ ก๓ ฟ Flag for Hawalli (KW-HA)
๐ด๓ ฌ๓ ก๓ ฐ๓ จ๓ ฟ Flag for Phongsaly (LA-PH)
๐ด๓ ซ๓ บ๓ ฐ๓ ก๓ ถ๓ ฟ Flag for Pavlodar (KZ-PAV)
๐ด๓ ซ๓ บ๓ ก๓ ฌ๓ ญ๓ ฟ Flag for Almaty Region (KZ-ALM)
๐ด๓ ซ๓ ท๓ ซ๓ ต๓ ฟ Flag for Al Asimah (KW-KU)
๐ด๓ ฌ๓ ก๓ ข๓ ซ๓ ฟ Flag for Bokeo (LA-BK)
๐ด๓ ฌ๓ ก๓ ก๓ ด๓ ฟ Flag for Attapeu (LA-AT)
๐ด๓ ซ๓ บ๓ ก๓ ซ๓ ด๓ ฟ Flag for Aktobe (KZ-AKT)
๐ด๓ ซ๓ บ๓ ก๓ ด๓ น๓ ฟ Flag for Atyrau (KZ-ATY)
๐ด๓ ซ๓ ท๓ ช๓ ก๓ ฟ Flag for Al Jahra (KW-JA)
๐ด๓ ฌ๓ ก๓ ข๓ ฌ๓ ฟ Flag for Bolikhamsai (LA-BL)
๐ด๓ ฌ๓ ก๓ ฏ๓ ต๓ ฟ Flag for Oudomxay (LA-OU)
๐ด๓ ซ๓ บ๓ ญ๓ ก๓ ฎ๓ ฟ Flag for Mangystau (KZ-MAN)
๐ด๓ ซ๓ บ๓ บ๓ ก๓ ฐ๓ ฟ Flag for West Kazakhstan (KZ-ZAP)
๐ด๓ ซ๓ บ๓ บ๓ จ๓ ก๓ ฟ Flag for Jambyl (KZ-ZHA)
๐ด๓ ซ๓ บ๓ ก๓ ณ๓ ด๓ ฟ Flag for Astana (KZ-AST)
๐ด๓ ฌ๓ ก๓ ฌ๓ ฐ๓ ฟ Flag for Luang Prabang (LA-LP)
๐ด๓ ซ๓ ท๓ ฆ๓ ก๓ ฟ Flag for Al Farwaniyah (KW-FA)
๐ด๓ ซ๓ บ๓ ซ๓ ต๓ ณ๓ ฟ Flag for Kostanay (KZ-KUS)
๐ด๓ ซ๓ บ๓ ก๓ ฌ๓ ก๓ ฟ Flag for Almaty (KZ-ALA)
๐ด๓ ซ๓ บ๓ ซ๓ ก๓ ฒ๓ ฟ Flag for Karagandy (KZ-KAR)
๐ด๓ ซ๓ บ๓ ซ๓ บ๓ น๓ ฟ Flag for Kyzylorda (KZ-KZY)
๐ด๓ ฌ๓ ก๓ ณ๓ ฌ๓ ฟ Flag for Salavan (LA-SL)
๐ด๓ ฌ๓ ก๓ ฌ๓ ญ๓ ฟ Flag for Luang Namtha (LA-LM)
๐ด๓ ซ๓ ฒ๓ ต๓ ฐ๓ ฟ Flag for Sejong (KR-50)
๐ด๓ ซ๓ ท๓ ญ๓ ต๓ ฟ Flag for Mubarak Al-Kabeer (KW-MU)
๐ด๓ ซ๓ บ๓ ณ๓ ฅ๓ ถ๓ ฟ Flag for North Kazakhstan (KZ-SEV)
๐ฉ๐ฟโ๐ฆ๐ฟโ๐ง๐ฟ Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
๐ด๓ ซ๓ ท๓ ก๓ จ๓ ฟ Flag for Al Ahmadi (KW-AH)
๐ด๓ ฌ๓ ก๓ ซ๓ จ๓ ฟ Flag for Khammouane (LA-KH)
๐ด๓ ซ๓ บ๓ ก๓ ซ๓ ญ๓ ฟ Flag for Akmola (KZ-AKM)
๐ด๓ ซ๓ บ๓ น๓ ต๓ บ๓ ฟ Flag for South Kazakhstan (KZ-YUZ)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ น๓ ฟ Flag for Triesen (LI-09)
๐จ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ฆ๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
๐ฉ๐ปโ๐ฆ๐ปโ๐ถ๐ป Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
๐ด๓ ฌ๓ ซ๓ ท๓ ฟ Flag for North Central (LK-7)
๐ด๓ ฌ๓ ก๓ ธ๓ ก๓ ฟ Flag for Sainyabuli (LA-XA)
๐ด๓ ฌ๓ ข๓ ก๓ ซ๓ ฟ Flag for Akkar (LB-AK)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ท๓ ฟ Flag for Laborie (LC-07)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ถ๓ ฟ Flag for Gros Islet (LC-06)
๐ด๓ ฌ๓ ข๓ ก๓ ณ๓ ฟ Flag for North (LB-AS)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ฑ๓ ฟ Flag for Balzers (LI-01)
๐ด๓ ฌ๓ ซ๓ ฒ๓ ฟ Flag for Central (LK-2)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ด๓ ฟ Flag for Mauren (LI-04)
๐ด๓ ฌ๓ ข๓ ฎ๓ ก๓ ฟ Flag for Nabatieh (LB-NA)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ต๓ ฟ Flag for Dennery (LC-05)
๐ด๓ ฌ๓ ข๓ ช๓ ก๓ ฟ Flag for South (LB-JA)
๐ด๓ ฌ๓ ฉ๓ ฑ๓ ฑ๓ ฟ Flag for Vaduz (LI-11)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ฒ๓ ฟ Flag for Castries (LC-02)
๐ด๓ ฌ๓ ซ๓ ธ๓ ฟ Flag for Uva (LK-8)
๐ด๓ ฌ๓ ฉ๓ ฑ๓ ฐ๓ ฟ Flag for Triesenberg (LI-10)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ต๓ ฟ Flag for Planken (LI-05)
๐ด๓ ฌ๓ ฃ๓ ฑ๓ ฑ๓ ฟ Flag for Vieux Fort (LC-11)
๐ด๓ ฌ๓ ข๓ ข๓ จ๓ ฟ Flag for Baalbek-Hermel (LB-BH)
๐ด๓ ฌ๓ ซ๓ ถ๓ ฟ Flag for North Western (LK-6)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ถ๓ ฟ Flag for Ruggell (LI-06)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ธ๓ ฟ Flag for Micoud (LC-08)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ฒ๓ ฟ Flag for Eschen (LI-02)
๐ด๓ ฌ๓ ฃ๓ ฑ๓ ฒ๓ ฟ Flag for Canaries (LC-12)
๐ด๓ ฌ๓ ข๓ ข๓ ก๓ ฟ Flag for Beirut (LB-BA)
๐ด๓ ฌ๓ ก๓ ธ๓ ฉ๓ ฟ Flag for Xiangkhouang (LA-XI)
๐ด๓ ฌ๓ ฃ๓ ฑ๓ ฐ๓ ฟ Flag for Soufriรจre (LC-10)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ฑ๓ ฟ Flag for Anse la Raye (LC-01)
๐ด๓ ฌ๓ ฃ๓ ฐ๓ ณ๓ ฟ Flag for Choiseul (LC-03)
๐ด๓ ฌ๓ ฉ๓ ฐ๓ ณ๓ ฟ Flag for Gamprin (LI-03)
๐ด๓ ฌ๓ ซ๓ ด๓ ฟ Flag for Northern (LK-4)
๐ด๓ ฌ๓ ฒ๓ ง๓ ข๓ ฟ Flag for Grand Bassa (LR-GB)
๐ด๓ ฌ๓ ฒ๓ ง๓ ฐ๓ ฟ Flag for Gbarpolu (LR-GP)
๐ด๓ ฌ๓ ฒ๓ ง๓ ง๓ ฟ Flag for Grand Gedeh (LR-GG)
๐ด๓ ฌ๓ ด๓ ฑ๓ ฒ๓ ฟ Flag for Jurbarkas (LT-12)
๐ด๓ ฌ๓ ฒ๓ ฎ๓ ฉ๓ ฟ Flag for Nimba (LR-NI)
๐ด๓ ฆ๓ ฉ๓ ฐ๓ ธ๓ ฟ Flag for Central Finland (FI-08)
๐ด๓ ฌ๓ ด๓ ฑ๓ ฐ๓ ฟ Flag for Jonava (LT-10)
๐ด๓ ฌ๓ ฒ๓ ญ๓ ง๓ ฟ Flag for Margibi (LR-MG)
๐ด๓ ฌ๓ ฒ๓ ณ๓ ฉ๓ ฟ Flag for Sinoe (LR-SI)
๐ด๓ ฌ๓ ฒ๓ ญ๓ ฏ๓ ฟ Flag for Montserrado (LR-MO)
๐ด๓ ฌ๓ ด๓ ฑ๓ ถ๓ ฟ Flag for Kaunas (LT-16)
๐ด๓ ฌ๓ ณ๓ ซ๓ ฟ Flag for Thaba-Tseka (LS-K)
๐ด๓ ฌ๓ ด๓ ฐ๓ ต๓ ฟ Flag for Birลกtonas (LT-05)
๐ด๓ ฌ๓ ณ๓ ฆ๓ ฟ Flag for Mohaleโs Hoek (LS-F)
๐ด๓ ฌ๓ ฒ๓ ข๓ ญ๓ ฟ Flag for Bomi (LR-BM)
๐ด๓ ฌ๓ ด๓ ฐ๓ ท๓ ฟ Flag for Druskininkai (LT-07)
๐ด๓ ฌ๓ ด๓ ฑ๓ ด๓ ฟ Flag for Kalvarija (LT-14)
๐ด๓ ฌ๓ ด๓ ฑ๓ ต๓ ฟ Flag for Kauno Municipality (LT-15)
๐ด๓ ฌ๓ ณ๓ จ๓ ฟ Flag for Qachaโs Nek (LS-H)
๐ด๓ ฌ๓ ด๓ ฐ๓ ด๓ ฟ Flag for Anykลกฤiai (LT-04)
๐ด๓ ฌ๓ ณ๓ ฃ๓ ฟ Flag for Leribe (LS-C)
๐ด๓ ฌ๓ ด๓ ฑ๓ ฑ๓ ฟ Flag for Joniลกkis (LT-11)
๐ด๓ ฌ๓ ฒ๓ ฌ๓ ฏ๓ ฟ Flag for Lofa (LR-LO)
๐ด๓ ฌ๓ ฒ๓ ฒ๓ ฉ๓ ฟ Flag for Rivercess (LR-RI)
๐ด๓ ฌ๓ ด๓ ฑ๓ ณ๓ ฟ Flag for Kaiลกiadorys (LT-13)
๐ด๓ ฌ๓ ด๓ ฐ๓ ธ๓ ฟ Flag for Elektrฤnai (LT-08)
๐ด๓ ฌ๓ ฒ๓ ง๓ ซ๓ ฟ Flag for Grand Kru (LR-GK)
๐ด๓ ฌ๓ ณ๓ ค๓ ฟ Flag for Berea (LS-D)
๐ด๓ ฌ๓ ณ๓ ง๓ ฟ Flag for Quthing (LS-G)
๐ด๓ ฌ๓ ณ๓ ข๓ ฟ Flag for Butha-Buthe (LS-B)
๐ด๓ ฌ๓ ด๓ ฐ๓ ฑ๓ ฟ Flag for Akmenฤ (LT-01)
๐ด๓ ฌ๓ ด๓ ฐ๓ น๓ ฟ Flag for Ignalina (LT-09)
๐ด๓ ฌ๓ ณ๓ ฅ๓ ฟ Flag for Mafeteng (LS-E)
๐ด๓ ฌ๓ ณ๓ ช๓ ฟ Flag for Mokhotlong (LS-J)
๐ด๓ ฌ๓ ด๓ ฐ๓ ณ๓ ฟ Flag for Alytus (LT-03)
๐ด๓ ฌ๓ ด๓ ฐ๓ ถ๓ ฟ Flag for Birลพai (LT-06)
๐ด๓ ฃ๓ ฆ๓ ซ๓ ข๓ ฟ Flag for Nana-Grรฉbizi (CF-KB)
๐ด๓ ฌ๓ ฒ๓ ฒ๓ ง๓ ฟ Flag for River Gee (LR-RG)
๐ด๓ ฌ๓ ด๓ ต๓ ด๓ ฟ Flag for Utena (LT-54)
๐ด๓ ฌ๓ ด๓ ฒ๓ ท๓ ฟ Flag for Molฤtai (LT-27)
๐ด๓ ฌ๓ ด๓ ด๓ ฑ๓ ฟ Flag for ล akiai (LT-41)
๐ด๓ ฌ๓ ด๓ ฑ๓ น๓ ฟ Flag for Kelmฤ (LT-19)
๐ด๓ ฌ๓ ด๓ ฒ๓ ณ๓ ฟ Flag for Kupiลกkis (LT-23)
๐ด๓ ฌ๓ ด๓ ต๓ ถ๓ ฟ Flag for Vilkaviลกkis (LT-56)
๐ด๓ ฌ๓ ด๓ ฒ๓ ธ๓ ฟ Flag for Neringa (LT-28)
๐ด๓ ฌ๓ ด๓ ณ๓ ณ๓ ฟ Flag for Panevฤลพys (LT-33)
๐ด๓ ฌ๓ ด๓ ฒ๓ น๓ ฟ Flag for Pagฤgiai (LT-29)
๐ด๓ ฌ๓ ด๓ ด๓ ณ๓ ฟ Flag for ล iauliลณ Municipality (LT-43)
๐ด๓ ฌ๓ ด๓ ณ๓ ฑ๓ ฟ Flag for Palanga (LT-31)
๐ด๓ ฌ๓ ด๓ ฑ๓ ธ๓ ฟ Flag for Kฤdainiai (LT-18)
๐ด๓ ฌ๓ ด๓ ด๓ ฐ๓ ฟ Flag for Rokiลกkis (LT-40)
๐ด๓ ฌ๓ ด๓ ด๓ ต๓ ฟ Flag for ล ilalฤ (LT-45)
๐ด๓ ฌ๓ ด๓ ต๓ ฒ๓ ฟ Flag for Trakai (LT-52)
๐ด๓ ฆ๓ ญ๓ ฐ๓ ฎ๓ ฉ๓ ฟ Flag for Pohnpei (FM-PNI)
๐ด๓ ฌ๓ ด๓ ณ๓ ถ๓ ฟ Flag for Prienai (LT-36)
๐ด๓ ฌ๓ ด๓ ต๓ ฑ๓ ฟ Flag for Telลกiai (LT-51)
๐ด๓ ฌ๓ ด๓ ฒ๓ ฑ๓ ฟ Flag for Klaipฤda (LT-21)
๐ด๓ ฌ๓ ด๓ ฑ๓ ท๓ ฟ Flag for Kazlลณ Rลซda (LT-17)
๐ด๓ ฌ๓ ด๓ ด๓ ท๓ ฟ Flag for ล irvintos (LT-47)
๐ด๓ ฌ๓ ด๓ ณ๓ ฐ๓ ฟ Flag for Pakruojis (LT-30)
๐ด๓ ฌ๓ ด๓ ด๓ ด๓ ฟ Flag for ล iauliai (LT-44)
๐ด๓ ฌ๓ ด๓ ฒ๓ ฒ๓ ฟ Flag for Kretinga (LT-22)
๐ด๓ ฌ๓ ด๓ ด๓ ถ๓ ฟ Flag for ล ilutฤ (LT-46)
๐ด๓ ฌ๓ ด๓ ด๓ ฒ๓ ฟ Flag for ล alฤininkai (LT-42)
๐ด๓ ฌ๓ ด๓ ณ๓ ธ๓ ฟ Flag for Raseiniai (LT-38)
๐ด๓ ฌ๓ ด๓ ต๓ ต๓ ฟ Flag for Varฤna (LT-55)
๐ด๓ ฌ๓ ด๓ ณ๓ ด๓ ฟ Flag for Pasvalys (LT-34)
๐ด๓ ฌ๓ ด๓ ณ๓ ต๓ ฟ Flag for Plungฤ (LT-35)
๐ด๓ ฌ๓ ด๓ ด๓ น๓ ฟ Flag for ล venฤionys (LT-49)
๐ด๓ ฌ๓ ด๓ ณ๓ ท๓ ฟ Flag for Radviliลกkis (LT-37)
๐ด๓ ฌ๓ ด๓ ฒ๓ ด๓ ฟ Flag for Lazdijai (LT-24)
๐ด๓ ฌ๓ ด๓ ต๓ ฐ๓ ฟ Flag for Tauragฤ (LT-50)
๐ด๓ ฌ๓ ด๓ ด๓ ธ๓ ฟ Flag for Skuodas (LT-48)
๐ด๓ ฌ๓ ด๓ ต๓ ณ๓ ฟ Flag for Ukmergฤ (LT-53)
๐ด๓ ฌ๓ ด๓ ณ๓ น๓ ฟ Flag for Rietavas (LT-39)
๐ด๓ ฌ๓ ด๓ ฒ๓ ต๓ ฟ Flag for Marijampolฤ (LT-25)
๐ด๓ ฌ๓ ด๓ ฒ๓ ถ๓ ฟ Flag for Maลพeikiai (LT-26)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ณ๓ ฟ Flag for Baldone (LV-013)
๐ด๓ ฌ๓ ด๓ ถ๓ ฌ๓ ฟ Flag for Vilnius County (LT-VL)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ถ๓ ฟ Flag for Alsunga (LV-006)
๐ด๓ ฌ๓ ด๓ ต๓ ธ๓ ฟ Flag for Vilnius (LT-58)
๐ด๓ ฌ๓ ด๓ ด๓ ก๓ ฟ Flag for Tauragฤ County (LT-TA)
๐ด๓ ฌ๓ ด๓ ต๓ ด๓ ฟ Flag for Utena County (LT-UT)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ฒ๓ ฟ Flag for Aizkraukle (LV-002)
๐ด๓ ฌ๓ ต๓ ค๓ ฉ๓ ฟ Flag for Diekirch (LU-DI)
๐ด๓ ฌ๓ ด๓ ญ๓ ฒ๓ ฟ Flag for Marijampolฤ County (LT-MR)
๐ฉ๐ฝโ๐จ๐ฝโ๐ถ๐ฝ Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone
๐ด๓ ฌ๓ ด๓ ณ๓ ก๓ ฟ Flag for ล iauliai County (LT-SA)
๐ด๓ ฌ๓ ต๓ ฅ๓ ฃ๓ ฟ Flag for Echternach (LU-EC)
๐ด๓ ฌ๓ ต๓ ฒ๓ ค๓ ฟ Flag for Redange (LU-RD)
๐ด๓ ฌ๓ ต๓ ฃ๓ ฌ๓ ฟ Flag for Clervaux (LU-CL)
๐ด๓ ฌ๓ ด๓ ต๓ น๓ ฟ Flag for Visaginas (LT-59)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ น๓ ฟ Flag for Ape (LV-009)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ธ๓ ฟ Flag for Amata (LV-008)
๐ด๓ ฌ๓ ด๓ ก๓ ฌ๓ ฟ Flag for Alytus County (LT-AL)
๐ด๓ ฌ๓ ต๓ ง๓ ฒ๓ ฟ Flag for Grevenmacher (LU-GR)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ฑ๓ ฟ Flag for Aglona (LV-001)
๐ด๓ ฌ๓ ต๓ ญ๓ ฅ๓ ฟ Flag for Mersch (LU-ME)
๐ด๓ ฌ๓ ต๓ ถ๓ ค๓ ฟ Flag for Vianden (LU-VD)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ต๓ ฟ Flag for Aloja (LV-005)
๐ด๓ ฌ๓ ข๓ ช๓ ฌ๓ ฟ Flag for Mount Lebanon (LB-JL)
๐ด๓ ฌ๓ ด๓ ซ๓ ต๓ ฟ Flag for Kaunas County (LT-KU)
๐ด๓ ฌ๓ ด๓ ถ๓ ฐ๓ ฟ Flag for Zarasai (LT-60)
๐ด๓ ฌ๓ ต๓ ท๓ ฉ๓ ฟ Flag for Wiltz (LU-WI)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ฑ๓ ฟ Flag for ฤdaลพi (LV-011)
๐ด๓ ฌ๓ ต๓ ฌ๓ ต๓ ฟ Flag for Luxembourg (LU-LU)
๐ด๓ ฌ๓ ด๓ ด๓ ฅ๓ ฟ Flag for Telลกiai County (LT-TE)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ท๓ ฟ Flag for Alลซksne (LV-007)
๐ด๓ ฌ๓ ต๓ ฒ๓ ญ๓ ฟ Flag for Remich (LU-RM)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ด๓ ฟ Flag for Aknฤซste (LV-004)
๐ด๓ ฌ๓ ต๓ ฅ๓ ณ๓ ฟ Flag for Esch-sur-Alzette (LU-ES)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฐ๓ ณ๓ ฟ Flag for Aizpute (LV-003)
๐ด๓ ฌ๓ ด๓ ซ๓ ฌ๓ ฟ Flag for Klaipฤda County (LT-KL)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ท๓ ฟ Flag for Dundaga (LV-027)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ฐ๓ ฟ Flag for Jaunpils (LV-040)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ น๓ ฟ Flag for Burtnieki (LV-019)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ต๓ ฟ Flag for Balvi (LV-015)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ท๓ ฟ Flag for Beverฤซna (LV-017)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ต๓ ฟ Flag for Daugavpils Municipality (LV-025)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ฑ๓ ฟ Flag for Cesvaine (LV-021)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ถ๓ ฟ Flag for Ilลซkste (LV-036)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ฐ๓ ฟ Flag for Kuldฤซga (LV-050)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ฒ๓ ฟ Flag for Grobiลa (LV-032)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ณ๓ ฟ Flag for Gulbene (LV-033)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ณ๓ ฟ Flag for Kandava (LV-043)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ธ๓ ฟ Flag for Brocฤni (LV-018)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ธ๓ ฟ Flag for Krimulda (LV-048)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ฐ๓ ฟ Flag for Carnikava (LV-020)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ น๓ ฟ Flag for Krustpils (LV-049)
๐ฉ๐พโ๐จ๐พโ๐ถ๐พ Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ถ๓ ฟ Flag for Dobele (LV-026)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ต๓ ฟ Flag for Kocฤni (LV-045)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ฑ๓ ฟ Flag for Garkalne (LV-031)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ฐ๓ ฟ Flag for ฤrgฤผi (LV-030)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ธ๓ ฟ Flag for Durbe (LV-028)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ท๓ ฟ Flag for Krฤslava (LV-047)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ด๓ ฟ Flag for Dagda (LV-024)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ธ๓ ฟ Flag for Jaunjelgava (LV-038)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ถ๓ ฟ Flag for Bauska (LV-016)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฑ๓ ด๓ ฟ Flag for Baltinava (LV-014)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ฒ๓ ฟ Flag for Jฤkabpils Municipality (LV-042)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ น๓ ฟ Flag for Jaunpiebalga (LV-039)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ฒ๓ ฟ Flag for Cฤsis (LV-022)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ด๓ ฟ Flag for Iecava (LV-034)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ฑ๓ ฟ Flag for ฤถegums (LV-051)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ณ๓ ต๓ ฟ Flag for Ikลกฤทile (LV-035)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ ณ๓ ฟ Flag for Cibla (LV-023)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ด๓ ด๓ ฟ Flag for Kฤrsava (LV-044)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ฒ๓ น๓ ฟ Flag for Engure (LV-029)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ต๓ ฟ Flag for Lฤซgatne (LV-055)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ถ๓ ฟ Flag for Nฤซca (LV-066)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ฑ๓ ฟ Flag for Mฤlpils (LV-061)
๐ด๓ ง๓ ฅ๓ ซ๓ ซ๓ ฟ Flag for Kvemo Kartli (GE-KK)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ฐ๓ ฟ Flag for Pฤrgauja (LV-070)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ณ๓ ฟ Flag for Lielvฤrde (LV-053)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ฒ๓ ฟ Flag for Pฤผaviลas (LV-072)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ฑ๓ ฟ Flag for Pฤvilosta (LV-071)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ น๓ ฟ Flag for Madona (LV-059)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ถ๓ ฟ Flag for Rauna (LV-076)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ด๓ ฟ Flag for Limbaลพi (LV-054)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ด๓ ฟ Flag for Naukลกฤni (LV-064)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ฒ๓ ฟ Flag for ฤถekava (LV-052)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ท๓ ฟ Flag for Salaspils (LV-087)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ณ๓ ฟ Flag for Mฤrsrags (LV-063)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ธ๓ ฟ Flag for Olaine (LV-068)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ น๓ ฟ Flag for Roja (LV-079)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ฑ๓ ฟ Flag for Rucava (LV-081)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ฒ๓ ฟ Flag for Rugฤji (LV-082)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ท๓ ฟ Flag for Ogre (LV-067)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ด๓ ฟ Flag for Rลซjiena (LV-084)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ น๓ ฟ Flag for Saulkrasti (LV-089)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ธ๓ ฟ Flag for Saldus (LV-088)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ณ๓ ฟ Flag for Rundฤle (LV-083)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ต๓ ฟ Flag for Nereta (LV-065)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ น๓ ฟ Flag for Ozolnieki (LV-069)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ฐ๓ ฟ Flag for Ropaลพi (LV-080)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ธ๓ ฟ Flag for Riebiลi (LV-078)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ถ๓ ฟ Flag for Lฤซvฤni (LV-056)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ต๓ ฟ Flag for Priekuฤผi (LV-075)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ธ๓ ฟ Flag for Ludza (LV-058)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ฐ๓ ฟ Flag for Sฤja (LV-090)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ด๓ ฟ Flag for Priekule (LV-074)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ต๓ ท๓ ฟ Flag for Lubฤna (LV-057)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ธ๓ ถ๓ ฟ Flag for Salacgrฤซva (LV-086)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ถ๓ ฒ๓ ฟ Flag for Mฤrupe (LV-062)
๐ด๓ ฌ๓ ถ๓ ฐ๓ ท๓ ณ๓ ฟ Flag for Preiฤผi (LV-073)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ท๓ ฟ Flag for Viesฤซte (LV-107)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ด๓ ฟ Flag for Smiltene (LV-094)
๐ด๓ ฌ๓ น๓ ซ๓ ฆ๓ ฟ Flag for Kufra (LY-KF)
๐ด๓ ฌ๓ ถ๓ ค๓ ง๓ ถ๓ ฟ Flag for Daugavpils (LV-DGV)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ น๓ ฟ Flag for Tukums (LV-099)
๐ฉ๐ฟโ๐จ๐ฟโ๐ถ๐ฟ Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone
๐ด๓ ฌ๓ ถ๓ ฌ๓ ฐ๓ ธ๓ ฟ Flag for Liepฤja (LV-LPX)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ฑ๓ ฟ Flag for Valka (LV-101)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ณ๓ ฟ Flag for Vฤrkava (LV-103)
๐ด๓ ฌ๓ น๓ ญ๓ ข๓ ฟ Flag for Murqub (LY-MB)
๐ด๓ ฌ๓ ถ๓ ถ๓ ฅ๓ ฎ๓ ฟ Flag for Ventspils (LV-VEN)
๐ด๓ ฌ๓ น๓ ช๓ ก๓ ฟ Flag for Jabal al Akhdar (LY-JA)
๐ด๓ ฌ๓ ถ๓ ช๓ ซ๓ ข๓ ฟ Flag for Jฤkabpils (LV-JKB)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ฑ๓ ฟ Flag for Sigulda (LV-091)
๐ด๓ ฌ๓ น๓ ช๓ ง๓ ฟ Flag for Jabal al Gharbi (LY-JG)
๐ด๓ ฌ๓ น๓ ง๓ ด๓ ฟ Flag for Ghat (LY-GT)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ต๓ ฟ Flag for Stopiลi (LV-095)
๐ด๓ ฌ๓ ถ๓ ฒ๓ ฉ๓ ธ๓ ฟ Flag for Riga (LV-RIX)
๐ด๓ ฌ๓ น๓ ค๓ ฒ๓ ฟ Flag for Derna (LY-DR)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ฐ๓ ฟ Flag for Vaiลode (LV-100)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ฒ๓ ฟ Flag for Varakฤผฤni (LV-102)
๐ด๓ ฌ๓ ถ๓ ช๓ ฅ๓ ฌ๓ ฟ Flag for Jelgava (LV-JEL)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ฒ๓ ฟ Flag for Skrฤซveri (LV-092)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ท๓ ฟ Flag for Talsi (LV-097)
๐ด๓ ฌ๓ ถ๓ ถ๓ ญ๓ ฒ๓ ฟ Flag for Valmiera (LV-VMR)
๐ด๓ ฌ๓ น๓ ข๓ ก๓ ฟ Flag for Benghazi (LY-BA)
๐ด๓ ฌ๓ ถ๓ ฒ๓ ฅ๓ บ๓ ฟ Flag for Rฤzekne (LV-REZ)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ณ๓ ฟ Flag for Skrunda (LV-093)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฑ๓ ฐ๓ ฟ Flag for Zilupe (LV-110)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ถ๓ ฟ Flag for Strenฤi (LV-096)
๐ด๓ ฌ๓ น๓ ช๓ ต๓ ฟ Flag for Jufra (LY-JU)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ด๓ ฟ Flag for Vecpiebalga (LV-104)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ต๓ ฟ Flag for Vecumnieki (LV-105)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ ธ๓ ฟ Flag for Viฤผaka (LV-108)
๐ด๓ ฌ๓ ถ๓ ช๓ ต๓ ฒ๓ ฟ Flag for Jลซrmala (LV-JUR)
๐ด๓ ฌ๓ ถ๓ ฑ๓ ฐ๓ น๓ ฟ Flag for Viฤผฤni (LV-109)
๐ด๓ ฌ๓ ถ๓ ฐ๓ น๓ ธ๓ ฟ Flag for Tฤrvete (LV-098)
๐ด๓ ญ๓ ก๓ ฐ๓ ธ๓ ฟ Flag for Grand Casablanca (MA-08)
๐ด๓ ฌ๓ น๓ ญ๓ ช๓ ฟ Flag for Marj (LY-MJ)
๐ด๓ ฌ๓ น๓ ท๓ ก๓ ฟ Flag for Al Wahat (LY-WA)
๐ด๓ ญ๓ ฃ๓ ญ๓ ฃ๓ ฟ Flag for Monte Carlo (MC-MC)
๐ด๓ ญ๓ ก๓ ฑ๓ ด๓ ฟ Flag for Guelmim-Es Semara (MA-14)
๐ด๓ ฌ๓ น๓ บ๓ ก๓ ฟ Flag for Zawiya (LY-ZA)
๐ด๓ ญ๓ ก๓ ฐ๓ ฒ๓ ฟ Flag for Gharb-Chrarda-Bรฉni Hssen (MA-02)
๐ด๓ ญ๓ ก๓ ฑ๓ ฑ๓ ฟ Flag for Marrakesh-Tensift-El Haouz (MA-11)
๐ด๓ ญ๓ ก๓ ฑ๓ ฐ๓ ฟ Flag for Doukkala-Abda (MA-10)
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ฆ๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone
๐ด๓ ญ๓ ก๓ ฐ๓ ท๓ ฟ Flag for Rabat-Salรฉ-Zemmour-Zaer (MA-07)
๐ด๓ ญ๓ ก๓ ฑ๓ ถ๓ ฟ Flag for Oued Ed-Dahab-Lagouira (MA-16)
๐ด๓ ฌ๓ น๓ ฎ๓ ฌ๓ ฟ Flag for Nalut (LY-NL)
๐ด๓ ฌ๓ น๓ ณ๓ ข๓ ฟ Flag for Sabha (LY-SB)
๐ด๓ ญ๓ ก๓ ฐ๓ ณ๓ ฟ Flag for Taza-Al Hoceima-Taounate (MA-03)
๐ด๓ ญ๓ ฃ๓ ช๓ ฅ๓ ฟ Flag for Jardin Exotique de Monaco (MC-JE)
๐ด๓ ฌ๓ น๓ ท๓ ณ๓ ฟ Flag for Wadi al Shatii (LY-WS)
๐ด๓ ญ๓ ฃ๓ ฌ๓ ก๓ ฟ Flag for Larvotto (MC-LA)
๐ด๓ ฌ๓ น๓ ฎ๓ ฑ๓ ฟ Flag for Nuqat al Khams (LY-NQ)
๐ด๓ ญ๓ ฃ๓ ญ๓ ก๓ ฟ Flag for Malbousquet (MC-MA)
๐ด๓ ญ๓ ก๓ ฑ๓ ฒ๓ ฟ Flag for Tadla-Azilal (MA-12)
๐ด๓ ญ๓ ฃ๓ ฃ๓ ฏ๓ ฟ Flag for La Condamine (MC-CO)
๐ด๓ ญ๓ ฃ๓ ญ๓ ฏ๓ ฟ Flag for Monaco-Ville (MC-MO)
๐ด๓ ญ๓ ก๓ ฐ๓ น๓ ฟ Flag for Chaouia-Ouardigha (MA-09)
๐ด๓ ญ๓ ก๓ ฐ๓ ฑ๓ ฟ Flag for Tangier-Tรฉtouan (MA-01)
๐ด๓ ญ๓ ฃ๓ ญ๓ ง๓ ฟ Flag for Moneghetti (MC-MG)
๐ด๓ ฌ๓ น๓ ญ๓ ฑ๓ ฟ Flag for Murzuq (LY-MQ)
๐ด๓ ญ๓ ก๓ ฐ๓ ถ๓ ฟ Flag for Meknรจs-Tafilalet (MA-06)
๐ด๓ ญ๓ ฃ๓ ฆ๓ ฏ๓ ฟ Flag for Fontvieille (MC-FO)
๐ด๓ ฌ๓ น๓ ท๓ ค๓ ฟ Flag for Wadi al Hayaa (LY-WD)
๐ด๓ ญ๓ ฃ๓ ฃ๓ ฌ๓ ฟ Flag for La Colle (MC-CL)
๐ด๓ ฌ๓ น๓ ณ๓ ฒ๓ ฟ Flag for Sirte (LY-SR)
๐ด๓ ฌ๓ น๓ ญ๓ ฉ๓ ฟ Flag for Misrata (LY-MI)
๐ด๓ ญ๓ ก๓ ฐ๓ ต๓ ฟ Flag for Fรจs-Boulemane (MA-05)
๐ด๓ ฌ๓ น๓ ด๓ ข๓ ฟ Flag for Tripoli (LY-TB)
๐ด๓ ญ๓ ฃ๓ ง๓ ก๓ ฟ Flag for La Gare (MC-GA)
๐ฉ๐พโ๐ฉ๐พโ๐ฆ๐พ Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
๐ด๓ ญ๓ ค๓ ฅ๓ ค๓ ฟ Flag for Edineศ (MD-ED)
๐ด๓ ญ๓ ค๓ จ๓ ฉ๓ ฟ Flag for Hรฎnceศti (MD-HI)
๐ด๓ ญ๓ ค๓ ฆ๓ ก๓ ฟ Flag for Fฤleศti (MD-FA)
๐ด๓ ญ๓ ค๓ ฃ๓ ฒ๓ ฟ Flag for Criuleni (MD-CR)
๐ด๓ ญ๓ ค๓ ณ๓ ฉ๓ ฟ Flag for Sรฎngerei (MD-SI)
๐ด๓ ญ๓ ค๓ ณ๓ ฏ๓ ฟ Flag for Soroca (MD-SO)
๐ด๓ ญ๓ ค๓ ฃ๓ ด๓ ฟ Flag for Cantemir (MD-CT)
๐ด๓ ญ๓ ค๓ ฒ๓ ฅ๓ ฟ Flag for Rezina (MD-RE)
๐ด๓ ญ๓ ค๓ ณ๓ ค๓ ฟ Flag for ศoldฤneศti (MD-SD)
๐ด๓ ญ๓ ค๓ ข๓ ฒ๓ ฟ Flag for Briceni (MD-BR)
๐ด๓ ญ๓ ฃ๓ ถ๓ ฒ๓ ฟ Flag for Vallon de la Rousse (MC-VR)
๐ด๓ ญ๓ ค๓ ข๓ ก๓ ฟ Flag for Bฤlลฃi (MD-BA)
๐ด๓ ญ๓ ค๓ ค๓ ต๓ ฟ Flag for Dubฤsari (MD-DU)
๐ด๓ ญ๓ ค๓ ฃ๓ ฌ๓ ฟ Flag for Cฤlฤraศi (MD-CL)
๐ด๓ ญ๓ ฃ๓ ณ๓ ฐ๓ ฟ Flag for Spรฉlugues (MC-SP)
๐ด๓ ญ๓ ค๓ ฃ๓ ก๓ ฟ Flag for Cahul (MD-CA)
๐ด๓ ญ๓ ค๓ ฉ๓ ก๓ ฟ Flag for Ialoveni (MD-IA)
๐ด๓ ญ๓ ค๓ ฏ๓ ฒ๓ ฟ Flag for Orhei (MD-OR)
๐ด๓ ญ๓ ค๓ ค๓ ฒ๓ ฟ Flag for Drochia (MD-DR)
๐ด๓ ญ๓ ค๓ ง๓ ก๓ ฟ Flag for Gagauzia (MD-GA)
๐ด๓ ญ๓ ค๓ ฃ๓ ญ๓ ฟ Flag for Cimiศlia (MD-CM)
๐ด๓ ญ๓ ค๓ ฏ๓ ฃ๓ ฟ Flag for Ocniลฃa (MD-OC)
๐ด๓ ญ๓ ค๓ ข๓ ณ๓ ฟ Flag for Basarabeasca (MD-BS)
๐ด๓ ญ๓ ค๓ ณ๓ ด๓ ฟ Flag for Strฤศeni (MD-ST)
๐ด๓ ญ๓ ค๓ ก๓ ฎ๓ ฟ Flag for Anenii Noi (MD-AN)
๐ด๓ ญ๓ ฃ๓ ญ๓ ต๓ ฟ Flag for Moulins (MC-MU)
๐ด๓ ญ๓ ค๓ ข๓ ค๓ ฟ Flag for Bender (MD-BD)
๐ด๓ ญ๓ ค๓ ง๓ ฌ๓ ฟ Flag for Glodeni (MD-GL)
๐ด๓ ญ๓ ฃ๓ ณ๓ ฏ๓ ฟ Flag for La Source (MC-SO)
๐ด๓ ญ๓ ค๓ ฃ๓ ต๓ ฟ Flag for Chiศinฤu (MD-CU)
๐ด๓ ญ๓ ค๓ ค๓ ฏ๓ ฟ Flag for Donduศeni (MD-DO)
๐ด๓ ญ๓ ค๓ ฆ๓ ฌ๓ ฟ Flag for Floreศti (MD-FL)
๐ด๓ ญ๓ ฃ๓ ฐ๓ จ๓ ฟ Flag for Port Hercules (MC-PH)
๐ด๓ ญ๓ ค๓ ฎ๓ ฉ๓ ฟ Flag for Nisporeni (MD-NI)
๐ด๓ ญ๓ ค๓ ฒ๓ ฉ๓ ฟ Flag for Rรฎศcani (MD-RI)
๐ด๓ ญ๓ ค๓ ฌ๓ ฅ๓ ฟ Flag for Leova (MD-LE)
๐ด๓ ญ๓ ค๓ ณ๓ ถ๓ ฟ Flag for ลtefan Vodฤ (MD-SV)
๐ด๓ ญ๓ ค๓ ต๓ ฎ๓ ฟ Flag for Ungheni (MD-UN)
๐ด๓ ญ๓ ง๓ ก๓ ฟ Flag for Toamasina (MG-A)
๐ด๓ ญ๓ ง๓ ด๓ ฟ Flag for Antananarivo (MG-T)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ถ๓ ฟ Flag for Cetinje (ME-06)
๐ด๓ ญ๓ ซ๓ ฐ๓ ต๓ ฟ Flag for Bogdanci (MK-05)
๐ด๓ ญ๓ ฅ๓ ฒ๓ ฐ๓ ฟ Flag for Ulcinj (ME-20)
๐ด๓ ญ๓ ฅ๓ ฐ๓ น๓ ฟ Flag for Kolaลกin (ME-09)
๐ด๓ ญ๓ ซ๓ ฐ๓ ท๓ ฟ Flag for Bosilovo (MK-07)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ด๓ ฟ Flag for Pljevlja (ME-14)
๐ด๓ ญ๓ ค๓ ด๓ ฅ๓ ฟ Flag for Teleneศti (MD-TE)
๐ด๓ ญ๓ ซ๓ ฐ๓ ถ๓ ฟ Flag for Bogovinje (MK-06)
๐ด๓ ญ๓ ฅ๓ ฒ๓ ฑ๓ ฟ Flag for ลฝabljak (ME-21)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ธ๓ ฟ Flag for Herceg Novi (ME-08)
๐ด๓ ญ๓ ฅ๓ ฒ๓ ณ๓ ฟ Flag for Petnjica (ME-23)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ท๓ ฟ Flag for Roลพaje (ME-17)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ต๓ ฟ Flag for Budva (ME-05)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ฒ๓ ฟ Flag for Bar (ME-02)
๐ด๓ ญ๓ ซ๓ ฐ๓ ณ๓ ฟ Flag for Berovo (MK-03)
๐ด๓ ญ๓ ฅ๓ ฑ๓ น๓ ฟ Flag for Tivat (ME-19)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ต๓ ฟ Flag for Pluลพine (ME-15)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ฐ๓ ฟ Flag for Kotor (ME-10)
๐ด๓ ญ๓ จ๓ ฌ๓ ฟ Flag for Ralik Chain (MH-L)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ท๓ ฟ Flag for Danilovgrad (ME-07)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ณ๓ ฟ Flag for Plav (ME-13)
๐ด๓ ญ๓ ซ๓ ฐ๓ ด๓ ฟ Flag for Bitola (MK-04)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ด๓ ฟ Flag for Bijelo Polje (ME-04)
๐ด๓ ญ๓ ฅ๓ ฐ๓ ฑ๓ ฟ Flag for Andrijevica (ME-01)
๐ฉ๐ฟโ๐ฉ๐ฟโ๐ฆ๐ฟ Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone
๐ด๓ ญ๓ ฅ๓ ฑ๓ ฒ๓ ฟ Flag for Nikลกiฤ (ME-12)
๐ด๓ ญ๓ ค๓ ด๓ ก๓ ฟ Flag for Taraclia (MD-TA)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ฑ๓ ฟ Flag for Mojkovac (ME-11)
๐ด๓ ญ๓ ง๓ ญ๓ ฟ Flag for Mahajanga (MG-M)
๐ด๓ ญ๓ ฅ๓ ฒ๓ ฒ๓ ฟ Flag for Gusinje (ME-22)
๐ด๓ ญ๓ ง๓ ฆ๓ ฟ Flag for Fianarantsoa (MG-F)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ธ๓ ฟ Flag for ล avnik (ME-18)
๐ด๓ ญ๓ ฅ๓ ฑ๓ ถ๓ ฟ Flag for Podgorica (ME-16)
๐ด๓ ญ๓ ง๓ ต๓ ฟ Flag for Toliara (MG-U)
๐ด๓ ญ๓ ง๓ ค๓ ฟ Flag for Antsiranana (MG-D)
๐ด๓ ญ๓ ซ๓ ด๓ ณ๓ ฟ Flag for Kratovo (MK-43)
๐ด๓ ญ๓ ซ๓ ด๓ ด๓ ฟ Flag for Kriva Palanka (MK-44)
๐ด๓ ญ๓ ซ๓ ต๓ ฒ๓ ฟ Flag for Makedonski Brod (MK-52)
๐ด๓ ญ๓ ซ๓ ณ๓ ต๓ ฟ Flag for Jegunovce (MK-35)
๐ด๓ ญ๓ ซ๓ ด๓ น๓ ฟ Flag for Lozovo (MK-49)
๐ด๓ ญ๓ ซ๓ ด๓ ท๓ ฟ Flag for Kumanovo (MK-47)
๐ด๓ ญ๓ ซ๓ ฑ๓ ฒ๓ ฟ Flag for Vevฤani (MK-12)
๐ด๓ ญ๓ ซ๓ ฒ๓ ด๓ ฟ Flag for Demir Kapija (MK-24)
๐ด๓ ญ๓ ซ๓ ฑ๓ ฑ๓ ฟ Flag for Vasilevo (MK-11)
๐ด๓ ญ๓ ซ๓ ณ๓ ฐ๓ ฟ Flag for ลฝelino (MK-30)
๐ด๓ ญ๓ ซ๓ ณ๓ ถ๓ ฟ Flag for Kavadarci (MK-36)
๐ด๓ ญ๓ ซ๓ ณ๓ ฒ๓ ฟ Flag for Zelenikovo (MK-32)
๐ด๓ ญ๓ ซ๓ ด๓ ฑ๓ ฟ Flag for Konฤe (MK-41)
๐ด๓ ญ๓ ซ๓ ฑ๓ ด๓ ฟ Flag for Vinica (MK-14)
๐ด๓ ญ๓ ซ๓ ฑ๓ ฐ๓ ฟ Flag for Valandovo (MK-10)
๐ด๓ ญ๓ ซ๓ ต๓ ต๓ ฟ Flag for Novaci (MK-55)
๐ด๓ ญ๓ ซ๓ ต๓ ถ๓ ฟ Flag for Novo Selo (MK-56)
๐ด๓ ญ๓ ซ๓ ณ๓ ด๓ ฟ Flag for Ilinden (MK-34)
๐ด๓ ญ๓ ซ๓ ต๓ ฑ๓ ฟ Flag for Makedonska Kamenica (MK-51)
๐ด๓ ญ๓ ซ๓ ฑ๓ ถ๓ ฟ Flag for Vrapฤiลกte (MK-16)
๐ด๓ ญ๓ ซ๓ ฐ๓ ธ๓ ฟ Flag for Brvenica (MK-08)
๐ด๓ ญ๓ ซ๓ ฒ๓ ฐ๓ ฟ Flag for Gradsko (MK-20)
๐ด๓ ญ๓ ซ๓ ต๓ ฐ๓ ฟ Flag for Mavrovo and Rostuลกa (MK-50)
๐ด๓ ญ๓ ซ๓ ฒ๓ ฒ๓ ฟ Flag for Debarca (MK-22)
๐ด๓ ญ๓ ซ๓ ฑ๓ น๓ ฟ Flag for Gostivar (MK-19)
๐ด๓ ญ๓ ซ๓ ต๓ ณ๓ ฟ Flag for Mogila (MK-53)
๐ด๓ ญ๓ ซ๓ ด๓ ธ๓ ฟ Flag for Lipkovo (MK-48)
๐ด๓ ญ๓ ซ๓ ณ๓ ท๓ ฟ Flag for Karbinci (MK-37)
๐ด๓ ญ๓ ซ๓ ณ๓ ณ๓ ฟ Flag for Zrnovci (MK-33)
๐ด๓ ญ๓ ซ๓ ต๓ ด๓ ฟ Flag for Negotino (MK-54)
๐ด๓ ญ๓ ซ๓ ด๓ ฐ๓ ฟ Flag for Kiฤevo (MK-40)
๐ด๓ ญ๓ ซ๓ ฒ๓ ฑ๓ ฟ Flag for Debar (MK-21)
๐ด๓ ญ๓ ซ๓ ฑ๓ ณ๓ ฟ Flag for Veles (MK-13)
๐ด๓ ญ๓ ซ๓ ฒ๓ ถ๓ ฟ Flag for Dojran (MK-26)
๐ด๓ ญ๓ ซ๓ ฑ๓ ธ๓ ฟ Flag for Gevgelija (MK-18)
๐ด๓ ญ๓ ซ๓ ด๓ ฒ๓ ฟ Flag for Koฤani (MK-42)
๐ด๓ ญ๓ ซ๓ ด๓ ต๓ ฟ Flag for Krivogaลกtani (MK-45)
๐ด๓ ญ๓ ซ๓ ฒ๓ ณ๓ ฟ Flag for Delฤevo (MK-23)
๐ด๓ ญ๓ ซ๓ ด๓ ถ๓ ฟ Flag for Kruลกevo (MK-46)
๐ด๓ ญ๓ ซ๓ ธ๓ ฒ๓ ฟ Flag for ฤuฤer-Sandevo (MK-82)
๐ด๓ ญ๓ ซ๓ ถ๓ ฒ๓ ฟ Flag for Prilep (MK-62)
๐ด๓ ญ๓ ซ๓ ท๓ ธ๓ ฟ Flag for Centar ลฝupa (MK-78)
๐ด๓ ญ๓ ญ๓ ฐ๓ ด๓ ฟ Flag for Mandalay (MM-04)
๐ด๓ ญ๓ ฌ๓ ด๓ ฟ Flag for Sรฉgou (ML-4)
๐ด๓ ญ๓ ซ๓ ต๓ น๓ ฟ Flag for Petrovec (MK-59)
๐ด๓ ญ๓ ซ๓ ธ๓ ฑ๓ ฟ Flag for ฤeลกinovo-Obleลกevo (MK-81)
๐ด๓ ญ๓ ฌ๓ ธ๓ ฟ Flag for Kidal (ML-8)
๐ด๓ ญ๓ ญ๓ ฐ๓ ฒ๓ ฟ Flag for Bago (MM-02)
๐ด๓ ญ๓ ซ๓ ท๓ ฒ๓ ฟ Flag for Struga (MK-72)
๐ด๓ ญ๓ ซ๓ ท๓ ต๓ ฟ Flag for Tearce (MK-75)
๐ด๓ ญ๓ ซ๓ ท๓ ด๓ ฟ Flag for Studeniฤani (MK-74)
๐ด๓ ญ๓ ซ๓ ต๓ ธ๓ ฟ Flag for Ohrid (MK-58)
๐ด๓ ญ๓ ซ๓ ถ๓ น๓ ฟ Flag for Sveti Nikole (MK-69)
๐ด๓ ญ๓ ซ๓ ท๓ ณ๓ ฟ Flag for Strumica (MK-73)
๐ด๓ ญ๓ ฌ๓ ณ๓ ฟ Flag for Sikasso (ML-3)
๐ด๓ ญ๓ ญ๓ ฑ๓ ฑ๓ ฟ Flag for Kachin (MM-11)
๐ด๓ ญ๓ ซ๓ ถ๓ ถ๓ ฟ Flag for Resen (MK-66)
๐ด๓ ญ๓ ฌ๓ ข๓ ซ๓ ฏ๓ ฟ Flag for Bamako (ML-BKO)
๐ด๓ ญ๓ ญ๓ ฐ๓ ณ๓ ฟ Flag for Magway (MM-03)
๐ด๓ ญ๓ ซ๓ ท๓ ฐ๓ ฟ Flag for Sopiลกte (MK-70)
๐ด๓ ญ๓ ซ๓ ท๓ ฑ๓ ฟ Flag for Staro Nagoriฤane (MK-71)
๐ด๓ ญ๓ ญ๓ ฐ๓ ท๓ ฟ Flag for Ayeyarwady (MM-07)
๐ด๓ ญ๓ ฌ๓ ท๓ ฟ Flag for Gao (ML-7)
๐ด๓ ญ๓ ฌ๓ ต๓ ฟ Flag for Mopti (ML-5)
๐ด๓ ญ๓ ซ๓ ธ๓ ณ๓ ฟ Flag for ล tip (MK-83)
๐ด๓ ญ๓ ญ๓ ฑ๓ ฒ๓ ฟ Flag for Kayah (MM-12)
๐ด๓ ญ๓ ญ๓ ฐ๓ ต๓ ฟ Flag for Tanintharyi (MM-05)
๐ด๓ ญ๓ ฌ๓ ฒ๓ ฟ Flag for Koulikoro (ML-2)
๐ด๓ ญ๓ ซ๓ ถ๓ ณ๓ ฟ Flag for Probiลกtip (MK-63)
๐ด๓ ญ๓ ซ๓ ถ๓ ฐ๓ ฟ Flag for Pehฤevo (MK-60)
๐ด๓ ญ๓ ญ๓ ฐ๓ ฑ๓ ฟ Flag for Sagaing (MM-01)
๐ด๓ ญ๓ ซ๓ ธ๓ ฐ๓ ฟ Flag for ฤaลกka (MK-80)
๐ด๓ ญ๓ ซ๓ ถ๓ ต๓ ฟ Flag for Rankovce (MK-65)
๐ด๓ ญ๓ ญ๓ ฐ๓ ถ๓ ฟ Flag for Yangon (MM-06)
๐ด๓ ญ๓ ซ๓ ท๓ ถ๓ ฟ Flag for Tetovo (MK-76)
๐ด๓ ญ๓ ซ๓ ถ๓ ท๓ ฟ Flag for Rosoman (MK-67)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ณ๓ ฟ Flag for Assaba (MR-03)
๐ด๓ ญ๓ ญ๓ ฑ๓ ท๓ ฟ Flag for Shan (MM-17)
๐ด๓ ญ๓ ญ๓ ฑ๓ ถ๓ ฟ Flag for Rakhine (MM-16)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ด๓ ฑ๓ ฟ Flag for Khรถvsgรถl (MN-041)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ท๓ ฑ๓ ฟ Flag for Bayan-รlgii (MN-071)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ น๓ ฟ Flag for Bayankhongor (MN-069)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ ฑ๓ ฟ Flag for Dornod (MN-061)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ด๓ น๓ ฟ Flag for Selenge (MN-049)
๐ด๓ ญ๓ ฎ๓ ฑ๓ ฟ Flag for Ulaanbaatar (MN-1)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ณ๓ ท๓ ฟ Flag for Darkhan-Uul (MN-037)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ด๓ ท๓ ฟ Flag for Tรถv (MN-047)
๐ด๓ ญ๓ ญ๓ ฑ๓ ต๓ ฟ Flag for Mon (MM-15)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ถ๓ ฟ Flag for Trarza (MR-06)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ต๓ ฑ๓ ฟ Flag for Sรผkhbaatar (MN-051)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ด๓ ฟ Flag for Gorgol (MR-04)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ต๓ ต๓ ฟ Flag for รvรถrkhangai (MN-055)
๐ด๓ ญ๓ ญ๓ ฑ๓ ด๓ ฟ Flag for Chin (MM-14)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ ท๓ ฟ Flag for Bulgan (MN-067)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ต๓ ท๓ ฟ Flag for Zavkhan (MN-057)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ ณ๓ ฟ Flag for Dornogovi (MN-063)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ต๓ ณ๓ ฟ Flag for รmnรถgovi (MN-053)
๐ด๓ ญ๓ ญ๓ ฑ๓ ณ๓ ฟ Flag for Kayin (MM-13)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ ต๓ ฟ Flag for Govi-Altai (MN-065)
๐ด๓ ญ๓ ฒ๓ ฑ๓ ฑ๓ ฟ Flag for Tiris Zemmour (MR-11)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ต๓ น๓ ฟ Flag for Dundgovi (MN-059)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ท๓ ณ๓ ฟ Flag for Arkhangai (MN-073)
๐ด๓ ญ๓ ฒ๓ ฐ๓ น๓ ฟ Flag for Tagant (MR-09)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ด๓ ณ๓ ฟ Flag for Khovd (MN-043)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ด๓ ถ๓ ฟ Flag for Uvs (MN-046)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ถ๓ ด๓ ฟ Flag for Govisรผmber (MN-064)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ต๓ ฟ Flag for Brakna (MR-05)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ธ๓ ฟ Flag for Dakhlet Nouadhibou (MR-08)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ฑ๓ ฟ Flag for Hodh Ech Chargui (MR-01)
๐ด๓ ญ๓ ฎ๓ ฐ๓ ณ๓ ต๓ ฟ Flag for Orkhon (MN-035)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ฒ๓ ฟ Flag for Hodh El Gharbi (MR-02)
๐ด๓ ญ๓ ญ๓ ฑ๓ ธ๓ ฟ Flag for Naypyidaw (MM-18)
๐ด๓ ญ๓ ฒ๓ ฐ๓ ท๓ ฟ Flag for Adrar (MR-07)
๐ด๓ ญ๓ ฒ๓ ฑ๓ ฒ๓ ฟ Flag for Inchiri (MR-12)
๐ด๓ ญ๓ ด๓ ฑ๓ น๓ ฟ Flag for Iklin (MT-19)
๐ด๓ ญ๓ ด๓ ฑ๓ ด๓ ฟ Flag for Gฤงarb (MT-14)
๐ด๓ ญ๓ ด๓ ณ๓ ณ๓ ฟ Flag for Mqabba (MT-33)
๐ด๓ ญ๓ ด๓ ฒ๓ ฒ๓ ฟ Flag for Kerฤem (MT-22)
๐ด๓ ญ๓ ด๓ ฑ๓ ถ๓ ฟ Flag for Gฤงasri (MT-16)
๐ด๓ ญ๓ ด๓ ฒ๓ ด๓ ฟ Flag for Lija (MT-24)
๐ด๓ ญ๓ ด๓ ฐ๓ ต๓ ฟ Flag for Birลผebbuฤกa (MT-05)
๐ด๓ ญ๓ ด๓ ฐ๓ ด๓ ฟ Flag for Birkirkara (MT-04)
๐ด๓ ญ๓ ด๓ ณ๓ ฑ๓ ฟ Flag for Mฤกarr (MT-31)
๐ด๓ ญ๓ ด๓ ฐ๓ ฒ๓ ฟ Flag for Balzan (MT-02)
๐ด๓ ญ๓ ด๓ ณ๓ ถ๓ ฟ Flag for Munxar (MT-36)
๐ด๓ ญ๓ ด๓ ฑ๓ ณ๓ ฟ Flag for Gฤงajnsielem (MT-13)
๐ด๓ ญ๓ ด๓ ณ๓ ธ๓ ฟ Flag for Naxxar (MT-38)
๐ด๓ ญ๓ ด๓ ฐ๓ น๓ ฟ Flag for Floriana (MT-09)
๐ด๓ ญ๓ ด๓ ฒ๓ ถ๓ ฟ Flag for Marsa (MT-26)
๐ด๓ ญ๓ ด๓ ฐ๓ ท๓ ฟ Flag for Dingli (MT-07)
๐ด๓ ญ๓ ด๓ ฑ๓ ฑ๓ ฟ Flag for Gudja (MT-11)
๐ด๓ ญ๓ ด๓ ฒ๓ ณ๓ ฟ Flag for Kirkop (MT-23)
๐ด๓ ญ๓ ด๓ ฒ๓ ท๓ ฟ Flag for Marsaskala (MT-27)
๐ด๓ ญ๓ ด๓ ณ๓ น๓ ฟ Flag for Paola (MT-39)
๐ด๓ ญ๓ ด๓ ฑ๓ ฐ๓ ฟ Flag for Fontana (MT-10)
๐ด๓ ญ๓ ด๓ ณ๓ ด๓ ฟ Flag for Msida (MT-34)
๐ด๓ ญ๓ ด๓ ณ๓ ท๓ ฟ Flag for Nadur (MT-37)
๐ด๓ ญ๓ ด๓ ณ๓ ฒ๓ ฟ Flag for Mosta (MT-32)
๐ด๓ ญ๓ ด๓ ณ๓ ต๓ ฟ Flag for Imtarfa (MT-35)
๐ด๓ ญ๓ ด๓ ฐ๓ ถ๓ ฟ Flag for Cospicua (MT-06)
๐ด๓ ญ๓ ด๓ ฐ๓ ณ๓ ฟ Flag for Birgu (MT-03)
๐ด๓ ญ๓ ฒ๓ ฑ๓ ด๓ ฟ Flag for Nouakchott Nord (MR-14)
๐ด๓ ญ๓ ด๓ ฑ๓ ฒ๓ ฟ Flag for Gลผira (MT-12)
๐ด๓ ญ๓ ด๓ ณ๓ ฐ๓ ฟ Flag for Mellieฤงa (MT-30)
๐ด๓ ญ๓ ด๓ ฑ๓ ท๓ ฟ Flag for Gฤงaxaq (MT-17)
๐ด๓ ญ๓ ด๓ ฑ๓ ธ๓ ฟ Flag for ฤฆamrun (MT-18)
๐ด๓ ญ๓ ด๓ ฐ๓ ธ๓ ฟ Flag for Fgura (MT-08)
๐ด๓ ญ๓ ด๓ ฐ๓ ฑ๓ ฟ Flag for Attard (MT-01)
๐ด๓ ญ๓ ด๓ ฑ๓ ต๓ ฟ Flag for Gฤงargฤงur (MT-15)
๐ด๓ ญ๓ ด๓ ฒ๓ ฑ๓ ฟ Flag for Kalkara (MT-21)
๐ด๓ ญ๓ ฒ๓ ฑ๓ ต๓ ฟ Flag for Nouakchott Sud (MR-15)
๐ด๓ ญ๓ ด๓ ฒ๓ ธ๓ ฟ Flag for Marsaxlokk (MT-28)
๐ด๓ ญ๓ ด๓ ด๓ ต๓ ฟ Flag for Victoria (MT-45)
๐ด๓ ญ๓ ด๓ ด๓ ฒ๓ ฟ Flag for Qala (MT-42)
๐ด๓ ญ๓ ด๓ ถ๓ ด๓ ฟ Flag for ลปabbar (MT-64)
๐ด๓ ญ๓ ต๓ ก๓ ง๓ ฟ Flag for Agalรฉga (MU-AG)
๐ด๓ ญ๓ ด๓ ต๓ ธ๓ ฟ Flag for Taโ Xbiex (MT-58)
๐ด๓ ญ๓ ด๓ ด๓ ฑ๓ ฟ Flag for Pietร (MT-41)
๐ด๓ ญ๓ ด๓ ต๓ ฒ๓ ฟ Flag for Sannat (MT-52)
๐ด๓ ญ๓ ต๓ ฐ๓ ฌ๓ ฟ Flag for Port Louis District (MU-PL)
๐ด๓ ญ๓ ด๓ ถ๓ ฑ๓ ฟ Flag for Xagฤงra (MT-61)
๐ด๓ ญ๓ ต๓ ข๓ ฌ๓ ฟ Flag for Riviรจre Noire (MU-BL)
๐ด๓ ญ๓ ด๓ ต๓ ถ๓ ฟ Flag for Sliema (MT-56)
๐ด๓ ญ๓ ด๓ ด๓ ท๓ ฟ Flag for Safi (MT-47)
๐ด๓ ญ๓ ต๓ ฆ๓ ฌ๓ ฟ Flag for Flacq (MU-FL)
๐ด๓ ญ๓ ด๓ ด๓ ฐ๓ ฟ Flag for Pembroke (MT-40)
๐ด๓ ญ๓ ด๓ ต๓ ท๓ ฟ Flag for Swieqi (MT-57)
๐ด๓ ญ๓ ต๓ ฃ๓ ต๓ ฟ Flag for Curepipe (MU-CU)
๐ด๓ ญ๓ ด๓ ถ๓ ธ๓ ฟ Flag for ลปurrieq (MT-68)
๐ด๓ ญ๓ ด๓ ด๓ น๓ ฟ Flag for San ฤ wann (MT-49)
๐ด๓ ญ๓ ต๓ ง๓ ฐ๓ ฟ Flag for Grand Port (MU-GP)
๐ด๓ ญ๓ ต๓ ฃ๓ ฃ๓ ฟ Flag for Cargados Carajos (MU-CC)
๐ด๓ ญ๓ ด๓ ด๓ ด๓ ฟ Flag for Qrendi (MT-44)
๐ด๓ ญ๓ ด๓ ถ๓ ฐ๓ ฟ Flag for Valletta (MT-60)
๐ด๓ ญ๓ ต๓ ฐ๓ ก๓ ฟ Flag for Pamplemousses (MU-PA)
๐ด๓ ญ๓ ด๓ ด๓ ณ๓ ฟ Flag for Qormi (MT-43)
๐ด๓ ญ๓ ต๓ ฐ๓ ต๓ ฟ Flag for Port Louis (MU-PU)
๐ด๓ ญ๓ ด๓ ต๓ น๓ ฟ Flag for Tarxien (MT-59)
๐ด๓ ญ๓ ด๓ ถ๓ ต๓ ฟ Flag for ลปebbuฤก Gozo (MT-65)
๐ด๓ ญ๓ ด๓ ต๓ ฐ๓ ฟ Flag for Saint Lawrence (MT-50)
๐ด๓ ญ๓ ด๓ ถ๓ ท๓ ฟ Flag for ลปejtun (MT-67)
๐ด๓ ญ๓ ด๓ ต๓ ฑ๓ ฟ Flag for St. Paulโs Bay (MT-51)
๐ด๓ ญ๓ ด๓ ต๓ ณ๓ ฟ Flag for Santa Luฤija (MT-53)
๐ด๓ ญ๓ ด๓ ถ๓ ถ๓ ฟ Flag for ลปebbuฤก (MT-66)
๐ด๓ ญ๓ ด๓ ด๓ ถ๓ ฟ Flag for Rabat (MT-46)
๐ด๓ ญ๓ ด๓ ต๓ ต๓ ฟ Flag for Siฤกฤกiewi (MT-55)
๐ฉ๐ฝโ๐ฉ๐ฝโ๐ง๐ฝ Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ญ๓ ด๓ ต๓ ด๓ ฟ Flag for Santa Venera (MT-54)
๐ด๓ ญ๓ ด๓ ถ๓ ณ๓ ฟ Flag for Xgฤงajra (MT-63)
๐ด๓ ญ๓ ต๓ ญ๓ ฏ๓ ฟ Flag for Moka (MU-MO)
๐ด๓ ญ๓ ธ๓ ญ๓ ฉ๓ ฃ๓ ฟ Flag for Michoacรกn (MX-MIC)
๐ด๓ ญ๓ ท๓ ฎ๓ ฟ Flag for Northern (MW-N)
๐ด๓ ญ๓ ถ๓ ต๓ ฎ๓ ฟ Flag for Upper North Province (MV-UN)
๐ด๓ ญ๓ ธ๓ ฃ๓ ฏ๓ ฌ๓ ฟ Flag for Colima (MX-COL)
๐ด๓ ญ๓ ต๓ ฒ๓ ฏ๓ ฟ Flag for Rodrigues (MU-RO)
๐ด๓ ญ๓ ธ๓ ง๓ ต๓ ก๓ ฟ Flag for Guanajuato (MX-GUA)
๐ด๓ ญ๓ ธ๓ ฃ๓ ญ๓ ธ๓ ฟ Flag for Ciudad de Mexico (MX-CMX)
๐ด๓ ญ๓ ธ๓ ฐ๓ ต๓ ฅ๓ ฟ Flag for Puebla (MX-PUE)
๐ด๓ ญ๓ ต๓ ฑ๓ ข๓ ฟ Flag for Quatre Bornes (MU-QB)
๐ด๓ ญ๓ ธ๓ ฏ๓ ก๓ ธ๓ ฟ Flag for Oaxaca (MX-OAX)
๐ด๓ ญ๓ ท๓ ฃ๓ ฟ Flag for Central (MW-C)
๐ด๓ ญ๓ ต๓ ณ๓ ก๓ ฟ Flag for Savanne (MU-SA)
๐ด๓ ญ๓ ธ๓ ญ๓ ฏ๓ ฒ๓ ฟ Flag for Morelos (MX-MOR)
๐ด๓ ญ๓ ธ๓ จ๓ ฉ๓ ค๓ ฟ Flag for Hidalgo (MX-HID)
๐ด๓ ญ๓ ธ๓ ก๓ ง๓ ต๓ ฟ Flag for Aguascalientes (MX-AGU)
๐ด๓ ญ๓ ธ๓ ฃ๓ ก๓ ญ๓ ฟ Flag for Campeche (MX-CAM)
๐ด๓ ญ๓ ธ๓ ฎ๓ ฌ๓ ฅ๓ ฟ Flag for Nuevo Leรณn (MX-NLE)
๐ด๓ ญ๓ ถ๓ ญ๓ ฌ๓ ฅ๓ ฟ Flag for Malรฉ (MV-MLE)
๐ด๓ ญ๓ ธ๓ ง๓ ฒ๓ ฏ๓ ฟ Flag for Guerrero (MX-GRO)
๐ด๓ ญ๓ ต๓ ถ๓ ฐ๓ ฟ Flag for Vacoas-Phoenix (MU-VP)
๐จ๐ปโ๐จ๐ปโ๐ฆ๐ปโ๐ง๐ป Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
๐ด๓ ญ๓ ถ๓ ฎ๓ ฃ๓ ฟ Flag for North Central Province (MV-NC)
๐ด๓ ญ๓ ธ๓ ญ๓ ฅ๓ ธ๓ ฟ Flag for Mexico State (MX-MEX)
๐ด๓ ญ๓ ต๓ ฐ๓ ท๓ ฟ Flag for Plaines Wilhems (MU-PW)
๐ด๓ ญ๓ ถ๓ ฃ๓ ฅ๓ ฟ Flag for Central Province (MV-CE)
๐ด๓ ญ๓ ธ๓ ฃ๓ ฏ๓ ก๓ ฟ Flag for Coahuila (MX-COA)
๐ด๓ ญ๓ ถ๓ ณ๓ ต๓ ฟ Flag for South Province (MV-SU)
๐ด๓ ญ๓ ธ๓ ฃ๓ จ๓ ฐ๓ ฟ Flag for Chiapas (MX-CHP)
๐ด๓ ญ๓ ท๓ ณ๓ ฟ Flag for Southern (MW-S)
๐ด๓ ญ๓ บ๓ ณ๓ ฟ Flag for Sofala (MZ-S)
๐ด๓ ญ๓ น๓ ฐ๓ น๓ ฟ Flag for Perlis (MY-09)
๐ด๓ ญ๓ ธ๓ ถ๓ ฅ๓ ฒ๓ ฟ Flag for Veracruz (MX-VER)
๐ด๓ ญ๓ น๓ ฑ๓ ณ๓ ฟ Flag for Sarawak (MY-13)
๐ด๓ ญ๓ น๓ ฐ๓ ณ๓ ฟ Flag for Kelantan (MY-03)
๐ด๓ ฎ๓ ก๓ ฃ๓ ก๓ ฟ Flag for Zambezi (NA-CA)
๐ด๓ ญ๓ บ๓ ข๓ ฟ Flag for Manica (MZ-B)
๐ด๓ ญ๓ น๓ ฑ๓ ต๓ ฟ Flag for Labuan (MY-15)
๐ด๓ ญ๓ บ๓ ฐ๓ ฟ Flag for Cabo Delgado (MZ-P)
๐ด๓ ฎ๓ ก๓ จ๓ ก๓ ฟ Flag for Hardap (NA-HA)
๐ด๓ ญ๓ บ๓ ด๓ ฟ Flag for Tete (MZ-T)
๐ด๓ ญ๓ น๓ ฐ๓ ฒ๓ ฟ Flag for Kedah (MY-02)
๐ด๓ ญ๓ น๓ ฐ๓ ถ๓ ฟ Flag for Pahang (MY-06)
๐ด๓ ญ๓ น๓ ฐ๓ ท๓ ฟ Flag for Penang (MY-07)
๐ด๓ ญ๓ น๓ ฐ๓ ธ๓ ฟ Flag for Perak (MY-08)
๐ด๓ ญ๓ บ๓ ฌ๓ ฟ Flag for Maputo Province (MZ-L)
๐ด๓ ข๓ ฒ๓ ง๓ ฏ๓ ฟ Flag for Goiรกs (BR-GO)
๐ด๓ ญ๓ น๓ ฑ๓ ฑ๓ ฟ Flag for Terengganu (MY-11)
๐ด๓ ญ๓ บ๓ ฉ๓ ฟ Flag for Inhambane (MZ-I)
๐ด๓ ญ๓ น๓ ฐ๓ ด๓ ฟ Flag for Malacca (MY-04)
๐ด๓ ฎ๓ ก๓ ฅ๓ ฒ๓ ฟ Flag for Erongo (NA-ER)
๐ด๓ ญ๓ ธ๓ ด๓ ฌ๓ ก๓ ฟ Flag for Tlaxcala (MX-TLA)
๐ด๓ ญ๓ น๓ ฐ๓ ต๓ ฟ Flag for Negeri Sembilan (MY-05)
๐ด๓ ญ๓ ธ๓ บ๓ ก๓ ฃ๓ ฟ Flag for Zacatecas (MX-ZAC)
๐ด๓ ญ๓ ธ๓ ด๓ ก๓ ญ๓ ฟ Flag for Tamaulipas (MX-TAM)
๐ด๓ ญ๓ บ๓ ก๓ ฟ Flag for Niassa (MZ-A)
๐ด๓ ญ๓ บ๓ ญ๓ ฐ๓ ญ๓ ฟ Flag for Maputo (MZ-MPM)
๐ด๓ ญ๓ บ๓ ฎ๓ ฟ Flag for Nampula (MZ-N)
๐ด๓ ญ๓ น๓ ฑ๓ ถ๓ ฟ Flag for Putrajaya (MY-16)
๐ด๓ ญ๓ ธ๓ ณ๓ ฉ๓ ฎ๓ ฟ Flag for Sinaloa (MX-SIN)
๐ด๓ ญ๓ ธ๓ น๓ ต๓ ฃ๓ ฟ Flag for Yucatรกn (MX-YUC)
๐ด๓ ญ๓ น๓ ฑ๓ ฒ๓ ฟ Flag for Sabah (MY-12)
๐ฉ๐ผโ๐ฉ๐ผโ๐ง๐ผโ๐ง๐ผ Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
๐ด๓ ญ๓ บ๓ ฑ๓ ฟ Flag for Zambezia (MZ-Q)
๐ด๓ ญ๓ ธ๓ ฑ๓ ต๓ ฅ๓ ฟ Flag for Querรฉtaro (MX-QUE)
๐ด๓ ญ๓ บ๓ ง๓ ฟ Flag for Gaza (MZ-G)
๐ด๓ ฎ๓ ก๓ ฏ๓ ค๓ ฟ Flag for Otjozondjupa (NA-OD)
๐ด๓ ฎ๓ ฅ๓ ด๓ ฟ Flag for Maradi (NE-4)
๐ด๓ ฎ๓ ก๓ ซ๓ ต๓ ฟ Flag for Kunene (NA-KU)
๐ด๓ ฎ๓ ง๓ ก๓ ซ๓ ฟ Flag for Akwa Ibom (NG-AK)
๐ด๓ ฎ๓ ฅ๓ ต๓ ฟ Flag for Tahoua (NE-5)
๐ด๓ ญ๓ ต๓ ฒ๓ ฒ๓ ฟ Flag for Riviรจre du Rempart (MU-RR)
๐ด๓ ฎ๓ ง๓ ฉ๓ ญ๓ ฟ Flag for Imo (NG-IM)
๐ด๓ ฎ๓ ง๓ ซ๓ ด๓ ฟ Flag for Katsina (NG-KT)
๐ด๓ ฎ๓ ฅ๓ ณ๓ ฟ Flag for Dosso (NE-3)
๐ด๓ ฎ๓ ฅ๓ ถ๓ ฟ Flag for Tillabรฉri (NE-6)
๐ด๓ ฎ๓ ง๓ ฅ๓ ซ๓ ฟ Flag for Ekiti (NG-EK)
๐ด๓ ฎ๓ ก๓ ฏ๓ จ๓ ฟ Flag for Omaheke (NA-OH)
๐ด๓ ฎ๓ ง๓ ข๓ ก๓ ฟ Flag for Bauchi (NG-BA)
๐ด๓ ฎ๓ ก๓ ซ๓ ก๓ ฟ Flag for Karas (NA-KA)
๐ด๓ ฎ๓ ง๓ ข๓ น๓ ฟ Flag for Bayelsa (NG-BY)
๐ด๓ ฎ๓ ก๓ ฏ๓ ท๓ ฟ Flag for Ohangwena (NA-OW)
๐ด๓ ฎ๓ ง๓ ข๓ ฅ๓ ฟ Flag for Benue (NG-BE)
๐ด๓ ฎ๓ ง๓ ฅ๓ ฎ๓ ฟ Flag for Enugu (NG-EN)
๐ด๓ ฎ๓ ก๓ ฏ๓ ฎ๓ ฟ Flag for Oshana (NA-ON)
๐ด๓ ฎ๓ ง๓ ซ๓ ค๓ ฟ Flag for Kaduna (NG-KD)
๐จ๐ปโ๐ถ๐ปโ๐ฆ๐ป Family - Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
๐ด๓ ฎ๓ ง๓ ซ๓ ฅ๓ ฟ Flag for Kebbi (NG-KE)
๐ด๓ ฎ๓ ง๓ ช๓ ฉ๓ ฟ Flag for Jigawa (NG-JI)
๐ด๓ ฎ๓ ฅ๓ ธ๓ ฟ Flag for Niamey (NE-8)
๐ด๓ ฎ๓ ง๓ ก๓ ฎ๓ ฟ Flag for Anambra (NG-AN)
๐ด๓ ฎ๓ ง๓ ง๓ ฏ๓ ฟ Flag for Gombe (NG-GO)
๐ด๓ ฎ๓ ฅ๓ ฑ๓ ฟ Flag for Agadez (NE-1)
๐ด๓ ฎ๓ ก๓ ซ๓ จ๓ ฟ Flag for Khomas (NA-KH)
๐ด๓ ฎ๓ ฅ๓ ฒ๓ ฟ Flag for Diffa (NE-2)
๐ด๓ ญ๓ น๓ ฐ๓ ฑ๓ ฟ Flag for Johor (MY-01)
๐ด๓ ฎ๓ ง๓ ซ๓ ฎ๓ ฟ Flag for Kano (NG-KN)
๐ด๓ ฎ๓ ก๓ ฏ๓ ณ๓ ฟ Flag for Omusati (NA-OS)
๐ด๓ ฎ๓ ง๓ ซ๓ ฏ๓ ฟ Flag for Kogi (NG-KO)
๐ด๓ ฎ๓ ง๓ ฅ๓ ค๓ ฟ Flag for Edo (NG-ED)
๐ด๓ ฎ๓ ง๓ ก๓ ข๓ ฟ Flag for Abia (NG-AB)
๐ด๓ ฎ๓ ก๓ ฏ๓ ด๓ ฟ Flag for Oshikoto (NA-OT)
๐ด๓ ฎ๓ ก๓ ซ๓ ท๓ ฟ Flag for Kavango West (NA-KW)
๐ด๓ ฎ๓ ง๓ ฅ๓ ข๓ ฟ Flag for Ebonyi (NG-EB)
๐ด๓ ฎ๓ ฅ๓ ท๓ ฟ Flag for Zinder (NE-7)
๐ด๓ ฎ๓ ฉ๓ ช๓ ฉ๓ ฟ Flag for Jinotega (NI-JI)
๐ด๓ ฎ๓ ง๓ ฎ๓ ก๓ ฟ Flag for Nasarawa (NG-NA)
๐ด๓ ฎ๓ ฌ๓ ฆ๓ ฒ๓ ฟ Flag for Friesland (NL-FR)
๐ด๓ ฎ๓ ง๓ ณ๓ ฏ๓ ฟ Flag for Sokoto (NG-SO)
๐ด๓ ฎ๓ ฉ๓ ฒ๓ ฉ๓ ฟ Flag for Rivas (NI-RI)
๐ด๓ ฎ๓ ฉ๓ ฎ๓ ณ๓ ฟ Flag for Nueva Segovia (NI-NS)
๐ด๓ ฎ๓ ง๓ ฐ๓ ฌ๓ ฟ Flag for Plateau (NG-PL)
๐ด๓ ฎ๓ ง๓ น๓ ฏ๓ ฟ Flag for Yobe (NG-YO)
๐ด๓ ฎ๓ ฌ๓ ข๓ ฑ๓ ฑ๓ ฟ Flag for Bonaire (NL-BQ1)
๐ด๓ ฎ๓ ฉ๓ ก๓ ฎ๓ ฟ Flag for Atlรกntico Norte (NI-AN)
๐ด๓ ฎ๓ ง๓ บ๓ ก๓ ฟ Flag for Zamfara (NG-ZA)
๐ด๓ ฎ๓ ฌ๓ ง๓ ฅ๓ ฟ Flag for Gelderland (NL-GE)
๐ด๓ ฎ๓ ง๓ ฏ๓ น๓ ฟ Flag for Oyo (NG-OY)
๐ด๓ ฎ๓ ฉ๓ ญ๓ ค๓ ฟ Flag for Madriz (NI-MD)
๐ด๓ ฎ๓ ฉ๓ ฃ๓ ฉ๓ ฟ Flag for Chinandega (NI-CI)
๐ด๓ ฎ๓ ง๓ ฏ๓ ฎ๓ ฟ Flag for Ondo (NG-ON)
๐จ๐ฝโ๐จ๐ฝโ๐ฆ๐ฝโ๐ง๐ฝ Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
๐ด๓ ค๓ ฅ๓ ฎ๓ ท๓ ฟ Flag for North Rhine-Westphalia (DE-NW)
๐ด๓ ฎ๓ ง๓ ฌ๓ ก๓ ฟ Flag for Lagos (NG-LA)
๐ด๓ ฎ๓ ฉ๓ ญ๓ ฎ๓ ฟ Flag for Managua (NI-MN)
๐ด๓ ฎ๓ ฉ๓ ก๓ ณ๓ ฟ Flag for Atlรกntico Sur (NI-AS)
๐ด๓ ฎ๓ ฌ๓ ฃ๓ ท๓ ฟ Flag for Curaรงao (NL-CW)
๐ด๓ ฎ๓ ฉ๓ ข๓ ฏ๓ ฟ Flag for Boaco (NI-BO)
๐ด๓ ฎ๓ ง๓ ฒ๓ ฉ๓ ฟ Flag for Rivers (NG-RI)
๐ด๓ ฎ๓ ฉ๓ ง๓ ฒ๓ ฟ Flag for Granada (NI-GR)
๐ด๓ ฎ๓ ฉ๓ ฃ๓ ฏ๓ ฟ Flag for Chontales (NI-CO)
๐ด๓ ฎ๓ ฌ๓ ง๓ ฒ๓ ฟ Flag for Groningen (NL-GR)
๐ด๓ ฎ๓ ฌ๓ ข๓ ฑ๓ ณ๓ ฟ Flag for Sint Eustatius (NL-BQ3)
๐ด๓ ฎ๓ ฉ๓ ณ๓ ช๓ ฟ Flag for Rรญo San Juan (NI-SJ)
๐ด๓ ฎ๓ ง๓ ฏ๓ ณ๓ ฟ Flag for Osun (NG-OS)
๐ด๓ ฎ๓ ง๓ ด๓ ก๓ ฟ Flag for Taraba (NG-TA)
๐ด๓ ฎ๓ ฌ๓ ฆ๓ ฌ๓ ฟ Flag for Flevoland (NL-FL)
๐ด๓ ฎ๓ ฉ๓ ญ๓ ด๓ ฟ Flag for Matagalpa (NI-MT)
๐ด๓ ฎ๓ ฌ๓ ค๓ ฒ๓ ฟ Flag for Drenthe (NL-DR)
๐ด๓ ฎ๓ ฉ๓ ฃ๓ ก๓ ฟ Flag for Carazo (NI-CA)
๐ด๓ ฎ๓ ง๓ ซ๓ ท๓ ฟ Flag for Kwara (NG-KW)
๐ด๓ ฎ๓ ง๓ ฎ๓ ฉ๓ ฟ Flag for Niger (NG-NI)
๐ด๓ ฎ๓ ฉ๓ ฅ๓ ณ๓ ฟ Flag for Estelรญ (NI-ES)
๐ด๓ ฎ๓ ฌ๓ บ๓ จ๓ ฟ Flag for South Holland (NL-ZH)
"""
for line in emojis.splitlines():
words = line.split()
char = words[0]
desc = " ".join(words[1:])
print("{}\t:{}".format(desc, char))
| 257,168 | 215,277 |
#!/usr/bin/env python
""" ngc - n-grams count
License: 3-clause BSD (see https://opensource.org/licenses/BSD-3-Clause)
Author: Hubert Tournier
"""
import getopt
import logging
import os
import re
import string
import sys
import unicode2ascii
# Version string used by the what(1) and ident(1) commands:
ID = "@(#) $Id: ngc - n-grams count v1.0.2 (September 26, 2021) by Hubert Tournier $"
# Default parameters. Can be superseded by command line options
parameters = {
"Convert": {
"Unicode to ASCII": False,
"Upper to lower case": False,
"Lower to upper case": False,
"Spaces to one space": False,
},
"Discard": {
"Unicode characters": False,
"Upper case letters": False,
"Lower case letters": False,
"Connection symbols": False, # ' -
"Digits": False,
"Punctuation": False, # . , ; : ! ?
"Other printable symbols": False,
"Spaces": False, # space tab return formfeed vtab
"Control characters": False,
},
"Length": 1,
"Fixed block": False, # Sliding-window mode by default
"Word boundary": False,
"Partial": {
"Discard": False,
"Keep": True,
"Justify": False,
},
"Show": {
"Text": False,
"N-grams": True,
"Summary": False,
},
}
occurrences = {}
summary = {
"Upper case letters": 0,
"Lower case letters": 0,
"Connection symbols": 0,
"Digits": 0,
"Punctuation": 0,
"Other printable symbols": 0,
"Spaces": 0,
"Other spaces": 0,
"Control characters": 0,
"Unicode letters": 0,
"Unicode marks": 0,
"Unicode numbers": 0,
"Unicode punctuations": 0,
"Unicode symbols": 0,
"Unicode separators": 0,
"Unicode others": 0,
"All unicode characters": 0,
"All characters": 0,
"All n-grams": 0
}
################################################################################
def initialize_debugging(program_name):
"""Debugging set up"""
console_log_format = program_name + ": %(levelname)s: %(message)s"
logging.basicConfig(format=console_log_format, level=logging.DEBUG)
logging.disable(logging.INFO)
################################################################################
def display_help():
"""Displays usage and help"""
print("usage: ngc [-b|--block] [-c|--convert ARGS] [--debug]", file=sys.stderr)
print(" [-d|--discard ARGS] [--help|-?] [-l|--length ARG]", file=sys.stderr)
print(" [-p|--partial ARG] [-q|--quiet] [-s|--summary] [-t|--text]", file=sys.stderr)
print(" [--version] [-w|--word] [--] [filename ...]", file=sys.stderr)
print(" ----------------- ----------------------------------------------------",
file=sys.stderr
)
print(" -b|--block Use fixed- instead of sliding-windows blocks", file=sys.stderr)
print(" -c|--convert ARGS Convert text input. A combination of:", file=sys.stderr)
print(" ARG = a - Unicode characters to ASCII (remove accents)", file=sys.stderr)
print(" ARG = l - Upper case letters to lower", file=sys.stderr)
print(" ARG = u - Lower case letters to upper", file=sys.stderr)
print(" ARG = s - Spaces-like characters to 1 space", file=sys.stderr)
print(" ARGS l and u can't be used at the same time", file=sys.stderr)
print(" -d|--discard ARGS Discard characters. A combination of:", file=sys.stderr)
print(" ARG = U - Unicode characters", file=sys.stderr)
print(" ARG = u - Upper case letters", file=sys.stderr)
print(" ARG = l - Lower case letters", file=sys.stderr)
print(" ARG = L - All letters", file=sys.stderr)
print(" ARG = c - Connection symbols ('-)", file=sys.stderr)
print(" ARG = d - Digits", file=sys.stderr)
print(" ARG = p - Punctuation (.,;:!?)", file=sys.stderr)
print(" ARG = o - Other printable symbols", file=sys.stderr)
print(" ARG = s - Spaces (space, tab, return, formfeed, vtab)", file=sys.stderr)
print(" ARG = n - Non printable Control characters", file=sys.stderr)
print(" -l|--length ARG Length of the n-gram. Defaults to 1", file=sys.stderr)
print(" -p|--partial ARG What to do with partial blocks? One among:", file=sys.stderr)
print(" ARG = d - Discard", file=sys.stderr)
print(" ARG = k - Keep as-is", file=sys.stderr)
print(" ARG = j - Keep but right-justify with spaces", file=sys.stderr)
print(" -q|--quiet Don't show occurrences and frequency by n-gram", file=sys.stderr)
print(" -s|--summary Show a summary of what was processed", file=sys.stderr)
print(" -t|--text Show modified text input", file=sys.stderr)
print(" -w|--word Respect Word boundaries (delimited by spaces)", file=sys.stderr)
print(" --debug Enable debug mode", file=sys.stderr)
print(" --help|-? Print usage and this help message and exit", file=sys.stderr)
print(" --version Print version and exit", file=sys.stderr)
print(" -- Options processing terminator", file=sys.stderr)
print(file=sys.stderr)
################################################################################
def process_environment_variables():
"""Process environment variables"""
if "NGC_DEBUG" in os.environ.keys():
logging.disable(logging.NOTSET)
################################################################################
def process_command_line():
"""Process command line"""
# pylint: disable=C0103
global parameters
# pylint: enable=C0103
# option letters followed by : expect an argument
# same for option strings followed by =
character_options = "bc:d:l:p:qstw?"
string_options = [
"block",
"convert=",
"debug",
"discard=",
"help",
"length=",
"partial=",
"quiet",
"summary",
"text",
"version",
"word",
]
try:
options, remaining_arguments = getopt.getopt(
sys.argv[1:], character_options, string_options
)
except getopt.GetoptError as error:
logging.critical(error)
display_help()
sys.exit(1)
for option, argument in options:
if option in ("-b", "--block"):
parameters["Fixed block"] = True
elif option in ("-c", "--convert"):
if 'l' in argument and 'u' in argument:
logging.critical("-c|--convert parameter can't contain [lu] at the same time")
sys.exit(1)
if 'a' in argument:
parameters["Convert"]["Unicode to ASCII"] = True
if 'l' in argument:
parameters["Convert"]["Upper to lower case"] = True
if 'u' in argument:
parameters["Convert"]["Lower to upper case"] = True
if 's' in argument:
parameters["Convert"]["Spaces to one space"] = True
elif option in ("-d", "--discard"):
if 'U' in argument:
parameters["Discard"]["Unicode characters"] = True
if 'u' in argument:
parameters["Discard"]["Upper case letters"] = True
if 'l' in argument:
parameters["Discard"]["Lower case letters"] = True
if 'L' in argument:
parameters["Discard"]["Upper case letters"] = True
parameters["Discard"]["Lower case letters"] = True
if 'c' in argument:
parameters["Discard"]["Connection symbols"] = True
if 'd' in argument:
parameters["Discard"]["Digits"] = True
if 'p' in argument:
parameters["Discard"]["Punctuation"] = True
if 'o' in argument:
parameters["Discard"]["Other printable symbols"] = True
if 's' in argument:
parameters["Discard"]["Spaces"] = True
if 'n' in argument:
parameters["Discard"]["Control characters"] = True
elif option in ("-l", "--length"):
if argument.isdigit() and int(argument) >= 0:
parameters["Length"] = int(argument)
else:
logging.critical("-l|--length parameter must be a strictly positive integer")
sys.exit(1)
elif option in ("-p", "--partial"):
if len(argument) > 1 or argument not in ('d', 'k', 'j'):
logging.critical("-p|--partial parameter must be a single character among [dkj]")
sys.exit(1)
if argument == 'd':
parameters["Partial"]["Discard"] = True
parameters["Partial"]["Keep"] = False
elif argument == 'j':
parameters["Partial"]["Justify"] = True
parameters["Partial"]["Keep"] = False
elif option in ("-q", "--quiet"):
parameters["Show"]["N-grams"] = False
elif option in ("-s", "--summary"):
parameters["Show"]["Summary"] = True
elif option in ("-t", "--text"):
parameters["Show"]["Text"] = True
elif option in ("-w", "--word"):
parameters["Word boundary"] = True
elif option == "--debug":
logging.disable(logging.NOTSET)
elif option in ("--help", "-?"):
display_help()
sys.exit(0)
elif option == "--version":
print(ID.replace("@(" + "#)" + " $" + "Id" + ": ", "").replace(" $", ""))
sys.exit(0)
logging.debug("process_command_line(): parameters:")
logging.debug(parameters)
logging.debug("process_command_line(): remaining_arguments:")
logging.debug(remaining_arguments)
return remaining_arguments
################################################################################
def handle_partial_n_gram(text):
"""Analyze n-grams frequency in a string"""
# pylint: disable=C0103
global occurrences, summary
# pylint: enable=C0103
if not parameters["Partial"]["Discard"]:
if parameters["Partial"]["Justify"]:
for _ in range(parameters["Length"] - len(text)):
text += " "
if text in occurrences:
occurrences[text] += 1
else:
occurrences[text] = 1
summary["All n-grams"] += 1
################################################################################
def frequency_analysis(text):
"""Analyze n-grams frequency in a string"""
# pylint: disable=C0103
global occurrences, summary
# pylint: enable=C0103
if parameters["Show"]["Summary"]:
for character in text:
if ord(character) < 128:
if character in string.ascii_uppercase:
summary["Upper case letters"] += 1
elif character in string.ascii_lowercase:
summary["Lower case letters"] += 1
elif character in ("'", "-"):
summary["Connection symbols"] += 1
elif character in string.digits:
summary["Digits"] += 1
elif character in (".", ",", ";", ":", "!", "?"):
summary["Punctuation"] += 1
elif character == " ":
summary["Spaces"] += 1
elif character in string.whitespace:
summary["Other spaces"] += 1
elif (ord(character) < 32 and ord(character) not in (9, 11, 12, 13)) \
or ord(character) == 127:
summary["Control characters"] += 1
else:
summary["Other printable symbols"] += 1
else:
summary["All unicode characters"] += 1
if unicode2ascii.is_unicode_letter(character):
summary["Unicode letters"] += 1
elif unicode2ascii.is_unicode_mark(character):
summary["Unicode marks"] += 1
elif unicode2ascii.is_unicode_number(character):
summary["Unicode numbers"] += 1
elif unicode2ascii.is_unicode_punctuation(character):
summary["Unicode punctuations"] += 1
elif unicode2ascii.is_unicode_symbol(character):
summary["Unicode symbols"] += 1
elif unicode2ascii.is_unicode_separator(character):
summary["Unicode separators"] += 1
else:
summary["Unicode others"] += 1
if len(text) <= parameters["Length"]:
if text:
handle_partial_n_gram(text)
else:
i = 0
while i < len(text) + 1 - parameters["Length"]:
sequence = text[i:i + parameters["Length"]]
if sequence in occurrences:
occurrences[sequence] += 1
else:
occurrences[sequence] = 1
summary["All n-grams"] += 1
if parameters["Fixed block"]:
i += parameters["Length"]
else:
i += 1
if i < len(text):
handle_partial_n_gram(text[i:])
################################################################################
def process_line(line):
"""Process a text line"""
# pylint: disable=C0103
global summary
# pylint: enable=C0103
line = line.rstrip(os.linesep)
# Conversions:
if parameters["Convert"]["Unicode to ASCII"]:
line = unicode2ascii.unicode_to_ascii_string(line)
if parameters["Convert"]["Upper to lower case"]:
line = line.lower()
if parameters["Convert"]["Lower to upper case"]:
line = line.upper()
# Discards:
if parameters["Discard"]["Unicode characters"]:
line = "".join([c for c in line if ord(c) < 128])
if parameters["Discard"]["Upper case letters"]:
line = re.sub(r"[A-Z]+", "", line)
if parameters["Discard"]["Lower case letters"]:
line = re.sub(r"[a-z]+", "", line)
if parameters["Discard"]["Connection symbols"]:
line = re.sub(r"[-']+", "", line)
if parameters["Discard"]["Digits"]:
line = re.sub(r"[0-9]+", "", line)
if parameters["Discard"]["Punctuation"]:
line = re.sub(r"[\.,;:!\?]+", "", line)
if parameters["Discard"]["Other printable symbols"]:
line = re.sub(r"[\"#$&@\[\\\]_`{|}~%()\*+/<=>^]+", "", line)
if parameters["Discard"]["Spaces"]:
line = re.sub(r"[" + string.whitespace + r"]+", "", line)
if parameters["Discard"]["Control characters"]:
line = "".join(
[c for c in line if not (ord(c) < 9 or (ord(c) > 13 and ord(c) < 32) or ord(c) == 127)]
)
# Late conversions:
if parameters["Convert"]["Spaces to one space"]:
line = re.sub(r"[" + string.whitespace + r"]+", " ", line)
if parameters["Show"]["Text"]:
print(line)
if parameters["Word boundary"]:
# Splitting words on all kind of whitespaces:
for word in line.split():
if word:
frequency_analysis(word)
summary["All characters"] += len(word)
else:
frequency_analysis(line)
summary["All characters"] += len(line)
################################################################################
def process_file(filename):
"""Process the file designated by filename, line by line"""
with open(filename, "r") as file:
for line in file.readlines():
process_line(line)
################################################################################
def compute_kappa_plaintext():
"""Return kappa_plaintext for the processed input stream"""
# pylint: disable=C0103
global occurrences, summary
# pylint: enable=C0103
# See https://en.wikipedia.org/wiki/Index_of_coincidence
index = 0.0
for occurrence in occurrences.values():
index += occurrence * (occurrence - 1)
return index / (summary["All n-grams"] * (summary["All n-grams"] - 1))
################################################################################
def compute_coincidence_index(kappa_plaintext):
"""Return coincidence index for a given kappa_plaintext and alphabet"""
# pylint: disable=C0103
global summary
# pylint: enable=C0103
if summary["Unicode separators"]:
# Unknown alphabet size
return 0
alphabet_size = 0
if summary["Upper case letters"]:
alphabet_size += len(string.ascii_uppercase)
if summary["Lower case letters"]:
alphabet_size += len(string.ascii_lowercase)
if summary["Digits"]:
alphabet_size += len(string.digits)
if summary["Connection symbols"]:
alphabet_size += len("'-")
if summary["Punctuation"]:
alphabet_size += len(".,;:?!")
if summary["Other printable symbols"]:
alphabet_size += len("\"#$&@[\\]_`{|}~%()*+/<=>^")
if summary["Spaces"]:
alphabet_size += 1
if summary["Other spaces"]:
alphabet_size += len(string.whitespace) - 1
if summary["Control characters"]:
alphabet_size += 29
return kappa_plaintext * alphabet_size
################################################################################
def main():
"""The program's main entry point"""
program_name = os.path.basename(sys.argv[0])
initialize_debugging(program_name)
process_environment_variables()
arguments = process_command_line()
exit_status = 0
# Reading from files whose name were given as arguments:
if len(arguments):
for filename in arguments:
if os.path.isfile(filename):
process_file(filename)
else:
logging.error("The argument '%s' is not a filename", filename)
exit_status = 1
# Reading from standard input as there are no arguments:
else:
for line in sys.stdin:
process_line(line)
# Displaying occurrences and frequency by n-gram:
if parameters["Show"]["N-grams"]:
if parameters["Show"]["Text"]:
print("--")
decreasing_occurrences = dict(sorted(occurrences.items(), key=lambda t: t[1], reverse=True))
for key, value in decreasing_occurrences.items():
print("'{}'\t{}\t{:.2f}%".format(key, value, (value/summary["All n-grams"])*100))
# Displaying summary:
if parameters["Show"]["Summary"]:
print("==")
for key, value in summary.items():
print("{:23s}\t{:d}".format(key, value))
print()
kappa_plaintext = compute_kappa_plaintext()
coincidence_index = compute_coincidence_index(kappa_plaintext)
print("{:23s}\t{}".format("Kappa-plaintext", kappa_plaintext))
print("{:23s}\t{}".format("Index of coincidence", coincidence_index))
sys.exit(exit_status)
if __name__ == "__main__":
main()
| 19,186 | 5,560 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.gsim.campbell_2003 import (
Campbell2003,
Campbell2003SHARE,
Campbell2003MblgAB1987NSHMP2008,
Campbell2003MblgJ1996NSHMP2008,
Campbell2003MwNSHMP2008
)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
import numpy
# Test data generated from OpenSHA implementation.
class Campbell2003TestCase(BaseGSIMTestCase):
GSIM_CLASS = Campbell2003
def test_mean(self):
self.check('C03/C03_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('C03/C03_STD_TOTAL.csv',
max_discrep_percentage=0.1)
class Campbell2003SHARETestCase(BaseGSIMTestCase):
GSIM_CLASS = Campbell2003SHARE
def test_mean(self):
self.check('C03/C03SHARE_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('C03/C03SHARE_STD_TOTAL.csv',
max_discrep_percentage=0.1)
class Campbell2003MblgAB1987NSHMP2008TestCase(BaseGSIMTestCase):
GSIM_CLASS = Campbell2003MblgAB1987NSHMP2008
# test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f``
def test_mean(self):
self.check('C03/C03MblgAB1987NSHMP2008_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('C03/C03MblgAB1987NSHMP2008_STD_TOTAL.csv',
max_discrep_percentage=0.1)
class Campbell2003MblgJ1996NSHMP2008TestCase(BaseGSIMTestCase):
GSIM_CLASS = Campbell2003MblgJ1996NSHMP2008
# test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f``
def test_mean(self):
self.check('C03/C03MblgJ1996NSHMP2008_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('C03/C03MblgJ1996NSHMP2008_STD_TOTAL.csv',
max_discrep_percentage=0.1)
class Campbell2003MwNSHMP2008TestCase(BaseGSIMTestCase):
GSIM_CLASS = Campbell2003MwNSHMP2008
# test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f``
def test_mean(self):
self.check('C03/C03MwNSHMP2008_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('C03/C03MwNSHMP2008_STD_TOTAL.csv',
max_discrep_percentage=0.1)
| 3,104 | 1,293 |
# -*- coding: utf-8 -*-
"""Time Series Forest Regressor (TSF)."""
__author__ = ["Tony Bagnall", "kkoziara", "luiszugasti", "kanand77", "Markus Lรถning"]
__all__ = ["TimeSeriesForestRegressor"]
import numpy as np
from joblib import Parallel, delayed
from sklearn.ensemble._forest import ForestRegressor
from sklearn.tree import DecisionTreeRegressor
from sktime.regression.base import BaseRegressor
from sktime.series_as_features.base.estimators.interval_based._tsf import (
BaseTimeSeriesForest,
_transform,
)
class TimeSeriesForestRegressor(BaseTimeSeriesForest, ForestRegressor, BaseRegressor):
"""Time series forest regressor.
A time series forest is an ensemble of decision trees built on random intervals.
Overview: For input data with n series of length m, for each tree:
- sample sqrt(m) intervals,
- find mean, std and slope for each interval, concatenate to form new data set,
- build decision tree on new data set.
Ensemble the trees with averaged probability estimates.
This implementation deviates from the original in minor ways. It samples
intervals with replacement and does not use the splitting criteria tiny
refinement described in [1]_. This is an intentionally stripped down, non
configurable version for use as a HIVE-COTE component.
Parameters
----------
n_estimators : int, default=200
Number of estimators.
min_interval : int, default=3
Minimum width of an interval.
n_jobs : int, default=1
The number of jobs to run in parallel for both `fit` and `predict`.
``-1`` means using all processors.
random_state : int, default=None
Attributes
----------
n_classes : int
Number of classes.
n_intervals : int
Number of intervals.
classes_ : list
List of classes for a given problem.
See Also
--------
TimeSeriesForestClassifier
References
----------
.. [1] H.Deng, G.Runger, E.Tuv and M.Vladimir, "A time series forest for
classification and feature extraction", Information Sciences, 239, 2013
.. [2] Java implementation https://github.com/uea-machine-learning/tsml
.. [3] Arxiv paper: https://arxiv.org/abs/1302.2277
"""
_tags = {
"capability:multivariate": False,
"X_inner_mtype": "numpy3D",
}
_base_estimator = DecisionTreeRegressor()
def fit(self, X, y):
"""Override sklearn forest fit with BaseRegressor fit."""
return BaseRegressor.fit(self, X, y)
def _fit(self, X, y):
"""Wrap BaseForest._fit.
This is a temporary measure prior to the BaseRegressor refactor.
"""
return BaseTimeSeriesForest._fit(self, X, y)
def predict(self, X):
"""Override sklearn forest predict with BaseRegressor predict."""
return BaseRegressor.predict(self, X)
def _predict(self, X):
"""Predict.
Parameters
----------
X : pd.DataFrame or np.ndarray
Panel data
Returns
-------
np.ndarray
Predictions.
"""
X = X.squeeze(1)
_, series_length = X.shape
if series_length != self.series_length:
raise TypeError(
"The number of time points in the training data does not match "
"that in the test data."
)
y_pred = Parallel(n_jobs=self.n_jobs)(
delayed(_predict)(X, self.estimators_[i], self.intervals_[i])
for i in range(self.n_estimators)
)
return np.mean(y_pred, axis=0)
def _predict(X, estimator, intervals):
Xt = _transform(X, intervals)
return estimator.predict(Xt)
| 3,713 | 1,126 |
"""
Django settings for vectorc2 project.
Copyright 2019 Sebastian Ryszard Kruk <vectorc2@kruk.me>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#9iml9@=i%x#i57qi1zm)&)p46hrf(g=pn7jioagsh*))6+z9('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
"localhost",
"127.0.0.1",
"0.0.0.0"
]
# Application definition
INSTALLED_APPS = [
'space',
'command',
'bootstrap4',
'octicons',
'nonicons',
'blocks',
'photos',
'morse',
'webview.apps.WebviewConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'channels',
# 'compressor',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'vectorc2.urls'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'webview', 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'vectorc2.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# languages
from django.utils.translation import gettext_lazy as _
LANGUAGES = [
('pl', _('Polish')),
('en', _('English')),
]
# Default settings
BOOTSTRAP4 = {
# The complete URL to the Bootstrap CSS file
# Note that a URL can be either a string,
# e.g. "https://stackpath.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css",
# or a dict like the default value below.
"css_url": {
"href": "/static/style/bootstrap/bootstrap.min.css",
# "integrity": "sha384-WskhaSGFgHYWDcbwN70/dfYBj47jz9qbsMId/iRN3ewGhXQFZCSftd1LZCfmhktB",
"crossorigin": "anonymous",
},
# The complete URL to the Bootstrap JavaScript file
"javascript_url": {
"url": "/static/script/bootstrap/bootstrap.min.js",
# "integrity": "sha384-smHYKdLADwkXOn1EmN1qk/HfnUcbVRZyYmZ4qpPea6sjB/pTJ0euyQp0Mk8ck+5T",
"crossorigin": "anonymous",
},
# The complete URL to the Bootstrap CSS file (None means no theme)
"theme_url": None,
# The URL to the jQuery JavaScript file (full)
"jquery_url": {
"url": "/static/script/bootstrap/jquery-3.3.1.min.js",
# "integrity": "sha384-tsQFqpEReu7ZLhBV2VZlAu7zcOV+rXbYlF2cqB8txI/8aZajjp4Bqd+V6D5IgvKT",
"crossorigin": "anonymous",
},
# The URL to the jQuery JavaScript file (slim)
"jquery_slim_url": {
"url": "/static/script/bootstrap/jquery-3.3.1.slim.min.js",
# "integrity": "sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo",
"crossorigin": "anonymous",
},
# The URL to the Popper.js JavaScript file (slim)
"popper_url": {
"url": "/static/script/bootstrap/popper.min.js",
# "integrity": "sha384-ZMP7rVo3mIykV+2+9J3UJ46jBk0WLaUAdn689aCwoqbBJiSnjAK/l8WvCWPIPm49",
"crossorigin": "anonymous",
},
# Put JavaScript in the HEAD section of the HTML document (only relevant if you use bootstrap4.html)
'javascript_in_head': False,
# Include jQuery with Bootstrap JavaScript False|falsy|slim|full (default=False)
# False - means tag bootstrap_javascript use default value - `falsy` and does not include jQuery)
'include_jquery': False,
# Label class to use in horizontal forms
'horizontal_label_class': 'col-md-3',
# Field class to use in horizontal forms
'horizontal_field_class': 'col-md-9',
# Set placeholder attributes to label if no placeholder is provided
'set_placeholder': True,
# Class to indicate required (better to set this in your Django form)
'required_css_class': '',
# Class to indicate error (better to set this in your Django form)
'error_css_class': 'has-error',
# Class to indicate success, meaning the field has valid input (better to set this in your Django form)
'success_css_class': 'has-success',
# Renderers (only set these if you have studied the source and understand the inner workings)
'formset_renderers':{
'default': 'bootstrap4.renderers.FormsetRenderer',
},
'form_renderers': {
'default': 'bootstrap4.renderers.FormRenderer',
},
'field_renderers': {
'default': 'bootstrap4.renderers.FieldRenderer',
'inline': 'bootstrap4.renderers.InlineFieldRenderer',
},
}
ASGI_APPLICATION = "vectorc2.routing.application"
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [('127.0.0.1', 6379)],
},
},
}
VECTOR = { }
# #TODO
# STATICFILES_FINDERS = [
# 'compressor.finders.CompressorFinder'
# ]
# COMPRESS_ENABLED = False
# COMPRESS_ROOT = os.path.join(BASE_DIR, 'static_collected') | 7,766 | 2,737 |
from torchvision.datasets.folder import pil_loader, accimage_loader, default_loader
from torch import Tensor
from pathlib import Path
from enum import Enum
from collections import namedtuple
from torchvision import transforms as T
import os
import numpy as np
import pdb
import functools
import torch.utils.data as data
import torch
class ConversionType(Enum):
centerToVert = 1
def convert_bbox_format(boxes: Tensor, conversionType: int) -> Tensor:
if conversionType > ConversionType.centerToVert.value:
raise ValueError(
f"conversionType must be less than" +
"{ConversionType.centerToVert.value}, received {conversionType}")
if conversionType == ConversionType.centerToVert.value:
# convert box annotations from (Cx,Cy,W,H) to (X0,Y0,X1,Y1)
box_centers = boxes[:, [0, 1, 0, 1]]
box_wh = 0.5 * boxes[:, [2, 3, 2, 3]]
box_wh[:, :2] *= -1
convertedBoxes = box_centers + box_wh
else:
raise ValueError
return convertedBoxes
class Wgisd(data.Dataset):
"""`FGVC-Aircraft <http://www.robots.ox.ac.uk/~vgg/data/fgvc-aircraft>`_ Dataset.
Args:
root (string): Root directory path to dataset.
transform (callable, optional): A function/transform that takes in a PIL image
and returns a transformed version. E.g. ``transforms.RandomCrop``
loader (callable, optional): A function to load an image given its path.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in the root directory. If dataset is already downloaded, it is not
downloaded again.
"""
url = 'https://github.com/thsant/wgisd.git'
splits = ('train', 'test')
def __init__(self, root, split='train', transform=None,
loader=default_loader, download=False,
val_size=0.2):
if split not in self.splits:
raise ValueError(
'Split "{}" not found. Valid splits are: {}'.format(
split, ', '.join(
self.splits), ))
if val_size < 0 or val_size > 1:
raise ValueError('val_size should be a fraction between 0 and 1')
self.root = Path(root)
self.split = split
# There's no file specifying a validation dataset, so use a subset of the
# training dataset
dset_file = self.split
self.classes_file = self.root / f'{dset_file}.txt'
if download:
self.download()
self.transform = transform
self.loader = loader
self.id_to_fname = {}
self.val_size = val_size
self.total_set = None
self.samples = None
self.create_dataset()
self.mode = 'test' if self.split == 'test' else 'train'
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, mode):
if self.split == 'test':
self._mode = 'test'
self.partition_dset()
return
supported_modes = ['train', 'val', 'trainval']
if mode not in supported_modes:
raise ValueError(f'mode must be one of {supported_modes}')
self._mode = mode
self.partition_dset()
def create_dataset(self):
image_names = []
samples = []
with open(self.classes_file, 'r') as f:
for line in f:
image_names.append(line.rstrip())
data_dir = self.root / 'data'
# Read bbox annotations from file
for idx, img_name in enumerate(image_names):
target = {}
gt_boxes = []
annotations = data_dir / f'{img_name}.txt'
img_path = data_dir / f'{img_name}.jpg'
with annotations.open() as f:
for line in f:
gt_boxes.append(line.split()[1:])
gt_np = np.array(gt_boxes, dtype=np.float32)
gt_tensor = torch.as_tensor(gt_np, dtype=torch.float32)
boxes = convert_bbox_format(gt_tensor, conversionType=1)
img = self.loader(img_path)
width, height = img.size
boxes[:, [0, 2]] = boxes[:, [0, 2]] * width
boxes[:, [1, 3]] = boxes[:, [1, 3]] * height
boxes = boxes.to(dtype=torch.int32)
numObjs = boxes.shape[0]
labels = torch.ones((numObjs,), dtype=torch.int64)
iscrowd = torch.zeros((numObjs,), dtype=torch.int64)
image_id = torch.tensor([idx])
self.id_to_fname[image_id.item()] = img_path.parts[-1]
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
target['boxes'] = boxes
target['labels'] = labels
target['image_id'] = image_id
target['area'] = area
target['iscrowd'] = iscrowd
samples.append((img_path, target))
self.total_set = samples
def partition_dset(self):
num_images = len(self.total_set)
split = int(np.floor(self.val_size * num_images))
if self.mode == 'trainval':
self.samples = self.total_set
elif self.mode == 'train':
self.samples = self.total_set[split:]
elif self.mode == 'val':
self.samples = self.total_set[:split]
else:
self.samples = self.total_set
@functools.cached_property
def mean(self):
n_pixels = 0
pix_sum = torch.zeros([3])
for img_path, _ in self.total_set:
img = self.loader(img_path)
w,h = img.size
im_tensor = T.ToTensor()(img)
pix_sum += im_tensor.sum([1,2])
n_pixels += (w*h)
pix_avg = pix_sum / n_pixels
return pix_avg
@functools.cached_property
def stddev(self):
avg = self.mean
avg = avg.reshape([3, 1, 1])
var_sum = torch.zeros([3])
n_pixels = 0
for img_path, _ in self.total_set:
img = self.loader(img_path)
w,h = img.size
im_tensor = T.ToTensor()(img)
var_sum += ((im_tensor - avg)**2).sum([1,2])
n_pixels += (w*h)
var = var_sum / n_pixels
return torch.sqrt(var)
def get_fname(self, img_id):
return self.id_to_fname[img_id.item()]
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample, target = self.transform(sample, target)
return sample, target
def __len__(self):
return len(self.samples)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp,
self.transform.__repr__().replace('\n',
'\n' + ' ' * len(tmp)))
return fmt_str
def _check_exists(self):
return self.root.exists() and self.classes_file.exists()
def download(self):
"""Download the wgisd data if it doesn't exist already."""
import requests
import tarfile
from git import Repo
if self._check_exists():
return
print('Downloading %s ... (may take a few minutes)' % self.url)
self.root.mkdir()
Repo.clone_from(self.url, str(self.root))
print('Done!')
| 7,802 | 2,432 |
#!/usr/bin/python
#
# So this script is in a bit of a hack state right now.
# This script reads
#
#
#
# Graciously copied and modified from:
# http://graphics.cs.cmu.edu/projects/im2gps/flickr_code.html
#Image querying script written by Tamara Berg,
#and extended heavily James Hays
#9/26/2007 added dynamic timeslices to query more efficiently.
#8/18/2008 added new fields and set maximum time slice.
#8/19/2008 this is a much simpler function which gets ALL geotagged photos of
# sufficient accuracy. No queries, no negative constraints.
# divides up the query results into multiple files
# 1/5/2009
# now uses date_taken instead of date_upload to get more diverse blocks of images
# 1/13/2009 - uses the original im2gps keywords, not as negative constraints though
import sys, string, math, time, socket
from flickrapi2 import FlickrAPI
from datetime import datetime
import pycurl
import os
import shutil
socket.setdefaulttimeout(30) #30 second time out on sockets before they throw
#an exception. I've been having trouble with urllib.urlopen hanging in the
#flickr API. This will show up as exceptions.IOError.
#the time out needs to be pretty long, it seems, because the flickr servers can be slow
#to respond to our big searches.
#returns a query and the search times to attempt to get a desired number of photos
#this needs serious refactoring -KAS
def DoSearch(fapi,query_string,desired_photos):
# number of seconds to skip per query
#timeskip = 62899200 #two years
#timeskip = 604800 #one week
timeskip = 172800 #two days
#timeskip = 86400 #one day
#timeskip = 3600 #one hour
#timeskip = 2257 #for resuming previous query
#mintime = 1121832000 #from im2gps
#mintime = 1167407788 # resume crash england
#mintime = 1177828976 #resume crash japan
#mintime = 1187753798 #resume crash greece
mintime = 1171416400 #resume crash WashingtonDC
maxtime = mintime+timeskip
endtime = 1192165200 #10/12/2007, at the end of im2gps queries
print datetime.fromtimestamp(mintime)
print datetime.fromtimestamp(endtime)
while (maxtime < endtime):
#new approach - adjust maxtime until we get the desired number of images
#within a block. We'll need to keep upper bounds and lower
#lower bound is well defined (mintime), but upper bound is not. We can't
#search all the way from endtime.
lower_bound = mintime + 900 #lower bound OF the upper time limit. must be at least 15 minutes or zero results
upper_bound = mintime + timeskip * 20 #upper bound of the upper time limit
maxtime = .95 * lower_bound + .05 * upper_bound
print '\nBinary search on time range upper bound'
print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound))
print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound))
keep_going = 6 #search stops after a fixed number of iterations
while( keep_going > 0 and maxtime < endtime):
try:
rsp = fapi.photos_search(api_key=flickrAPIKey,
ispublic="1",
media="photos",
per_page="250",
page="1",
has_geo = "0", #bbox="-180, -90, 180, 90",
text=query_string,
accuracy="6", #6 is region level.
min_upload_date=str(mintime),
max_upload_date=str(maxtime))
#we want to catch these failures somehow and keep going.
time.sleep(1)
fapi.testFailure(rsp)
total_images = rsp.photos[0]['total'];
null_test = int(total_images); #want to make sure this won't crash later on for some reason
null_test = float(total_images);
print '\nnumimgs: ' + total_images
print 'mintime: ' + str(mintime) + ' maxtime: ' + str(maxtime) + ' timeskip: ' + str(maxtime - mintime)
if( int(total_images) > desired_photos ):
print 'too many photos in block, reducing maxtime'
upper_bound = maxtime
maxtime = (lower_bound + maxtime) / 2 #midpoint between current value and lower bound.
if( int(total_images) < desired_photos):
print 'too few photos in block, increasing maxtime'
lower_bound = maxtime
maxtime = (upper_bound + maxtime) / 2
print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound))
print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound))
if( int(total_images) > 0): #only if we're not in a degenerate case
keep_going = keep_going - 1
else:
upper_bound = upper_bound + timeskip;
except KeyboardInterrupt:
print('Keyboard exception while querying for images, exiting\n')
raise
except:
print sys.exc_info()[0]
#print type(inst) # the exception instance
#print inst.args # arguments stored in .args
#print inst # __str__ allows args to printed directly
print ('Exception encountered while querying for images\n')
#end of while binary search
print 'finished binary search'
return([mintime,maxtime,total_images,rsp])
###########################################################################
# Modify this section to reflect your data and specific search
###########################################################################
# flickr auth information:
# change these to your flickr api keys and secret
flickrAPIKey = "fa33550d413b36b3fddc473a931a3b3b" # API key
flickrSecret = "7fd481bff0916055" # shared "secret"
rootpath = "../data/" #where do you want the data
desired_photos = 1000 #how many photos do you want to try and get
query_file_name = 'query.dat' #The file to get the queries from
#query_file_name = 'place_rec_queries_fall08.txt'
query_file = open(query_file_name, 'r')
#aggregate all of the positive and negative queries together.
pos_queries = [] #an empty list
neg_queries = '' #a string
num_queries = 0
for line in query_file:
if line[0] != '#' and len(line) > 1: #line end character is 2 long?
print line[0:len(line)-1]
if line[0] != '-':
pos_queries = pos_queries + [line[0:len(line)-1]]
num_queries = num_queries + 1
if line[0] == '-':
neg_queries = neg_queries + ' ' + line[0:len(line)-1]
query_file.close()
print 'positive queries: '
print pos_queries
print 'negative queries: ' + neg_queries
print 'num_queries = ' + str(num_queries)
#this is the desired number of photos in each block
# make a new FlickrAPI instance
fapi = FlickrAPI(flickrAPIKey, flickrSecret)
for current_tag in range(0, num_queries):
print('TOP OF LOOP')
# change this to the location where you want to put your output file
try:
stats = os.stat(rootpath)
except OSError:
os.mkdir(rootpath)
outpath = rootpath+pos_queries[current_tag]+'/'
try:
os.mkdir(outpath)
except OSError:
shutil.rmtree(outpath,True)
os.mkdir(outpath)
out_file = open(rootpath + pos_queries[current_tag] + '.txt','w')
###########################################################################
#form the query string.
query_string = pos_queries[current_tag] + ' ' + neg_queries
print '\n\nquery_string is ' + query_string
total_images_queried = 0;
[mintime,maxtime,total_images,rsp] = DoSearch(fapi,query_string,desired_photos)
print('GETTING TOTATL IMAGES:'+str(total_images))
s = '\nmintime: ' + str(mintime) + ' maxtime: ' + str(maxtime)
print s
out_file.write(s + '\n')
i = getattr(rsp,'photos',None)
if i:
s = 'numimgs: ' + total_images
print s
out_file.write(s + '\n')
current_image_num = 1;
num = 4 # CHANGE THIS BACK int(rsp.photos[0]['pages'])
s = 'total pages: ' + str(num)
print s
out_file.write(s + '\n')
#only visit 16 pages max, to try and avoid the dreaded duplicate bug
#16 pages = 4000 images, should be duplicate safe. Most interesting pictures will be taken.
num_visit_pages = min(16,num)
s = 'visiting only ' + str(num_visit_pages) + ' pages ( up to ' + str(num_visit_pages * 250) + ' images)'
print s
out_file.write(s + '\n')
total_images_queried = total_images_queried + min((num_visit_pages * 250), int(total_images))
#print 'stopping before page ' + str(int(math.ceil(num/3) + 1)) + '\n'
pagenum = 1;
counter = -1
while( pagenum <= num_visit_pages ):
#for pagenum in range(1, num_visit_pages + 1): #page one is searched twice
print ' page number ' + str(pagenum)
try:
print("PAGE")
print(pagenum)
# WARNING THIS QUERY HAS TO MATCH THE SEARCH QUERY!!!!
rsp = fapi.photos_search(api_key=flickrAPIKey,
ispublic="1",
media="photos",
per_page="250",
page=str(pagenum),
has_geo = "0",
text=query_string,
#extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views",
#accuracy="6", #6 is region level.
min_upload_date=str(1121832000),#mintime),
max_upload_date=str(1192165200))#maxtime))
#rsp = fapi.photos_search(api_key=flickrAPIKey,
# ispublic="1",
# media="photos",
# per_page="250",
# page='0', #str(pagenum),
# sort="interestingness-desc",
# has_geo = "0", #bbox="-180, -90, 180, 90",
# text=query_string,
# #accuracy="6", #6 is region level. most things seem 10 or better.
# extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views",
# min_upload_date=str(mintime),
# max_upload_date=str(maxtime))
##min_taken_date=str(datetime.fromtimestamp(mintime)),
##max_taken_date=str(datetime.fromtimestamp(maxtime)))
time.sleep(1)
fapi.testFailure(rsp)
except KeyboardInterrupt:
print('Keyboard exception while querying for images, exiting\n')
raise
except:
print sys.exc_info()[0]
#print type(inst) # the exception instance
#print inst.args # arguments stored in .args
#print inst # __str__ allows args to printed directly
print ('Exception encountered while querying for images\n')
else:
print('got a response')
# and print them
k = getattr(rsp,'photos',None)
if k:
print('In K')
m = getattr(rsp.photos[0],'photo',None)
if m:
print('In M')
for b in rsp.photos[0].photo:
print('In b')
if b!=None:
counter = counter + 1
##print(http://farm{farm-id}.static.flickr.com/{server-id}/{id}_{secret}.jpg)
myurl = 'http://farm'+b['farm']+".static.flickr.com/"+b['server']+"/"+b['id']+"_"+b['secret']+'.jpg'
fname = outpath+pos_queries[current_tag]+str(counter)+'.jpg' #b['id']+"_"+b['secret']+'.jpg'
print(myurl)
print(fname)
mycurl = pycurl.Curl()
mycurl.setopt(pycurl.URL, str(myurl))
myfile = open(fname,"wb")
mycurl.setopt(pycurl.WRITEDATA, myfile)
mycurl.setopt(pycurl.FOLLOWLOCATION, 1)
mycurl.setopt(pycurl.MAXREDIRS, 5)
mycurl.setopt(pycurl.NOSIGNAL, 1)
mycurl.perform()
mycurl.close()
myfile.close()
out_file.write('URL: '+myurl+'\n')
out_file.write('File: '+ fname+'\n')
out_file.write('photo: ' + b['id'] + ' ' + b['secret'] + ' ' + b['server'] + '\n')
out_file.write('owner: ' + b['owner'] + '\n')
out_file.write('title: ' + b['title'].encode("ascii","replace") + '\n')
out_file.write('originalsecret: ' + b['originalsecret'] + '\n')
out_file.write('originalformat: ' + b['originalformat'] + '\n')
out_file.write('o_height: ' + b['o_height'] + '\n')
out_file.write('o_width: ' + b['o_width'] + '\n')
out_file.write('datetaken: ' + b['datetaken'].encode("ascii","replace") + '\n')
out_file.write('dateupload: ' + b['dateupload'].encode("ascii","replace") + '\n')
out_file.write('tags: ' + b['tags'].encode("ascii","replace") + '\n')
out_file.write('license: ' + b['license'].encode("ascii","replace") + '\n')
out_file.write('latitude: ' + b['latitude'].encode("ascii","replace") + '\n')
out_file.write('longitude: ' + b['longitude'].encode("ascii","replace") + '\n')
out_file.write('accuracy: ' + b['accuracy'].encode("ascii","replace") + '\n')
out_file.write('views: ' + b['views'] + '\n')
out_file.write('interestingness: ' + str(current_image_num) + ' out of ' + str(total_images) + '\n');
out_file.write('\n')
current_image_num = current_image_num + 1;
print('')
pagenum = pagenum + 1; #this is in the else exception block. Itwon't increment for a failure.
#this block is indented such that it will only run if there are no exceptions
#in the original query. That means if there are exceptions, mintime won't be incremented
#and it will try again
timeskip = maxtime - mintime #used for initializing next binary search
mintime = maxtime
out_file.write('Total images queried: ' + str(total_images_queried) + '\n')
out_file.close
| 16,223 | 4,704 |
import os
import sys
from pyspark.sql.types import *
PATH = "/home/ubuntu/work/ml-resources/spark-ml/data"
SPARK_HOME = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/"
os.environ['SPARK_HOME'] = SPARK_HOME
sys.path.append(SPARK_HOME + "/python")
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.sql import SparkSession
conf = SparkConf().setAppName("First Spark App").setMaster("local")
sc = SparkContext(conf=conf)
spark = SparkSession(sc)
def get_user_data():
custom_schema = StructType([
StructField("no", StringType(), True),
StructField("age", IntegerType(), True),
StructField("gender", StringType(), True),
StructField("occupation", StringType(), True),
StructField("zipCode", StringType(), True)
])
from pyspark.sql import SQLContext
from pyspark.sql.types import *
sql_context = SQLContext(sc)
user_df = sql_context.read \
.format('com.databricks.spark.csv') \
.options(header='false', delimiter='|') \
.load("%s/ml-100k/u.user" % PATH, schema = custom_schema)
return user_df
def get_movie_data_df():
custom_schema = StructType([
StructField("no", StringType(), True),
StructField("moviename", StringType(), True),
StructField("date", StringType(), True),
StructField("f1", StringType(), True), StructField("url", StringType(), True),
StructField("f2", IntegerType(), True), StructField("f3", IntegerType(), True),
StructField("f4", IntegerType(), True), StructField("f5", IntegerType(), True),
StructField("f6", IntegerType(), True), StructField("f7", IntegerType(), True),
StructField("f8", IntegerType(), True), StructField("f9", IntegerType(), True),
StructField("f10", IntegerType(), True), StructField("f11", IntegerType(), True),
StructField("f12", IntegerType(), True), StructField("f13", IntegerType(), True),
StructField("f14", IntegerType(), True), StructField("f15", IntegerType(), True),
StructField("f16", IntegerType(), True), StructField("f17", IntegerType(), True),
StructField("f18", IntegerType(), True), StructField("f19", IntegerType(), True)
])
from pyspark.sql import SQLContext
from pyspark.sql.types import *
sql_context = SQLContext(sc)
movie_df = sql_context.read \
.format('com.databricks.spark.csv') \
.options(header='false', delimiter='|') \
.load("%s/ml-100k/u.item" % PATH, schema = custom_schema)
return movie_df
def get_movie_data():
return sc.textFile("%s/ml-100k/u.item" % PATH)
def get_rating_data():
return sc.textFile("%s/ml-100k/u.data" % PATH)
| 2,695 | 853 |
"""
"""
from rest_framework import routers
from safemasks.resources.rest.serializers import SupplierViewSet, TrustedSupplierViewSet
# Routers provide an easy way of automatically determining the URL conf.
ROUTER = routers.DefaultRouter()
ROUTER.register(r"suppliers", SupplierViewSet, "suppliers")
ROUTER.register(r"suppliers-trusted", TrustedSupplierViewSet, "suppliers-trusted")
| 384 | 119 |
##############################################################################
#
# Copyright (c) 2004-2008 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Output formatting.
"""
from __future__ import print_function
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping
from contextlib import contextmanager
import doctest
import os
import re
import sys
import tempfile
import traceback
from datetime import datetime, timedelta
from zope.testrunner.exceptions import DocTestFailureException
try:
unicode
except NameError:
unicode = str
doctest_template = """
File "%s", line %s, in %s
%s
Want:
%s
Got:
%s
"""
class OutputFormatter(object):
"""Test runner output formatter."""
# Implementation note: be careful about printing stuff to sys.stderr.
# It is used for interprocess communication between the parent and the
# child test runner, when you run some test layers in a subprocess.
# resume_layer() reasigns sys.stderr for this reason, but be careful
# and don't store the original one in __init__ or something.
max_width = 80
def __init__(self, options):
self.options = options
self.last_width = 0
self.compute_max_width()
progress = property(lambda self: self.options.progress)
verbose = property(lambda self: self.options.verbose)
in_subprocess = property(
lambda self: (
self.options.resume_layer is not None and
self.options.processes > 1))
def compute_max_width(self):
"""Try to determine the terminal width."""
# Note that doing this every time is more test friendly.
self.max_width = tigetnum('cols', self.max_width)
def getShortDescription(self, test, room):
"""Return a description of a test that fits in ``room`` characters."""
room -= 1
s = str(test)
if len(s) > room:
pos = s.find(" (")
if pos >= 0:
w = room - (pos + 5)
if w < 1:
# first portion (test method name) is too long
s = s[:room-3] + "..."
else:
pre = s[:pos+2]
post = s[-w:]
s = "%s...%s" % (pre, post)
else:
w = room - 4
s = '... ' + s[-w:]
return ' ' + s[:room]
def info(self, message):
"""Print an informative message."""
print(message)
def info_suboptimal(self, message):
"""Print an informative message about losing some of the features.
For example, when you run some tests in a subprocess, you lose the
ability to use the debugger.
"""
print(message)
def error(self, message):
"""Report an error."""
print(message)
def error_with_banner(self, message):
"""Report an error with a big ASCII banner."""
print()
print('*'*70)
self.error(message)
print('*'*70)
print()
def profiler_stats(self, stats):
"""Report profiler stats."""
stats.print_stats(50)
def import_errors(self, import_errors):
"""Report test-module import errors (if any)."""
if import_errors:
print("Test-module import failures:")
for error in import_errors:
self.print_traceback("Module: %s\n" % error.module,
error.exc_info),
print()
def tests_with_errors(self, errors):
"""Report names of tests with errors (if any)."""
if errors:
print()
print("Tests with errors:")
for test, exc_info in errors:
print(" ", test)
def tests_with_failures(self, failures):
"""Report names of tests with failures (if any)."""
if failures:
print()
print("Tests with failures:")
for test, exc_info in failures:
print(" ", test)
def modules_with_import_problems(self, import_errors):
"""Report names of modules with import problems (if any)."""
if import_errors:
print()
print("Test-modules with import problems:")
for test in import_errors:
print(" " + test.module)
def format_seconds(self, n_seconds):
"""Format a time in seconds."""
if n_seconds >= 60:
n_minutes, n_seconds = divmod(n_seconds, 60)
return "%d minutes %.3f seconds" % (n_minutes, n_seconds)
else:
return "%.3f seconds" % n_seconds
def format_seconds_short(self, n_seconds):
"""Format a time in seconds (short version)."""
return "%.3f s" % n_seconds
def summary(self, n_tests, n_failures, n_errors, n_seconds,
n_skipped=0):
"""Summarize the results of a single test layer."""
print(" Ran %s tests with %s failures, %s errors and "
"%s skipped in %s."
% (n_tests, n_failures, n_errors, n_skipped,
self.format_seconds(n_seconds)))
def totals(self, n_tests, n_failures, n_errors, n_seconds,
n_skipped=0):
"""Summarize the results of all layers."""
print("Total: %s tests, %s failures, %s errors and %s skipped in %s."
% (n_tests, n_failures, n_errors, n_skipped,
self.format_seconds(n_seconds)))
def list_of_tests(self, tests, layer_name):
"""Report a list of test names."""
print("Listing %s tests:" % layer_name)
for test in tests:
print(' ', test)
def garbage(self, garbage):
"""Report garbage generated by tests."""
if garbage:
print("Tests generated new (%d) garbage:" % len(garbage))
print(garbage)
def test_garbage(self, test, garbage):
"""Report garbage generated by a test."""
if garbage:
print("The following test left garbage:")
print(test)
print(garbage)
def test_threads(self, test, new_threads):
"""Report threads left behind by a test."""
if new_threads:
print("The following test left new threads behind:")
print(test)
print("New thread(s):", new_threads)
def refcounts(self, rc, prev):
"""Report a change in reference counts."""
print(" sys refcount=%-8d change=%-6d" % (rc, rc - prev))
def detailed_refcounts(self, track, rc, prev):
"""Report a change in reference counts, with extra detail."""
print((" sum detail refcount=%-8d"
" sys refcount=%-8d"
" change=%-6d"
% (track.n, rc, rc - prev)))
track.output()
def start_set_up(self, layer_name):
"""Report that we're setting up a layer.
The next output operation should be stop_set_up().
"""
print(" Set up %s" % layer_name, end=' ')
sys.stdout.flush()
def stop_set_up(self, seconds):
"""Report that we've set up a layer.
Should be called right after start_set_up().
"""
print("in %s." % self.format_seconds(seconds))
def start_tear_down(self, layer_name):
"""Report that we're tearing down a layer.
The next output operation should be stop_tear_down() or
tear_down_not_supported().
"""
print(" Tear down %s" % layer_name, end=' ')
sys.stdout.flush()
def stop_tear_down(self, seconds):
"""Report that we've tore down a layer.
Should be called right after start_tear_down().
"""
print("in %s." % self.format_seconds(seconds))
def tear_down_not_supported(self):
"""Report that we could not tear down a layer.
Should be called right after start_tear_down().
"""
print("... not supported")
def start_test(self, test, tests_run, total_tests):
"""Report that we're about to run a test.
The next output operation should be test_success(), test_error(), or
test_failure().
"""
self.test_width = 0
if self.progress:
if self.last_width:
sys.stdout.write('\r' + (' ' * self.last_width) + '\r')
s = " %d/%d (%.1f%%)" % (tests_run, total_tests,
tests_run * 100.0 / total_tests)
sys.stdout.write(s)
self.test_width += len(s)
if self.verbose == 1:
room = self.max_width - self.test_width - 1
s = self.getShortDescription(test, room)
sys.stdout.write(s)
self.test_width += len(s)
elif self.verbose == 1:
sys.stdout.write('.' * test.countTestCases())
elif self.in_subprocess:
sys.stdout.write('.' * test.countTestCases())
# Give the parent process a new line so it sees the progress
# in a timely manner.
sys.stdout.write('\n')
if self.verbose > 1:
s = str(test)
sys.stdout.write(' ')
sys.stdout.write(s)
self.test_width += len(s) + 1
sys.stdout.flush()
def test_success(self, test, seconds):
"""Report that a test was successful.
Should be called right after start_test().
The next output operation should be stop_test().
"""
if self.verbose > 2:
s = " (%s)" % self.format_seconds_short(seconds)
sys.stdout.write(s)
self.test_width += len(s) + 1
def test_skipped(self, test, reason):
"""Report that a test was skipped.
Should be called right after start_test().
The next output operation should be stop_test().
"""
if self.verbose > 2:
s = " (skipped: %s)" % reason
elif self.verbose > 1:
s = " (skipped)"
else:
return
sys.stdout.write(s)
self.test_width += len(s) + 1
def test_error(self, test, seconds, exc_info, stdout=None, stderr=None):
"""Report that an error occurred while running a test.
Should be called right after start_test().
The next output operation should be stop_test().
"""
if self.verbose > 2:
print(" (%s)" % self.format_seconds_short(seconds))
print()
self.print_traceback("Error in test %s" % test, exc_info)
self.print_std_streams(stdout, stderr)
self.test_width = self.last_width = 0
def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None):
"""Report that a test failed.
Should be called right after start_test().
The next output operation should be stop_test().
"""
if self.verbose > 2:
print(" (%s)" % self.format_seconds_short(seconds))
print()
self.print_traceback("Failure in test %s" % test, exc_info)
self.print_std_streams(stdout, stderr)
self.test_width = self.last_width = 0
def print_traceback(self, msg, exc_info):
"""Report an error with a traceback."""
print()
print(msg)
print(self.format_traceback(exc_info))
def print_std_streams(self, stdout, stderr):
"""Emit contents of buffered standard streams."""
if stdout:
sys.stdout.write("Stdout:\n")
sys.stdout.write(stdout)
if not stdout.endswith("\n"):
sys.stdout.write("\n")
sys.stdout.write("\n")
if stderr:
sys.stderr.write("Stderr:\n")
sys.stderr.write(stderr)
if not stderr.endswith("\n"):
sys.stderr.write("\n")
sys.stderr.write("\n")
def format_traceback(self, exc_info):
"""Format the traceback."""
v = exc_info[1]
if isinstance(v, DocTestFailureException):
tb = v.args[0]
elif isinstance(v, doctest.DocTestFailure):
tb = doctest_template % (
v.test.filename,
v.test.lineno + v.example.lineno + 1,
v.test.name,
v.example.source,
v.example.want,
v.got,
)
else:
tb = "".join(traceback.format_exception(*exc_info))
return tb
def stop_test(self, test):
"""Clean up the output state after a test."""
if self.progress:
self.last_width = self.test_width
elif self.verbose > 1:
print()
sys.stdout.flush()
def stop_tests(self):
"""Clean up the output state after a collection of tests."""
if self.progress and self.last_width:
sys.stdout.write('\r' + (' ' * self.last_width) + '\r')
if self.verbose == 1 or self.progress:
print()
def tigetnum(attr, default=None):
"""Return a value from the terminfo database.
Terminfo is used on Unix-like systems to report various terminal attributes
(such as width, height or the number of supported colors).
Returns ``default`` when the ``curses`` module is not available, or when
sys.stdout is not a terminal.
"""
try:
import curses
except ImportError:
# avoid reimporting a broken module in python 2.3
sys.modules['curses'] = None
else:
# If sys.stdout is not a real file object (e.g. in unit tests that
# use various wrappers), you get an error, different depending on
# Python version:
expected_exceptions = (curses.error, TypeError, AttributeError)
if sys.version_info >= (3,):
import io
expected_exceptions += (io.UnsupportedOperation, )
try:
curses.setupterm()
except expected_exceptions:
# You get curses.error when $TERM is set to an unknown name
pass
else:
try:
return curses.tigetnum(attr)
except expected_exceptions:
# You get TypeError on PyPy3 due to a bug:
# https://bitbucket.org/pypy/pypy/issue/2016/pypy3-cursestigetnum-raises-ctype
pass
return default
def terminal_has_colors():
"""Determine whether the terminal supports colors.
Some terminals (e.g. the emacs built-in one) don't.
"""
return tigetnum('colors', -1) >= 8
class ColorfulOutputFormatter(OutputFormatter):
"""Output formatter that uses ANSI color codes.
Like syntax highlighting in your text editor, colorizing
test failures helps the developer.
"""
# These colors are carefully chosen to have enough contrast
# on terminals with both black and white background.
colorscheme = {'normal': 'normal',
'default': 'default',
'info': 'normal',
'suboptimal-behaviour': 'magenta',
'error': 'brightred',
'number': 'green',
'slow-test': 'brightmagenta',
'ok-number': 'green',
'error-number': 'brightred',
'filename': 'lightblue',
'lineno': 'lightred',
'testname': 'lightcyan',
'failed-example': 'cyan',
'expected-output': 'green',
'actual-output': 'red',
'character-diffs': 'magenta',
'diff-chunk': 'magenta',
'exception': 'red',
'skipped': 'brightyellow',
}
# Map prefix character to color in diff output. This handles ndiff and
# udiff correctly, but not cdiff. In cdiff we ought to highlight '!' as
# expected-output until we see a '-', then highlight '!' as actual-output,
# until we see a '*', then switch back to highlighting '!' as
# expected-output. Nevertheless, coloried cdiffs are reasonably readable,
# so I'm not going to fix this.
# -- mgedmin
diff_color = {'-': 'expected-output',
'+': 'actual-output',
'?': 'character-diffs',
'@': 'diff-chunk',
'*': 'diff-chunk',
'!': 'actual-output',
}
prefixes = [('dark', '0;'),
('light', '1;'),
('bright', '1;'),
('bold', '1;'),
]
colorcodes = {'default': 0, 'normal': 0,
'black': 30,
'red': 31,
'green': 32,
'brown': 33, 'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'grey': 37, 'gray': 37, 'white': 37}
slow_test_threshold = 10.0 # seconds
def color_code(self, color):
"""Convert a color description (e.g. 'lightred') to a terminal code."""
prefix_code = ''
for prefix, code in self.prefixes:
if color.startswith(prefix):
color = color[len(prefix):]
prefix_code = code
break
color_code = self.colorcodes[color]
return '\033[%s%sm' % (prefix_code, color_code)
def color(self, what):
"""Pick a named color from the color scheme"""
return self.color_code(self.colorscheme[what])
def colorize(self, what, message, normal='normal'):
"""Wrap message in color."""
return self.color(what) + message + self.color(normal)
def error_count_color(self, n):
"""Choose a color for the number of errors."""
if n:
return self.color('error-number')
else:
return self.color('ok-number')
def skip_count_color(self, n):
"""Choose a color for the number of skipped tests."""
if n:
return self.color('skipped')
else:
return self.color('ok-number')
def test_skipped(self, test, reason):
"""Report that a test was skipped.
Should be called right after start_test().
The next output operation should be stop_test().
"""
if self.verbose > 2:
s = " (%sskipped: %s%s)" % (
self.color('skipped'), reason, self.color('info'))
elif self.verbose > 1:
s = " (%sskipped%s)" % (
self.color('skipped'), self.color('info'))
else:
return
sys.stdout.write(s)
self.test_width += len(s) + 1
def info(self, message):
"""Print an informative message."""
print(self.colorize('info', message))
def info_suboptimal(self, message):
"""Print an informative message about losing some of the features.
For example, when you run some tests in a subprocess, you lose the
ability to use the debugger.
"""
print(self.colorize('suboptimal-behaviour', message))
def error(self, message):
"""Report an error."""
print(self.colorize('error', message))
def error_with_banner(self, message):
"""Report an error with a big ASCII banner."""
print()
print(self.colorize('error', '*'*70))
self.error(message)
print(self.colorize('error', '*'*70))
print()
def tear_down_not_supported(self):
"""Report that we could not tear down a layer.
Should be called right after start_tear_down().
"""
print("...", self.colorize('suboptimal-behaviour', "not supported"))
def format_seconds(self, n_seconds, normal='normal'):
"""Format a time in seconds."""
if n_seconds >= 60:
n_minutes, n_seconds = divmod(n_seconds, 60)
return "%s minutes %s seconds" % (
self.colorize('number', '%d' % n_minutes, normal),
self.colorize('number', '%.3f' % n_seconds, normal))
else:
return "%s seconds" % (
self.colorize('number', '%.3f' % n_seconds, normal))
def format_seconds_short(self, n_seconds):
"""Format a time in seconds (short version)."""
if n_seconds >= self.slow_test_threshold:
color = 'slow-test'
else:
color = 'number'
return self.colorize(color, "%.3f s" % n_seconds)
def summary(self, n_tests, n_failures, n_errors, n_seconds,
n_skipped=0):
"""Summarize the results."""
sys.stdout.writelines([
self.color('info'), ' Ran ',
self.color('number'), str(n_tests),
self.color('info'), ' tests with ',
self.error_count_color(n_failures), str(n_failures),
self.color('info'), ' failures, ',
self.error_count_color(n_errors), str(n_errors),
self.color('info'), ' errors, ',
self.skip_count_color(n_skipped), str(n_skipped),
self.color('info'), ' skipped in ',
self.format_seconds(n_seconds, 'info'), '.',
self.color('normal'), '\n',
])
def totals(self, n_tests, n_failures, n_errors, n_seconds,
n_skipped=0):
"""Report totals (number of tests, failures, and errors)."""
sys.stdout.writelines([
self.color('info'), 'Total: ',
self.color('number'), str(n_tests),
self.color('info'), ' tests, ',
self.error_count_color(n_failures), str(n_failures),
self.color('info'), ' failures, ',
self.error_count_color(n_errors), str(n_errors),
self.color('info'), ' errors, ',
self.skip_count_color(n_skipped), str(n_skipped),
self.color('info'), ' skipped in ',
self.format_seconds(n_seconds, 'info'), '.',
self.color('normal'), '\n'])
def print_traceback(self, msg, exc_info):
"""Report an error with a traceback."""
print()
print(self.colorize('error', msg))
v = exc_info[1]
if isinstance(v, DocTestFailureException):
self.print_doctest_failure(v.args[0])
elif isinstance(v, doctest.DocTestFailure):
# I don't think these are ever used... -- mgedmin
tb = self.format_traceback(exc_info)
print(tb)
else:
tb = self.format_traceback(exc_info)
self.print_colorized_traceback(tb)
def print_doctest_failure(self, formatted_failure):
"""Report a doctest failure.
``formatted_failure`` is a string -- that's what
DocTestSuite/DocFileSuite gives us.
"""
color_of_indented_text = 'normal'
colorize_diff = False
for line in formatted_failure.splitlines():
if line.startswith('File '):
m = re.match(r'File "(.*)", line (\d*), in (.*)$', line)
if m:
filename, lineno, test = m.groups()
sys.stdout.writelines([
self.color('normal'), 'File "',
self.color('filename'), filename,
self.color('normal'), '", line ',
self.color('lineno'), lineno,
self.color('normal'), ', in ',
self.color('testname'), test,
self.color('normal'), '\n'])
else:
print(line)
elif line.startswith(' ') or line.strip() == '':
if colorize_diff and len(line) > 4:
color = self.diff_color.get(
line[4], color_of_indented_text)
print(self.colorize(color, line))
else:
if line.strip() != '':
print(self.colorize(color_of_indented_text, line))
else:
print(line)
else:
colorize_diff = False
if line.startswith('Failed example'):
color_of_indented_text = 'failed-example'
elif line.startswith('Expected:'):
color_of_indented_text = 'expected-output'
elif line.startswith('Got:'):
color_of_indented_text = 'actual-output'
elif line.startswith('Exception raised:'):
color_of_indented_text = 'exception'
elif line.startswith('Differences '):
color_of_indented_text = 'normal'
colorize_diff = True
else:
color_of_indented_text = 'normal'
print(line)
print()
def print_colorized_traceback(self, formatted_traceback):
"""Report a test failure.
``formatted_traceback`` is a string.
"""
for line in formatted_traceback.splitlines():
if line.startswith(' File'):
m = re.match(r' File "(.*)", line (\d*), in (.*)$', line)
if m:
filename, lineno, test = m.groups()
sys.stdout.writelines([
self.color('normal'), ' File "',
self.color('filename'), filename,
self.color('normal'), '", line ',
self.color('lineno'), lineno,
self.color('normal'), ', in ',
self.color('testname'), test,
self.color('normal'), '\n'])
else:
print(line)
elif line.startswith(' '):
print(self.colorize('failed-example', line))
elif line.startswith('Traceback (most recent call last)'):
print(line)
else:
print(self.colorize('exception', line))
print()
class FakeTest(object):
"""A fake test object that only has an id."""
failureException = None
def __init__(self, test_id):
self._id = test_id
def id(self):
return self._id
# Conditional imports: we don't want zope.testrunner to have a hard
# dependency on subunit.
try:
import subunit
from subunit.iso8601 import Utc
subunit.StreamResultToBytes
except (ImportError, AttributeError):
subunit = None
# testtools is a hard dependency of subunit itself, but we guard it
# separately for richer error messages.
try:
import testtools
from testtools.content import (
Content,
ContentType,
content_from_file,
text_content,
)
testtools.StreamToExtendedDecorator
except (ImportError, AttributeError):
testtools = None
class _RunnableDecorator(object):
"""Permit controlling the runnable annotation on tests.
This decorates a StreamResult, adding a setRunnable context manager to
indicate whether a test is runnable. (A context manager is unidiomatic
here, but it's just about the simplest way to stuff the relevant state
through the various layers of decorators involved without accidentally
affecting later test results.)
"""
def __init__(self, decorated):
self.decorated = decorated
self._runnable = True
def __getattr__(self, name):
return getattr(self.decorated, name)
@contextmanager
def setRunnable(self, runnable):
orig_runnable = self._runnable
try:
self._runnable = runnable
yield
finally:
self._runnable = orig_runnable
def status(self, **kwargs):
kwargs = dict(kwargs)
kwargs['runnable'] = self._runnable
self.decorated.status(**kwargs)
class _SortedDict(MutableMapping, object):
"""A dict that always returns items in sorted order.
This differs from collections.OrderedDict in that it returns items in
*sorted* order, not in insertion order.
We use this as a workaround for the fact that
testtools.ExtendedToStreamDecorator doesn't sort the details dict when
encoding it, which makes it difficult to write stable doctests for
subunit v2 output.
"""
def __init__(self, items):
self._dict = dict(items)
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
self._dict[key] = value
def __delitem__(self, key):
del self._dict[key]
def __iter__(self):
return iter(sorted(self._dict))
def __len__(self):
return len(self._dict)
class SubunitOutputFormatter(object):
"""A subunit output formatter.
This output formatter generates subunit-compatible output (see
https://launchpad.net/subunit). Subunit output is essentially a stream
of results of unit tests.
In this formatter, non-test events (such as layer set up) are encoded as
specially-tagged tests. In particular, for a layer 'foo', the fake
tests related to layer setup and teardown are tagged with 'zope:layer'
and are called 'foo:setUp' and 'foo:tearDown'. Any tests within layer
'foo' are tagged with 'zope:layer:foo'.
Note that all tags specific to this formatter begin with 'zope:'.
"""
# subunit output is designed for computers, so displaying a progress bar
# isn't helpful.
progress = False
verbose = property(lambda self: self.options.verbose)
TAG_INFO_SUBOPTIMAL = 'zope:info_suboptimal'
TAG_ERROR_WITH_BANNER = 'zope:error_with_banner'
TAG_LAYER = 'zope:layer'
TAG_IMPORT_ERROR = 'zope:import_error'
TAG_PROFILER_STATS = 'zope:profiler_stats'
TAG_GARBAGE = 'zope:garbage'
TAG_THREADS = 'zope:threads'
TAG_REFCOUNTS = 'zope:refcounts'
def __init__(self, options, stream=None):
if subunit is None:
raise Exception('Requires subunit 0.0.11 or better')
if testtools is None:
raise Exception('Requires testtools 0.9.30 or better')
self.options = options
if stream is None:
stream = sys.stdout
self._stream = stream
self._subunit = self._subunit_factory(self._stream)
# Used to track the last layer that was set up or torn down. Either
# None or (layer_name, last_touched_time).
self._last_layer = None
self.UTC = Utc()
# Content types used in the output.
self.TRACEBACK_CONTENT_TYPE = ContentType(
'text', 'x-traceback', {'language': 'python', 'charset': 'utf8'})
self.PROFILE_CONTENT_TYPE = ContentType(
'application', 'x-binary-profile')
self.PLAIN_TEXT = ContentType('text', 'plain', {'charset': 'utf8'})
@classmethod
def _subunit_factory(cls, stream):
"""Return a TestResult attached to the given stream."""
return _RunnableDecorator(subunit.TestProtocolClient(stream))
def _emit_timestamp(self, now=None):
"""Emit a timestamp to the subunit stream.
If 'now' is not specified, use the current time on the system clock.
"""
if now is None:
now = datetime.now(self.UTC)
self._subunit.time(now)
return now
def _emit_fake_test(self, message, tag, details=None):
"""Emit a successful fake test to the subunit stream.
Use this to print tagged informative messages.
"""
test = FakeTest(message)
with self._subunit.setRunnable(False):
self._subunit.startTest(test)
self._subunit.tags([tag], [])
self._subunit.addSuccess(test, details=details)
self._subunit.stopTest(test)
def _emit_error(self, error_id, tag, exc_info, runnable=False):
"""Emit an error to the subunit stream.
Use this to pass on information about errors that occur outside of
tests.
"""
test = FakeTest(error_id)
with self._subunit.setRunnable(runnable):
self._subunit.startTest(test)
self._subunit.tags([tag], [])
self._subunit.addError(test, exc_info)
self._subunit.stopTest(test)
def _emit_failure(self, failure_id, tag, exc_info):
"""Emit an failure to the subunit stream.
Use this to pass on information about failures that occur outside of
tests.
"""
test = FakeTest(failure_id)
self._subunit.addFailure(test, exc_info)
def _enter_layer(self, layer_name):
"""Tell subunit that we are entering a layer."""
self._subunit.tags(['zope:layer:%s' % (layer_name,)], [])
def _exit_layer(self, layer_name):
"""Tell subunit that we are exiting a layer."""
self._subunit.tags([], ['zope:layer:%s' % (layer_name,)])
def info(self, message):
"""Print an informative message."""
# info() output is not relevant to actual test results. It only
# says things like "Running tests" or "Tearing down left over
# layers", things that are communicated already by the subunit
# stream. Just suppress the info() output.
pass
def info_suboptimal(self, message):
"""Print an informative message about losing some of the features.
For example, when you run some tests in a subprocess, you lose the
ability to use the debugger.
"""
# Used _only_ to indicate running in a subprocess.
self._emit_fake_test(message.strip(), self.TAG_INFO_SUBOPTIMAL)
def error(self, message):
"""Report an error."""
# XXX: Mostly used for user errors, sometimes used for errors in the
# test framework, sometimes used to record layer setUp failure (!!!).
self._stream.write('%s\n' % (message,))
def error_with_banner(self, message):
"""Report an error with a big ASCII banner."""
# Either "Could not communicate with subprocess"
# Or "Can't post-mortem debug when running a layer as a subprocess!"
self._emit_fake_test(message, self.TAG_ERROR_WITH_BANNER)
def profiler_stats(self, stats):
"""Report profiler stats."""
fd, filename = tempfile.mkstemp(prefix='zope.testrunner-')
os.close(fd)
try:
stats.dump_stats(filename)
profile_content = content_from_file(
filename, content_type=self.PROFILE_CONTENT_TYPE)
details = {'profiler-stats': profile_content}
# Name the test 'zope:profiler_stats' just like its tag.
self._emit_fake_test(
self.TAG_PROFILER_STATS, self.TAG_PROFILER_STATS, details)
finally:
os.unlink(filename)
def import_errors(self, import_errors):
"""Report test-module import errors (if any)."""
if import_errors:
for error in import_errors:
self._emit_error(
error.module, self.TAG_IMPORT_ERROR, error.exc_info,
runnable=True)
def tests_with_errors(self, errors):
"""Report names of tests with errors (if any).
Simply not supported by the subunit formatter. Fancy summary output
doesn't make sense.
"""
pass
def tests_with_failures(self, failures):
"""Report names of tests with failures (if any).
Simply not supported by the subunit formatter. Fancy summary output
doesn't make sense.
"""
pass
def modules_with_import_problems(self, import_errors):
"""Report names of modules with import problems (if any)."""
# This is simply a summary method, and subunit output doesn't
# benefit from summaries.
pass
def summary(self, n_tests, n_failures, n_errors, n_seconds,
n_skipped=0):
"""Summarize the results of a single test layer.
Since subunit is a stream protocol format, it has no need for a
summary. When the stream is finished other tools can generate a
summary if so desired.
"""
pass
def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0):
"""Summarize the results of all layers.
Simply not supported by the subunit formatter. Fancy summary output
doesn't make sense.
"""
pass
def _emit_exists(self, test):
"""Emit an indication that a test exists.
With the v1 protocol, we just emit a fake success line.
"""
self._subunit.addSuccess(test)
def list_of_tests(self, tests, layer_name):
"""Report a list of test names."""
self._enter_layer(layer_name)
for test in tests:
self._subunit.startTest(test)
self._emit_exists(test)
self._subunit.stopTest(test)
self._exit_layer(layer_name)
def garbage(self, garbage):
"""Report garbage generated by tests."""
# XXX: Really, 'garbage', 'profiler_stats' and the 'refcounts' twins
# ought to add extra details to a fake test that represents the
# summary information for the whole suite. However, there's no event
# on output formatters for "everything is really finished, honest". --
# jml, 2010-02-14
details = {'garbage': text_content(unicode(garbage))}
self._emit_fake_test(self.TAG_GARBAGE, self.TAG_GARBAGE, details)
def test_garbage(self, test, garbage):
"""Report garbage generated by a test.
Encoded in the subunit stream as a test error. Clients can filter
out these tests based on the tag if they don't think garbage should
fail the test run.
"""
# XXX: Perhaps 'test_garbage' and 'test_threads' ought to be within
# the output for the actual test, appended as details to whatever
# result the test gets. Not an option with the present API, as there's
# no event for "no more output for this test". -- jml, 2010-02-14
self._subunit.startTest(test)
self._subunit.tags([self.TAG_GARBAGE], [])
self._subunit.addError(
test, details={'garbage': text_content(unicode(garbage))})
self._subunit.stopTest(test)
def test_threads(self, test, new_threads):
"""Report threads left behind by a test.
Encoded in the subunit stream as a test error. Clients can filter
out these tests based on the tag if they don't think left-over
threads should fail the test run.
"""
self._subunit.startTest(test)
self._subunit.tags([self.TAG_THREADS], [])
self._subunit.addError(
test, details={'threads': text_content(unicode(new_threads))})
self._subunit.stopTest(test)
def refcounts(self, rc, prev):
"""Report a change in reference counts."""
details = _SortedDict({
'sys-refcounts': text_content(str(rc)),
'changes': text_content(str(rc - prev)),
})
# XXX: Emit the details dict as JSON?
self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details)
def detailed_refcounts(self, track, rc, prev):
"""Report a change in reference counts, with extra detail."""
details = _SortedDict({
'sys-refcounts': text_content(str(rc)),
'changes': text_content(str(rc - prev)),
'track': text_content(str(track.delta)),
})
self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details)
def start_set_up(self, layer_name):
"""Report that we're setting up a layer.
We do this by emitting a fake test of the form '$LAYER_NAME:setUp'
and adding a tag of the form 'zope:layer:$LAYER_NAME' to the current
tag context.
The next output operation should be stop_set_up().
"""
test = FakeTest('%s:setUp' % (layer_name,))
now = self._emit_timestamp()
with self._subunit.setRunnable(False):
self._subunit.startTest(test)
self._subunit.tags([self.TAG_LAYER], [])
self._last_layer = (layer_name, now)
def stop_set_up(self, seconds):
"""Report that we've set up a layer.
Should be called right after start_set_up().
"""
layer_name, start_time = self._last_layer
self._last_layer = None
test = FakeTest('%s:setUp' % (layer_name,))
self._emit_timestamp(start_time + timedelta(seconds=seconds))
with self._subunit.setRunnable(False):
self._subunit.addSuccess(test)
self._subunit.stopTest(test)
self._enter_layer(layer_name)
def layer_failure(self, failure_type, exc_info):
layer_name, start_time = self._last_layer
self._emit_failure(
'%s:%s' % (layer_name, failure_type), self.TAG_LAYER, exc_info)
def start_tear_down(self, layer_name):
"""Report that we're tearing down a layer.
We do this by emitting a fake test of the form
'$LAYER_NAME:tearDown' and removing a tag of the form
'layer:$LAYER_NAME' from the current tag context.
The next output operation should be stop_tear_down() or
tear_down_not_supported().
"""
test = FakeTest('%s:tearDown' % (layer_name,))
self._exit_layer(layer_name)
now = self._emit_timestamp()
with self._subunit.setRunnable(False):
self._subunit.startTest(test)
self._subunit.tags([self.TAG_LAYER], [])
self._last_layer = (layer_name, now)
def stop_tear_down(self, seconds):
"""Report that we've torn down a layer.
Should be called right after start_tear_down().
"""
layer_name, start_time = self._last_layer
self._last_layer = None
test = FakeTest('%s:tearDown' % (layer_name,))
self._emit_timestamp(start_time + timedelta(seconds=seconds))
with self._subunit.setRunnable(False):
self._subunit.addSuccess(test)
self._subunit.stopTest(test)
def tear_down_not_supported(self):
"""Report that we could not tear down a layer.
Should be called right after start_tear_down().
"""
layer_name, start_time = self._last_layer
self._last_layer = None
test = FakeTest('%s:tearDown' % (layer_name,))
self._emit_timestamp()
with self._subunit.setRunnable(False):
self._subunit.addSkip(test, 'tearDown not supported')
self._subunit.stopTest(test)
def start_test(self, test, tests_run, total_tests):
"""Report that we're about to run a test.
The next output operation should be test_success(), test_error(), or
test_failure().
"""
self._emit_timestamp()
self._subunit.startTest(test)
def test_success(self, test, seconds):
"""Report that a test was successful.
Should be called right after start_test().
The next output operation should be stop_test().
"""
self._emit_timestamp()
self._subunit.addSuccess(test)
def test_skipped(self, test, reason):
"""Report that a test was skipped.
Should be called right after start_test().
The next output operation should be stop_test().
"""
self._subunit.addSkip(test, reason)
def _exc_info_to_details(self, exc_info):
"""Translate 'exc_info' into a details dict usable with subunit."""
# In an ideal world, we'd use the pre-bundled 'TracebackContent'
# class from testtools. However, 'OutputFormatter' contains special
# logic to handle errors from doctests, so we have to use that and
# manually create an object equivalent to an instance of
# 'TracebackContent'.
formatter = OutputFormatter(None)
traceback = formatter.format_traceback(exc_info)
# We have no idea if the traceback is a unicode object or a
# bytestring with non-ASCII characters. We had best be careful when
# handling it.
if isinstance(traceback, bytes):
# Assume the traceback was UTF-8-encoded, but still be careful.
unicode_tb = traceback.decode('utf-8', 'replace')
else:
unicode_tb = traceback
return _SortedDict({
'traceback': Content(
self.TRACEBACK_CONTENT_TYPE,
lambda: [unicode_tb.encode('utf8')]),
})
def _add_std_streams_to_details(self, details, stdout, stderr):
"""Add buffered standard stream contents to a subunit details dict."""
if stdout:
if isinstance(stdout, bytes):
stdout = stdout.decode('utf-8', 'replace')
details['test-stdout'] = Content(
self.PLAIN_TEXT, lambda: [stdout.encode('utf-8')])
if stderr:
if isinstance(stderr, bytes):
stderr = stderr.decode('utf-8', 'replace')
details['test-stderr'] = Content(
self.PLAIN_TEXT, lambda: [stderr.encode('utf-8')])
def test_error(self, test, seconds, exc_info, stdout=None, stderr=None):
"""Report that an error occurred while running a test.
Should be called right after start_test().
The next output operation should be stop_test().
"""
self._emit_timestamp()
details = self._exc_info_to_details(exc_info)
self._add_std_streams_to_details(details, stdout, stderr)
self._subunit.addError(test, details=details)
def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None):
"""Report that a test failed.
Should be called right after start_test().
The next output operation should be stop_test().
"""
self._emit_timestamp()
details = self._exc_info_to_details(exc_info)
self._add_std_streams_to_details(details, stdout, stderr)
self._subunit.addFailure(test, details=details)
def stop_test(self, test):
"""Clean up the output state after a test."""
self._subunit.stopTest(test)
def stop_tests(self):
"""Clean up the output state after a collection of tests."""
# subunit handles all of this itself.
pass
class SubunitV2OutputFormatter(SubunitOutputFormatter):
"""A subunit v2 output formatter."""
@classmethod
def _subunit_factory(cls, stream):
"""Return a TestResult attached to the given stream."""
stream_result = _RunnableDecorator(subunit.StreamResultToBytes(stream))
result = testtools.ExtendedToStreamDecorator(stream_result)
# Lift our decorating method up so that we can get at it easily.
result.setRunnable = stream_result.setRunnable
result.startTestRun()
return result
def error(self, message):
"""Report an error."""
# XXX: Mostly used for user errors, sometimes used for errors in the
# test framework, sometimes used to record layer setUp failure (!!!).
self._subunit.status(
file_name='error', file_bytes=unicode(message).encode('utf-8'),
eof=True, mime_type=repr(self.PLAIN_TEXT))
def _emit_exists(self, test):
"""Emit an indication that a test exists."""
now = datetime.now(self.UTC)
self._subunit.status(
test_id=test.id(), test_status='exists',
test_tags=self._subunit.current_tags, timestamp=now)
| 47,925 | 13,868 |
# Generated by Django 2.2 on 2021-09-11 04:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waterApp', '0010_auto_20210911_1041'),
]
operations = [
migrations.AlterField(
model_name='gwmonitoring',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
| 410 | 149 |
"""The Stratified Space Geometry Package."""
| 45 | 13 |
from django.core.management import BaseCommand
import logging
# These two lines enable debugging at httplib level (requests->urllib3->http.client)
# You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA.
# The only thing missing will be the response.body which is not logged.
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
http_client.HTTPConnection.debuglevel = 1
# You must initialize logging, otherwise you'll not see debug output.
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
class Command(BaseCommand):
def handle(self, *args, **options):
from exporter.tasks import GenerateModelExportTask
gmet = GenerateModelExportTask()
gmet.run(1) | 928 | 261 |
from functools import partial
import os
import pytest
import dask
import dask.array as da
from dask.utils_test import inc
from dask.highlevelgraph import HighLevelGraph, BasicLayer, Layer
from dask.blockwise import Blockwise
from dask.array.utils import assert_eq
def test_visualize(tmpdir):
pytest.importorskip("graphviz")
fn = str(tmpdir)
a = da.ones(10, chunks=(5,))
b = a + 1
c = a + 2
d = b + c
d.dask.visualize(fn)
assert os.path.exists(fn)
def test_basic():
a = {"x": 1}
b = {"y": (inc, "x")}
layers = {"a": a, "b": b}
dependencies = {"a": set(), "b": {"a"}}
hg = HighLevelGraph(layers, dependencies)
assert dict(hg) == {"x": 1, "y": (inc, "x")}
assert all(isinstance(layer, Layer) for layer in hg.layers.values())
def test_keys_values_items_methods():
a = da.ones(10, chunks=(5,))
b = a + 1
c = a + 2
d = b + c
hg = d.dask
keys, values, items = hg.keys(), hg.values(), hg.items()
assert all(isinstance(i, list) for i in [keys, values, items])
assert keys == [i for i in hg]
assert values == [hg[i] for i in hg]
assert items == [(k, v) for k, v in zip(keys, values)]
def test_cull():
a = {"x": 1, "y": (inc, "x")}
layers = {
"a": BasicLayer(
a, dependencies={"x": set(), "y": {"x"}}, global_dependencies=set()
)
}
dependencies = {"a": set()}
hg = HighLevelGraph(layers, dependencies)
culled_by_x = hg.cull({"x"})
assert dict(culled_by_x) == {"x": 1}
culled_by_y = hg.cull({"y"})
assert dict(culled_by_y) == a
@pytest.mark.parametrize("inject_dict", [True, False])
def test_map_basic_layers(inject_dict):
"""Check map_basic_layers() by injecting an inc() call"""
y = da.ones(3, chunks=(3,), dtype="int") + 40
def inject_inc(dsk):
assert isinstance(dsk, BasicLayer)
dsk = dict(dsk)
k = next(iter(dsk))
dsk[k] = (inc, dsk[k])
if inject_dict:
return dsk # map_basic_layers() should automatically convert it to a `BasicLayer`
else:
return BasicLayer(dsk)
dsk = y.__dask_graph__()
y.dask = dsk.map_basic_layers(inject_inc)
layers = list(y.dask.layers.values())
assert isinstance(layers[0], BasicLayer)
assert isinstance(layers[1], Blockwise)
assert_eq(y, [42] * 3)
@pytest.mark.parametrize("use_layer_map_task", [True, False])
def test_map_tasks(use_layer_map_task):
"""Check map_tasks() by injecting an +1 to the `40` literal"""
y = da.ones(3, chunks=(3,), dtype="int") + 40
def plus_one(tasks):
ret = []
for t in tasks:
if t == 40:
t += 1
ret.append(t)
return tuple(ret)
dsk = y.__dask_graph__()
if use_layer_map_task:
# In order to test the default map_tasks() implementation on a Blockwise Layer,
# we overwrite Blockwise.map_tasks with Layer.map_tasks
blockwise_layer = list(dsk.layers.values())[1]
blockwise_layer.map_tasks = partial(Layer.map_tasks, blockwise_layer)
y.dask = dsk.map_tasks(plus_one)
assert_eq(y, [42] * 3)
def annot_map_fn(key):
return key[1:]
@pytest.mark.parametrize(
"annotation",
[
{"worker": "alice"},
{"block_id": annot_map_fn},
],
)
def test_single_annotation(annotation):
with dask.annotate(**annotation):
A = da.ones((10, 10), chunks=(5, 5))
alayer = A.__dask_graph__().layers[A.name]
assert alayer.annotations == annotation
assert dask.config.get("annotations", None) is None
def test_multiple_annotations():
with dask.annotate(block_id=annot_map_fn):
with dask.annotate(resource="GPU"):
A = da.ones((10, 10), chunks=(5, 5))
B = A + 1
C = B + 1
assert dask.config.get("annotations", None) is None
alayer = A.__dask_graph__().layers[A.name]
blayer = B.__dask_graph__().layers[B.name]
clayer = C.__dask_graph__().layers[C.name]
assert alayer.annotations == {"resource": "GPU", "block_id": annot_map_fn}
assert blayer.annotations == {"block_id": annot_map_fn}
assert clayer.annotations is None
| 4,179 | 1,566 |
# Requires pip install bitarray
from bitarray import bitarray
import argparse, math
def derive_transfer_function(pTransferFunctionString: str) -> list:
lTransferFunction = list(map(int, pTransferFunctionString.split(',')))
lTransferFunctionValid = True
lLengthTransferFunction = len(lTransferFunction)
for i in range(0, lLengthTransferFunction):
if i not in lTransferFunction:
lTransferFunctionValid = False
break
# end if
# end for
if not lTransferFunctionValid:
raise Exception('Transfer function must contain all integers from 0 to N where (N - 1) is length of the substitution array.')
lExponent = math.log(lLengthTransferFunction, 2)
if lExponent != math.floor(lExponent):
raise Exception('Transfer function length must be even power of 2.')
return lTransferFunction
def print_transfer_function_table(pTransferFunction: list) -> None:
lLengthTransferFunction = len(pTransferFunction)
lNumberBits = int(math.log(lLengthTransferFunction, 2))
lFormat = '0' + str(lNumberBits) + 'b'
# print column headers
print()
for i in range(0, lNumberBits):
print("x=" + str(i) + "\t", end="")
for i in range(0, lNumberBits):
print("y=" + str(i) + "\t", end="")
print()
# print values for transfer function
for lIndex, lSubstitutionValue in enumerate(pTransferFunction):
lBinaryIndex = bitarray(format(lIndex, lFormat))
lBinarySV = bitarray(format(lSubstitutionValue, lFormat))
for i in range(0, lNumberBits):
print(int(lBinaryIndex[i]), end="")
print("\t", end="")
for i in range(0, lNumberBits):
print(int(lBinarySV[i]), end="")
print("\t", end="")
print()
print()
def print_linear_approximation_table(pTransferFunction: list) -> None:
lLengthTransferFunction = len(pTransferFunction)
lNumberBits = int(math.log(lLengthTransferFunction, 2))
lFormat = '0' + str(lNumberBits) + 'b'
# print column headers
print("\t", end="")
for i in range(0, lLengthTransferFunction):
print("b=" + str(i) + "\t", end="")
print()
for lA in range(0, lLengthTransferFunction):
# print row header
print("a=" + str(lA) + "\t", end="")
for lB in range(0, lLengthTransferFunction):
a = bitarray(format(lA, lFormat))
b = bitarray(format(lB, lFormat))
lCount = 0
for lX, lY in enumerate(pTransferFunction):
x = bitarray(format(lX, lFormat))
y = bitarray(format(lY, lFormat))
lVectorXorOfAX = 0
for i in range(0, lNumberBits):
lVectorXorOfAX ^= int(a[i]) * int(x[i])
lVectorXorOfBY = 0
for i in range(0, lNumberBits):
lVectorXorOfBY ^= int(b[i]) * int(y[i])
lAXxorBY = lVectorXorOfAX ^ lVectorXorOfBY
if lAXxorBY == 0:
lCount += 1
# end looping through transfer function
print(str(lCount) + "\t", end="")
# end for b
print()
# end for a
if __name__ == '__main__':
lArgParser = argparse.ArgumentParser(description='Transference: A tool to help visualize s-boxes (substitution boxes or transfer functions)')
lArgParser.add_argument('-tft', '--transfer-function-table', help='Print the transfer function table for the s-box', action='store_true')
lArgParser.add_argument('-lat', '--linear-approximation-table', help='Calculate the linear transformation table for the s-box', action='store_true')
lArgParser.add_argument('-all', '--all', help='Calculate the linear transformation table for the s-box', action='store_true')
lArgParser.add_argument('-v', '--verbose', help='Enables verbose output', action='store_true')
lArgParser.add_argument('INPUT', action='store', type=str, help='The substitution table (s-box) represented as a comma delimted list of integers. The length of the list is the number of bits in the substitution. Required. Example: 3,2,0,1 means substitute 3 for 0, 2 for 1, 0 for 2 and 1 for 3. ')
lArgs = lArgParser.parse_args()
lTransferFunction = derive_transfer_function(lArgs.INPUT)
if lArgs.all:
lArgs.transfer_function_table = lArgs.linear_approximation_table = True
if lArgs.transfer_function_table:
print_transfer_function_table(lTransferFunction)
if lArgs.linear_approximation_table:
print_linear_approximation_table(lTransferFunction)
| 4,612 | 1,451 |
import pytest
from aiospamc.client import Client
from aiospamc.exceptions import (
BadResponse,
UsageException,
DataErrorException,
NoInputException,
NoUserException,
NoHostException,
UnavailableException,
InternalSoftwareException,
OSErrorException,
OSFileException,
CantCreateException,
IOErrorException,
TemporaryFailureException,
ProtocolException,
NoPermissionException,
ConfigException,
ServerTimeoutException,
ResponseException,
)
from aiospamc.responses import Response
async def test_request_sent_to_connection(mock_client_dependency, mocker, hostname):
mock_req = mocker.MagicMock()
await mock_client_dependency.request(mock_req, host=hostname)
assert (
bytes(mock_req)
== mock_client_dependency.connection_factory().request.await_args[0][0]
)
async def test_request_response_sent_to_parser(
mock_client_dependency, mocker, hostname
):
mock_req = mocker.MagicMock()
connection = mock_client_dependency.connection_factory()
parser = mock_client_dependency.parser_factory()
mocker.spy(parser, "parse")
await mock_client_dependency.request(mock_req, host=hostname)
response = connection.request.return_value
assert response == parser.parse.call_args[0][0]
async def test_request_returns_response(mock_client_dependency, mocker, hostname):
mock_req = mocker.MagicMock()
connection = mock_client_dependency.connection_factory()
parser = mock_client_dependency.parser_factory()
parse_spy = mocker.spy(parser, "parse")
result = await mock_client_dependency.request(mock_req, host=hostname)
expected = Response(**parse_spy.spy_return)
assert expected == result
async def test_request_raises_usage(mock_client_response, mocker, ex_usage, hostname):
mock_client = mock_client_response(ex_usage)
with pytest.raises(UsageException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_data_err(
mock_client_response, mocker, ex_data_err, hostname
):
mock_client = mock_client_response(ex_data_err)
with pytest.raises(DataErrorException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_no_input(
mock_client_response, mocker, ex_no_input, hostname
):
mock_client = mock_client_response(ex_no_input)
with pytest.raises(NoInputException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_no_user(
mock_client_response, mocker, ex_no_user, hostname
):
mock_client = mock_client_response(ex_no_user)
with pytest.raises(NoUserException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_no_host(
mock_client_response, mocker, ex_no_host, hostname
):
mock_client = mock_client_response(ex_no_host)
with pytest.raises(NoHostException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_unavailable(
mock_client_response, mocker, ex_unavailable, hostname
):
mock_client = mock_client_response(ex_unavailable)
with pytest.raises(UnavailableException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_software(
mock_client_response, mocker, ex_software, hostname
):
mock_client = mock_client_response(ex_software)
with pytest.raises(InternalSoftwareException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_os_error(
mock_client_response, mocker, ex_os_err, hostname
):
mock_client = mock_client_response(ex_os_err)
with pytest.raises(OSErrorException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_os_file(
mock_client_response, mocker, ex_os_file, hostname
):
mock_client = mock_client_response(ex_os_file)
with pytest.raises(OSFileException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_cant_create(
mock_client_response, mocker, ex_cant_create, hostname
):
mock_client = mock_client_response(ex_cant_create)
with pytest.raises(CantCreateException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_io_error(
mock_client_response, mocker, ex_io_err, hostname
):
mock_client = mock_client_response(ex_io_err)
with pytest.raises(IOErrorException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_temporary_failure(
mock_client_response, mocker, ex_temp_fail, hostname
):
mock_client = mock_client_response(ex_temp_fail)
with pytest.raises(TemporaryFailureException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_protocol(
mock_client_response, mocker, ex_protocol, hostname
):
mock_client = mock_client_response(ex_protocol)
with pytest.raises(ProtocolException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_no_permission(
mock_client_response, mocker, ex_no_perm, hostname
):
mock_client = mock_client_response(ex_no_perm)
with pytest.raises(NoPermissionException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_config(mock_client_response, mocker, ex_config, hostname):
mock_client = mock_client_response(ex_config)
with pytest.raises(ConfigException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_timeout(
mock_client_response, mocker, ex_timeout, hostname
):
mock_client = mock_client_response(ex_timeout)
with pytest.raises(ServerTimeoutException):
await mock_client.request(mocker.MagicMock(), host=hostname)
async def test_request_raises_undefined(
mock_client_response, mocker, ex_undefined, hostname
):
mock_client = mock_client_response(ex_undefined)
with pytest.raises(ResponseException):
await mock_client.request(mocker.MagicMock(), host=hostname)
| 6,264 | 1,987 |
import os
import tempfile
import importlib
import pytest
import astropy
import astropy.config.paths
# Force MPL to use non-gui backends for testing.
try:
import matplotlib
except ImportError:
pass
else:
matplotlib.use('Agg')
# Don't actually import pytest_remotedata because that can do things to the
# entrypoints code in pytest.
remotedata_spec = importlib.util.find_spec("pytest_remotedata")
HAVE_REMOTEDATA = remotedata_spec is not None
# Do not collect the sample data file because this would download the sample data.
collect_ignore = ["data/sample.py"]
@pytest.fixture(scope='session', autouse=True)
def tmp_config_dir(request):
"""
Globally set the default config for all tests.
"""
tmpdir = tempfile.TemporaryDirectory()
os.environ["SUNPY_CONFIGDIR"] = str(tmpdir.name)
astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)
astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)
yield
del os.environ["SUNPY_CONFIGDIR"]
tmpdir.cleanup()
astropy.config.paths.set_temp_config._temp_path = None
astropy.config.paths.set_temp_cache._temp_path = None
@pytest.fixture()
def sunpy_cache(mocker, tmp_path):
"""
Provide a way to add local files to the cache. This can be useful when mocking
remote requests.
"""
from types import MethodType
from sunpy.data.data_manager.cache import Cache
from sunpy.data.data_manager.downloader import ParfiveDownloader
from sunpy.data.data_manager.storage import InMemStorage
cache = Cache(
ParfiveDownloader(),
InMemStorage(),
tmp_path,
None
)
def add(self, url, path):
self._storage.store({
'url': url,
'file_path': path,
'file_hash': 'none', # hash doesn't matter
})
cache.add = MethodType(add, cache)
def func(mocked):
mocker.patch(mocked, cache)
return cache
yield func
@pytest.fixture()
def undo_config_dir_patch():
"""
Provide a way for certain tests to not have the config dir.
"""
oridir = os.environ["SUNPY_CONFIGDIR"]
del os.environ["SUNPY_CONFIGDIR"]
yield
os.environ["SUNPY_CONFIGDIR"] = oridir
@pytest.fixture(scope='session', autouse=True)
def hide_parfive_progress(request):
"""
Globally set the HIDE_PARFIVE_PROGESS to hide the parfive progress bar in tests.
Used by the parfive helper class only.
"""
os.environ["HIDE_PARFIVE_PROGESS"] = "True"
yield
del os.environ["HIDE_PARFIVE_PROGESS"]
@pytest.fixture(scope='session', autouse=True)
def tmp_dl_dir(request):
"""
Globally set the default download directory for the test run to a tmp dir.
"""
with tempfile.TemporaryDirectory() as tmpdir:
os.environ["SUNPY_DOWNLOADDIR"] = tmpdir
yield tmpdir
del os.environ["SUNPY_DOWNLOADDIR"]
@pytest.fixture()
def undo_download_dir_patch():
"""
Provide a way for certain tests to not have tmp download dir.
"""
oridir = os.environ["SUNPY_DOWNLOADDIR"]
del os.environ["SUNPY_DOWNLOADDIR"]
yield
os.environ["SUNPY_DOWNLOADDIR"] = oridir
def pytest_runtest_setup(item):
"""
pytest hook to skip all tests that have the mark 'remotedata' if the
pytest_remotedata plugin is not installed.
"""
if isinstance(item, pytest.Function):
if 'remote_data' in item.keywords and not HAVE_REMOTEDATA:
pytest.skip("skipping remotedata tests as pytest-remotedata is not installed")
| 3,534 | 1,156 |
'''
Models for QtWidgets
'''
from collections import deque
from math import ceil
import datetime as dt
import calendar
class EventInCalendar__Model:
class Text:
@staticmethod
def getDefault():
return EventInCalendar__Model.Text()
def __init__(self, event=None, overflow=False):
if event is None:
self.init_date = dt.datetime(1, 1, 1)
self.end_date = dt.datetime(9999, 12, 31)
self.place = Event__Model.Place()
else:
if overflow:
self.init_date = dt.datetime.combine(
event.getInitDate().date(), dt.time(0, 0, 0))
else:
self.init_date = event.getInitDate()
self.end_date = event.getEndDate()
self.place = event.getPlace()
def __str__(self):
init_time, end_time = self.init_date.time(), self.end_date.time()
return ' '.join([str(i) for i in [init_time, end_time, self.place]])
@staticmethod
def colorOf(val):
range_list = [
(0.0, 0.2, 'rgb(178, 0, 0)'),
(0.2, 0.5, 'rgb(255, 40, 40)'),
(0.5, 0.7, 'rgb(191, 165, 0)'),
(0.7, 1.0, 'rgb(252, 224, 45)'),
(1.0, 1.1, 'rgb(46, 234, 81)'),
]
for lw, hi, c in range_list:
if lw <= val and hi > val:
return c
def __init__(self, master, overflow):
self._fulfillment = 0.0
self._overflow = overflow
self._master = master
self._event = None
def getFulFillmentStatus(self, numeric=False):
if not numeric:
return EventInCalendar__Model.colorOf(self._fulfillment)
return self._fulfillment
def setEvent(self, event):
self._event = event.getModel()
self._fulfillment = self._event.getFulFillmentStatus()
def __str__(self):
if self._event is None:
return EventInCalendar__Model.Text().__str__()
return EventInCalendar__Model.Text(self._event, self._overflow).__str__()
class Event__Model:
class Place:
def __init__(self, name='NA', people=0):
self.name = name
self.people = people
def __str__(self):
return self.name
def __init__(self, init_date, end_date, place, fulfillment=0.0):
self._init_date = init_date
self._end_date = end_date
self._place = place
self._fulfillment = fulfillment
def getFulFillmentStatus(self):
return self._fulfillment
def getInitDate(self):
return self._init_date
def getEndDate(self):
return self._end_date
def getPlace(self):
return self._place
class Date__Model:
TYPE_WEEKDAY = 0
TYPE_WEEKEND = 1
TYPE_HOLYDAY = 2
TYPE_FREEDAY = 3
TYPE_GRAYDAY = 4
@staticmethod
def colorOf(val):
color_list = [
(Date__Model.TYPE_WEEKDAY, (219, 219, 219)),
(Date__Model.TYPE_WEEKEND, (183, 183, 183)),
(Date__Model.TYPE_HOLYDAY, (183, 183, 183)),
(Date__Model.TYPE_FREEDAY, (0, 216, 255)),
(Date__Model.TYPE_GRAYDAY, (255, 255, 255)),
]
for d, c in color_list:
if d == val:
return c
return color_list[0][1]
def __init__(self, master, date):
self._master = master
self._events = list()
self._date = date
self._date_type = Date__Model.TYPE_WEEKDAY
def setDate(self, date, datetype=TYPE_WEEKDAY):
self._date = date
self._date_type = datetype
def getDate(self):
return self._date
def getDateType(self, numeric=False):
if numeric is False:
return Date__Model.colorOf(self._date_type)
return self._date_type
def addEvent(self, event):
self._events.append(event)
def getEvents(self):
return self._events
class Calendar__Model:
TYPE_MONDAY_LEADING = 0
TYPE_TUESDAY_LEADING = 1
TYPE_WEDNESDAY_LEADING = 2
TYPE_THURSDAY_LEADING = 3
TYPE_FRIDAY_LEADING = 4
TYPE_SATURDAY_LEADING = 5
TYPE_SUNDAY_LEADING = 6
MAX_DIM_X = 7
MAX_DIM_Y = 6
WEEKENDS = [5, 6]
@staticmethod
def dayOf(date, init, datatree):
'''
Returns the day of the week of a given date and the position
of that day in the calendar grid.
The returned text value of the day is recovered from the stringer module.
'''
days = datatree['str']['days']
# Get the day of the week of the selected date
datetuple = tuple([int(s) for s in str(date).split(' ')[0].split('-')])
day = days[list(zip(*days))[0].index(calendar.weekday(*datetuple))][1]
# Horizontal position in the grid is deduced from the selected leading day
days_dq = deque(days)
days_dq.rotate(7 - init)
pos_x = list(zip(*days_dq))[0].index(calendar.weekday(*datetuple))
# Vertical position is deduced from the selected leading day and the
# day of the first date of that month
firstmonthday = (datetuple[0], datetuple[1], 1)
fday = list(zip(*days_dq))[0].index(calendar.weekday(*firstmonthday))
pos_y = ceil((fday + date.day) / 7) - 1
# Return the place in the calendar grid depending on the offset
return day, pos_x, pos_y
def __init__(self, master, ctype=TYPE_SUNDAY_LEADING, holidays=list()):
'''
Calendar constructor, a calendar is an array of dates that should
always be full, thus, initialy an array of empty dates (6x7), is
array is called holders; a second empty array of dates is created
and will replace eventually the dates of the respective holder date.
Both arrays are validated through a snapshot array, the snapshot refers
to the dates that fill the Calendar grid for a current month, be those
dates from the actual month or the adjacent months
'''
self._master = master
self._type = ctype
self._holidays = holidays
# Assume month as current month
self._month = tuple([dt.date.today().year, dt.date.today().month])
# Generate the snapshot for the current month
self._snapshot = self.generateSnapshot()
# Create empty dates from the snapshot
self._dates = self.generateDefaultDates()
def generateSnapshot(self):
rt = list()
if self._month is None:
return rt
# First day of month
first_day = dt.date(self._month[0], self._month[1], 1)
# Find day of first position in calendar grid
offset = Calendar__Model.dayOf(first_day, self._type, self._master.getDataTree())[1]
first_day -= dt.timedelta(offset)
# Once first position is encountered, fill the holder array
for i in range(Calendar__Model.MAX_DIM_X * Calendar__Model.MAX_DIM_Y):
rt.append(first_day)
first_day += dt.timedelta(1)
return rt
def generateDefaultDates(self):
rt = list()
for date in self._snapshot:
created_date = self._master.createDate(date)
self.setDateType(created_date)
rt.append(created_date)
return rt
def addDate(self, date):
if self._month is not None:
if date.getModel().getDate() in self._snapshot:
index = self._snapshot.index(date.getModel().getDate())
self.setDateType(date)
self._dates[index] = date
def addEventInCalendar(self, date, eic):
if self._month is not None:
if date in self._snapshot:
index = self._snapshot.index(date)
self._dates[index].addCalendarEvent(eic)
def setDateType(self, date):
current_type = date.getModel().getDateType(numeric=True)
deduced_type = Date__Model.TYPE_WEEKDAY
dt_date = date.getModel().getDate()
dt_tuple = (dt_date.year, dt_date.month, dt_date.day)
if calendar.weekday(*dt_tuple) in Calendar__Model.WEEKENDS:
deduced_type = Date__Model.TYPE_WEEKEND
if dt_date in self._holidays:
deduced_type = Date__Model.TYPE_HOLYDAY
if (dt_date.year, dt_date.month) != self._month:
deduced_type = Date__Model.TYPE_GRAYDAY
if current_type < deduced_type:
current_type = deduced_type
date.changeDateType(current_type)
def _update(self):
self._snapshot = self.generateSnapshot()
self._dates = self.generateDefaultDates()
# Add the required events
events = self._master.getEvents()
events_to_add = list()
for event in events:
if event.getModel().getInitDate().date() in self._snapshot:
events_to_add.append(event)
self._master.createEvents(events_to_add)
def setMonth(self, month):
self._month = month
self._update()
def getMonth(self):
return self._month
def monthSubtract(self):
month = self._month
if month[1] == 1:
if month[0] == 1:
return month
else:
return (month[0] - 1, 12)
else:
return (month[0], month[1] - 1)
def monthAdd(self):
month = self._month
if month[1] == 12:
if month[0] == 9999:
return month
else:
return (month[0] + 1, 1)
else:
return (month[0], month[1] + 1)
def setDataTree(self, datatree):
self._datatree = datatree
self._update()
def getDataTree(self):
return self._datatree
def posInSnapshot(self, date):
i = self._snapshot.index(date)
return ceil((i + 1) / 7) - 1, (i) % 7
def getHolderDimensions(self):
return Calendar__Model.MAX_DIM_X, Calendar__Model.MAX_DIM_Y
def getDates(self):
return self._dates
def getType(self):
return self._type
| 10,165 | 3,244 |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tensorflow as tf
from bigdl.orca.tfpark.tf_dataset import TensorMeta
from bigdl.dllib.utils import nest
from bigdl.orca.data import SparkXShards
from bigdl.dllib.utils import log4Error
class Dataset(object):
"""
Represents a distributed set of elements backed by an RDD,
which is created by applying tensorflow dataset transformations
on each partitions.
"""
def __init__(self, xshards, create_dataset_fn):
self.xshards = xshards
self.create_dataset_fn = create_dataset_fn
def as_graph_rdd(self, batch_per_shard, drop_remainder=True):
create_dataset_fn = self.create_dataset_fn
def to_dataset(iter):
data_list = list(iter)
import tensorflow as tf
if not data_list:
return []
datasets = [create_dataset_fn(data) for data in data_list]
from functools import reduce
dataset = reduce(lambda x, y: x.concatenate(y), datasets)
dataset = dataset.batch(batch_per_shard, drop_remainder)
iterator = dataset.make_initializable_iterator()
train_next_ops = nest.flatten(iterator.get_next())
output_types = [t for t in nest.flatten(dataset.output_types)]
output_types_enum = [t.as_datatype_enum for t in output_types]
init_op_name = iterator.initializer.name
table_init_op = tf.tables_initializer().name
output_names = [op.name for op in train_next_ops]
graph = train_next_ops[0].graph
flatten_shapes = nest.flatten(dataset.output_shapes)
flatten_shapes = [shape[1:] for shape in flatten_shapes]
flatten_tensor_structure = [TensorMeta(dtype=output_types[i],
shape=list(flatten_shapes[i]),
name="zoo_input_{}".format(i))
for i in range(len(flatten_shapes))]
structure = dataset.output_types
if isinstance(structure, tf.DType):
structure = (structure,)
tensor_structure = nest.pack_sequence_as(structure,
flatten_tensor_structure)
meta_info = {
"init_op_name": init_op_name,
"table_init_op": table_init_op,
"output_names": output_names,
"output_types": output_types_enum,
"tensor_structure": tensor_structure
}
return [(bytearray(graph.as_graph_def().SerializeToString()), meta_info)]
graph_rdd_and_meta = self.xshards.rdd.mapPartitions(to_dataset)
return graph_rdd_and_meta
def as_tf_dataset_rdd(self):
create_dataset_fn = self.create_dataset_fn
def to_dataset(iter):
data_list = list(iter)
if not data_list:
return []
from tensorflow.python.distribute.coordinator.values import serialize_dataset_to_graph
datasets = [create_dataset_fn(data) for data in data_list]
from functools import reduce
dataset = reduce(lambda x, y: x.concatenate(y), datasets)
ds_def = serialize_dataset_to_graph(dataset).numpy()
elem_spec = dataset.element_spec
return [{"ds_def": ds_def, "elem_spec": elem_spec}]
tf_dataset_rdd = self.xshards.rdd.mapPartitions(to_dataset)
return tf_dataset_rdd
@staticmethod
def from_tensor_slices(xshards):
return TensorSliceDataset(xshards)
@staticmethod
def from_feature_table(tbl):
from bigdl.friesian.feature import FeatureTable
from bigdl.friesian.feature.utils import featuretable_to_xshards
log4Error.invalidInputError(isinstance(tbl, FeatureTable),
"Only Friesian FeatureTable is supported")
xshards = featuretable_to_xshards(tbl)
return TensorSliceDataset(xshards)
def map(self, map_func):
return MapDataset(self, map_func)
class TensorSliceDataset(Dataset):
def __init__(self, xshards):
assert isinstance(xshards, SparkXShards), \
"only datasets backed by a SparkXShards are supported"
self.xshards = xshards
def create_dataset_fn(data):
return tf.data.Dataset.from_tensor_slices(data)
super().__init__(xshards, create_dataset_fn)
class MapDataset(Dataset):
def __init__(self, input_dataset, map_func):
create_pre_dataset_fn = input_dataset.create_dataset_fn
def create_dataset_fn(data):
dataset = create_pre_dataset_fn(data)
return dataset.map(map_func)
super().__init__(xshards=input_dataset.xshards,
create_dataset_fn=create_dataset_fn)
| 5,457 | 1,586 |
#!/usr/bin/env python3
import sys
import getopt
import xml.etree.ElementTree as ET
def processVendors(outFile, vendors):
outFile.writelines(["\nconstexpr std::array<std::string_view, ", str(
len(vendors)), "> vendors = {{\n"])
for vendor in vendors:
outFile.writelines([' \"', vendor.tag, '\",\n'])
outFile.write('}};\n')
def processEnumValue(outFile, enum, value):
if not value.get('value') is None:
# Spitting out plain values
outFile.write(value.get('value'))
elif not value.get('bitpos') is None:
# Bitflag
outFile.writelines(
['0x', format(1 << int(value.get('bitpos')), '08X')])
elif not value.get('alias') is None:
processEnumValue(outFile, enum, enum.find(value.get('alias')))
def processEnums(outFile, enums, vendors, first, last):
for enum in enums:
# Skip VkResult
if enum.tag == 'VkResult':
continue
# Skip if there's no values, MSVC can't do zero-sized arrays
if len(enum.findall('./')) == 0:
continue
outFile.writelines(
['\nconstexpr EnumValueSet ', enum.tag, 'Sets[] = {\n'])
# Determine how much to chop off the front
strName = enum.tag
typeDigit = ''
# Determine if type ends with vendor tag
vendorName = ''
for vendor in vendors:
if strName.endswith(vendor.tag):
vendorName = vendor.tag
strName = strName[:-len(vendorName)]
if strName[-1].isdigit():
typeDigit = strName[-1]
strName = strName[:-1]
if strName.endswith('FlagBits'):
strName = strName[:-8]
# Construct most likely enum prefix
mainPrefix = ''
for char in strName:
if mainPrefix == '':
mainPrefix += char
elif char.isupper():
mainPrefix += '_'
mainPrefix += char.upper()
else:
mainPrefix += char.upper()
mainPrefix += '_'
if typeDigit != '':
mainPrefix += typeDigit
mainPrefix += '_'
current = first
while current <= last:
for value in enum.findall('./'):
if int(value.get('first')) != current:
continue
outFile.write(" {\"")
valueStr = value.tag
if valueStr.startswith(mainPrefix):
valueStr = valueStr[len(mainPrefix):]
if vendorName != '' and valueStr.endswith(vendorName):
valueStr = valueStr[:-len(vendorName)-1]
if valueStr.endswith('_BIT'):
valueStr = valueStr[:-4]
outFile.write(valueStr)
outFile.write("\", ")
processEnumValue(outFile, enum, value)
outFile.write("},\n")
current += 1
outFile.write('};\n')
def main(argv):
inputFile = ''
outputFile = ''
try:
opts, args = getopt.getopt(argv, 'i:o:', [])
except getopt.GetoptError:
print('Error parsing options')
sys.exit(1)
for opt, arg in opts:
if opt == '-i':
inputFile = arg
elif opt == '-o':
outputFile = arg
if(inputFile == ''):
print("Error: No Vulkan XML file specified")
sys.exit(1)
if(outputFile == ''):
print("Error: No output file specified")
sys.exit(1)
try:
dataXml = ET.parse(inputFile)
dataRoot = dataXml.getroot()
except:
print("Error: Could not open input file: ", inputFile)
sys.exit(1)
firstVersion = int(dataRoot.get('first'))
lastVersion = int(dataRoot.get('last'))
outFile = open(outputFile, "w")
# Common Header
with open("common_header.txt") as fd:
outFile.write(fd.read())
outFile.write('\n')
#
outFile.write("""#ifndef VK_VALUE_SERIALIZATION_HPP
#define VK_VALUE_SERIALIZATION_HPP
/* USAGE:
To use, include this header where the declarations for the boolean checks are required.
On *ONE* compilation unit, include the definition of `#define VK_VALUE_SERIALIZATION_CONFIG_MAIN`
so that the definitions are compiled somewhere following the one definition rule.
*/
#include <vulkan/vulkan.h>
#include <string>
#include <string_view>
""")
# Static Asserts
outFile.writelines(["\nstatic_assert(VK_HEADER_VERSION >= ", str(
firstVersion), ", \"VK_HEADER_VERSION is from before the supported range.\");\n"])
outFile.writelines(["static_assert(VK_HEADER_VERSION <= ", str(
lastVersion), ", \"VK_HEADER_VERSION is from after the supported range.\");\n"])
# Function Declarataions
outFile.write("""
/**
* @brief Macro that automatically stringifies the given Vulkan type for serialization
* @param VKTYPE Actual Vulkan type
* @param VALUE Value to be serialized
* @param STRPTR Pointer to the string to store the serialization in. Only modified if true is
* returned.
* @return True if serialization was successful. False otherwise.
*/
#define VK_SERIALIZE(VKTYPE, VALUE, STRPTR) vk_serialize<VKTYPE>(#VKTYPE, VALUE, STRPTR)
/**
* @brief Macro that automatically stringifies the given Vulkan type for parsing
* @param VKTYPE Actual Vulkan type
* @param STRING String to be parsed
* @param VALPTR Pointer to the value to store the parsed value in. Only modified if true is
* returned.
* @return True if serialization was successful. False otherwise.
*/
#define VK_PARSE(VKTYPE, STRING, VALPTR) vk_parse<VKTYPE>(#VKTYPE, STRING, VALPTR)
/**
* @brief Serializes a Vulkan enumerator/flag type (32-bit)
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkValue Value being serialized
* @param pString Pointer to a string that will be modified with the serialized value. Only modified
* if true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString);
/**
* @brief Parses a Vulkan enumerator/flag serialized string (32-bit)
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkString String being parsed
* @param pValue Pointer to a value that will be modified with the parsed value. Only modified if
* true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue);
/**
* @brief Serializes a Vulkan enumerator/flag type (64-bit)
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkValue Value being serialized
* @param pString Pointer to a string that will be modified with the serialized value. Only modified
* if true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString);
/**
* @brief Parses a Vulkan enumerator/flag serialized string (64-bit)
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkString String being parsed
* @param pValue Pointer to a value that will be modified with the parsed value. Only modified if
* true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue);
/**
* @brief Serializes a Vulkan enumerator/flag type
* @tparam Vulkan type being serialized
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkValue Value being serialized
* @param pString Pointer to a string that will be modified with the serialized value. Only modified
* if true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
template <typename T>
bool vk_serialize(std::string_view vkType, T vkValue, std::string *pString) {
return vk_serialize(vkType, static_cast<uint32_t>(vkValue), pString);
}
/**
* @brief Parses a Vulkan enumerator/flag serialized string
* @tparam Vulkan type being parsed
* @param vkType Name of the Vulkan enumerator/flag type
* @param vkString String being parsed
* @param pValue Pointer to a value that will be modified with the parsed value. Only modified if
* true is returned.
* @return True the value was successfully serialized. False otherwise.
*/
template <typename T>
bool vk_parse(std::string_view vkType, std::string vkString, T *pValue) {
uint32_t retVal = 0;
auto found = vk_parse(vkType, vkString, &retVal);
if (found) {
*pValue = static_cast<T>(retVal);
}
return found;
}
""")
# Definition Start
outFile.write("\n#ifdef VK_VALUE_SERIALIZATION_CONFIG_MAIN\n")
outFile.write("\n#include <algorithm>\n")
outFile.write("#include <array>\n")
outFile.write("#include <cstring>\n")
outFile.write("\nnamespace {\n")
# Vendors
vendors = dataRoot.findall('vendors/')
processVendors(outFile, vendors)
# EnumSet Declaration
outFile.write("\nstruct EnumValueSet {\n")
outFile.write(" std::string_view name;\n")
outFile.write(" int64_t value;\n")
outFile.write("};\n")
# Enums
enums = dataRoot.findall('enums/')
processEnums(outFile, enums, vendors, firstVersion, lastVersion)
# Enum Type Declaration
outFile.write("\nstruct EnumType {\n")
outFile.write(" std::string_view name;\n")
outFile.write(" EnumValueSet const* data;\n")
outFile.write(" uint32_t count;\n")
outFile.write(" bool allowEmpty;\n")
outFile.write("};\n")
# Enum Pointer Array
outFile.writelines(["\nconstexpr std::array<EnumType, ", str(
len(enums)-1), "> enumTypes = {{\n"]) # -1 for not doing VkResult
for enum in enums:
if enum.tag == 'VkResult':
continue
valueCount = len(enum.findall('./'))
if valueCount == 0:
outFile.writelines(
[" {\"", str(enum.tag), "\", nullptr, 0, true},\n"])
else:
allowEmpty = "true"
for enumVal in enum.findall('./'):
if enumVal.get('first') == enum.get('first'):
allowEmpty = "false"
outFile.writelines([" {\"", str(enum.tag), "\", ", str(
enum.tag), "Sets, ", str(valueCount), ", ", allowEmpty, "},\n"])
outFile.write('}};\n')
# Function definitions
outFile.write("""
/**
* @brief Removes a vendor tag from the end of the given string view
* @param view String view to remove the vendor tag from
* @return A string_view without the vendor tag, if it was suffixed
*/
std::string_view stripVendor(std::string_view view) {
for (auto const &it : vendors) {
// Don't strip if it's all that's left
if (view == it)
break;
if (strncmp(view.data() + view.size() - it.size(), it.data(), it.size()) == 0) {
view = view.substr(0, view.size() - it.size());
break;
}
}
return view;
}
/**
* @brief Strips '_BIT' from the end of a string, if there
*/
std::string_view stripBit(std::string_view view) {
if (view.size() > strlen("_BIT")) {
if (view.substr(view.size() - strlen("_BIT")) == "_BIT") {
return view.substr(0, view.size() - strlen("_BIT"));
}
}
return view;
}
bool getEnumType(std::string_view vkType,
EnumValueSet const **ppStart,
EnumValueSet const **ppEnd,
bool *pAllowEmpty) {
// Check for a conversion from Flags -> FlagBits
std::string localString;
if (vkType.rfind("Flags") != std::string::npos) {
localString = vkType;
auto it = localString.rfind("Flags");
localString = localString.replace(it, strlen("Flags"), "FlagBits");
vkType = localString;
}
// Try the original name
for (auto const &it : enumTypes) {
if (vkType == std::string_view{it.name}) {
*ppStart = it.data;
*ppEnd = it.data + it.count;
*pAllowEmpty = it.allowEmpty;
return true;
}
}
// Try a vendor-stripped name
vkType = stripVendor(vkType);
for (auto const &it : enumTypes) {
if (vkType == std::string_view{it.name}) {
*ppStart = it.data;
*ppEnd = it.data + it.count;
*pAllowEmpty = it.allowEmpty;
return true;
}
}
return false;
}
/**
* @brief Converts a Vulkan Flag typename into the prefix that is used for it's enums
* @param typeName Name of the type to generate the Vk enum prefix for
* @return Generated prefix string
*
* Any capitalized letters except for the first has an underscore inserted before it, an underscore
* is added to the end, and all characters are converted to upper case.
*
* It also removed the 'Flags' or 'FlagBits' suffixes.
*/
std::string processEnumPrefix(std::string_view typeName) {
// Flag Bits
std::size_t flagBitsSize = strlen("FlagBits");
if (typeName.size() > flagBitsSize) {
if (strncmp(typeName.data() + typeName.size() - flagBitsSize, "FlagBits", flagBitsSize) ==
0) {
typeName = typeName.substr(0, typeName.size() - strlen("FlagBits"));
}
}
// Flags
std::size_t flagsSize = strlen("Flags");
if (typeName.size() > flagsSize) {
if (strncmp(typeName.data() + typeName.size() - flagsSize, "Flags", flagsSize) == 0) {
typeName = typeName.substr(0, typeName.size() - strlen("Flags"));
}
}
std::string retStr;
for (auto it = typeName.begin(); it != typeName.end(); ++it) {
if (it == typeName.begin()) {
retStr += ::toupper(*it);
} else if (::isupper(*it)) {
retStr += '_';
retStr += *it;
} else {
retStr += toupper(*it);
}
}
retStr += '_';
return retStr;
}
bool findValue(std::string_view findValue,
std::string_view prefix,
uint64_t *pValue,
EnumValueSet const *start,
EnumValueSet const *end) {
// Remove the vendor tag suffix if it's on the value
findValue = stripVendor(findValue);
if (findValue[findValue.size() - 1] == '_')
findValue = findValue.substr(0, findValue.size() - 1);
// Remove '_BIT' if it's there
findValue = stripBit(findValue);
// Iterate until we find the value
while (start != end) {
if (findValue == start->name) {
*pValue |= start->value;
return true;
}
std::string prefixedName{prefix};
prefixedName += start->name;
if (findValue == prefixedName) {
*pValue |= start->value;
return true;
}
++start;
}
return false;
}
/**
* @brief Takes a given string and formats it for use with parsing
* @param str The string to format
* @return Formatted string
*
* First, any non alphanumeric characters are trimmed from both ends of the string.
* After than, any spaces are replaced with underscores, and finally all the characters are
* capitalized. This will generate the string closest to the original ones found in the XML spec.
*/
std::string formatString(std::string str) {
// Trim left
std::size_t cutOffset = 0;
for (auto c : str) {
if (::isalnum(c))
break;
else
++cutOffset;
}
str = str.substr(cutOffset);
// Trim right
cutOffset = 0;
for (std::size_t i = 0; i < str.size(); ++i) {
if (::isalnum(str[i]))
cutOffset = i + 1;
}
str = str.substr(0, cutOffset);
std::replace(str.begin(), str.end(), ' ', '_');
std::for_each(str.begin(), str.end(), [](char &c) { c = ::toupper(c); });
return str;
}
bool serializeBitmask(EnumValueSet const *end,
EnumValueSet const *start,
bool allowEmpty,
uint64_t vkValue,
std::string *pString) {
--end;
--start;
if(start == end) {
// If this is a non-existing bitmask, then return an empty string
*pString = {};
return true;
}
std::string retStr;
while (start != end) {
if(vkValue == 0 && !retStr.empty()) {
break;
}
if ((start->value & vkValue) == start->value) {
// Found a compatible bit mask, add it
if (!retStr.empty()) {
retStr += " | ";
}
retStr += start->name;
vkValue = vkValue ^ start->value;
}
--start;
}
if (vkValue != 0 || (retStr.empty() && !allowEmpty)) {
// Failed to find a valid bitmask for the value
return false;
}
*pString = retStr;
return true;
}
bool serializeEnum(EnumValueSet const *start,
EnumValueSet const *end,
uint64_t vkValue,
std::string *pString) {
while (start != end) {
if (start->value == vkValue) {
*pString = start->name;
return true;
}
++start;
}
return false;
}
bool parseBitmask(std::string_view vkString,
EnumValueSet const *start,
EnumValueSet const *end,
std::string_view prefix,
uint64_t *pValue) {
uint64_t retVal = 0;
auto startCh = vkString.begin();
auto endCh = startCh;
for (; endCh != vkString.end(); ++endCh) {
if (*endCh == '|') {
std::string token(startCh, endCh);
token = formatString(token);
bool foundVal = findValue(token, prefix, &retVal, start, end);
if (!foundVal)
return false;
startCh = endCh + 1;
}
}
if (startCh != endCh) {
std::string token(startCh, endCh);
token = formatString(token);
bool foundVal = findValue(token, prefix, &retVal, start, end);
if (!foundVal)
return false;
}
*pValue = retVal;
return true;
}
bool parseEnum(std::string_view vkString,
EnumValueSet const *start,
EnumValueSet const *end,
std::string_view prefix,
uint64_t *pValue) {
uint64_t retVal = 0;
std::string token = formatString(std::string{vkString});
bool found = findValue(token, prefix, &retVal, start, end);
if (found) {
*pValue = retVal;
}
return found;
}
} // namespace
bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString) {
if (vkType.empty()) {
return false;
}
EnumValueSet const *start, *end;
bool allowEmpty;
if (!getEnumType(vkType, &start, &end, &allowEmpty)) {
return false;
}
if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) {
return serializeBitmask(start, end, allowEmpty, vkValue, pString);
}
return serializeEnum(start, end, vkValue, pString);
}
bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString) {
return vk_serialize(vkType, static_cast<uint64_t>(vkValue), pString);
}
bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue) {
if (vkType.empty()) {
return false;
}
EnumValueSet const *start, *end;
bool allowEmpty;
if (!getEnumType(vkType, &start, &end, &allowEmpty)) {
return false;
}
if (vkString.empty()) {
if (allowEmpty) {
*pValue = 0;
return true;
} else {
return false;
}
}
std::string prefix = processEnumPrefix(stripVendor(vkType));
if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) {
return parseBitmask(vkString, start, end, prefix, pValue);
}
return parseEnum(vkString, start, end, prefix, pValue);
}
bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue) {
uint64_t tempValue;
if (vk_parse(vkType, vkString, &tempValue)) {
*pValue = static_cast<uint32_t>(tempValue);
return true;
}
return false;
}
""")
# endif
outFile.write("\n#endif // VK_VALUE_SERIALIZATION_CONFIG_MAIN\n")
outFile.write("#endif // VK_VALUE_SERIALIZATION_HPP\n")
outFile.close()
if __name__ == "__main__":
main(sys.argv[1:])
| 20,180 | 6,471 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File : Ampel-core/ampel/cli/AbsStockCommand.py
# License : BSD-3-Clause
# Author : vb <vbrinnel@physik.hu-berlin.de>
# Date : 25.03.2021
# Last Modified Date: 25.03.2021
# Last Modified By : vb <vbrinnel@physik.hu-berlin.de>
from typing import Dict, Any, Optional, Union, Literal
from ampel.cli.ArgParserBuilder import ArgParserBuilder
from ampel.cli.MaybeIntAction import MaybeIntAction
from ampel.cli.LoadJSONAction import LoadJSONAction
from ampel.cli.AbsCoreCommand import AbsCoreCommand
from ampel.mongo.utils import maybe_match_array
from ampel.model.UnitModel import UnitModel
from ampel.model.time.UnixTimeModel import UnixTimeModel
from ampel.model.time.TimeStringModel import TimeStringModel
from ampel.model.time.TimeLastRunModel import TimeLastRunModel
from ampel.model.time.TimeDeltaModel import TimeDeltaModel
from ampel.model.time.TimeConstraintModel import TimeConstraintModel
class AbsStockCommand(AbsCoreCommand, abstract=True):
"""
Base class for commands selecting/matching stock(s)
"""
@staticmethod
def get_select_args_help() -> Dict[str, str]:
return {
# Required
'config': 'Path to an ampel config file (yaml/json)',
# Optional
'secrets': 'Path to a YAML secrets store in sops format',
'log-profile': 'One of: default, compact, headerless, verbose, debug',
'id-mapper': 'Convert stock ids using the provided id mapper (ex: ZTFIdMapper)',
# Selection
'stock': 'Stock id(s) (OR matched if multi-valued)',
'channel': 'Channel(s)',
'created-after-ts': 'Created after unix timestamp',
'created-after-str': 'Created after date-time iso string',
'created-after-delta': 'Created after time delta',
'created-after-process': 'Created after last run of process with name',
'created-before-ts': 'Created before unix timestamp',
'created-before-str': 'Created before date-time iso string',
'created-before-delta': 'Created before time delta',
'created-before-process': 'Created before last run of process with name',
'updated-after-ts': 'Updated after unix timestamp',
'updated-after-str': 'Updated after date-time iso string',
'updated-after-delta': 'Updated after time delta',
'updated-after-process': 'Updated after last run of process with name',
'updated-before-ts': 'Updated before unix timestamp',
'updated-before-str': 'Updated before date-time iso string',
'updated-before-delta': 'Updated before time delta',
'updated-before-process': 'Updated before last run of process with name',
'custom-match': 'Custom mongodb match as JSON string (ex: {"body.aKey": {"$gt": 1}})',
}
def add_selection_args(self, builder: ArgParserBuilder) -> None:
# Selection args
builder.add_group('match', 'Stock selection arguments')
builder.add_arg('match', "stock", action=MaybeIntAction, nargs="+")
builder.add_x_args('match',
{'name': 'created-before-str'}, {'name': 'created-before-ts', 'type': int},
{'name': 'created-before-delta', 'action': LoadJSONAction},
{'name': 'created-before-process'}
)
builder.add_x_args('match',
{'name': 'created-after-str'}, {'name': 'created-after-ts', 'type': int},
{'name': 'created-after-delta', 'action': LoadJSONAction},
{'name': 'created-after-process'}
)
builder.add_x_args('match',
{'name': 'updated-before-str'}, {'name': 'updated-before-ts', 'type': int},
{'name': 'updated-before-delta', 'action': LoadJSONAction},
{'name': 'updated-before-process'}
)
builder.add_x_args('match',
{'name': 'updated-after-str'}, {'name': 'updated-after-ts', 'type': int},
{'name': 'updated-after-delta', 'action': LoadJSONAction},
{'name': 'updated-after-process'}
)
builder.create_logic_args('match', "channel", "Channel")
builder.create_logic_args('match', "with-tag", "Tag")
builder.create_logic_args('match', "without-tag", "Tag", excl=True)
builder.add_arg('match', "custom-match", metavar="#", action=LoadJSONAction)
def get_tag(self, args: Dict[str, Any]) -> Optional[Dict[Union[Literal['with'], Literal['without']], Dict]]:
tag: Optional[Dict[Union[Literal['with'], Literal['without']], Dict]] = None
if args.get('with_tag'):
tag = {'with': args['with_tag']}
if args.get('without_tag'):
if tag is None:
tag = {}
tag['without'] = args['without_tag']
return tag
def build_select_model(self, args: Dict[str, Any]) -> UnitModel:
conf = {
"created": self.get_time_model("created", args),
"updated": self.get_time_model("updated", args),
'channel': args['channel'],
'custom': args['custom_match']
}
if args.get('tag'):
conf['tag'] = self.get_tag(args)
if (stock := args.get('stock')):
conf['custom'] = {
'_id': stock if isinstance(stock, (int, bytes, str))
else maybe_match_array(stock)
}
return UnitModel(unit="T3StockSelector", config=conf)
def get_time_model(self, prefix: str, args: Dict[str, Any]) -> TimeConstraintModel:
d: Dict[str, Any] = {'after': None, 'before': None}
for when in ('after', 'before'):
if args.get(x := f"{prefix}_{when}_ts"):
d[when] = UnixTimeModel(match_type='unix_time', value=args[x])
elif args.get(x := f"{prefix}_{when}_str"):
d[when] = TimeStringModel(match_type='time_string', dateTimeStr=args[x], dateTimeFormat="%Y%m%dT%H%M%S")
elif args.get(x := f"{prefix}_{when}_delta"):
d[when] = TimeDeltaModel(match_type='time_delta', **args[x])
elif args.get(x := f"{prefix}_{when}_process"):
d[when] = TimeLastRunModel(match_type='time_last_run', process_name=args[x])
return TimeConstraintModel(**d)
| 5,629 | 2,035 |
from typing import List
def solution(records: List[str]):
logger = []
id_name = dict()
message = {"Enter": "๋์ด ๋ค์ด์์ต๋๋ค.", "Leave": "๋์ด ๋๊ฐ์ต๋๋ค."}
for record in records:
op, id, *name = record.split()
if name:
id_name[id] = name[0]
if op in message:
logger.append((id, op))
answer = []
for log in logger:
id, msg = log
answer.append(id_name[id] + message[msg])
return answer
if __name__ == "__main__":
i = [
"Enter uid1234 Muzi",
"Enter uid4567 Prodo",
"Leave uid1234",
"Enter uid1234 Prodo",
"Change uid4567 Ryan",
]
print(solution(i)) | 682 | 267 |
"""
Implementation of REST API for nets creation
"""
from flask import Blueprint, request
from .utils import typename_to_type
from .contexts import contexts
nr = Blueprint('nets', __name__)
def _create_bool_constant(func):
context = request.get_json()['context']
if context is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
net = func(ctx)
return {'result': ctx.net2name[net]}, 201
def _create_unary_gate(func):
context = request.get_json()['context']
x = request.get_json()['x']
if context is None or x is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
x = ctx.nets[x]
assert x is not None
net = func(ctx, x)
return {'result': ctx.net2name[net]}, 201
def _create_binary_gate(func):
context = request.get_json()['context']
x = request.get_json()['x']
y = request.get_json()['y']
if context is None or x is None or y is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
x = ctx.nets[x]
y = ctx.nets[y]
assert x is not None
assert y is not None
net = func(ctx, x, y)
return {'result': ctx.net2name[net]}, 201
@nr.route('', methods=['GET'])
def list_nets():
"""
Gets the list of the available nets
"""
context = request.args.get('context')
ctx = contexts[context]['context']
return {'nets': [key for key, _ in ctx.nets.items()]}, 200
@nr.route('/true', methods=['POST'])
def create_true():
"""
Creates the net true
"""
return _create_bool_constant(lambda ctx : ctx.mk_true())
@nr.route('/false', methods=['POST'])
def create_false():
"""
Creates the net false
"""
return _create_bool_constant(lambda ctx : ctx.mk_false())
@nr.route('/numbers/create', methods=['POST'])
def create_number():
"""
Creates a number
"""
context = request.get_json()['context']
value = request.get_json()['value']
typ = request.get_json()['type']
if context is None or value is None or typ is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
assert value is not None
assert typ is not None
net = ctx.mk_number(value, typename_to_type(ctx, typ))
return {'result': ctx.net2name[net]}, 201
@nr.route('/nots/create', methods=['POST'])
def create_not():
"""
Creates a logical not
"""
return _create_unary_gate(lambda ctx, x : ctx.mk_not(x))
@nr.route('/minuses/create', methods=['POST'])
def create_minus():
"""
Creates an arithmetic minus
"""
return _create_unary_gate(lambda ctx, x : ctx.mk_minus(x))
@nr.route('/ands/create', methods=['POST'])
def create_and():
"""
Creates a logical and
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_and(x, y))
@nr.route('/ors/create', methods=['POST'])
def create_or():
"""
Creates a logical or
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_or(x, y))
@nr.route('/implieses/create', methods=['POST'])
def create_implies():
"""
Creates a logical implies
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_implies(x, y))
@nr.route('/xors/create', methods=['POST'])
def create_xor():
"""
Creates a logical xor
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_xor(x, y))
@nr.route('/iffs/create', methods=['POST'])
def create_iff():
"""
Creates a logical iff
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_iff(x, y))
@nr.route('/adds/create', methods=['POST'])
def create_add():
"""
Creates an addition
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_add(x, y))
@nr.route('/muls/create', methods=['POST'])
def create_mul():
"""
Creates a multiplication
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_mul(x, y))
@nr.route('/divs/create', methods=['POST'])
def create_div():
"""
Creates a division
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_div(x, y))
@nr.route('/mods/create', methods=['POST'])
def create_mod():
"""
Creates a modulus
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_mod(x, y))
@nr.route('/subs/create', methods=['POST'])
def create_sub():
"""
Creates a subtraction
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_sub(x, y))
@nr.route('/eqs/create', methods=['POST'])
def create_eq():
"""
Creates an equality
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_eq(x, y))
@nr.route('/leqs/create', methods=['POST'])
def create_leq():
"""
Creates an less or equal
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_leq(x, y))
@nr.route('/geqs/create', methods=['POST'])
def create_geq():
"""
Creates a greater or equal
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_geq(x, y))
@nr.route('/lts/create', methods=['POST'])
def create_lt():
"""
Creates a less than
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_lt(x, y))
@nr.route('/gts/create', methods=['POST'])
def create_gt():
"""
Creates a greater than
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_gt(x, y))
@nr.route('/neqs/create', methods=['POST'])
def create_neq():
"""
Creates a not equal
"""
return _create_binary_gate(lambda ctx, x, y : ctx.mk_neq(x, y))
@nr.route('/ites/create', methods=['POST'])
def create_ite():
"""
Creates an if then else
"""
context = request.get_json()['context']
x = request.get_json()['x']
y = request.get_json()['y']
z = request.get_json()['z']
if context is None or x is None or y is None or z is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
i = ctx.nets[x]
t = ctx.nets[y]
e = ctx.nets[z]
assert i is not None
assert t is not None
assert e is not None
net = ctx.mk_ite(i, t, e)
return {'result': ctx.net2name[net]}, 201
@nr.route('/casts/create', methods=['POST'])
def create_cast():
"""
Creates a type cast
"""
context = request.get_json()['context']
x = request.get_json()['x']
t = request.get_json()['type']
if context is None or x is None or t is None:
return {'result': 'error'}, 400
ctx = contexts[context]['context']
x = ctx.nets[x]
assert ctx is not None
assert x is not None
net = None
if t == 'int8':
net = ctx.mk_cast_to_int8(x)
elif t == 'int16':
net = ctx.mk_cast_to_int16(x)
elif t == 'int32':
net = ctx.mk_cast_to_int32(x)
elif t == 'int64':
net = ctx.mk_cast_to_int64(x)
elif t == 'uint8':
net = ctx.mk_cast_to_uint8(x)
elif t == 'uint16':
net = ctx.mk_cast_to_uint16(x)
elif t == 'uint32':
net = ctx.mk_cast_to_uint32(x)
elif t == 'uint64':
net = ctx.mk_cast_to_uint64(x)
else:
return {'result': 'unhandled type {}'.format(t)}, 400
assert net is not None
return {'result': ctx.net2name[net]}, 201
| 7,081 | 2,540 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Image util ops."""
import tensorflow as tf
def get_ndims(image):
return image.get_shape().ndims or tf.rank(image)
def to_4D_image(image):
"""Convert 2/3/4D image to 4D image.
Args:
image: 2/3/4D tensor.
Returns:
4D tensor with the same type.
"""
with tf.control_dependencies(
[
tf.debugging.assert_rank_in(
image, [2, 3, 4], message="`image` must be 2/3/4D tensor"
)
]
):
ndims = image.get_shape().ndims
if ndims is None:
return _dynamic_to_4D_image(image)
elif ndims == 2:
return image[None, :, :, None]
elif ndims == 3:
return image[None, :, :, :]
else:
return image
def _dynamic_to_4D_image(image):
shape = tf.shape(image)
original_rank = tf.rank(image)
# 4D image => [N, H, W, C] or [N, C, H, W]
# 3D image => [1, H, W, C] or [1, C, H, W]
# 2D image => [1, H, W, 1]
left_pad = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32)
right_pad = tf.cast(tf.equal(original_rank, 2), dtype=tf.int32)
new_shape = tf.concat(
[
tf.ones(shape=left_pad, dtype=tf.int32),
shape,
tf.ones(shape=right_pad, dtype=tf.int32),
],
axis=0,
)
return tf.reshape(image, new_shape)
def from_4D_image(image, ndims):
"""Convert back to an image with `ndims` rank.
Args:
image: 4D tensor.
ndims: The original rank of the image.
Returns:
`ndims`-D tensor with the same type.
"""
with tf.control_dependencies(
[tf.debugging.assert_rank(image, 4, message="`image` must be 4D tensor")]
):
if isinstance(ndims, tf.Tensor):
return _dynamic_from_4D_image(image, ndims)
elif ndims == 2:
return tf.squeeze(image, [0, 3])
elif ndims == 3:
return tf.squeeze(image, [0])
else:
return image
def _dynamic_from_4D_image(image, original_rank):
shape = tf.shape(image)
# 4D image <= [N, H, W, C] or [N, C, H, W]
# 3D image <= [1, H, W, C] or [1, C, H, W]
# 2D image <= [1, H, W, 1]
begin = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32)
end = 4 - tf.cast(tf.equal(original_rank, 2), dtype=tf.int32)
new_shape = shape[begin:end]
return tf.reshape(image, new_shape)
| 3,091 | 1,107 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for charge.py"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import logging
from rdkit import Chem
from molvs.standardize import Standardizer, standardize_smiles
from molvs.charge import Reionizer
logging.basicConfig(level=logging.DEBUG)
def charge_parent_smiles(smiles, prefer_organic=False):
"""Utility function that returns the charge parent SMILES for given a SMILES string."""
mol = Chem.MolFromSmiles(smiles.encode('utf8'), sanitize=False)
mol = Standardizer(prefer_organic=prefer_organic).charge_parent(mol)
if mol:
return Chem.MolToSmiles(mol, isomericSmiles=True)
def test_charge_parent():
"""Test neutralization of ionized acids and bases."""
assert charge_parent_smiles('C(C(=O)[O-])(Cc1n[n-]nn1)(C[NH3+])(C[N+](=O)[O-])') == 'NCC(Cc1nn[nH]n1)(C[N+](=O)[O-])C(=O)O'
def test_charge_parent2():
"""Test preservation of zwitterion."""
assert charge_parent_smiles('n(C)1cc[n+]2cccc([O-])c12') == 'Cn1cc[n+]2cccc([O-])c12'
def test_charge_parent3():
"""Choline should be left with a positive charge."""
assert charge_parent_smiles('C[N+](C)(C)CCO') == 'C[N+](C)(C)CCO'
def test_charge_parent4():
"""This should have the hydrogen removed to give deanol as a charge parent."""
assert charge_parent_smiles('C[NH+](C)CCO') == 'CN(C)CCO'
def test_charge_parent5():
"""Sodium benzoate to benzoic acid."""
assert charge_parent_smiles('[Na+].O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1'
def test_charge_parent6():
"""Benzoate ion to benzoic acid."""
assert charge_parent_smiles('O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1'
def test_charge_parent7():
"""Charges in histidine should be neutralized."""
assert charge_parent_smiles('[NH3+]C(Cc1cnc[nH]1)C(=O)[O-]') == 'NC(Cc1cnc[nH]1)C(=O)O'
def test_charge_parent8():
""""""
assert charge_parent_smiles('C[NH+](C)(C).[Cl-]') == 'CN(C)C'
def test_charge_parent9():
"""No organic fragments."""
assert charge_parent_smiles('[N+](=O)([O-])[O-]') == 'O=[N+]([O-])[O-]'
def test_charge_parent10():
"""No organic fragments."""
assert charge_parent_smiles('[N+](=O)([O-])[O-]', prefer_organic=True) == 'O=[N+]([O-])[O-]'
def test_charge_parent11():
"""Larger inorganic fragment should be chosen."""
assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]') == 'O=[N+]([O-])[O-]'
def test_charge_parent12():
"""Smaller organic fragment should be chosen over larger inorganic fragment."""
assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]', prefer_organic=True) == '[CH2]'
def test_standardize():
"""Test table salt."""
assert standardize_smiles('[Na].[Cl]') == '[Cl-].[Na+]'
def test_reionize():
"""Test reionizer moves proton to weaker acid."""
mol = Chem.MolFromSmiles('C1=C(C=CC(=C1)[S]([O-])=O)[S](O)(=O)=O')
r = Reionizer()
mol = r.reionize(mol)
assert Chem.MolToSmiles(mol) == 'O=S(O)c1ccc(S(=O)(=O)[O-])cc1'
def test_reionize2():
"""Test charged carbon doesn't get recognised as alpha-carbon-hydrogen-keto."""
mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N')
r = Reionizer()
mol = r.reionize(mol)
assert Chem.MolToSmiles(mol) == 'CCOC(=O)C(=O)[CH-]C#N'
def test_reionize3():
""""""
mol = Chem.MolFromSmiles('C[N+]1=C[CH-]N(C(=N)N)/C1=C/[N+](=O)[O-]')
r = Reionizer()
mol = r.reionize(mol)
assert Chem.MolToSmiles(mol) == 'C[N+]1=CCN(C(=N)N)C1=[C-][N+](=O)[O-]'
def test_should_complete():
"""Reionization should not infinitely loop forever on these molecules."""
# GitHub Issue #14
assert standardize_smiles('CCCCCCCCCCCCCCCCCC(=O)CC(=C)C(=O)O[Ti](=O)(OC(C)C)C(C)C') == 'C=C(CC(=O)[CH-]CCCCCCCCCCCCCCCC)C(=O)[O-].CC(C)[O-].CCC.[O-2].[Ti+5]'
assert standardize_smiles('OP(=O)(O)[O-].OP(=O)([O-])[O-].[O-]S(=O)(=O)[O-].[Na+].[Na+].[Na+].[Mg+2].[Cl-].[Cl-].[K+].[K+]') == 'O=P([O-])(O)O.O=P([O-])([O-])O.O=S(=O)([O-])[O-].[Cl-].[Cl-].[K+].[K+].[Mg+2].[Na+].[Na+].[Na+]'
def test_forced_charge1():
"""Test forced charge correction maintaining overall neutral charge."""
assert standardize_smiles('[Na].O=C(O)c1ccccc1') == 'O=C([O-])c1ccccc1.[Na+]'
def test_forced_charge2():
"""Test forced charge correction with no corresponding proton for neutralization."""
# GitHub Issue #15
assert standardize_smiles('[Na].[Na]') == '[Na+].[Na+]'
# TODO: Arguably should become selenite ion... O=[Se]([O-])[O-]. Need an AcidBasePair?
assert standardize_smiles('[Na].[Na].O[Se](O)=O') == 'O=[Se](O)O.[Na+].[Na+]'
# def test_reionize3():
# """Test canonical ionization position when multiple equivalent possibilities."""
# mol = Chem.MolFromSmiles('CC1=CC(=CC=C1S(O)=O)S([O-])=O')
# mol2 = Chem.MolFromSmiles('CC1=CC(=CC=C1S([O-])=O)S(O)=O')
# r = Reionizer()
# mol = r.reionize(mol)
# mol2 = r.reionize(mol2)
# assert Chem.MolToSmiles(mol) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O'
# assert Chem.MolToSmiles(mol2) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O'
# assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2)
#
#
# def test_reionize4():
# """Test canonical ionization position when multiple equivalent possibilities."""
# mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N')
# mol2 = Chem.MolFromSmiles('[CH2-]COC(=O)C(=O)CC#N')
# r = Reionizer()
# mol = r.reionize(mol)
# mol2 = r.reionize(mol2)
# assert Chem.MolToSmiles(mol) == '[CH2-]COC(=O)C(=O)CC#N'
# assert Chem.MolToSmiles(mol2) == ''
# assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2)
| 5,624 | 2,468 |
from django.shortcuts import render
from django.core import serializers
from .models import User
from django.forms.models import model_to_dict
from rest_framework import status
from rest_framework.response import Response
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
#ํ์๊ฐ์
/users/auth/
#์์ด๋๋ฅผ ๋ฑ๋กํ๋๊ณณ /users/register
@api_view(['POST'])
def register(request):
data=request.data
if all(i in data for i in ('email','nickname','password')):
email_check=User.objects.filter(email=data['email'])
nick_check=User.objects.filter(nickname=data['nickname'])
if email_check.exists():
return Response({"message": "email already exists"}, status=status.HTTP_409_CONFLICT)
elif nick_check.exists():
return Response({"message": "nickname already exists"}, status=status.HTTP_409_CONFLICT)
else:
user = User.objects.create_user(
data['email'],
data['nickname'],
data['password'],
)
user.save()
return Response(model_to_dict(user), status=status.HTTP_201_CREATED)
else:
return Response({"message": "key error"}, status=status.HTTP_400_BAD_REQUEST)
# ํ ํฐ์ ์ฃผ๋ฉด ํด๋น ์ ์ ์ ์ ๋ณด๋ฅผ ์ป๋ ๊ณณ /users/users
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def info(request):
user = request.user
data = request.data
try:
searchU=User.objects.filter(email=user.email)
if searchU.count==0:
return Response({"message": "Can't find info"}, status=status.HTTP_404_NOT_FOUND)
data = {
'email': user.email,
'nickname':user.nickname
}
return Response((data), status=status.HTTP_200_OK)
except User.DoesNotExist:
return Response({"message": "info does not exists"}, status=status.HTTP_404_NOT_FOUND)
| 1,921 | 605 |
# -*- coding: utf-8 -*-
from pytest import raises
from astral import Astral, AstralError, Location
import datetime
import pytz
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
def test_Location_Name():
c = Location()
assert c.name == 'Greenwich'
c.name = 'London'
assert c.name == 'London'
c.name = 'Kรถln'
assert c.name == 'Kรถln'
def test_Location_Country():
c = Location()
assert c.region == 'England'
c.region = 'Australia'
assert c.region == 'Australia'
def test_Location_Elevation():
dd = Astral()
c = dd['London']
assert c.elevation == 24
def test_Location_TimezoneName():
c = Location()
assert c.timezone == 'Europe/London'
c.name = 'Asia/Riyadh'
assert c.name == 'Asia/Riyadh'
def test_Location_TimezoneNameNoLocation():
c = Location()
c._timezone_group = 'Europe'
c._timezone_location = ''
assert c.timezone == 'Europe'
def test_Location_TimezoneNameBad():
c = Location()
with raises(ValueError):
c.timezone = 'bad/timezone'
def test_Location_TimezoneLookup():
c = Location()
assert c.tz == pytz.timezone('Europe/London')
c.timezone='Europe/Stockholm'
assert c.tz == pytz.timezone('Europe/Stockholm')
def test_Location_TimezoneLookupBad():
c = Location()
c._timezone_group = 'bad'
c._timezone_location = 'timezone'
with raises(AstralError):
c.tz
def test_Location_Sun():
c = Location()
c.sun()
def test_Location_Dawn():
c = Location()
c.dawn()
def test_Location_DawnUTC():
c = Location()
c.dawn(local=False)
def test_Location_Sunrise():
c = Location()
c.sunrise()
def test_Location_SunriseUTC():
c = Location()
c.sunrise(local=False)
def test_Location_SolarNoon():
c = Location()
c.solar_noon()
def test_Location_SolarNoonUTC():
c = Location()
c.solar_noon(local=False)
def test_Location_Dusk():
c = Location()
c.dusk()
def test_Location_DuskUTC():
c = Location()
c.dusk(local=False)
def test_Location_Sunset():
c = Location()
c.sunset()
def test_Location_SunsetUTC():
c = Location()
c.sunset(local=False)
def test_Location_SolarElevation():
dd = Astral()
location = dd['Riyadh']
dt = datetime.datetime(2015, 12, 14, 8, 0, 0)
dt = location.tz.localize(dt)
elevation = location.solar_elevation(dt)
assert abs(elevation - 17) < 0.5
def test_Location_SolarAzimuth():
dd = Astral()
location = dd['Riyadh']
dt = datetime.datetime(2015, 12, 14, 8, 0, 0)
dt = location.tz.localize(dt)
azimuth = location.solar_azimuth(dt)
assert abs(azimuth - 126) < 0.5
def test_Location_TimeAtElevation():
dd = Astral()
location = dd['New Delhi']
test_data = {
datetime.date(2016, 1, 5): datetime.datetime(2016, 1, 5, 10, 0),
}
for day, cdt in test_data.items():
cdt = location.tz.localize(cdt)
dt = location.time_at_elevation(28, date=day)
assert datetime_almost_equal(dt, cdt, seconds=600)
def test_Location_SolarDepression():
c = Location(("Heidelberg", "Germany", 49.412, -8.71, "Europe/Berlin"))
c.solar_depression = 'nautical'
assert c.solar_depression == 12
c.solar_depression = 18
assert c.solar_depression == 18
def test_Location_Moon():
d = datetime.date(2017, 12, 1)
c=Location()
assert c.moon_phase(date=d) == 11
def test_Location_TzError():
with raises(AttributeError):
c = Location()
c.tz = 1
def test_Location_equality():
c1 = Location()
c2 = Location()
t = (c1, c2)
assert c1 == c2
assert len(set(t)) == 1
c1 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0])
c2 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0])
c3 = Location(["Stockholm", "Sweden", 59.3, 18, "Europe/Stockholm", 0])
t1 = (c1, c2)
t2 = (c1, c3)
assert c1 == c2
assert len(set(t1)) == 1
assert c1 != c3
assert len(set(t2)) == 2
| 4,134 | 1,668 |
from .jsonc import load, loads, dump, dumps
| 44 | 15 |
##########################################################################
#
# Copyright 2008-2009 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""d3d9caps.h"""
from winapi import *
from d3d9types import *
D3DVS20CAPS = Flags(DWORD, [
"D3DVS20CAPS_PREDICATION",
])
D3DVSHADERCAPS2_0 = Struct("D3DVSHADERCAPS2_0", [
(D3DVS20CAPS, "Caps"),
(INT, "DynamicFlowControlDepth"),
(INT, "NumTemps"),
(INT, "StaticFlowControlDepth"),
])
D3DPS20CAPS = Flags(DWORD, [
"D3DPS20CAPS_ARBITRARYSWIZZLE",
"D3DPS20CAPS_GRADIENTINSTRUCTIONS",
"D3DPS20CAPS_PREDICATION",
"D3DPS20CAPS_NODEPENDENTREADLIMIT",
"D3DPS20CAPS_NOTEXINSTRUCTIONLIMIT",
])
D3DPSHADERCAPS2_0 = Struct("D3DPSHADERCAPS2_0", [
(D3DPS20CAPS, "Caps"),
(INT, "DynamicFlowControlDepth"),
(INT, "NumTemps"),
(INT, "StaticFlowControlDepth"),
(INT, "NumInstructionSlots"),
])
D3DCAPS = Flags(DWORD, [
"D3DCAPS_READ_SCANLINE",
])
D3DCAPS2 = Flags(DWORD, [
"D3DCAPS2_FULLSCREENGAMMA",
"D3DCAPS2_CANCALIBRATEGAMMA",
"D3DCAPS2_RESERVED",
"D3DCAPS2_CANMANAGERESOURCE",
"D3DCAPS2_DYNAMICTEXTURES",
"D3DCAPS2_CANAUTOGENMIPMAP",
"D3DCAPS2_CANSHARERESOURCE",
])
D3DCAPS3 = Flags(DWORD, [
"D3DCAPS3_RESERVED",
"D3DCAPS3_ALPHA_FULLSCREEN_FLIP_OR_DISCARD",
"D3DCAPS3_LINEAR_TO_SRGB_PRESENTATION",
"D3DCAPS3_COPY_TO_VIDMEM",
"D3DCAPS3_COPY_TO_SYSTEMMEM",
])
D3DPRESENT_INTERVAL = Flags(DWORD, [
#"D3DPRESENT_INTERVAL_DEFAULT", # 0
"D3DPRESENT_INTERVAL_ONE",
"D3DPRESENT_INTERVAL_TWO",
"D3DPRESENT_INTERVAL_THREE",
"D3DPRESENT_INTERVAL_FOUR",
"D3DPRESENT_INTERVAL_IMMEDIATE",
])
D3DCURSORCAPS = Flags(DWORD, [
"D3DCURSORCAPS_COLOR",
"D3DCURSORCAPS_LOWRES",
])
D3DDEVCAPS = Flags(DWORD, [
"D3DDEVCAPS_EXECUTESYSTEMMEMORY",
"D3DDEVCAPS_EXECUTEVIDEOMEMORY",
"D3DDEVCAPS_TLVERTEXSYSTEMMEMORY",
"D3DDEVCAPS_TLVERTEXVIDEOMEMORY",
"D3DDEVCAPS_TEXTURESYSTEMMEMORY",
"D3DDEVCAPS_TEXTUREVIDEOMEMORY",
"D3DDEVCAPS_DRAWPRIMTLVERTEX",
"D3DDEVCAPS_CANRENDERAFTERFLIP",
"D3DDEVCAPS_TEXTURENONLOCALVIDMEM",
"D3DDEVCAPS_DRAWPRIMITIVES2",
"D3DDEVCAPS_SEPARATETEXTUREMEMORIES",
"D3DDEVCAPS_DRAWPRIMITIVES2EX",
"D3DDEVCAPS_HWTRANSFORMANDLIGHT",
"D3DDEVCAPS_CANBLTSYSTONONLOCAL",
"D3DDEVCAPS_HWRASTERIZATION",
"D3DDEVCAPS_PUREDEVICE",
"D3DDEVCAPS_QUINTICRTPATCHES",
"D3DDEVCAPS_RTPATCHES",
"D3DDEVCAPS_RTPATCHHANDLEZERO",
"D3DDEVCAPS_NPATCHES",
])
D3DPMISCCAPS = Flags(DWORD, [
"D3DPMISCCAPS_MASKZ",
"D3DPMISCCAPS_CULLNONE",
"D3DPMISCCAPS_CULLCW",
"D3DPMISCCAPS_CULLCCW",
"D3DPMISCCAPS_COLORWRITEENABLE",
"D3DPMISCCAPS_CLIPPLANESCALEDPOINTS",
"D3DPMISCCAPS_CLIPTLVERTS",
"D3DPMISCCAPS_TSSARGTEMP",
"D3DPMISCCAPS_BLENDOP",
"D3DPMISCCAPS_NULLREFERENCE",
"D3DPMISCCAPS_INDEPENDENTWRITEMASKS",
"D3DPMISCCAPS_PERSTAGECONSTANT",
"D3DPMISCCAPS_FOGANDSPECULARALPHA",
"D3DPMISCCAPS_SEPARATEALPHABLEND",
"D3DPMISCCAPS_MRTINDEPENDENTBITDEPTHS",
"D3DPMISCCAPS_MRTPOSTPIXELSHADERBLENDING",
"D3DPMISCCAPS_FOGVERTEXCLAMPED",
"D3DPMISCCAPS_POSTBLENDSRGBCONVERT",
])
D3DLINECAPS = Flags(DWORD, [
"D3DLINECAPS_TEXTURE",
"D3DLINECAPS_ZTEST",
"D3DLINECAPS_BLEND",
"D3DLINECAPS_ALPHACMP",
"D3DLINECAPS_FOG",
"D3DLINECAPS_ANTIALIAS",
])
D3DPRASTERCAPS = Flags(DWORD, [
"D3DPRASTERCAPS_DITHER",
"D3DPRASTERCAPS_ZTEST",
"D3DPRASTERCAPS_FOGVERTEX",
"D3DPRASTERCAPS_FOGTABLE",
"D3DPRASTERCAPS_MIPMAPLODBIAS",
"D3DPRASTERCAPS_ZBUFFERLESSHSR",
"D3DPRASTERCAPS_FOGRANGE",
"D3DPRASTERCAPS_ANISOTROPY",
"D3DPRASTERCAPS_WBUFFER",
"D3DPRASTERCAPS_WFOG",
"D3DPRASTERCAPS_ZFOG",
"D3DPRASTERCAPS_COLORPERSPECTIVE",
"D3DPRASTERCAPS_SCISSORTEST",
"D3DPRASTERCAPS_SLOPESCALEDEPTHBIAS",
"D3DPRASTERCAPS_DEPTHBIAS",
"D3DPRASTERCAPS_MULTISAMPLE_TOGGLE",
])
D3DPCMPCAPS = Flags(DWORD, [
"D3DPCMPCAPS_NEVER",
"D3DPCMPCAPS_LESS",
"D3DPCMPCAPS_EQUAL",
"D3DPCMPCAPS_LESSEQUAL",
"D3DPCMPCAPS_GREATER",
"D3DPCMPCAPS_NOTEQUAL",
"D3DPCMPCAPS_GREATEREQUAL",
"D3DPCMPCAPS_ALWAYS",
])
D3DPBLENDCAPS = Flags(DWORD, [
"D3DPBLENDCAPS_ZERO",
"D3DPBLENDCAPS_ONE",
"D3DPBLENDCAPS_SRCCOLOR",
"D3DPBLENDCAPS_INVSRCCOLOR",
"D3DPBLENDCAPS_SRCALPHA",
"D3DPBLENDCAPS_INVSRCALPHA",
"D3DPBLENDCAPS_DESTALPHA",
"D3DPBLENDCAPS_INVDESTALPHA",
"D3DPBLENDCAPS_DESTCOLOR",
"D3DPBLENDCAPS_INVDESTCOLOR",
"D3DPBLENDCAPS_SRCALPHASAT",
"D3DPBLENDCAPS_BOTHSRCALPHA",
"D3DPBLENDCAPS_BOTHINVSRCALPHA",
"D3DPBLENDCAPS_BLENDFACTOR",
"D3DPBLENDCAPS_SRCCOLOR2",
"D3DPBLENDCAPS_INVSRCCOLOR2",
])
D3DPSHADECAPS = Flags(DWORD, [
"D3DPSHADECAPS_COLORGOURAUDRGB",
"D3DPSHADECAPS_SPECULARGOURAUDRGB",
"D3DPSHADECAPS_ALPHAGOURAUDBLEND",
"D3DPSHADECAPS_FOGGOURAUD",
])
D3DPTEXTURECAPS = Flags(DWORD, [
"D3DPTEXTURECAPS_PERSPECTIVE",
"D3DPTEXTURECAPS_POW2",
"D3DPTEXTURECAPS_ALPHA",
"D3DPTEXTURECAPS_SQUAREONLY",
"D3DPTEXTURECAPS_TEXREPEATNOTSCALEDBYSIZE",
"D3DPTEXTURECAPS_ALPHAPALETTE",
"D3DPTEXTURECAPS_NONPOW2CONDITIONAL",
"D3DPTEXTURECAPS_PROJECTED",
"D3DPTEXTURECAPS_CUBEMAP",
"D3DPTEXTURECAPS_VOLUMEMAP",
"D3DPTEXTURECAPS_MIPMAP",
"D3DPTEXTURECAPS_MIPVOLUMEMAP",
"D3DPTEXTURECAPS_MIPCUBEMAP",
"D3DPTEXTURECAPS_CUBEMAP_POW2",
"D3DPTEXTURECAPS_VOLUMEMAP_POW2",
"D3DPTEXTURECAPS_NOPROJECTEDBUMPENV",
])
D3DPTFILTERCAPS = Flags(DWORD, [
"D3DPTFILTERCAPS_MINFPOINT",
"D3DPTFILTERCAPS_MINFLINEAR",
"D3DPTFILTERCAPS_MINFANISOTROPIC",
"D3DPTFILTERCAPS_MINFPYRAMIDALQUAD",
"D3DPTFILTERCAPS_MINFGAUSSIANQUAD",
"D3DPTFILTERCAPS_MIPFPOINT",
"D3DPTFILTERCAPS_MIPFLINEAR",
"D3DPTFILTERCAPS_CONVOLUTIONMONO",
"D3DPTFILTERCAPS_MAGFPOINT",
"D3DPTFILTERCAPS_MAGFLINEAR",
"D3DPTFILTERCAPS_MAGFANISOTROPIC",
"D3DPTFILTERCAPS_MAGFPYRAMIDALQUAD",
"D3DPTFILTERCAPS_MAGFGAUSSIANQUAD",
])
D3DPTADDRESSCAPS = Flags(DWORD, [
"D3DPTADDRESSCAPS_WRAP",
"D3DPTADDRESSCAPS_MIRROR",
"D3DPTADDRESSCAPS_CLAMP",
"D3DPTADDRESSCAPS_BORDER",
"D3DPTADDRESSCAPS_INDEPENDENTUV",
"D3DPTADDRESSCAPS_MIRRORONCE",
])
D3DSTENCILCAPS = Flags(DWORD, [
"D3DSTENCILCAPS_KEEP",
"D3DSTENCILCAPS_ZERO",
"D3DSTENCILCAPS_REPLACE",
"D3DSTENCILCAPS_INCRSAT",
"D3DSTENCILCAPS_DECRSAT",
"D3DSTENCILCAPS_INVERT",
"D3DSTENCILCAPS_INCR",
"D3DSTENCILCAPS_DECR",
"D3DSTENCILCAPS_TWOSIDED",
])
D3DTEXOPCAPS = Flags(DWORD, [
"D3DTEXOPCAPS_DISABLE",
"D3DTEXOPCAPS_SELECTARG1",
"D3DTEXOPCAPS_SELECTARG2",
"D3DTEXOPCAPS_MODULATE",
"D3DTEXOPCAPS_MODULATE2X",
"D3DTEXOPCAPS_MODULATE4X",
"D3DTEXOPCAPS_ADD",
"D3DTEXOPCAPS_ADDSIGNED",
"D3DTEXOPCAPS_ADDSIGNED2X",
"D3DTEXOPCAPS_SUBTRACT",
"D3DTEXOPCAPS_ADDSMOOTH",
"D3DTEXOPCAPS_BLENDDIFFUSEALPHA",
"D3DTEXOPCAPS_BLENDTEXTUREALPHA",
"D3DTEXOPCAPS_BLENDFACTORALPHA",
"D3DTEXOPCAPS_BLENDTEXTUREALPHAPM",
"D3DTEXOPCAPS_BLENDCURRENTALPHA",
"D3DTEXOPCAPS_PREMODULATE",
"D3DTEXOPCAPS_MODULATEALPHA_ADDCOLOR",
"D3DTEXOPCAPS_MODULATECOLOR_ADDALPHA",
"D3DTEXOPCAPS_MODULATEINVALPHA_ADDCOLOR",
"D3DTEXOPCAPS_MODULATEINVCOLOR_ADDALPHA",
"D3DTEXOPCAPS_BUMPENVMAP",
"D3DTEXOPCAPS_BUMPENVMAPLUMINANCE",
"D3DTEXOPCAPS_DOTPRODUCT3",
"D3DTEXOPCAPS_MULTIPLYADD",
"D3DTEXOPCAPS_LERP",
])
D3DFVFCAPS = Flags(DWORD, [
"D3DFVFCAPS_TEXCOORDCOUNTMASK",
"D3DFVFCAPS_DONOTSTRIPELEMENTS",
"D3DFVFCAPS_PSIZE",
])
D3DVTXPCAPS = Flags(DWORD, [
"D3DVTXPCAPS_TEXGEN",
"D3DVTXPCAPS_MATERIALSOURCE7",
"D3DVTXPCAPS_DIRECTIONALLIGHTS",
"D3DVTXPCAPS_POSITIONALLIGHTS",
"D3DVTXPCAPS_LOCALVIEWER",
"D3DVTXPCAPS_TWEENING",
"D3DVTXPCAPS_TEXGEN_SPHEREMAP",
"D3DVTXPCAPS_NO_TEXGEN_NONLOCALVIEWER",
])
D3DDEVCAPS2 = Flags(DWORD, [
"D3DDEVCAPS2_STREAMOFFSET",
"D3DDEVCAPS2_DMAPNPATCH",
"D3DDEVCAPS2_ADAPTIVETESSRTPATCH",
"D3DDEVCAPS2_ADAPTIVETESSNPATCH",
"D3DDEVCAPS2_CAN_STRETCHRECT_FROM_TEXTURES",
"D3DDEVCAPS2_PRESAMPLEDDMAPNPATCH",
"D3DDEVCAPS2_VERTEXELEMENTSCANSHARESTREAMOFFSET",
])
D3DDTCAPS = Flags(DWORD, [
"D3DDTCAPS_UBYTE4",
"D3DDTCAPS_UBYTE4N",
"D3DDTCAPS_SHORT2N",
"D3DDTCAPS_SHORT4N",
"D3DDTCAPS_USHORT2N",
"D3DDTCAPS_USHORT4N",
"D3DDTCAPS_UDEC3",
"D3DDTCAPS_DEC3N",
"D3DDTCAPS_FLOAT16_2",
"D3DDTCAPS_FLOAT16_4",
])
#D3DPS_VERSION = Enum("DWORD", [
# "D3DPS_VERSION(0,0)",
# "D3DPS_VERSION(1,0)",
# "D3DPS_VERSION(1,1)",
# "D3DPS_VERSION(1,2)",
# "D3DPS_VERSION(1,3)",
# "D3DPS_VERSION(1,4)",
# "D3DPS_VERSION(2,0)",
# "D3DPS_VERSION(3,0)",
#])
D3DPS_VERSION = DWORD
#D3DVS_VERSION = Enum("DWORD", [
# "D3DVS_VERSION(0,0)",
# "D3DVS_VERSION(1,0)",
# "D3DVS_VERSION(1,1)",
# "D3DVS_VERSION(2,0)",
# "D3DVS_VERSION(3,0)",
#])
D3DVS_VERSION = DWORD
D3DCAPS9 = Struct("D3DCAPS9", [
(D3DDEVTYPE, "DeviceType"),
(UINT, "AdapterOrdinal"),
(D3DCAPS, "Caps"),
(D3DCAPS2, "Caps2"),
(D3DCAPS3, "Caps3"),
(D3DPRESENT_INTERVAL, "PresentationIntervals"),
(D3DCURSORCAPS, "CursorCaps"),
(D3DDEVCAPS, "DevCaps"),
(D3DPMISCCAPS, "PrimitiveMiscCaps"),
(D3DPRASTERCAPS, "RasterCaps"),
(D3DPCMPCAPS, "ZCmpCaps"),
(D3DPBLENDCAPS, "SrcBlendCaps"),
(D3DPBLENDCAPS, "DestBlendCaps"),
(D3DPCMPCAPS, "AlphaCmpCaps"),
(D3DPSHADECAPS, "ShadeCaps"),
(D3DPTEXTURECAPS, "TextureCaps"),
(D3DPTFILTERCAPS, "TextureFilterCaps"),
(D3DPTFILTERCAPS, "CubeTextureFilterCaps"),
(D3DPTFILTERCAPS, "VolumeTextureFilterCaps"),
(D3DPTADDRESSCAPS, "TextureAddressCaps"),
(D3DPTADDRESSCAPS, "VolumeTextureAddressCaps"),
(D3DLINECAPS, "LineCaps"),
(DWORD, "MaxTextureWidth"),
(DWORD, "MaxTextureHeight"),
(DWORD, "MaxVolumeExtent"),
(DWORD, "MaxTextureRepeat"),
(DWORD, "MaxTextureAspectRatio"),
(DWORD, "MaxAnisotropy"),
(Float, "MaxVertexW"),
(Float, "GuardBandLeft"),
(Float, "GuardBandTop"),
(Float, "GuardBandRight"),
(Float, "GuardBandBottom"),
(Float, "ExtentsAdjust"),
(D3DSTENCILCAPS, "StencilCaps"),
(D3DFVFCAPS, "FVFCaps"),
(D3DTEXOPCAPS, "TextureOpCaps"),
(DWORD, "MaxTextureBlendStages"),
(DWORD, "MaxSimultaneousTextures"),
(D3DVTXPCAPS, "VertexProcessingCaps"),
(DWORD, "MaxActiveLights"),
(DWORD, "MaxUserClipPlanes"),
(DWORD, "MaxVertexBlendMatrices"),
(DWORD, "MaxVertexBlendMatrixIndex"),
(Float, "MaxPointSize"),
(DWORD, "MaxPrimitiveCount"),
(DWORD, "MaxVertexIndex"),
(DWORD, "MaxStreams"),
(DWORD, "MaxStreamStride"),
(D3DVS_VERSION, "VertexShaderVersion"),
(DWORD, "MaxVertexShaderConst"),
(D3DPS_VERSION, "PixelShaderVersion"),
(Float, "PixelShader1xMaxValue"),
(D3DDEVCAPS2, "DevCaps2"),
(Float, "MaxNpatchTessellationLevel"),
(DWORD, "Reserved5"),
(UINT, "MasterAdapterOrdinal"),
(UINT, "AdapterOrdinalInGroup"),
(UINT, "NumberOfAdaptersInGroup"),
(D3DDTCAPS, "DeclTypes"),
(DWORD, "NumSimultaneousRTs"),
(D3DPTFILTERCAPS, "StretchRectFilterCaps"),
(D3DVSHADERCAPS2_0, "VS20Caps"),
(D3DPSHADERCAPS2_0, "PS20Caps"),
(D3DPTFILTERCAPS, "VertexTextureFilterCaps"),
(DWORD, "MaxVShaderInstructionsExecuted"),
(DWORD, "MaxPShaderInstructionsExecuted"),
(DWORD, "MaxVertexShader30InstructionSlots"),
(DWORD, "MaxPixelShader30InstructionSlots"),
])
| 12,648 | 6,530 |
__version__ = '4.64.0'
| 23 | 14 |
import gym
from gym import spaces, error, utils
from gym.utils import seeding
import numpy as np
import configparser
from os import path
import matplotlib.pyplot as plt
from matplotlib.pyplot import gca
font = {'family': 'sans-serif',
'weight': 'bold',
'size': 14}
class MappingEnv(gym.Env):
def __init__(self):
# config_file = path.join(path.dirname(__file__), "params_flock.cfg")
# config = configparser.ConfigParser()
# config.read(config_file)
# config = config['flock']
self.nearest_agents = 7
self.nearest_targets = 7
self.mean_pooling = True # normalize the adjacency matrix by the number of neighbors or not
self.centralized = True
# number states per agent
self.nx_system = 4
# number of actions per agent
self.nu = 2
# default problem parameters
self.n_agents = 100 # int(config['network_size'])
# self.comm_radius = 0.9 # float(config['comm_radius'])
self.dt = 0.1 # #float(config['system_dt'])
self.v_max = 5.0 # float(config['max_vel_init'])
self.v_bias = self.v_max
# intitialize state matrices
self.x = None
self.u = None
self.mean_vel = None
self.init_vel = None
self.greedy_action = None
self.diff = None
self.r2 = None
self.adj_mat = None
self.adj_mat_mean = None
self.diff_targets = None
self.r2_targets = None
self.target_observed = None
self.state_network = None
self.state_values = None
self.reward = None
self.max_accel = 1
# self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,),
# dtype=np.float32)
#
# self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, ),
# dtype=np.float32)
# target initialization
self.px_max = 100
self.py_max = 100
x = np.linspace(-1.0 * self.px_max, self.px_max, self.n_agents)
y = np.linspace(-1.0 * self.py_max, self.py_max, self.n_agents)
tx, ty = np.meshgrid(x, y)
tx = tx.reshape((-1, 1))
ty = ty.reshape((-1, 1))
self.obs_rad = 2.0
self.obs_rad2 = self.obs_rad * self.obs_rad
self.target_x = np.stack((tx, ty), axis=1).reshape((-1, 2))
self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool)
# rendering initialization
self.fig = None
self.ax = None
self.line1 = None
self.line2 = None
self.action_scalar = 10.0
self.seed()
def reset(self):
x = np.zeros((self.n_agents, self.nx_system))
self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool)
x[:, 0] = np.random.uniform(low=-self.px_max, high=self.px_max, size=(self.n_agents,))
x[:, 1] = np.random.uniform(low=-self.py_max, high=self.py_max, size=(self.n_agents,))
#bias = np.random.uniform(low=-self.v_bias, high=self.v_bias, size=(2,))
x[:, 2] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[0]
x[:, 3] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[1]
# keep good initialization
self.mean_vel = np.mean(x[:, 2:4], axis=0)
self.init_vel = x[:, 2:4]
self.x = x
# self.a_net = self.get_connectivity(self.x)
self.compute_helpers()
return self.state_values, self.state_network
def params_from_cfg(self, args):
# TODO
pass
# # self.comm_radius = args.getfloat('comm_radius')
# # self.comm_radius2 = self.comm_radius * self.comm_radius
# # self.vr = 1 / self.comm_radius2 + np.log(self.comm_radius2)
# #
# # self.n_agents = args.getint('n_agents')
# # self.r_max = self.r_max * np.sqrt(self.n_agents)
#
# # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,),
# # dtype=np.float32)
# #
# # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, self.n_features),
# # dtype=np.float32)
#
# self.v_max = args.getfloat('v_max')
# self.v_bias = self.v_max
# self.dt = args.getfloat('dt')
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, u):
# u = np.reshape(u, (-1, 2))
assert u.shape == (self.n_agents, self.nu)
u = np.clip(u, a_min=-self.max_accel, a_max=self.max_accel)
self.u = u * self.action_scalar
old_x = np.copy(self.x)
# x position
self.x[:, 0] = self.x[:, 0] + self.x[:, 2] * self.dt + self.u[:, 0] * self.dt * self.dt * 0.5
# y position
self.x[:, 1] = self.x[:, 1] + self.x[:, 3] * self.dt + self.u[:, 1] * self.dt * self.dt * 0.5
# x velocity
self.x[:, 2] = self.x[:, 2] + self.u[:, 0] * self.dt
# y velocity
self.x[:, 3] = self.x[:, 3] + self.u[:, 1] * self.dt
# clip velocities
self.x[:, 2:4] = np.clip(self.x[:, 2:4], -1.0*self.v_max, self.v_max)
dist_traveled = np.sum(np.linalg.norm(self.x[:, 0:2] - old_x[:, 0:2], axis=1))
self.compute_helpers()
done = (0 == np.sum(self.target_unobserved))
return (self.state_values, self.state_network), 10.0 * self.reward - dist_traveled, done, {}
def compute_helpers(self):
# TODO - check this, and initialize stuff in the init(), and try to make more efficient
# Neighbors computations
self.diff = self.x.reshape((self.n_agents, 1, self.nx_system)) - self.x.reshape(
(1, self.n_agents, self.nx_system))
self.r2 = np.multiply(self.diff[:, :, 0], self.diff[:, :, 0]) + np.multiply(self.diff[:, :, 1],
self.diff[:, :, 1])
np.fill_diagonal(self.r2, np.Inf)
nearest = np.argsort(self.r2, axis=1)
obs_neigh = np.zeros((self.n_agents, self.nearest_agents * 4))
self.adj_mat = np.zeros((self.n_agents, self.n_agents))
for i in range(self.nearest_agents):
ind2, ind3 = np.meshgrid(nearest[:, i], range(4), indexing='ij')
ind1, _ = np.meshgrid(range(self.n_agents), range(4), indexing='ij')
obs_neigh[:, i * self.nx_system:(i + 1) * self.nx_system] = np.reshape(
self.diff[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 4))
self.adj_mat[:, nearest[:, i]] = 1.0
# Normalize the adjacency matrix by the number of neighbors - results in mean pooling, instead of sum pooling
n_neighbors = np.reshape(np.sum(self.adj_mat, axis=1), (self.n_agents, 1)) # correct - checked this
n_neighbors[n_neighbors == 0] = 1
self.adj_mat_mean = self.adj_mat / n_neighbors
# Targets computations
self.diff_targets = self.x[:, 0:2].reshape((self.n_agents, 1, 2)) - self.target_x[
self.target_unobserved].reshape(
(1, -1, 2))
self.r2_targets = np.multiply(self.diff_targets[:, :, 0], self.diff_targets[:, :, 0]) + np.multiply(
self.diff_targets[:, :, 1],
self.diff_targets[:, :, 1])
nearest_targets = np.argsort(self.r2_targets, axis=1)
obs_target = np.zeros((self.n_agents, self.nearest_targets * 2))
for i in range(min(self.nearest_targets, np.shape(nearest_targets)[1])):
ind2, ind3 = np.meshgrid(nearest_targets[:, i], range(2), indexing='ij')
ind1, _ = np.meshgrid(range(self.n_agents), range(2), indexing='ij')
obs_target[:, i * 2:(i + 1) * 2] = np.reshape(
self.diff_targets[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 2))
self.target_observed = np.any(self.r2_targets < self.obs_rad2, axis=0).reshape((-1, 1))
self.target_unobserved[self.target_unobserved] = np.tile(np.logical_not(self.target_observed), (1, 2)).flatten()
self.reward = np.sum(self.target_observed.astype(np.int))
self.state_values = np.hstack((obs_neigh, obs_target))
self.greedy_action = -1.0 * obs_target[:, 0:2]
if self.mean_pooling:
self.state_network = self.adj_mat_mean
else:
self.state_network = self.adj_mat
def controller(self):
"""
The controller for flocking from Turner 2003.
Returns: the optimal action
"""
# TODO
# return np.zeros((self.n_agents, 2))
return self.greedy_action / 10.0
def render(self, mode='human'):
"""
Render the environment with agents as points in 2D space
"""
if self.fig is None:
plt.ion()
fig = plt.figure()
self.ax = fig.add_subplot(111)
line1, = self.ax.plot(self.x[:, 0], self.x[:, 1], 'bo')
locs = self.target_x[self.target_unobserved].reshape((-1, 2))
line2, = self.ax.plot(locs[:, 0], locs[:, 1], 'rx')
plt.ylim(-1.0 * self.py_max, 1.0 * self.py_max)
plt.xlim(-1.0 * self.px_max, 1.0 * self.px_max)
a = gca()
a.set_xticklabels(a.get_xticks(), font)
a.set_yticklabels(a.get_yticks(), font)
plt.title('GNN Controller')
self.fig = fig
self.line1 = line1
self.line2 = line2
# TODO render unobserved targets
else:
self.line1.set_xdata(self.x[:, 0])
self.line1.set_ydata(self.x[:, 1])
locs = self.target_x[self.target_unobserved].reshape((-1,2))
self.line2.set_xdata(locs[:, 0])
self.line2.set_ydata(locs[:, 1])
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def close(self):
pass
| 10,188 | 3,771 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.Lu."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
class LuOpTest(test.TestCase):
@property
def float_types(self):
return set((np.float64, np.float32, np.complex64, np.complex128))
def _verifyLuBase(self, x, lower, upper, perm, verification,
output_idx_type):
lower_np, upper_np, perm_np, verification_np = self.evaluate(
[lower, upper, perm, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, lower)
self.assertShapeEqual(x, upper)
self.assertAllEqual(x.shape[:-1], perm.shape.as_list())
# Check dtypes are as expected.
self.assertEqual(x.dtype, lower_np.dtype)
self.assertEqual(x.dtype, upper_np.dtype)
self.assertEqual(output_idx_type.as_numpy_dtype, perm_np.dtype)
# Check that the permutation is valid.
if perm_np.shape[-1] > 0:
perm_reshaped = np.reshape(perm_np, (-1, perm_np.shape[-1]))
for perm_vector in perm_reshaped:
self.assertAllClose(np.arange(len(perm_vector)), np.sort(perm_vector))
def _verifyLu(self, x, output_idx_type=dtypes.int64):
# Verify that Px = LU.
lu, perm = linalg_ops.lu(x, output_idx_type=output_idx_type)
# Prepare the lower factor of shape num_rows x num_rows
lu_shape = np.array(lu.shape.as_list())
batch_shape = lu_shape[:-2]
num_rows = lu_shape[-2]
num_cols = lu_shape[-1]
lower = array_ops.matrix_band_part(lu, -1, 0)
if num_rows > num_cols:
eye = linalg_ops.eye(
num_rows, batch_shape=batch_shape, dtype=lower.dtype)
lower = array_ops.concat([lower, eye[..., num_cols:]], axis=-1)
elif num_rows < num_cols:
lower = lower[..., :num_rows]
# Fill the diagonal with ones.
ones_diag = array_ops.ones(
np.append(batch_shape, num_rows), dtype=lower.dtype)
lower = array_ops.matrix_set_diag(lower, ones_diag)
# Prepare the upper factor.
upper = array_ops.matrix_band_part(lu, 0, -1)
verification = math_ops.matmul(lower, upper)
# Permute the rows of product of the Cholesky factors.
if num_rows > 0:
# Reshape the product of the triangular factors and permutation indices
# to a single batch dimension. This makes it easy to apply
# invert_permutation and gather_nd ops.
perm_reshaped = array_ops.reshape(perm, [-1, num_rows])
verification_reshaped = array_ops.reshape(verification,
[-1, num_rows, num_cols])
# Invert the permutation in each batch.
inv_perm_reshaped = map_fn.map_fn(array_ops.invert_permutation,
perm_reshaped)
batch_size = perm_reshaped.shape.as_list()[0]
# Prepare the batch indices with the same shape as the permutation.
# The corresponding batch index is paired with each of the `num_rows`
# permutation indices.
batch_indices = math_ops.cast(
array_ops.broadcast_to(
math_ops.range(batch_size)[:, None], perm_reshaped.shape),
dtype=output_idx_type)
permuted_verification_reshaped = array_ops.gather_nd(
verification_reshaped,
array_ops.stack([batch_indices, inv_perm_reshaped], axis=-1))
# Reshape the verification matrix back to the original shape.
verification = array_ops.reshape(permuted_verification_reshaped,
lu_shape)
self._verifyLuBase(x, lower, upper, perm, verification,
output_idx_type)
def testBasic(self):
data = np.array([[4., -1., 2.], [-1., 6., 0], [10., 0., 5.]])
for dtype in (np.float32, np.float64):
for output_idx_type in (dtypes.int32, dtypes.int64):
self._verifyLu(data.astype(dtype), output_idx_type=output_idx_type)
for dtype in (np.complex64, np.complex128):
for output_idx_type in (dtypes.int32, dtypes.int64):
complex_data = np.tril(1j * data, -1).astype(dtype)
complex_data += np.triu(-1j * data, 1).astype(dtype)
complex_data += data
self._verifyLu(complex_data, output_idx_type=output_idx_type)
def testPivoting(self):
# This matrix triggers partial pivoting because the first diagonal entry
# is small.
data = np.array([[1e-9, 1., 0.], [1., 0., 0], [0., 1., 5]])
self._verifyLu(data.astype(np.float32))
for dtype in (np.float32, np.float64):
self._verifyLu(data.astype(dtype))
_, p = linalg_ops.lu(data)
p_val = self.evaluate([p])
# Make sure p_val is not the identity permutation.
self.assertNotAllClose(np.arange(3), p_val)
for dtype in (np.complex64, np.complex128):
complex_data = np.tril(1j * data, -1).astype(dtype)
complex_data += np.triu(-1j * data, 1).astype(dtype)
complex_data += data
self._verifyLu(complex_data)
_, p = linalg_ops.lu(data)
p_val = self.evaluate([p])
# Make sure p_val is not the identity permutation.
self.assertNotAllClose(np.arange(3), p_val)
def testInvalidMatrix(self):
# LU factorization gives an error when the input is singular.
# Note: A singular matrix may return without error but it won't be a valid
# factorization.
for dtype in self.float_types:
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
linalg_ops.lu(
np.array([[1., 2., 3.], [2., 4., 6.], [2., 3., 4.]],
dtype=dtype)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
linalg_ops.lu(
np.array([[[1., 2., 3.], [2., 4., 6.], [1., 2., 3.]],
[[1., 2., 3.], [3., 4., 5.], [5., 6., 7.]]],
dtype=dtype)))
def testBatch(self):
simple_array = np.array([[[1., -1.], [2., 5.]]]) # shape (1, 2, 2)
self._verifyLu(simple_array)
self._verifyLu(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyLu(np.vstack((odd_sized_array, odd_sized_array)))
batch_size = 200
# Generate random matrices.
np.random.seed(42)
matrices = np.random.rand(batch_size, 5, 5)
self._verifyLu(matrices)
# Generate random complex valued matrices.
np.random.seed(52)
matrices = np.random.rand(batch_size, 5,
5) + 1j * np.random.rand(batch_size, 5, 5)
self._verifyLu(matrices)
def testLargeMatrix(self):
# Generate random matrices.
n = 500
np.random.seed(64)
data = np.random.rand(n, n)
self._verifyLu(data)
# Generate random complex valued matrices.
np.random.seed(129)
data = np.random.rand(n, n) + 1j * np.random.rand(n, n)
self._verifyLu(data)
@test_util.run_v1_only("b/120545219")
def testEmpty(self):
self._verifyLu(np.empty([0, 2, 2]))
self._verifyLu(np.empty([2, 0, 0]))
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
lu1, p1 = linalg_ops.lu(matrix1)
lu2, p2 = linalg_ops.lu(matrix2)
lu1_val, p1_val, lu2_val, p2_val = self.evaluate([lu1, p1, lu2, p2])
self.assertAllEqual(lu1_val, lu2_val)
self.assertAllEqual(p1_val, p2_val)
class LuBenchmark(test.Benchmark):
shapes = [
(4, 4),
(10, 10),
(16, 16),
(101, 101),
(256, 256),
(1000, 1000),
(1024, 1024),
(2048, 2048),
(4096, 4096),
(513, 2, 2),
(513, 8, 8),
(513, 256, 256),
(4, 513, 2, 2),
]
def _GenerateMatrix(self, shape):
batch_shape = shape[:-2]
shape = shape[-2:]
assert shape[0] == shape[1]
n = shape[0]
matrix = np.ones(shape).astype(np.float32) / (2.0 * n) + np.diag(
np.ones(n).astype(np.float32))
return np.tile(matrix, batch_shape + (1, 1))
def benchmarkLuOp(self):
for shape in self.shapes:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/cpu:0"):
matrix = variables.Variable(self._GenerateMatrix(shape))
lu, p = linalg_ops.lu(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(lu, p),
min_iters=25,
name="lu_cpu_{shape}".format(shape=shape))
if test.is_gpu_available(True):
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/device:GPU:0"):
matrix = variables.Variable(self._GenerateMatrix(shape))
lu, p = linalg_ops.lu(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(lu, p),
min_iters=25,
name="lu_gpu_{shape}".format(shape=shape))
if __name__ == "__main__":
test.main()
| 10,478 | 3,693 |
import multiprocessing as mp
import subprocess
import shutil
import os
from ..helper import make_path_safe, thirdparty_binary, filter_scp
from ..exceptions import CorpusError
def mfcc_func(directory, job_name, mfcc_config_path): # pragma: no cover
log_directory = os.path.join(directory, 'log')
raw_mfcc_path = os.path.join(directory, 'raw_mfcc.{}.ark'.format(job_name))
raw_scp_path = os.path.join(directory, 'feats.{}.scp'.format(job_name))
log_path = os.path.join(log_directory, 'make_mfcc.{}.log'.format(job_name))
segment_path = os.path.join(directory, 'segments.{}'.format(job_name))
scp_path = os.path.join(directory, 'wav.{}.scp'.format(job_name))
with open(log_path, 'w') as f:
if os.path.exists(segment_path):
seg_proc = subprocess.Popen([thirdparty_binary('extract-segments'),
'scp,p:' + scp_path, segment_path, 'ark:-'],
stdout=subprocess.PIPE, stderr=f)
comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2',
'--config=' + mfcc_config_path,
'ark:-', 'ark:-'],
stdout=subprocess.PIPE, stderr=f, stdin=seg_proc.stdout)
else:
comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2',
'--config=' + mfcc_config_path,
'scp,p:' + scp_path, 'ark:-'],
stdout=subprocess.PIPE, stderr=f)
copy_proc = subprocess.Popen([thirdparty_binary('copy-feats'),
'--compress=true', 'ark:-',
'ark,scp:{},{}'.format(raw_mfcc_path, raw_scp_path)],
stdin=comp_proc.stdout, stderr=f)
copy_proc.wait()
def init(env):
os.environ = env
def mfcc(mfcc_directory, num_jobs, feature_config, frequency_configs):
"""
Multiprocessing function that converts wav files into MFCCs
See http://kaldi-asr.org/doc/feat.html and
http://kaldi-asr.org/doc/compute-mfcc-feats_8cc.html for more details on how
MFCCs are computed.
Also see https://github.com/kaldi-asr/kaldi/blob/master/egs/wsj/s5/steps/make_mfcc.sh
for the bash script this function was based on.
Parameters
----------
mfcc_directory : str
Directory to save MFCC feature matrices
log_directory : str
Directory to store log files
num_jobs : int
The number of processes to use in calculation
mfcc_configs : list of :class:`~aligner.config.MfccConfig`
Configuration object for generating MFCCs
Raises
------
CorpusError
If the files per speaker exceeds the number of files that are
allowed to be open on the computer (for Unix-based systems)
"""
child_env = os.environ.copy()
os.makedirs(os.path.join(mfcc_directory, 'log'), exist_ok=True)
paths = []
for j, p in frequency_configs:
paths.append(feature_config.write(mfcc_directory, j, p))
jobs = [(mfcc_directory, x, paths[x])
for x in range(num_jobs)]
with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool:
r = False
try:
results = [pool.apply_async(mfcc_func, args=i) for i in jobs]
output = [p.get() for p in results]
except OSError as e:
print(dir(e))
if e.errno == 24:
r = True
else:
raise
if r:
raise (CorpusError(
'There were too many files per speaker to process based on your OS settings. Please try to split your data into more speakers.'))
def apply_cmvn_func(directory, job_name, config):
normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name))
normed_ark_path = os.path.join(directory, config.raw_feature_id + '.{}.ark'.format(job_name))
with open(os.path.join(directory, 'log', 'norm.{}.log'.format(job_name)), 'w') as logf:
utt2spkpath = os.path.join(directory, 'utt2spk.{}'.format(job_name))
cmvnpath = os.path.join(directory, 'cmvn.{}.scp'.format(job_name))
featspath = os.path.join(directory, 'feats.{}.scp'.format(job_name))
if not os.path.exists(normed_scp_path):
cmvn_proc = subprocess.Popen([thirdparty_binary('apply-cmvn'),
'--utt2spk=ark:' + utt2spkpath,
'scp:' + cmvnpath,
'scp:' + featspath,
'ark,scp:{},{}'.format(normed_ark_path, normed_scp_path)],
stderr=logf
)
cmvn_proc.communicate()
def apply_cmvn(directory, num_jobs, config):
child_env = os.environ.copy()
jobs = [(directory, x, config)
for x in range(num_jobs)]
with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool:
results = [pool.apply_async(apply_cmvn_func, args=i) for i in jobs]
output = [p.get() for p in results]
def add_deltas_func(directory, job_name, config):
normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name))
ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name))
scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name))
with open(os.path.join(directory, 'log', 'add_deltas.{}.log'.format(job_name)), 'w') as logf:
if config.fmllr_path is not None and os.path.exists(config.fmllr_path):
deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'),
'scp:' + normed_scp_path, 'ark:-'],
stderr=logf,
stdout=subprocess.PIPE)
trans_proc = subprocess.Popen([thirdparty_binary('transform-feats'),
'ark:' + config.fmllr_path, 'ark:-',
'ark,scp:{},{}'.format(ark_path, scp_path)],
stdin=deltas_proc.stdout,
stderr=logf)
trans_proc.communicate()
else:
deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'),
'scp:' + normed_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)],
stderr=logf)
deltas_proc.communicate()
def add_deltas(directory, num_jobs, config):
child_env = os.environ.copy()
jobs = [(directory, x, config)
for x in range(num_jobs)]
with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool:
results = [pool.apply_async(add_deltas_func, args=i) for i in jobs]
output = [p.get() for p in results]
def apply_lda_func(directory, job_name, config):
normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name))
ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name))
scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name))
ivector_scp_path = os.path.join(directory, 'ivector.{}.scp'.format(job_name))
with open(os.path.join(directory, 'log', 'lda.{}.log'.format(job_name)), 'a') as logf:
if os.path.exists(config.lda_path):
splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'),
'--left-context={}'.format(config.splice_left_context),
'--right-context={}'.format(config.splice_right_context),
'scp:' + normed_scp_path,
'ark:-'],
stdout=subprocess.PIPE,
stderr=logf)
if config.ivectors and os.path.exists(ivector_scp_path):
transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"),
config.lda_path,
'ark:-',
'ark:-'],
stdin=splice_feats_proc.stdout,
stdout=subprocess.PIPE,
stderr=logf)
paste_proc = subprocess.Popen([thirdparty_binary('paste-feats'),
'ark:-',
'scp:' + ivector_scp_path,
'ark,scp:{},{}'.format(ark_path, scp_path)],
stdin=transform_feats_proc.stdout,
stderr=logf)
paste_proc.communicate()
else:
transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"),
config.lda_path,
'ark:-',
'ark,scp:{},{}'.format(ark_path, scp_path)],
stdin=splice_feats_proc.stdout,
stderr=logf)
transform_feats_proc.communicate()
else:
logf.write('could not find "{}"\n'.format(config.lda_path))
splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'),
'--left-context={}'.format(config.splice_left_context),
'--right-context={}'.format(config.splice_right_context),
'scp:' + normed_scp_path,
'ark,scp:{},{}'.format(ark_path, scp_path)],
stderr=logf)
splice_feats_proc.communicate()
def apply_lda(directory, num_jobs, config):
jobs = [(directory, x, config)
for x in range(num_jobs)]
with mp.Pool(processes=num_jobs, initializer=init, initargs=(os.environ.copy(),)) as pool:
results = [pool.apply_async(apply_lda_func, args=i) for i in jobs]
output = [p.get() for p in results]
| 10,970 | 3,245 |
"""
A number of static methods for interpretting the state of the fantasy football pitch that aren't required directly by
the client
"""
from ffai.core import Game, Action, ActionType
from ffai.core.procedure import *
from ffai.util.pathfinding import *
from typing import Optional, List, Dict
class ActionSequence:
def __init__(self, action_steps: List[Action], score: float = 0, description: str = ''):
""" Creates a new ActionSequence - an ordered list of sequential Actions to attempt to undertake.
:param action_steps: Sequence of action steps that form this action.
:param score: A score representing the attractiveness of the move (default: 0)
:param description: A debug string (default: '')
"""
# Note the intention of this object is that when the object is acting, as steps are completed,
# they are removed from the move_sequence so the next move is always the top of the move_sequence
# lis
self.action_steps = action_steps
self.score = score
self.description = description
def is_valid(self, game: Game) -> bool:
pass
def popleft(self):
return self.action_steps.pop(0)
#val = self.action_steps[0]
#del self.action_steps[0]
#return val
def is_empty(self):
return not self.action_steps
class FfHeatMap:
""" A heat map of a Blood Bowl field.
A class for analysing zones of control for both teams
"""
def __init__(self, game: Game, team: Team):
self.game=game
self.team = team
# Note that the edges are not on the field, but represent crowd squares
self.units_friendly: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)]
self.units_opponent: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)]
def add_unit_paths(self, player:Player, paths: List[Path]):
is_friendly: bool = player.team == self.team
for path in paths:
if is_friendly:
self.units_friendly[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost)
else:
self.units_opponent[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost)
def add_unit_by_paths(self, game: Game, paths: Dict[Player, List[Path]]):
for player in paths.keys():
self.add_unit_paths(player, paths[player])
def add_players_moved(self, game: Game, players: List[Player]):
for player in players:
adjacents: List[Square] = game.get_adjacent_squares(player.position, occupied=True)
self.units_friendly[player.position.x][player.position.y] += 1.0
for adjacent in adjacents:
self.units_friendly[player.position.x][player.position.y] += 0.5
def get_ball_move_square_safety_score(self, square: Square) -> float:
# Basic idea - identify safe regions to move the ball towards
friendly_heat: float = self.units_friendly[square.x][square.y]
opponent_heat: float = self.units_opponent[square.x][square.y]
score: float = 30.0 * max(0.0, (1.0 - opponent_heat/2))
return score
#score: float=0.0
#if opponent_heat < 0.25: score += 15.0
#if opponent_heat < 0.05: score += 15.0
#if opponent_heat < 1.5: score += 5
#if friendly_heat > 3.5: score += 10.0
#score += max(30.0, 5.0*(friendly_heat-opponent_heat))
return score
def get_cage_necessity_score(self, square: Square) -> float:
opponent_friendly: float = self.units_friendly[square.x][square.y]
opponent_heat: float = self.units_opponent[square.x][square.y]
score: float = 0.0
if opponent_heat < 0.4: score -= 80.0
# if opponent_friendly > opponent_heat: score -= max(30.0, 10.0*(opponent_friendly-opponent_heat))
# if opponent_heat <1.5: score -=5
# if opponent_heat > opponent_friendly: score += 10.0*(opponent_friendly-opponent_heat)
return score
def blitz_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_BLITZ:
return False
return True
def handoff_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_HANDOFF:
return False
return True
def foul_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_FOUL:
return False
return True
def pass_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_PASS:
return False
return True
def get_players(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = True, include_used: bool = True, include_off_pitch: bool = False, only_blockable: bool = False, only_used: bool = False) -> List[Player]:
players: List[Player] = []
selected_players: List[Player] = []
for iteam in game.state.teams:
if iteam == team and include_own:
players.extend(iteam.players)
if iteam != team and include_opp:
players.extend(iteam.players)
for player in players:
if only_blockable and not player.state.up:
continue
if only_used and not player.state.used:
continue
if include_stunned or not player.state.stunned:
if include_used or not player.state.used:
if include_off_pitch or (player.position is not None and not game.is_out_of_bounds(player.position)):
selected_players.append(player)
return selected_players
def caging_squares_north_east(game: Game, protect_square: Square) -> List[Square]:
# * At it's simplest, a cage requires 4 platers in the North-East, South-East, South-West and North-West
# * positions, relative to the ball carrier, such that there is no more than 3 squares between the players in
# * each of those adjacent compass directions.
# *
# * 1 3
# * xx-xx
# * xx-xx
# * --o--
# * xx-xx
# * xx-xx
# * 3 4
# *
# * pitch is 26 long
# *
# *
# * Basically we need one player in each of the corners: 1-4, but spaced such that there is no gap of 3 squares.
# * If the caging player is in 1-4, but next to ball carrier, he ensures this will automatically be me
# *
# * The only exception to this is when the ball carrier is on, or near, the sideline. Then return the squares
# * that can otherwise form the cage.
# *
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x <= game.state.pitch.width - 3:
if y == game.state.pitch.height-2:
caging_squares.append(game.get_square(x + 1, y + 1))
caging_squares.append(game.get_square(x + 2, y + 1))
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
elif y == game.state.pitch.height-1:
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
else:
caging_squares.append(game.get_square(x + 1, y + 1))
caging_squares.append(game.get_square(x + 1, y + 2))
caging_squares.append(game.get_square(x + 2, y + 1))
# caging_squares.append(game.state.pitch.get_square(x + 3, y + 3))
return caging_squares
def caging_squares_north_west(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x >= 3:
if y == game.state.pitch.height-2:
caging_squares.append(game.get_square(x - 1, y + 1))
caging_squares.append(game.get_square(x - 2, y + 1))
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
elif y == game.state.pitch.height-1:
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
else:
caging_squares.append(game.get_square(x - 1, y + 1))
caging_squares.append(game.get_square(x - 1, y + 2))
caging_squares.append(game.get_square(x - 2, y + 1))
# caging_squares.append(game.state.pitch.get_square(x - 3, y + 3))
return caging_squares
def caging_squares_south_west(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x >= 3:
if y == 2:
caging_squares.append(game.get_square(x - 1, y - 1))
caging_squares.append(game.get_square(x - 2, y - 1))
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
elif y == 1:
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
else:
caging_squares.append(game.get_square(x - 1, y - 1))
caging_squares.append(game.get_square(x - 1, y - 2))
caging_squares.append(game.get_square(x - 2, y - 1))
# caging_squares.append(game.state.pitch.get_square(x - 3, y - 3))
return caging_squares
def caging_squares_south_east(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x <= game.state.pitch.width-3:
if y == 2:
caging_squares.append(game.get_square(x + 1, y - 1))
caging_squares.append(game.get_square(x + 2, y - 1))
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
elif y == 1:
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
else:
caging_squares.append(game.get_square(x + 1, y - 1))
caging_squares.append(game.get_square(x + 1, y - 2))
caging_squares.append(game.get_square(x + 2, y - 1))
# caging_squares.append(game.get_square(x + 3, y - 3))
return caging_squares
def is_caging_position(game: Game, player: Player, protect_player: Player) -> bool:
return player.position.distance(protect_player.position) <= 2 and not is_castle_position_of(game, player, protect_player)
def has_player_within_n_squares(game: Game, units: List[Player], square: Square, num_squares: int) -> bool:
for cur in units:
if cur.position.distance(square) <= num_squares:
return True
return False
def has_adjacent_player(game: Game, square: Square) -> bool:
return not game.get_adjacent_players(square)
def is_castle_position_of(game: Game, player1: Player, player2: Player) -> bool:
return player1.position.x == player2.position.x or player1.position.y == player2.position.y
def is_bishop_position_of(game: Game, player1: Player, player2: Player) -> bool:
return abs(player1.position.x - player2.position.x) == abs(player1.position.y - player2.position.y)
def attacker_would_surf(game: Game, attacker: Player, defender: Player) -> bool:
if (defender.has_skill(Skill.SIDE_STEP) and not attacker.has_skill(Skill.GRAB)) or defender.has_skill(Skill.STAND_FIRM):
return False
if not attacker.position.is_adjacent(defender.position):
return False
return direct_surf_squares(game, attacker.position, defender.position)
def direct_surf_squares(game: Game, attack_square: Square, defend_square: Square) -> bool:
defender_on_sideline: bool = on_sideline(game, defend_square)
defender_in_endzone: bool = on_endzone(game, defend_square)
if defender_on_sideline and defend_square.x == attack_square.x:
return True
if defender_in_endzone and defend_square.y == attack_square.y:
return True
if defender_in_endzone and defender_on_sideline:
return True
return False
def reverse_x_for_right(game: Game, team: Team, x: int) -> int:
if not game.is_team_side(Square(13, 3), team):
res = game.state.pitch.width - 1 - x
else:
res = x
return res
def reverse_x_for_left(game: Game, team: Team, x: int) -> int:
if game.is_team_side(Square(13, 3), team):
res = game.state.pitch.width - 1 - x
else:
res = x
return res
def on_sideline(game: Game, square: Square) -> bool:
return square.y == 1 or square.y == game.state.pitch.height - 1
def on_endzone(game: Game, square: Square) -> bool:
return square.x == 1 or square.x == game.state.pitch.width - 1
def on_los(game: Game, team: Team, square: Square) -> bool:
return (reverse_x_for_right(game, team, square.x) == 13) and 4 < square.y < 21
def los_squares(game: Game, team: Team) -> List[Square]:
squares: List[Square] = [
game.get_square(reverse_x_for_right(game, team, 13), 5),
game.get_square(reverse_x_for_right(game, team, 13), 6),
game.get_square(reverse_x_for_right(game, team, 13), 7),
game.get_square(reverse_x_for_right(game, team, 13), 8),
game.get_square(reverse_x_for_right(game, team, 13), 9),
game.get_square(reverse_x_for_right(game, team, 13), 10),
game.get_square(reverse_x_for_right(game, team, 13), 11)
]
return squares
def distance_to_sideline(game: Game, square: Square) -> int:
return min(square.y - 1, game.state.pitch.height - square.y - 2)
def is_endzone(game, square: Square) -> bool:
return square.x == 1 or square.x == game.state.pitch.width - 1
def last_block_proc(game) -> Optional[Block]:
for i in range(len(game.state.stack.items) - 1, -1, -1):
if isinstance(game.state.stack.items[i], Block):
block_proc = game.state.stack.items[i]
return block_proc
return None
def is_adjacent_ball(game: Game, square: Square) -> bool:
ball_square = game.get_ball_position()
return ball_square is not None and ball_square.is_adjacent(square)
def squares_within(game: Game, square: Square, distance: int) -> List[Square]:
squares: List[Square] = []
for i in range(-distance, distance+1):
for j in range(-distance, distance+1):
cur_square = game.get_square(square.x+i, square.y+j)
if cur_square != square and not game.is_out_of_bounds(cur_square):
squares.append(cur_square)
return squares
def distance_to_defending_endzone(game: Game, team: Team, position: Square) -> int:
res = reverse_x_for_right(game, team, position.x) - 1
return res
def distance_to_scoring_endzone(game: Game, team: Team, position: Square) -> int:
res = reverse_x_for_left(game, team, position.x) - 1
return res
#return game.state.pitch.width - 1 - reverse_x_for_right(game, team, position.x)
def players_in_scoring_endzone(game: Game, team: Team, include_own: bool = True, include_opp: bool = False) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp)
selected_players: List[Player] = []
for player in players:
if in_scoring_endzone(game, team, player.position): selected_players.append(player)
return selected_players
def in_scoring_endzone(game: Game, team: Team, square: Square) -> bool:
return reverse_x_for_left(game, team, square.x) == 1
def players_in_scoring_distance(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = False) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned)
selected_players: List[Player] = []
for player in players:
if distance_to_scoring_endzone(game, team, player.position) <= player.num_moves_left(): selected_players.append(player)
return selected_players
def distance_to_nearest_player(game: Game, team: Team, square: Square, include_own: bool = True, include_opp: bool = True, only_used: bool = False, include_used: bool = True, include_stunned: bool = True, only_blockable: bool = False) -> int:
opps: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, only_used=only_used, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable)
cur_max = 100
for opp in opps:
dist = opp.position.distance(square)
cur_max = min(cur_max, dist)
return cur_max
def screening_distance(game: Game, from_square: Square, to_square: Square) -> float:
# Return the "screening distance" between 3 squares. (To complete)
# float dist =math.sqrt(math.pow(square.x - cur.position.x, 3) + math.pow(square.y - cur.position.y, 3))
return 0.0
def num_opponents_can_reach(game: Game, team: Team, square: Square) -> int:
opps: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opps_reach: int = 0
for cur in opps:
dist = max(square.x - cur.position.x, square.y - cur.position.y)
if cur.state.stunned: continue
move_allowed = cur.get_ma() + 2
if not cur.state.up: move_allowed -= 3
if dist < move_allowed: num_opps_reach += 1
return num_opps_reach
def num_opponents_on_field(game: Game, team: Team) -> int:
opps: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opponents = 0
for cur in opps:
if cur.position is not None: num_opponents += 1
return num_opponents
def number_opponents_closer_than_to_endzone(game: Game, team: Team, square: Square) -> int:
opponents: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opps = 0
distance_square_endzone = distance_to_defending_endzone(game, team, square)
for opponent in opponents:
distance_opponent_endzone = distance_to_defending_endzone(game, team, opponent.position)
if distance_opponent_endzone < distance_square_endzone: num_opps += 1
return num_opps
def in_scoring_range(game: Game, player: Player) -> bool:
return player.num_moves_left() >= distance_to_scoring_endzone(game, player.team, player.position)
def players_in_scoring_range(game: Game, team: Team, include_own=True, include_opp=True, include_used=True, include_stunned=True) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned, include_used=include_used)
res: List[Player] = []
for player in players:
if in_scoring_range(game, player): res.append(player)
return res
def players_in(game: Game, team: Team, squares: List[Square], include_own=True, include_opp=True, include_used=True, include_stunned=True, only_blockable=False) -> List[Player]:
allowed_players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable)
res: List[Player] = []
for square in squares:
player: Optional[Player] = game.get_player_at(square)
if player is None:
continue
if player in allowed_players:
res.append(player)
return res
| 19,640 | 6,775 |
# -*- coding: utf-8 -*-
"""sb-fastapi CLI root."""
import logging
import click
from sb_backend.cli.commands.serve import serve
@click.group()
@click.option(
"-v",
"--verbose",
help="Enable verbose logging.",
is_flag=True,
default=False,
)
def cli(**options):
"""sb-fastapi CLI root."""
if options["verbose"]:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(
level=level,
format="[%(asctime)s] [%(process)s] [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S %z",
)
cli.add_command(serve)
| 601 | 222 |
import dash
from dash import html
app = dash.Dash(__name__)
app.layout = html.Div(children=[html.H1('Data Science',
style = {'textAlign': 'center',
'color': '#0FD08D',
'font-size': '50px'}),
html.H2('La carrera mas sexy del siglo XXI',
style = {'textAlign': 'center',
'color' : '#009A64'}),
html.P('Factores clave:'),
html.Ul(children = [html.Li('Factor 1'),
html.Li('Factor 2'),
html.Li('Factor 3'),
html.Li(['Source: ',
html.A('https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946',
href = 'https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946')
])
])
])
if __name__ == '__main__':
app.run_server(debug=True) | 1,379 | 362 |
# -*- coding: utf-8 -*-
###################################################################################
from gluon import current
from helper import get_constant, execute_remote_cmd, config, get_datetime, \
log_exception, is_pingable, get_context_path
from libvirt import * # @UnusedWildImport
from log_handler import logger
from nat_mapper import create_mapping, remove_mapping
import math, shutil, libvirt, os, time, random
import xml.etree.ElementTree as etree
def _choose_datastore():
"""
Chooses datastore from a list of available datastores
"""
# datastore_capacity = current.db(current.db.datastore.id >= 0).select(orderby = current.db.datastore.used
datastores = current.db(current.db.datastore.id >= 0).select()
datastore_length = len(datastores)
logger.debug("datastore_lengtn" + str(datastore_length))
if(datastore_length == 0):
raise Exception("No datastore found.")
else:
count = datastore_length
available_datastores = {}
while count != 0:
available = datastores[datastore_length-count].capacity - datastores[datastore_length-count].used
available_datastores[datastores[datastore_length-count]] = available
count = count-1
z = [(i,available_datastores[i]) for i in available_datastores]
z.sort(key=lambda x: x[1])
available_datastores = z
logger.debug("available d" + str(available_datastores[-1]))
first_elts = available_datastores[-1]
first_elts = first_elts[0]
logger.debug("selected database" + str(first_elts))
return first_elts
def host_resources_used(host_id):
"""
Returns resources utilization of a host in MB, Count
"""
RAM = 0.0
CPU = 0.0
vms = current.db((current.db.vm_data.host_id == host_id) & (current.db.vm_data.status != current.VM_STATUS_UNKNOWN) & (current.db.vm_data.status != current.VM_STATUS_IN_QUEUE)).select()
logger.debug("vms selected are: " + str(vms))
for vm_data in vms:
RAM += vm_data.RAM
CPU += vm_data.vCPU
return (math.ceil(RAM),math.ceil(CPU))
def getVirshDomainConn(vm_details, host_ip=None, domain_name=None):
"""
Generic method to establish libvirt connection
"""
if vm_details != None:
host_ip = vm_details.host_id.host_ip.private_ip
domain_name = vm_details.vm_identity
connection_object = libvirt.open("qemu+ssh://root@" + host_ip + "/system")
domain = connection_object.lookupByName(domain_name)
return (connection_object, domain)
def getVirshDomain(vm_details):
"""
Generic method to establish libvirt connection
"""
(connection_object, domain) = getVirshDomainConn(vm_details)
connection_object.close()
return domain
def _set_portgroup_in_vm(domain_name, portgroup, host_ip, vlan_tag):
"""
Set the vlan tag in network configuration of VM
This is required to ensure that VM fetches IP of its vlan from DHCP
"""
(connection_object, domain) = getVirshDomainConn(None, host_ip, domain_name)
xml = etree.fromstring(domain.XMLDesc(0))
source_network_element = xml.find('.//interface/source')
source_network_string=etree.tostring(source_network_element)
logger.debug("Source network is " + source_network_string)
if source_network_string.find(" bridge=") != -1:
logger.debug("Source is set to bridge adding <vlan><tag_id> to the interface tag ")
root_new = xml.find('.//interface')
root_new_vlan= etree.SubElement(root_new, 'vlan')
root_new_tag= etree.SubElement(root_new_vlan, 'tag')
root_new_tag.set('id',vlan_tag)
logger.debug("After append root_new_vlan is " + etree.tostring(root_new_vlan))
elif source_network_string.find(" network=") != -1:
logger.debug("Source is set to network adding portgroup to the source tag ")
source_network_element.set('portgroup', portgroup)
logger.debug("Changed source network is " + etree.tostring(source_network_element))
else:
logger.debug("Neither VM nor vlan tagId is added in the xml" )
domain = connection_object.defineXML(etree.tostring(xml))
domain.destroy()
domain.create()
domain.isActive()
connection_object.close()
def _get_private_ip_mac(security_domain_id):
"""
Chooses a random Private IP from the pool, such that:
- It is not assigned to any VM or host
- It belongs to VLAN of given security domain
"""
vlans = current.db(current.db.security_domain.id == security_domain_id)._select(current.db.security_domain.vlan)
private_ip_pool = current.db((~current.db.private_ip_pool.id.belongs(current.db(current.db.vm_data.private_ip != None)._select(current.db.vm_data.private_ip)))
& (~current.db.private_ip_pool.id.belongs(current.db(current.db.host.host_ip != None)._select(current.db.host.host_ip)))
& (current.db.private_ip_pool.vlan.belongs(vlans))).select(current.db.private_ip_pool.ALL, orderby='<random>').first()
if private_ip_pool:
return private_ip_pool
else:
sd = current.db.security_domain[security_domain_id]
raise Exception(("Available MACs are exhausted for security domain '%s'." % sd.name))
def _choose_random_public_ip():
"""
Chooses a random Public IP from the pool, such that:
- It is not assigned to any VM
- It is not assigned to any host
- IP is marked active.
"""
public_ip_pool = current.db((~current.db.public_ip_pool.id.belongs(current.db(current.db.vm_data.public_ip != None)._select(current.db.vm_data.public_ip)))
& (~current.db.public_ip_pool.id.belongs(current.db(current.db.host.public_ip != None)._select(current.db.host.public_ip)))
& (current.db.public_ip_pool.is_active == True)) \
.select(current.db.public_ip_pool.ALL, orderby='<random>').first()
return public_ip_pool
def _choose_mac_ip(vm_properties):
"""
Chooses mac address and ip address for a vm to be installed.
It also chooses a random public IP if requested
"""
if not 'private_ip' in vm_properties:
private_ip_info = _get_private_ip_mac(vm_properties['security_domain'])
vm_properties['private_ip'] = private_ip_info.private_ip
vm_properties['mac_addr'] = private_ip_info.mac_addr
vm_properties['vlan_name'] = private_ip_info.vlan.name
vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag
if vm_properties['public_ip_req']:
if 'public_ip' not in vm_properties:
public_ip_pool = _choose_random_public_ip()
if public_ip_pool:
vm_properties['public_ip'] = public_ip_pool.public_ip
else:
raise Exception("Available Public IPs are exhausted.")
else:
vm_properties['public_ip'] = None
def _choose_mac_ip_vncport(vm_properties):
"""
Chooses mac address, ip address and vncport for a vm to be installed
"""
_choose_mac_ip(vm_properties)
start_range = int(get_constant('vncport_start_range'))
end_range = int(get_constant('vncport_end_range'))
vnc_ports_taken = current.db().select(current.db.vm_data.vnc_port)
while True:
random_vnc_port = random.randrange(start_range, end_range, 1)
if not random_vnc_port in vnc_ports_taken:
break;
vm_properties['vnc_port'] = str(random_vnc_port)
def find_new_host(RAM, vCPU):
"""
Select a random host from list of 3 hosts with available RAM and CPU
Availability is checked with 200 percent over-commitment.
"""
hosts = current.db(current.db.host.status == 1).select()
hosts = hosts.as_list(True,False)
count = 3
selected_hosts = []
while count != 0 and hosts:
host = random.choice(hosts)
logger.debug("Checking host =" + host['host_name'])
(used_ram, used_cpu) = host_resources_used(host['id'])
logger.debug("used ram: " + str(used_ram) + " used cpu: " + str(used_cpu) + " host ram: " + str(host['RAM']) + " host cpu "+ str(host['CPUs']))
host_ram_after_200_percent_overcommitment = math.floor((host['RAM'] * 1024) * 2)
host_cpu_after_200_percent_overcommitment = math.floor(host['CPUs'] * 2)
logger.debug("ram available: %s cpu available: %s cpu < max cpu: %s" % ((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM), ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU), (vCPU <= host['CPUs']) ))
if((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU) and (vCPU <= host['CPUs'])):
selected_hosts.append(host)
count = count -1
hosts.remove(host)
if selected_hosts:
#Sort selected host list by Ram first then Cpu
selected_host = sorted(selected_hosts,key=lambda k: k['RAM'])[0]
return selected_host['id']
#If no suitable host found
raise Exception("No active host is available for a new vm.")
def allocate_vm_properties(vm_details):
"""
Allocates vm properties ( datastore, host, ip address, mac address, vnc port, ram, vcpus)
"""
logger.debug("Inside allocate_vm_properties()...")
vm_properties = {}
vm_properties['datastore'] = _choose_datastore()
logger.debug("Datastore selected is: " + str(vm_properties['datastore']))
vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU)
logger.debug("Host selected is: " + str(vm_properties['host']))
vm_properties['public_ip_req'] = False if (vm_details.public_ip == None) else True
vm_properties['security_domain'] = vm_details.security_domain
_choose_mac_ip_vncport(vm_properties)
logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + " VNCPORT is : " \
+ str(vm_properties['vnc_port']) + " Vlan tag is " + str(vm_properties['vlan_tag']) )
vm_properties['ram'] = vm_details.RAM
vm_properties['vcpus'] = vm_details.vCPU
return vm_properties
def create_vm_image(vm_details, datastore):
"""
Create a VM image
- Creates a directory for the new VM using vm_identity
- Find the location of template image requested for
- Copy the template image from its location to new vm directory
"""
# Creates a directory for the new vm
vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
logger.debug("Creating vm directory...")
if not os.path.exists (vm_directory_path):
os.makedirs(vm_directory_path)
else:
raise Exception("Directory with same name as vmname already exists.")
# Finds the location of template image that the user has requested for its vm.
template = current.db.template[vm_details.template_id]
vm_image_name = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2'
# Copies the template image from its location to new vm directory
storage_type = config.get("GENERAL_CONF","storage_type")
copy_command = 'ndmpcopy ' if storage_type == current.STORAGE_NETAPP_NFS else 'cp '
#template_dir = get_constant('vm_templates_datastore')
if copy_command == 'cp ':
template_location = datastore.system_mount_point + '/' + get_constant('templates_dir') + '/' + template.hdfile
logger.debug("cp %s %s" % (template_location, vm_image_name))
rc = os.system("cp %s %s" % (template_location, vm_image_name))
if rc != 0:
logger.error("Copy not successful")
raise Exception("Copy not successful")
else:
logger.debug("Copied successfully")
elif copy_command == 'ndmpcopy ':
template_dir = template.datastore_id.path
logger.debug(template_dir)
logger.debug("Copy in progress when storage type is " + str(storage_type))
command_to_execute = copy_command + template_dir + '/' + get_constant("templates_dir") + '/' + \
template.hdfile + ' ' + datastore.path + '/' + get_constant('vms') + '/' + \
vm_details.vm_identity
logger.debug("ndmpcopy command: " + str(command_to_execute))
command_output = execute_remote_cmd(datastore.ds_ip, datastore.username, command_to_execute, datastore.password)
logger.debug(command_output)
logger.debug("Copied successfully.")
try:
vm_template_name = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity + '/' + template.hdfile
os.rename(vm_template_name, vm_image_name)
logger.debug("Template renamed successfully")
except:
logger.debug("Template rename not successful")
raise Exception("Template rename not successful")
return (template, vm_image_name)
def _get_install_command(vm_details, vm_image_location, vm_properties):
"""
Generates install command for vm
"""
template = vm_properties['template']
bus = ',bus=virtio'
optional = ' --import --os-type=' + template.os
model = ',model=virtio'
if (template.arch != 'amd64' and template.os == 'Linux'):
optional = optional + ' --arch=' + template.arch + ' '
format_command = ''
if (template.type == 'QCOW2'):
format_command = ',format=qcow2'
if (template.os == 'Windows'):
bus = ''
model = ''
install_command = 'virt-install \
--name=' + vm_details.vm_identity + ' \
--ram=' + str(vm_properties['ram']) + ' \
--vcpus=' + str(vm_properties['vcpus']) + optional + ' \
--disk path=' + vm_image_location + format_command + bus + ',cache=none' + ' \
--network network='+current.LIBVIRT_NETWORK + model + ',mac=' + vm_properties['mac_addr'] + ' \
--graphics vnc,port=' + vm_properties['vnc_port'] + ',listen=0.0.0.0,password=duolc \
--noautoconsole \
--autostart \
--force'
return install_command
def _generate_disk_xml(diskpath,target_disk):
"""
Generates xml for defining new disk
"""
root_element = etree.Element('disk',attrib = {'type':'block','device':'disk'})
etree.SubElement(root_element, 'driver',attrib = {'name':'qemu','cache':'none', 'type':'qcow2'})
etree.SubElement(root_element, 'source', attrib = {'dev':diskpath})
etree.SubElement(root_element, 'target', attrib = {'dev': target_disk})
return (etree.tostring(root_element))
def create_extra_disk_image(vm_details, disk_name, size, datastore):
"""
Create extra disk image
"""
vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + vm_details.vm_identity
if not os.path.exists (vm_extra_disks_directory_path):
logger.debug("Making Directory")
os.makedirs(vm_extra_disks_directory_path)
diskpath = vm_extra_disks_directory_path + '/' + disk_name
command= "qemu-img create -f qcow2 "+ diskpath + " " + str(size) + "G"
output = os.system(command)
return False if output != 0 else True
def attach_disk(vm_details, disk_name, hostip, already_attached_disks, new_vm):
"""
Attach given disk to the VM
"""
try:
(connection_object, domain) = getVirshDomainConn(None, hostip, vm_details.vm_identity)
#already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm.id).select())
logger.debug("Value of alreadyattached is : " + str(already_attached_disks))
(diskpath, device_present, disk_size) = get_extra_disk_location(vm_details.datastore_id, vm_details.vm_identity, disk_name, True)
if not device_present:
raise Exception("Device to be attached %s missing" %(diskpath))
# Attaching disk to vm using libvirt API
target_disk = "vd" + chr(97 + already_attached_disks + 1)
logger.debug(target_disk)
logger.debug("...................")
xmlDescription = _generate_disk_xml(diskpath, target_disk)
logger.debug(xmlDescription)
logger.debug("new vm is %s " % new_vm)
if new_vm:
logger.debug("Starting to attach disk on new vm request.")
domain.destroy()
logger.debug("VM destroyed")
domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG)
logger.debug("Disk attached")
logger.debug("Turn on vm")
domain.create()
logger.debug("VM started")
domain.isActive()
elif vm_details.status == current.VM_STATUS_SHUTDOWN:
logger.debug("Starting to attach disk while vm is shutdown.")
domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG)
logger.debug("Disk attached")
else:
raise Exception("VM is not in shutdown state. Check its status on host")
xmlfile = domain.XMLDesc(0)
domain = connection_object.defineXML(xmlfile)
logger.debug("VM XML redefined")
connection_object.close()
return disk_size
except:
logger.exception('Exception: ')
return 0
def serve_extra_disk_request(vm_details, disk_size, host_ip, new_vm = False):
"""
Serves extra disk request and updates db
"""
logger.debug("Starting to serve extra disk request...")
logger.debug("new vm is %s " % new_vm)
datastore = _choose_datastore()
already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm_details.id).select())
disk_name = vm_details.vm_identity + "_disk" + str(already_attached_disks + 1) + ".qcow2"
disk_created = create_extra_disk_image(vm_details, disk_name, disk_size, datastore)
vm_details.datastore_id = datastore.id
if disk_created:
if (attach_disk(vm_details, disk_name, host_ip, already_attached_disks, new_vm)):
current.db.attached_disks.insert(vm_id = vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_size)
current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(disk_size))
return True
return False
def launch_vm_on_host(vm_details, vm_image_location, vm_properties):
"""
Launches a vm image on host
"""
attach_disk_status_message = ''
install_command = _get_install_command(vm_details, vm_image_location, vm_properties)
# Starts installing a vm
host_ip = current.db.host[vm_properties['host']].host_ip.private_ip
logger.debug("Installation started...")
logger.debug("Host is "+ host_ip)
logger.debug("Installation command : " + install_command)
command_output = execute_remote_cmd(host_ip, 'root', install_command)
logger.debug(command_output)
logger.debug("Starting to set portgroup in vm...")
_set_portgroup_in_vm(vm_details['vm_identity'], vm_properties['vlan_name'], host_ip, vm_properties['vlan_tag'])
logger.debug("Portgroup set in vm")
# Serving HDD request
if (int(vm_details.extra_HDD) != 0):
if (serve_extra_disk_request(vm_details, vm_details.extra_HDD, host_ip, new_vm = True)):
message = "Attached extra disk successfully."
attach_disk_status_message += message
logger.debug(message)
else:
attach_disk_status_message += "Attached extra disk failed."
return attach_disk_status_message
def check_if_vm_defined(hostip, vmname):
"""
Checks if a newly created vm is successfully defined
"""
vm_defined = False
try:
connection_object = libvirt.openReadOnly('qemu+ssh://root@'+ hostip +'/system')
domain = connection_object.lookupByName(vmname)
if domain.ID() in connection_object.listDomainsID():
vm_defined = True
connection_object.close()
return vm_defined
except:
return False
def _free_vm_properties(vm_details, vm_properties):
"""
Frees vm properties in-case installation has failed mid-way
"""
logger.debug("VM installation fails..Starting to free vm properties")
if vm_properties:
host_ip_of_vm = current.db.host[vm_properties['host']].host_ip.private_ip
logger.debug("Host IP of vm is " + str(host_ip_of_vm))
if check_if_vm_defined(host_ip_of_vm, vm_details.vm_identity):
connection_object = libvirt.open('qemu+ssh://root@'+ host_ip_of_vm +'/system')
domain = connection_object.lookupByName(vm_details.vm_identity)
logger.debug("Starting to delete vm from host..")
domain.destroy()
domain.undefine()
connection_object.close()
logger.debug("VM deleted.")
current.db(current.db.attached_disks.vm_id == vm_details.id).delete()
if 'datastore' in vm_properties:
vm_directory_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
vm_extra_disk_dir_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + vm_properties['datastore'].ds_name + '/' + vm_details.vm_identity
if os.path.exists (vm_directory_path):
logger.debug("Starting to delete vm directory.")
shutil.rmtree(vm_directory_path)
if os.path.exists (vm_extra_disk_dir_path):
logger.debug("Starting to delete vm extra disk directory.")
shutil.rmtree(vm_extra_disk_dir_path)
return
def update_db_after_vm_installation(vm_details, vm_properties, parent_id = None):
"""
Updates db after a vm is installed successfully
"""
logger.debug("Starting to update db after vm installation..")
hostid = vm_properties['host']
datastore = vm_properties['datastore']
template_hdd = vm_properties['template'].hdd
logger.debug("Inside update db after installation")
logger.debug(vm_properties)
# Updating the used entry of datastore
current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(vm_details.extra_HDD) +
int(template_hdd))
private_ip_id = current.db.private_ip_pool(private_ip=vm_properties['private_ip']).id
public_ip_id = None
if vm_properties['public_ip'] != None:
public_ip_id = current.db.public_ip_pool(public_ip=vm_properties['public_ip']).id
if parent_id:
vm_status = current.VM_STATUS_SHUTDOWN
else:
vm_status = current.VM_STATUS_RUNNING
# Update vm_data table
current.db(current.db.vm_data.id == vm_details.id).update( host_id = hostid,
extra_HDD = vm_details.extra_HDD,
datastore_id = datastore.id,
vnc_port = vm_properties['vnc_port'],
private_ip = private_ip_id,
public_ip = public_ip_id,
start_time = get_datetime(),
parent_id = parent_id,
status = vm_status)
logger.debug("Updated db")
return
def create_object_store(parameters,object_data):
try:
logger.debug("In create_object_store() function...")
object_name=object_data['object_store_name']
size_limit=object_data['object_store_size']
sh_path = os.path.join(get_context_path(), 'private/object_storage.sh')
command = 'sh %s %s %s' %(sh_path, object_name, str(size_limit))
logger.debug("command :%s" %command)
file_name= object_data['object_store_name'] + "_key.txt"
file_path = os.path.join(get_context_path(), 'private/Object_keys/' + file_name)
cp = os.open(file_path,os.O_RDWR|os.O_CREAT)
co = os.fdopen(cp,"rw+")
fd = os.open('/home/key.txt',os.O_RDWR|os.O_CREAT)
fo = os.fdopen(fd,"r+")
key_s3_secret= fo.readline();
co.write(key_s3_secret);
key_s3_access= fo.readline();
co.write(key_s3_access);
key_swift_secret= fo.readline();
co.write(key_swift_secret);
swift_user= 'Swift_user: ' + object_name + ':swift'
co.write(swift_user)
co.close()
a,b,key_swift_secret= key_swift_secret.partition(' ') # @UnusedVariable
a,b,key_s3_secret= key_s3_secret.partition(' ') # @UnusedVariable
a,b,key_s3_access= key_s3_access.partition(' ') # @UnusedVariable
#print key_s3_secret, key_s3_access , key_swift_secret
object_data.update_record(swift_access_key= key_swift_secret.strip() , s3_secret_key= key_s3_secret.strip(), s3_access_key= key_s3_access.strip(), status=3)
fo.close()
message = "Object Store is created successfully."
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
# Installs a vm
def install(parameters):
"""
Installs a vm
"""
vmid = parameters['vm_id']
logger.debug("In install() function...")
vm_details = current.db.vm_data[vmid]
vm_properties = None
try:
# Fetches vm details from vm_data table
logger.debug("VM details are: " + str(vm_details))
# Calling allocate_vm_properties function
vm_properties = allocate_vm_properties(vm_details)
# Calling create_vm_image function
(vm_properties['template'], vm_image_location) = create_vm_image(vm_details, vm_properties['datastore'])
# Calling launch_vm_on_host
attach_disk_status_message = launch_vm_on_host(vm_details, vm_image_location, vm_properties)
# Checking if vm has been installed successfully
assert(check_if_vm_defined(current.db.host[vm_properties['host']].host_ip.private_ip, vm_details.vm_identity)), "VM is not installed. Check logs."
if vm_properties['public_ip_req']:
create_mapping(vm_properties['public_ip'], vm_properties['private_ip'])
# Update database after vm installation
update_db_after_vm_installation(vm_details, vm_properties)
message = "VM is installed successfully." + attach_disk_status_message
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
if vm_properties != None:
_free_vm_properties(vm_details, vm_properties)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def start(parameters):
"""
Starts a vm
"""
logger.debug("Inside start() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_RUNNING:
raise Exception("VM is already running. Check vm status on host.")
domain.create()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING)
message = vm_details.vm_identity + " is started successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def suspend(parameters):
"""
Suspends a vm
"""
logger.debug("Inside suspend() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_PAUSED:
raise Exception("VM is already paused. Check vm status on host.")
domain.suspend()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SUSPENDED)
message = vm_details.vm_identity + " is suspended successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def resume(parameters):
"""
Resumes a vm
"""
logger.debug("Inside resume() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_RUNNING:
raise Exception("VM is already running. Check vm status on host.")
domain.resume()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING)
message = vm_details.vm_identity + " is resumed successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def destroy(parameters):
"""
Destroys a vm forcefully
"""
logger.debug("Inside destroy() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_SHUTOFF:
raise Exception("VM is already shutoff. Check vm status on host.")
domain.destroy()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN)
message = vm_details.vm_identity + " is destroyed successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def shutdown(parameters):
"""
Destroys a vm gracefully
"""
logger.debug("Inside shutdown() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_SHUTOFF:
raise Exception("VM is already shutoff. Check vm status on host.")
domain.managedSave()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN)
message = vm_details.vm_identity + " is shutdown successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def _clean_up_database_after_vm_deletion(vm_details):
"""
Cleans up database after vm deletion
"""
logger.debug("Inside clean up database after vm deletion () function...")
# moving vm image folder to archives folder
archive_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('archives_dir')
if not os.path.exists(archive_directory_path):
os.makedirs(archive_directory_path)
source_file = vm_details.datastore_id.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
archive_filename = vm_details.vm_identity + str(get_datetime())
logger.debug(archive_filename)
destination_file = archive_directory_path + '/' + archive_filename
shutil.move(source_file, destination_file)
# removing hdd
vm_extra_disks_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
vm_details.datastore_id.ds_name + "/" + vm_details.vm_identity
if os.path.exists(vm_extra_disks_directory_path):
shutil.rmtree(vm_extra_disks_directory_path)
# updating the used entry of database
current.db(current.db.datastore.id == vm_details.datastore_id).update(used = int(vm_details.datastore_id.used) - \
(int(vm_details.extra_HDD) + int(vm_details.template_id.hdd)))
# updating task_queue_event entry to remove reference of VM
current.db(current.db.task_queue_event.vm_id == vm_details.id).update(vm_id = None)
# deleting entry of extra disk of vm
current.db(current.db.attached_disks.vm_id == vm_details.id).delete()
logger.debug("Database cleaned")
def vm_has_snapshots(vm_id):
"""
Checks if a vm has snapshot(s)
"""
if (current.db(current.db.snapshot.vm_id == vm_id).select()):
return True
else:
return False
def delete(parameters):
"""
Deletes a vm
"""
logger.debug("Inside delete() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
logger.debug(str(vm_details.status))
if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED):
logger.debug("Vm is not shutoff. Shutting it off first.")
domain.destroy()
logger.debug("Starting to delete it...")
domain.undefineFlags(VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA )
if vm_details.public_ip:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
message = vm_details.vm_identity + " is deleted successfully."
logger.debug(message)
_clean_up_database_after_vm_deletion(vm_details)
current.db(current.db.vm_data.id == vm_id).delete()
current.db.commit()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration):
"""
Migrate domain with snapshots
"""
# XML dump of snapshot(s) of the vm
logger.debug("Starting to take xml dump of the snapshot(s) of the vm... ")
if not os.path.exists(vm_backup_during_migration):
os.makedirs(vm_backup_during_migration)
for domain_snapshot in domain_snapshots_list:
logger.debug("snapshot name is " + str(domain_snapshot))
dump_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_dumpxml_command = 'virsh snapshot-dumpxml %s %s > %s' % ( vm_details.vm_identity, domain_snapshot, dump_xml_path)
logger.debug("Taking xml dump of" + str(domain_snapshot))
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_dumpxml_command)
logger.debug(command_output)
logger.debug("XML dump of " + str(domain_snapshot) + "succeeded.")
# Delete snapshot(s) of the vm and migrate it to destination host
logger.debug("Starting to delete snapshots of the vm....")
for domain_snapshot in domain_snapshots_list:
snapshot = domain.snapshotLookupByName(domain_snapshot, 0)
snapshot.delete(0)
logger.debug("Migrating the vm to destination host...")
domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0)
# Redefine all the snapshot(s) of the vm on the destination host and set current snapshot
logger.debug("Starting to redefine all the snapshot(s) of the domain...")
for domain_snapshot in domain_snapshots_list:
redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path)
command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_redefine_command)
logger.debug(command_output)
snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name)
command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_current_command)
logger.debug(command_output)
return
def _clean_migration_directory(vm_backup_during_migration):
"""
Delete directory created for storing dumpxml of vm snapshots
"""
if os.path.exists(vm_backup_during_migration):
shutil.rmtree(vm_backup_during_migration)
return
def undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration):
"""
Undo the migration
"""
if domain_snapshots_list:
# Redefine the snapshots of the vm on the source host
logger.debug("Starting to redefine all the snapshot(s) of the vm on the source host...")
for domain_snapshot in domain_snapshots_list:
redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path)
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_redefine_command, None, True)
logger.debug(command_output)
snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name)
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_current_command, None, True)
logger.debug(command_output)
# Delete directory created for storing dumpxml of vm snapshots
_clean_migration_directory(vm_backup_during_migration)
return
def migrate_domain(vm_id, destination_host_id=None, live_migration=False):
"""
Migrate domain
"""
vm_details = current.db.vm_data[vm_id]
domain_snapshots_list = []
current_snapshot_name = ''
vm_migration_directory = get_constant('vm_migration_data')
vm_backup_during_migration = vm_details.datastore_id.system_mount_point + '/' + vm_migration_directory + '/' + \
vm_details.vm_identity
if destination_host_id == None:
destination_host_id = find_new_host(vm_details.RAM, vm_details.vCPU)
destination_host_ip = current.db.host[destination_host_id].host_ip.private_ip
flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_UNSAFE
if live_migration:
flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_LIVE
if vm_details.status == current.VM_STATUS_SUSPENDED:
logger.debug("Vm is suspended")
flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_PAUSED
elif vm_details.status == current.VM_STATUS_SHUTDOWN:
logger.debug("Vm is shut off")
flags |= VIR_MIGRATE_OFFLINE
logger.debug("Flags: " + str(flags))
try:
domain = getVirshDomain(vm_details)
dom_snapshot_names = domain.snapshotListNames(0)
for snapshot in current.db(current.db.snapshot.vm_id == vm_id).select():
logger.debug("snapshot:" + str(snapshot.snapshot_name))
domain_snapshots_list.append(snapshot.snapshot_name)
dom_snapshot_names.remove(snapshot.snapshot_name)
logger.debug("domain snapshot list is " + str(domain_snapshots_list))
for dom_snapshot in dom_snapshot_names:
logger.debug("Deleting orphan snapshot %s" %(dom_snapshot))
snapshot = domain.snapshotLookupByName(dom_snapshot, 0)
snapshot.delete(0)
if domain_snapshots_list:
current_snapshot = domain.snapshotCurrent(0)
current_snapshot_name = current_snapshot.getName()
migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration)
else:
domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0)
vm_details.update_record(host_id = destination_host_id)
current.db.commit()
# Delete directory created for storing dumpxml of vm snapshot
_clean_migration_directory(vm_backup_during_migration)
message = vm_details.vm_identity + " is migrated successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def migrate_domain_datastore(vmid, destination_datastore_id, live_migration=False):
"""
Migrate VM domain from one datastore to another.
- Copy VM Image to new datastore
- Update VM XML definition
- Update database
"""
logger.debug(sys.path)
vm_details = current.db.vm_data[vmid]
# datastore_id = vm_details["datastore_id"]
logger.debug("Inside live disk migration block")
try:
(connection_object, domain) = getVirshDomainConn(vm_details)
datastore = current.db.datastore[destination_datastore_id]
vm_directory_path = datastore.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
logger.debug("Creating vm directory on other datastore...")
if not os.path.exists (vm_directory_path):
os.makedirs(vm_directory_path)
diskpath = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2'
current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2'
logger.debug(current_disk_file)
xmlfile = domain.XMLDesc(0)
if(live_migration==False):
rc = os.system("cp %s %s" % (current_disk_file, diskpath))
if rc != 0:
logger.error("Copy not successful")
raise Exception("Copy not successful")
else:
logger.debug("Copied successfully")
else:
if domain.isActive:
domain.undefine()
root = etree.fromstring(xmlfile)
target_elem = root.find("devices/disk/target")
target_disk = target_elem.get('dev')
#
# destxml = generate_blockcopy_xml(diskpath,target_disk)
flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY
domain.blockRebase(target_disk, diskpath, 0, flag)
block_info_list = domain.blockJobInfo(current_disk_file,0)
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
domain.blockJobAbort(current_disk_file, VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT)
source_elem = root.find("devices/disk/source")
source_elem.set('file',diskpath)
newxml_file = etree.tostring(root)
domain = connection_object.defineXML(newxml_file)
vm_details.update_record(datastore_id=destination_datastore_id)
if os.path.exists (diskpath):
os.remove(current_disk_file)
restore_symboltable_path = current_disk_path+"/restore_symboltable"
if os.path.exists (restore_symboltable_path):
logger.debug(restore_symboltable_path)
os.remove(restore_symboltable_path)
os.rmdir(current_disk_path)
connection_object.close()
message = vm_details.vm_identity + " is migrated successfully to new datastore."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
#undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id)
connection_object.close()
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id):
"""
Undo migration in case of any issue
"""
# undo databse changes
vm_details.update_record(datastore_id=datastore_id)
if domain.isActive:
logger.debug("domain is active")
block_info_list = domain.blockJobInfo(current_disk_file,0)
if(bool(block_info_list) == True):
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
if(block_info_list['end'] == block_info_list['cur']):
domain.blockJobAbort(current_disk_file)
block_info_list = domain.blockJobInfo(current_disk_file,0)
if os.path.exists (diskpath):
os.remove(diskpath)
os.rmdir(vm_directory_path)
def migrate(parameters):
"""
Migrates VM to new host
"""
vmid = parameters['vm_id']
logger.debug("Inside migrate() function for vm_id: "+str(vmid))
destination_host_id = parameters['destination_host']
if parameters['live_migration'] == 'on':
live_migration = True
else:
live_migration = False
return migrate_domain(vmid, destination_host_id, live_migration)
def migrate_datastore(parameters):
"""
Migrates VM to new datastore
"""
logger.debug("Inside migrate_datastore() function")
vmid = parameters['vm_id']
destination_ds_id = parameters['destination_ds']
if parameters['live_migration'] == 'on':
live_migration = True
else:
live_migration = False
return migrate_domain_datastore(vmid, destination_ds_id, live_migration)
def snapshot(parameters):
"""
Snapshots a vm
"""
logger.debug("Inside snapshot() function")
vm_id = parameters['vm_id']
snapshot_type = parameters['snapshot_type']
try:
vm_details = current.db.vm_data[vm_id]
if is_pingable(str(vm_details.private_ip.private_ip)):
logger.debug("VM is pingable. Starting to start with snapshotting...")
if snapshot_type != current.SNAPSHOT_USER:
snapshots = current.db((current.db.snapshot.vm_id == vm_id) & (current.db.snapshot.type == snapshot_type)).select()
#Delete the existing Daily/Monthly/Yearly snapshot
for snapshot_cron in snapshots:
logger.debug(snapshot_cron)
delete_snapshot({'vm_id':vm_id, 'snapshot_id':snapshot_cron.id})
snapshot_name = get_datetime().strftime("%I:%M%p_%B%d,%Y")
domain = getVirshDomain(vm_details)
xmlDesc = "<domainsnapshot><name>%s</name></domainsnapshot>" % (snapshot_name)
domain.snapshotCreateXML(xmlDesc, 0)
message = "Snapshotted successfully."
current.db.snapshot.insert(vm_id = vm_id, datastore_id = vm_details.datastore_id, snapshot_name = snapshot_name, type = snapshot_type)
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = "Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip)
raise Exception("Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip))
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def revert(parameters):
"""
Reverts to snapshot
"""
logger.debug("Inside revert snapshot() function")
vm_id = parameters['vm_id']
snapshotid = parameters['snapshot_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name']
snapshot = domain.snapshotLookupByName(snapshot_name, 0)
domain.revertToSnapshot(snapshot, 0)
message = "Reverted to snapshot successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def delete_snapshot(parameters):
"""
Deletes a snapshot
"""
logger.debug("Inside delete snapshot() function")
vm_id = parameters['vm_id']
snapshotid = parameters['snapshot_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name']
snapshot = None
try:
snapshot = domain.snapshotLookupByName(snapshot_name, 0)
except libvirtError:
logger.debug("Snapshot %s not found" %(snapshot_name))
if snapshot != None:
snapshot.delete(0)
message = "Deleted snapshot successfully."
logger.debug(message)
current.db(current.db.snapshot.id == snapshotid).delete()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def update_security_domain(vm_details, security_domain_id, xmlDesc=None):
"""
Get new IP for given security domain.
Update the VM XML with new mac_address and update the information in DB
"""
# fetch new private IP from db from given security domain
private_ip_info = _get_private_ip_mac(security_domain_id)
# update vm config to add new mac address.
root = etree.fromstring(xmlDesc)
mac_elem = root.find("devices/interface[@type='bridge']/mac")
mac_elem.set('address', private_ip_info.mac_addr)
vlan_tag_elem = root.find("devices/interface[@type='bridge']/vlan/tag")
vlan_tag_elem.set('id', private_ip_info.vlan.vlan_tag)
# update NAT IP mapping, if public IP present
if vm_details.public_ip:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
create_mapping(vm_details.public_ip.public_ip, private_ip_info.private_ip)
# update vm_data
current.db(current.db.vm_data.id == vm_details.id).update(security_domain = security_domain_id,
private_ip = private_ip_info.id)
return etree.tostring(root)
def edit_vm_config(parameters):
"""
Edits vm configuration
"""
logger.debug("Inside edit vm config() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
message = ""
try:
connection_object, domain = getVirshDomainConn(vm_details)
if 'vcpus' in parameters:
new_vcpus = int(parameters['vcpus'])
domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_VCPU_MAXIMUM)
domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_AFFECT_CONFIG)
message += "Edited vCPU successfully."
current.db(current.db.vm_data.id == vm_id).update(vCPU = new_vcpus)
if 'ram' in parameters:
new_ram = int(parameters['ram']) * 1024
logger.debug(str(new_ram))
domain.setMemoryFlags(new_ram, VIR_DOMAIN_MEM_MAXIMUM)
domain.setMemoryFlags(new_ram, VIR_DOMAIN_AFFECT_CONFIG)
message += " And edited RAM successfully."
current.db(current.db.vm_data.id == vm_id).update(RAM = int(parameters['ram']))
if 'public_ip' in parameters:
enable_public_ip = parameters['public_ip']
if enable_public_ip:
public_ip_pool = _choose_random_public_ip()
if public_ip_pool:
create_mapping(public_ip_pool.public_ip, vm_details.private_ip.private_ip)
current.db.vm_data[vm_id] = dict(public_ip=public_ip_pool.id)
message += "Edited Public IP successfully."
else:
raise Exception("Available Public IPs are exhausted.")
else:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
current.db.vm_data[vm_id] = dict(public_ip = None)
if 'security_domain' in parameters:
logger.debug('Updating security domain')
xmlfile = update_security_domain(vm_details, parameters['security_domain'], domain.XMLDesc(0))
domain = connection_object.defineXML(xmlfile)
if domain.isActive():
domain.reboot(0)
message += "Edited security domain successfully"
connection_object.close()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def _get_clone_properties(vm_details, cloned_vm_details, vm_properties):
"""
Get properties for Cloned VM.
"""
datastore = _choose_datastore()
vm_properties['datastore'] = datastore
logger.debug("Datastore selected is: " + str(datastore))
vm_properties['security_domain'] = vm_details.security_domain
vm_properties['public_ip_req'] = False
# Finds mac address, ip address and vnc port for the cloned vm
_choose_mac_ip_vncport(vm_properties)
logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + \
" VNCPORT is : " + str(vm_properties['vnc_port']))
# Template and host of parent vm
vm_properties['template'] = current.db(current.db.template.id == vm_details.template_id).select()[0]
vm_properties['vm_host_details'] = current.db.host[vm_details.host_id]
vm_properties['host'] = vm_properties['vm_host_details'].id
# Creates a directory for the cloned vm
logger.debug("Creating directory for cloned vm...")
cloned_vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + cloned_vm_details.vm_identity
if not os.path.exists (cloned_vm_directory_path):
os.makedirs(cloned_vm_directory_path)
clone_file_parameters = ' --file ' + cloned_vm_directory_path + '/' + cloned_vm_details.vm_identity + '.qcow2'
else:
raise Exception("Directory with same name as vmname already exists.")
# Creates a folder for additional disks of the cloned vm
vm = current.db(current.db.vm_data.vm_identity == vm_details.vm_identity).select().first()
disk_details_of_cloning_vm = current.db(current.db.attached_disks.vm_id == vm.id).select(orderby=current.db.attached_disks.attached_disk_name)
logger.debug(disk_details_of_cloning_vm)
already_attached_disks = len(disk_details_of_cloning_vm)
cloned_vm_extra_disks_directory = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + cloned_vm_details.vm_identity
if already_attached_disks > 0:
if not os.path.exists (cloned_vm_extra_disks_directory):
logger.debug("Making Directory")
os.makedirs(cloned_vm_extra_disks_directory)
count = already_attached_disks
while already_attached_disks > 0:
disk_name = cloned_vm_details.vm_identity + '_disk' + str(count - already_attached_disks + 1) + '.qcow2'
clone_file_parameters += ' --file ' + cloned_vm_extra_disks_directory + '/' + disk_name
current.db.attached_disks.insert(vm_id = cloned_vm_details.id,
datastore_id = datastore.id ,
attached_disk_name = disk_name,
capacity = disk_details_of_cloning_vm[count - already_attached_disks].capacity)
already_attached_disks -= 1
return (clone_file_parameters)
def migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties):
"""
Migrates cloned vm to new host
"""
try:
new_host_ip_for_cloned_vm = current.db.host[new_host_id_for_cloned_vm].host_ip.private_ip
logger.debug("New host ip for cloned vm is: " + str(new_host_ip_for_cloned_vm))
flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_OFFLINE|VIR_MIGRATE_UNSAFE
logger.debug("Clone currently on: " + str(vm_details.host_id.host_ip))
(current_host_connection_object, domain) = getVirshDomainConn(None, vm_details.host_id.host_ip, cloned_vm_details.vm_identity)
logger.debug("Starting to migrate cloned vm to host " + str(new_host_ip_for_cloned_vm))
domain.migrateToURI("qemu+ssh://root@" + new_host_ip_for_cloned_vm + "/system", flags , None, 0)
current_host_connection_object.close()
logger.debug("Successfully migrated cloned vm to host " + str(new_host_ip_for_cloned_vm))
cloned_vm_details.update_record(host_id = new_host_id_for_cloned_vm)
vm_properties['host'] = new_host_id_for_cloned_vm
return True
except libvirt.libvirtError,e:
message = e.get_error_message()
logger.debug("Error: " + message)
return False
def clone(vmid):
"""
Clones vm
"""
vm_properties = {}
logger.debug("Inside clone() function")
cloned_vm_details = current.db.vm_data[vmid]
vm_details = current.db(current.db.vm_data.id == cloned_vm_details.parent_id).select().first()
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] != VIR_DOMAIN_SHUTOFF:
raise Exception("VM is not shutoff. Check vm status.")
clone_file_parameters = _get_clone_properties(vm_details, cloned_vm_details, vm_properties)
logger.debug("cloned vm properties after clone_file_parameters" + str(vm_properties))
host = vm_properties['vm_host_details']
logger.debug("host is: " + str(host))
logger.debug("host details are: " + str(host))
(used_ram, used_cpu) = host_resources_used(host.id)
logger.debug("uram: " + str(used_ram) + " used_cpu: " + str(used_cpu) + " host ram: " + str(host.RAM) +" host cpu: " + str(host.CPUs))
host_ram_after_200_percent_overcommitment = math.floor((host.RAM * 1024) * 2)
host_cpu_after_200_percent_overcommitment = math.floor(host.CPUs * 2)
logger.debug("host_ram_after_200_percent_overcommitment in MB " + str(host_ram_after_200_percent_overcommitment))
logger.debug("host_cpu_after_200_percent_overcommitment " + str(host_cpu_after_200_percent_overcommitment))
logger.debug("Available RAM on host: %s, Requested RAM: %s" % ((host_ram_after_200_percent_overcommitment - used_ram), vm_details.RAM))
logger.debug("Available CPUs on host: %s, Requested CPU: %s " % ((host_cpu_after_200_percent_overcommitment - used_cpu), vm_details.vCPU))
if((( host_ram_after_200_percent_overcommitment - used_ram) >= vm_details.RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vm_details.vCPU) and (vm_details.vCPU <= host.CPUs)):
clone_command = "virt-clone --original " + vm_details.vm_identity + " --name " + cloned_vm_details.vm_identity + \
clone_file_parameters + " --mac " + vm_properties['mac_addr']
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', clone_command, None, True)
logger.debug(command_output)
logger.debug("Updating db after cloning")
update_db_after_vm_installation(cloned_vm_details, vm_properties, parent_id = vm_details.id)
message = "Cloned successfully. "
try:
new_host_id_for_cloned_vm = find_new_host(cloned_vm_details.RAM, cloned_vm_details.vCPU)
if new_host_id_for_cloned_vm != host.id:
if migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties):
message += "Found new host and migrated successfully."
else:
message += "Found new host but not migrated successfully."
else:
message += "New host selected to migrate cloned vm is same as the host on which it currently resides."
except:
message += "Could not find host to migrate cloned vm."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
raise Exception("Host resources exhausted. Migrate the host vms and then try.")
except:
_free_vm_properties(cloned_vm_details, vm_properties)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def attach_extra_disk(parameters):
"""
Attaches extra disk to VM
"""
logger.debug("Inside attach extra disk() function")
vmid = parameters['vm_id']
disk_size = parameters['disk_size']
vm_details = current.db.vm_data[vmid]
logger.debug(str(vm_details))
try:
if (serve_extra_disk_request(vm_details, disk_size, vm_details.host_id.host_ip.private_ip)):
current.db(current.db.vm_data.id == vmid).update(extra_HDD = vm_details.extra_HDD + disk_size)
message = "Attached extra disk successfully"
logger.debug(message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = " Your request for additional HDD could not be completed at this moment. Check logs."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_FAILED, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def get_vm_image_location(datastore_id, vm_identity):
"""
Get the file path for qcow2 image of a VM
"""
datastore = current.db.datastore[datastore_id]
vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_identity
vm_image_name = vm_directory_path + '/' + vm_identity + '.qcow2'
image_present = True if os.path.exists(vm_image_name) else False
return (vm_image_name, image_present)
def get_extra_disk_location(datastore_id, vm_identity, disk_name, get_disk_size=False):
"""
Get the file path for qcow2 image of teh extra disk
"""
datastore = current.db.datastore[datastore_id]
if datastore:
vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + vm_identity
ext = '' if disk_name.endswith('.qcow2') else '.qcow2'
disk_image_path = vm_extra_disks_directory_path + '/' + disk_name + ext
image_present = True if os.path.exists(disk_image_path) else False
disk_size = 0
if image_present & get_disk_size:
command = "qemu-img info " + disk_image_path + " | grep 'virtual size'"
ret = os.popen(command).read() # Returns e.g. virtual size: 40G (42949672960 bytes)
disk_size = int(ret[ret.index(':')+1:ret.index('G ')].strip())
return (disk_image_path, image_present, disk_size)
else:
return (None, False, 0)
def launch_existing_vm_image(vm_details):
"""
Launch existing VM image
- Choose new private_ip & mac_addr if not provided
- Get location for VM image
- Launch VM on given host
- Attach extra disk to VM if defined
- Create mapping between public IP and private IP if required
"""
logger.debug('Launch existing VM image')
vm_properties = {}
vm_properties['ram'] = vm_details.RAM
vm_properties['vcpus'] = vm_details.vCPU
vm_properties['security_domain'] = vm_details.security_domain
#If Private IP was already chosen previously and DHCP entry is done
if vm_details.private_ip != None:
private_ip_info = current.db.private_ip_pool[vm_details.private_ip]
if private_ip_info:
vm_properties['private_ip'] = private_ip_info.private_ip
vm_properties['mac_addr'] = private_ip_info.mac_addr
vm_properties['vlan_name'] = private_ip_info.vlan.name
vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag
if vm_details.public_ip == None:
vm_properties['public_ip_req'] = False
else:
vm_properties['public_ip_req'] = True
if vm_details.public_ip.is_active:
vm_properties['public_ip'] = vm_details.public_ip.public_ip
_choose_mac_ip_vncport(vm_properties)
vm_properties['template'] = current.db.template[vm_details.template_id]
vm_properties['datastore'] = current.db.datastore[vm_details.datastore_id]
vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU)
(vm_image_name, image_present) = get_vm_image_location(vm_details.datastore_id, vm_details.vm_identity)
if image_present:
launch_vm_on_host(vm_details, vm_image_name, vm_properties)
#Check if extra disk needs to be attached
attached_disks = current.db((current.db.attached_disks.vm_id == vm_details.id)).select()
if attached_disks:
#Extra disk to be attached to the VM
host_ip = current.db.host[vm_properties['host']].host_ip.private_ip
disk_counter = 1
for attached_disk in attached_disks:
disk_size = attach_disk(vm_details, attached_disk.attached_disk_name, host_ip, disk_counter, True)
current.db(current.db.attached_disks.vm_id == attached_disk.vm_id and
current.db.attached_disks.attached_disk_name==attached_disk.attached_disk_name
).update(capacity = disk_size)
vm_details.extra_HDD += disk_size
disk_counter += 1
#Create mapping of Private_IP and Public_IP
if vm_properties['public_ip_req']:
create_mapping(vm_properties['public_ip'], vm_properties['private_ip'])
update_db_after_vm_installation(vm_details, vm_properties)
def save_vm_as_template(parameters):
"""
Save VM as template
If template for given VM already exists, replace with new template.
"""
logger.debug("Inside save_as_template() function")
vm_id = parameters['vm_id']
vm_data = current.db.vm_data[vm_id]
user_list = []
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
(is_templated_created, new_template, old_template) = create_new_template(vm_details)
if (is_templated_created):
#remove old template
if os.path.exists (old_template):
os.remove(old_template)
else:
for user in current.db(current.db.user_vm_map.vm_id == vm_id).select(current.db.user_vm_map.user_id):
user_list.append(user.user_id)
new_template_id = current.db.template.insert(name = vm_data.vm_name + "_template" ,
os = vm_data.template_id.os ,
os_name = vm_data.template_id.os_name ,
os_version = vm_data.template_id.os_version ,
os_type = vm_data.template_id.os_type ,
arch = vm_data.template_id.arch ,
hdd = vm_data.template_id.hdd ,
hdfile = new_template ,
type = vm_data.template_id.type ,
tag = vm_data.vm_name + "_template" ,
datastore_id = vm_data.template_id.datastore_id,
owner = user_list)
current.db.vm_data[vm_id] = dict(saved_template = new_template_id)
message = "User Template saved successfully"
logger.debug(message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = " Vm Template not saved "
logger.debug("Task Status: %s " % message)
return (current.TASK_QUEUE_STATUS_FAILED, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def delete_template(parameters):
"""
Delete template
"""
logger.debug("Inside delete_template() function")
template_id = parameters['template_id']
template_details = current.db.template[template_id]
template_path = template_details["hdfile"]
if os.path.exists(template_path):
os.remove(template_path)
# set value in db also
parent_vm = current.db.vm_data(saved_template = template_id)
if parent_vm:
parent_vm.update_record(saved_template = None)
del current.db.template[template_id]
return (current.TASK_QUEUE_STATUS_SUCCESS, "")
def create_new_template(vm_details):
"""
Create a new template from the VM image
- Create template directory
- Copy VM Image to directory(Live copy if VM is running)
- Update database to define new template
"""
try:
(connection_object, domain) = getVirshDomainConn(vm_details)
xmlfile = domain.XMLDesc(0)
logger.debug("connection object created")
datastore = _choose_datastore()
logger.debug(datastore)
new_template_dir = datastore.system_mount_point + '/' +get_constant('templates_dir') + '/' + vm_details.requester_id.first_name
logger.debug("Creating user template directory...")
if not os.path.exists (new_template_dir):
os.makedirs(new_template_dir)
template = new_template_dir + '/' + vm_details.vm_identity + '_template.qcow2'
template_location = '/' + vm_details.requester_id.first_name + '/' + vm_details.vm_identity + '_template.qcow2'
old_template = new_template_dir + '/' + vm_details.vm_identity + '_template_old.qcow2'
if os.path.exists (template):
# move template to some other path
logger.debug("move template to some other file")
shutil.move(template, old_template)
logger.debug("template " + template)
current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2'
if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED):
logger.debug("vm is active in db")
if domain.isActive():
domain.undefine()
root = etree.fromstring(xmlfile)
target_elem = root.find("devices/disk/target")
target_disk = target_elem.get('dev')
flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY
domain.blockRebase(target_disk, template, 0, flag)
block_info_list = domain.blockJobInfo(current_disk_file,0)
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
domain.blockJobAbort(current_disk_file)
domain = connection_object.defineXML(xmlfile)
connection_object.close()
return (True, template_location, old_template)
else:
logger.debug("domain is not running on host")
return (False, template_location, old_template)
elif(vm_details.status == current.VM_STATUS_SHUTDOWN):
if domain.isActive():
logger.debug("Domain is still active...Please try again after some time!!!")
return (False, template_location, old_template)
else:
logger.debug("copying")
copy_command = "cp "+current_disk_file+" "+template
logger.debug("copy_command"+copy_command)
#rc = os.system("cp %s %s" % (current_disk_file, template))
logger.debug("copy command running on " + vm_details.host_id.host_ip.private_ip + " host")
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', copy_command)
logger.debug(command_output)
return (True, template_location, old_template)
except:
if not domain.isPersistent():
domain = connection_object.defineXML(xmlfile)
connection_object.close()
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (False, template_location, old_template)
| 75,380 | 23,502 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module wraps the Android Asset Packaging Tool."""
import os
from devil.utils import cmd_helper
from pylib import constants
_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
def _RunAaptCmd(args):
"""Runs an aapt command.
Args:
args: A list of arguments for aapt.
Returns:
The output of the command.
"""
cmd = [_AAPT_PATH] + args
status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
if status != 0:
raise Exception('Failed running aapt command: "%s" with output "%s".' %
(' '.join(cmd), output))
return output
def Dump(what, apk, assets=None):
"""Returns the output of the aapt dump command.
Args:
what: What you want to dump.
apk: Path to apk you want to dump information for.
assets: List of assets in apk you want to dump information for.
"""
assets = assets or []
if isinstance(assets, basestring):
assets = [assets]
return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
| 1,152 | 380 |
setvar('nsamples', getvar('a') + getvar('b'))
| 46 | 19 |
"""Core recipes for Psi4"""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict
from ase.atoms import Atoms
from ase.calculators.psi4 import Psi4
from jobflow import Maker, job
from monty.dev import requires
try:
import psi4
except:
psi4 = None
from quacc.schemas.calc import summarize_run
from quacc.util.basics import merge_dicts
from quacc.util.calc import run_calc
@dataclass
class StaticMaker(Maker):
"""
Class to carry out a single-point calculation.
Parameters
----------
name
Name of the job.
method
The level of theory to use.
basis
Basis set
swaps
Dictionary of custom kwargs for the calculator.
"""
name: str = "Psi4-Static"
method: str = "wb97x-v"
basis: str = "def2-tzvp"
swaps: Dict[str, Any] = None
@job
@requires(psi4, "Psi4 be installed. Try conda install -c psi4 psi4")
def make(
self, atoms: Atoms, charge: int = None, mult: int = None
) -> Dict[str, Any]:
"""
Make the run.
Parameters
----------
atoms
.Atoms object`
charge
Charge of the system. If None, this is determined from the sum of
atoms.get_initial_charges().
mult
Multiplicity of the system. If None, this is determined from 1+ the sum
of atoms.get_initial_magnetic_moments().
Returns
-------
Dict
Summary of the run.
"""
swaps = self.swaps or {}
defaults = {
"mem": "16GB",
"num_threads": "max",
"method": self.method,
"basis": self.basis,
"charge": charge if charge else round(sum(atoms.get_initial_charges())),
"multiplicity": mult
if mult
else round(1 + sum(atoms.get_initial_magnetic_moments())),
}
flags = merge_dicts(defaults, swaps, remove_none=True)
atoms.calc = Psi4(**flags)
new_atoms = run_calc(atoms)
summary = summarize_run(
new_atoms, input_atoms=atoms, additional_fields={"name": self.name}
)
return summary
| 2,218 | 689 |
#Main Program
from Class import Barang
import Menu
histori = list()
listBarang = [
Barang('Rinso', 5000, 20),
Barang('Sabun', 3000, 20),
Barang('Pulpen', 2500, 20),
Barang('Tisu', 10000, 20),
Barang('Penggaris', 1000, 20)
]
while True:
print('''
Menu
1. Tampilkan Barang
2. Tambahkan Barang
3. Tambah Stock Barang
4. Hapus Barang
5. Cari Barang Berdasarkan Keyword
6. Hitung Barang Belanjaan
7. Histori Keluar Masuk Barang
0. Keluar Program
''')
choice = input('Masukan No Menu: ')
if choice == '1':
Menu.menu1(listBarang)
elif choice == '2':
Menu.menu2(listBarang, histori)
elif choice == '3':
Menu.menu3(listBarang, histori)
elif choice == '4':
Menu.menu4(listBarang, histori)
elif choice == '5':
Menu.menu5(listBarang)
elif choice == '6':
Menu.menu6(listBarang, histori)
elif choice == '7':
Menu.menu7(histori)
elif choice == '0':
print('Keluar Program')
break
else:
print('Invalid Input!') | 984 | 460 |
#coding:utf-8
import numpy as np
import tensorflow as tf
import os
import time
import datetime
import ctypes
import threading
import json
ll1 = ctypes.cdll.LoadLibrary
lib_cnn = ll1("./init_cnn.so")
ll2 = ctypes.cdll.LoadLibrary
lib_kg = ll2("./init_know.so")
class Config(object):
def __init__(self):
self.instanceTot = lib_cnn.getInstanceTot()
self.sequence_size = lib_cnn.getLenLimit()
self.num_classes = lib_cnn.getRelationTotal()
self.num_words = lib_cnn.getWordTotal()
self.num_positions = 2 * lib_cnn.getPositionLimit() + 1
self.word_size = lib_cnn.getWordDimension()
self.position_size = 5
self.embedding_size = self.word_size + self.position_size * 2
self.filter_size = 3
self.num_filters = 230
self.relation_size = self.word_size#230
self.dropout_keep_prob = 0.5
self.l2_lambda = 0.0001
self.NA = 51
lib_cnn.setNA(self.NA)
lib_cnn.setRate(3)
self.margin = 1.0
self.nbatches = 100
self.trainTimes = 15
self.entityTotal = 0
self.relationTotal = 0
class Model(object):
def __init__(self, config):
sequence_size = config.sequence_size
num_classes = config.num_classes
num_words = config.num_words
num_positions = config.num_positions
embedding_size = config.embedding_size
word_size = config.word_size
position_size = config.position_size
relation_size = config.relation_size
filter_size = config.filter_size
num_filters = config.num_filters
dropout_keep_prob = config.dropout_keep_prob
margin = config.margin
l2_lambda = config.l2_lambda
self.input_x = tf.placeholder(tf.int32, [None, sequence_size], name = "input_x")
self.input_p_h = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_h")
self.input_p_t = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_t")
self.input_r = tf.placeholder(tf.float32, [1, 1], name = "input_r")
self.input_r_n = tf.placeholder(tf.float32, [1, 1], name = "input_r_n")
self.input_h = tf.placeholder(tf.int32, [1, 1], name = "input_h")
self.input_t = tf.placeholder(tf.int32, [1, 1], name = "input_t")
self.input_y = tf.placeholder(tf.float32, [1, num_classes], name = "input_y")
self.pos_h = tf.placeholder(tf.int32, [None])
self.pos_t = tf.placeholder(tf.int32, [None])
self.pos_r = tf.placeholder(tf.int32, [None])
self.neg_h = tf.placeholder(tf.int32, [None])
self.neg_t = tf.placeholder(tf.int32, [None])
self.neg_r = tf.placeholder(tf.int32, [None])
l2_loss = tf.constant(0.0)
with tf.name_scope("embedding-lookup"):
self.word_embeddings = tf.Variable(word_embeddings, name="word_embeddings")
self.relation_embeddings = tf.get_variable("relation_embeddings", [config.relationTotal, word_size])
self.position_embeddings = tf.get_variable("position_embeddings", [num_positions, position_size])
self.relation_attention = tf.get_variable("relation_attention", [num_classes, relation_size])
self.NAattention = tf.get_variable("NAattention", [relation_size, 1])
self.attention = tf.get_variable("attention", [num_filters, relation_size])
#know
pos_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_h)
pos_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_t)
pos_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.pos_r)
neg_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_h)
neg_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_t)
neg_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.neg_r)
#cnn
self.x_initial = tf.nn.embedding_lookup(self.word_embeddings, self.input_x)
self.x_p_h = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_h)
self.x_p_t = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_t)
self.x = tf.expand_dims(tf.concat(2, [self.x_initial, self.x_p_h, self.x_p_t]), -1)
self.head = tf.nn.embedding_lookup(self.word_embeddings, self.input_h)
self.tail = tf.nn.embedding_lookup(self.word_embeddings, self.input_t)
l2_loss += tf.nn.l2_loss(self.attention)
with tf.name_scope("conv-maxpool"):
self.W = tf.get_variable("W", [filter_size, embedding_size, 1, num_filters])
self.b = tf.get_variable("b", [num_filters])
conv = tf.nn.conv2d(self.x, self.W, strides=[1, 1, 1, 1], padding="VALID", name="conv")
h = tf.nn.tanh(tf.nn.bias_add(conv, self.b), name="tanh")
self.y = tf.nn.max_pool(h, ksize=[1, sequence_size - filter_size + 1, 1, 1], strides=[1, 1, 1, 1], padding='VALID', name="pool")
l2_loss += tf.nn.l2_loss(self.W)
l2_loss += tf.nn.l2_loss(self.b)
self.y = tf.reshape(self.y, [-1, num_filters])
with tf.name_scope('attention'):
self.y_attention = tf.reduce_max(self.y, 0 , keep_dims = True)
with tf.name_scope("dropout"):
self.y_attention = tf.nn.l2_normalize(self.y_attention, 1)
self.h_drop = tf.nn.dropout(self.y_attention, dropout_keep_prob)
self.transfer_w = tf.get_variable("transfer_w", [num_filters, num_classes])
self.scores = tf.matmul(self.h_drop, self.transfer_w)
l2_loss += tf.nn.l2_loss(self.transfer_w)
with tf.name_scope("loss"):
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(self.scores, self.input_y)
self.loss_cnn = tf.reduce_mean(cross_entropy) + l2_lambda * l2_loss
pos = tf.reduce_sum(abs(pos_h_e + pos_r_e - pos_t_e), 1, keep_dims = True)
neg = tf.reduce_sum(abs(neg_h_e + neg_r_e - neg_t_e), 1, keep_dims = True)
self.loss_kg = tf.reduce_sum(tf.maximum(pos - neg + margin, 0))
with tf.name_scope("accuracy"):
self.predictions = tf.argmax(self.scores, 1, name="predictions")
correct_predictions = tf.equal(self.predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
bags_sum = 0.0
bags_hit_NA = 0.0
sum_NA = 0.0
sum_fNA = 0.0
bags_hit = 0.0
loss_sum = 0.0
if __name__ == "__main__":
lib_cnn.readWordVec()
lib_cnn.readFromFile()
lib_kg.init()
np.random.seed(0)
tf.set_random_seed(0)
config = Config()
word_embeddings = np.zeros(config.num_words * config.word_size, dtype = np.float32)
lib_cnn.getWordVec.argtypes = [ctypes.c_void_p]
lib_cnn.getWordVec(word_embeddings.__array_interface__['data'][0])
word_embeddings.resize((config.num_words,config.word_size))
config.batch_size = lib_kg.getTripleTotal() / config.nbatches
config.entityTotal = lib_kg.getEntityTotal()
config.relationTotal = lib_kg.getRelationTotal()
with tf.Graph().as_default():
conf = tf.ConfigProto()
sess = tf.Session(config=conf)
with sess.as_default():
initializer = tf.contrib.layers.xavier_initializer()
with tf.variable_scope("model", reuse=None, initializer = initializer):
m = Model(config = config)
global_step_cnn = tf.Variable(0, name="global_step_cnn", trainable=False)
optimizer_cnn = tf.train.GradientDescentOptimizer(0.01)
grads_and_vars_cnn = optimizer_cnn.compute_gradients(m.loss_cnn)
train_op_cnn = optimizer_cnn.apply_gradients(grads_and_vars_cnn, global_step = global_step_cnn)
global_step_kg = tf.Variable(0, name="global_step_kg", trainable=False)
optimizer_kg = tf.train.GradientDescentOptimizer(0.001)
grads_and_vars_kg = optimizer_kg.compute_gradients(m.loss_kg)
train_op_kg = optimizer_kg.apply_gradients(grads_and_vars_kg, global_step=global_step_kg)
sess.run(tf.initialize_all_variables())
def outEmbedding(str1):
word_embeddings, relation_embeddings, position_embeddings, relation_attention, attention, W, B, transfer_w, transfer_b, softmax_w, softmax_b = sess.run([m.word_embeddings, m.relation_embeddings, m.position_embeddings, m.relation_attention, m.attention, m.W, m.b, m.transfer_w, m.transfer_b, m.softmax_w, m.softmax_b])
log = open("log"+str1+".txt", "w")
log.write(json.dumps(word_embeddings.tolist())+"\n")
log.write(json.dumps(relation_embeddings.tolist())+"\n")
log.write(json.dumps(position_embeddings.tolist())+"\n")
log.write(json.dumps(relation_attention.tolist())+"\n")
log.write(json.dumps(attention.tolist())+"\n")
log.write(json.dumps(W.tolist())+"\n")
log.write(json.dumps(B.tolist())+"\n")
log.write(json.dumps(transfer_w.tolist())+"\n")
NAattention = sess.run(m.NAattention)
log.write(json.dumps(NAattention.tolist()) + "\n")
log.close()
x_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
p_t_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
p_h_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
r_batch = np.zeros((1, 1), dtype = np.int32)
y_batch = np.zeros((1, config.num_classes), dtype = np.int32)
r_n_batch = np.zeros((1, 1), dtype = np.float32)
h_batch = np.zeros((1, 1), dtype = np.int32)
t_batch = np.zeros((1, 1), dtype = np.int32)
x_batch_addr = x_batch.__array_interface__['data'][0]
p_t_batch_addr = p_t_batch.__array_interface__['data'][0]
p_h_batch_addr = p_h_batch.__array_interface__['data'][0]
y_batch_addr = y_batch.__array_interface__['data'][0]
r_batch_addr = r_batch.__array_interface__['data'][0]
r_n_batch_addr = r_n_batch.__array_interface__['data'][0]
h_batch_addr = h_batch.__array_interface__['data'][0]
t_batch_addr = t_batch.__array_interface__['data'][0]
lib_cnn.batch_iter.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
tipTotal = lib_cnn.getTipTotal()
loop = 0
def train_cnn(coord):
def train_step_cnn(x_batch, p_h_batch, p_t_batch, y_batch, r_batch, r_n_batch, h_batch, t_batch):
global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA
feed_dict = {
m.input_x: x_batch,
m.input_p_h: p_h_batch,
m.input_p_t: p_t_batch,
m.input_r: r_batch,
m.input_r_n: r_n_batch,
m.input_y: y_batch,
m.input_h: h_batch,
m.input_t: t_batch
}
_, step, loss, accuracy = sess.run(
[train_op_cnn, global_step_cnn, m.loss_cnn, m.accuracy], feed_dict)
time_str = datetime.datetime.now().isoformat()
loss_sum += loss
bags_sum += 1
if (r_batch[0]!=config.NA):
sum_fNA += 1
if accuracy > 0.5:
bags_hit += 1.0
else:
sum_NA += 1
if accuracy > 0.5:
bags_hit_NA += 1.0
if bags_sum % 1000 == 0:
if (sum_NA == 0):
sum_NA+=1
if (sum_fNA == 0):
sum_fNA+=1
print("{}: step {}, loss {:g}, acc {:g} acc {:g} {} {}".format(time_str, step, loss_sum/bags_sum, bags_hit_NA/sum_NA, bags_hit/sum_fNA, sum_NA, sum_fNA))
global loop
while not coord.should_stop():
print 'Looping ', loop
outEmbedding(str(loop))
for i in range(tipTotal):
length = lib_cnn.batch_iter(x_batch_addr, p_h_batch_addr, p_t_batch_addr, y_batch_addr, r_batch_addr, r_n_batch_addr, h_batch_addr, t_batch_addr)
train_step_cnn(x_batch[0:length,], p_h_batch[0:length,], p_t_batch[0:length,], y_batch, r_batch, r_n_batch, h_batch, t_batch)
global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA
bags_sum = 0
bags_hit = 0
bags_hit_NA = 0
loss_sum = 0
sum_fNA = 0
sum_NA = 0
loop += 1
if loop == config.trainTimes:
coord.request_stop()
ph = np.zeros(config.batch_size * 2, dtype = np.int32)
pt = np.zeros(config.batch_size * 2, dtype = np.int32)
pr = np.zeros(config.batch_size * 2, dtype = np.int32)
nh = np.zeros(config.batch_size * 2, dtype = np.int32)
nt = np.zeros(config.batch_size * 2, dtype = np.int32)
nr = np.zeros(config.batch_size * 2, dtype = np.int32)
ph_addr = ph.__array_interface__['data'][0]
pt_addr = pt.__array_interface__['data'][0]
pr_addr = pr.__array_interface__['data'][0]
nh_addr = nh.__array_interface__['data'][0]
nt_addr = nt.__array_interface__['data'][0]
nr_addr = nr.__array_interface__['data'][0]
lib_kg.getBatch.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int]
times_kg = 0
def train_kg(coord):
def train_step_kg(pos_h_batch, pos_t_batch, pos_r_batch, neg_h_batch, neg_t_batch, neg_r_batch):
feed_dict = {
m.pos_h: pos_h_batch,
m.pos_t: pos_t_batch,
m.pos_r: pos_r_batch,
m.neg_h: neg_h_batch,
m.neg_t: neg_t_batch,
m.neg_r: neg_r_batch
}
_, step, loss = sess.run(
[train_op_kg, global_step_kg, m.loss_kg], feed_dict)
return loss
global times_kg
while not coord.should_stop():
times_kg += 1
res = 0.0
for batch in range(config.nbatches):
lib_kg.getBatch(ph_addr, pt_addr, pr_addr, nh_addr, nt_addr, nr_addr, config.batch_size)
res += train_step_kg(ph, pt, pr, nh, nt, nr)
coord = tf.train.Coordinator()
threads = []
threads.append(threading.Thread(target=train_kg, args=(coord,)))
threads.append(threading.Thread(target=train_cnn, args=(coord,)))
for t in threads: t.start()
coord.join(threads)
| 13,010 | 6,029 |
#!/usr/bin/env python3
"""get tag from http://demo.illustration2vec.net/."""
# note:
# - error 'ERROR: Request Entity Too Large' for file 1.1 mb
# <span style="color:red;">ERROR: Request Entity Too Large</span>
from collections import OrderedDict
from pathlib import Path
from pprint import pformat
import imghdr
import logging
import os
import shutil
import time
import urllib
import hashlib
import click
import requests
import structlog
import peewee
from PIL import Image
from i2vec_cli import models
from i2vec_cli.requests_session import Session, convert_raw_to_hydrus
from i2vec_cli.sha256 import sha256_checksum
from i2vec_cli.utils import user_data_dir, thumb_folder
def is_url(path):
"""Return True if path is url, False otherwise."""
scheme = urllib.parse.urlparse(path).scheme
if scheme in ('http', 'https'):
return True
return False
def is_ext_equal(file_ext, imghdr_ext):
"""compare file extension with result from imghdr_ext."""
if not imghdr_ext:
return False
if file_ext.lower() == '.{}'.format(imghdr_ext):
return True
if file_ext.lower() in ('.jpg', '.jpeg') and imghdr_ext == 'jpeg':
return True
return False
def download(url, no_clobber):
"""download url.
Args:
url: URL to be downloaded.
no_clobber: Skip download if file already exist.
Returns:
Downloaded filename or existing file if `no_clobber` is `True`
"""
log = structlog.getLogger()
basename = os.path.basename(url)
if os.path.isfile(basename) and no_clobber:
return basename
response = requests.get(url, stream=True)
with open(basename, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
name, ext = os.path.splitext(basename)
imghdr_ext = imghdr.what(basename)
ext_equal = is_ext_equal(file_ext=ext, imghdr_ext=imghdr_ext)
if not imghdr_ext:
log.debug("imghdr can't recognize file", file=basename)
return basename
else:
new_basename = '{}.{}'.format(name, imghdr_ext)
new_basename_exist = os.path.isfile(new_basename)
if ext_equal:
log.debug('Extension is equal', file_ext=ext, imghdr_ext=imghdr_ext)
return basename
elif not ext_equal:
if new_basename_exist and not no_clobber:
log.debug('Replace existing file', old=basename, new=new_basename)
shutil.move(basename, new_basename)
elif not new_basename_exist:
log.debug('Rename file ext', file=basename, new_ext=imghdr_ext)
shutil.move(basename, new_basename)
else:
log.debug('Not replace/rename file', no_clobber=no_clobber, new_basename=new_basename)
return new_basename
else:
log.debug(
'Unknown condition',
file=basename,
ext_equal=ext_equal,
new_basename_exist=new_basename_exist,
imghdr_ext=imghdr_ext
)
# just return base name if any error happen
return basename
def validate_close_delay(ctx, param, value):
"""validate close delay."""
try:
value = int(value)
except Exception as e:
raise click.BadParameter(
'Error when validate close delay: value={}, error={}'.format(value, e))
if value >= -1:
return value
else:
raise click.BadParameter('Close delay have to be bigger or equal than -1')
def delay_close(close_delay):
"""delay when closing the program."""
log = structlog.getLogger()
if close_delay == -1:
click.pause()
elif close_delay == 0:
log.debug('No close delay')
elif close_delay > 0:
time.sleep(close_delay)
else:
log.error('Invalid close delay', v=close_delay)
def md5_checksum(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def create_thumbnail(path, thumb_path):
"""create thumbnail."""
size = 320, 320
try:
im = Image.open(path)
im.thumbnail(size)
im.save(thumb_path, "JPEG")
except IOError:
raise IOError("cannot create thumbnail for", path)
def get_print_result(path, db_path, format, session):
"""get print result."""
# compatibility
p = path
sha256 = sha256_checksum(p)
md5 = md5_checksum(p)
thumb_path = os.path.join(user_data_dir, 'thumb', '{}.jpg'.format(sha256))
try:
load_res = models.load_result(db=db_path, sha256=sha256, md5=md5)
except models.Image.DoesNotExist:
load_res = None
if load_res:
tags = {'prediction': load_res}
else:
tags = session.get_tags(path=p)
try:
models.save_result(
db=db_path, sha256=sha256, md5=md5, prediction=tags['prediction'])
except peewee.IntegrityError as e:
log.debug(str(e))
except keyError as e:
log.debug(str(tags))
if not os.path.isfile(thumb_path):
create_thumbnail(p, thumb_path)
if format == 'dict':
return tags
if format == 'hydrus':
return convert_raw_to_hydrus(tags)
else:
return pformat(tags['prediction'])
@click.command()
@click.option('--format', type=click.Choice(['raw', 'hydrus']), default='raw')
@click.option('-d', '--debug', is_flag=True, help="Enable debug.")
@click.option('-nc', '--no-clobber', is_flag=True, help="Skip download url when file exist.")
@click.option(
'--close-delay', default=0, help="Close delay of the program.", callback=validate_close_delay)
@click.option(
'--driver', default=None, help="Driver for browser (deprecated).",
type=click.Choice(['firefox', 'phantomjs', 'chrome', 'zope.testbrowser', 'django']))
@click.option('--dump-html', is_flag=True, help="Dump html table for debugging (deprecated).")
@click.argument('path', nargs=-1)
def main(format, path, debug, no_clobber, close_delay, driver=None, dump_html=False):
"""get tag from illustration2vec."""
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
structlog.configure_once(logger_factory=structlog.stdlib.LoggerFactory())
log = structlog.getLogger()
if not path:
raise ValueError('PATH required.')
# init folder
os.makedirs(user_data_dir, exist_ok=True)
os.makedirs(thumb_folder, exist_ok=True)
# database
db_path = os.path.join(user_data_dir, 'main.db')
if not os.path.isfile(db_path):
Path(db_path).touch()
models.database.init(db_path)
try:
models.init_all_tables()
except peewee.OperationalError:
log.debug('Table already created')
session = Session(driver=driver)
try:
for p in path:
if os.path.isfile(p):
print('path:{}'.format(os.path.basename(p)))
elif is_url(p):
print('url:{}'.format(p))
p = download(p, no_clobber=no_clobber)
else:
log.error('Unknown path format or path is not exist', path=p)
continue
result = get_print_result(
path=p, db_path=db_path, format=format, session=session)
print(result)
finally:
delay_close(close_delay)
if hasattr(session, 'browser'):
session.browser.quit()
if __name__ == '__main__':
main()
| 7,468 | 2,462 |
"""Functions for builtin CherryPy tools."""
import logging
import re
from hashlib import md5
import six
from six.moves import urllib
import cherrypy
from cherrypy._cpcompat import text_or_bytes
from cherrypy.lib import httputil as _httputil
from cherrypy.lib import is_iterator
# Conditional HTTP request support #
def validate_etags(autotags=False, debug=False):
"""Validate the current ETag against If-Match, If-None-Match headers.
If autotags is True, an ETag response-header value will be provided
from an MD5 hash of the response body (unless some other code has
already provided an ETag header). If False (the default), the ETag
will not be automatic.
WARNING: the autotags feature is not designed for URL's which allow
methods other than GET. For example, if a POST to the same URL returns
no content, the automatic ETag will be incorrect, breaking a fundamental
use for entity tags in a possibly destructive fashion. Likewise, if you
raise 304 Not Modified, the response body will be empty, the ETag hash
will be incorrect, and your application will break.
See :rfc:`2616` Section 14.24.
"""
response = cherrypy.serving.response
# Guard against being run twice.
if hasattr(response, 'ETag'):
return
status, reason, msg = _httputil.valid_status(response.status)
etag = response.headers.get('ETag')
# Automatic ETag generation. See warning in docstring.
if etag:
if debug:
cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS')
elif not autotags:
if debug:
cherrypy.log('Autotags off', 'TOOLS.ETAGS')
elif status != 200:
if debug:
cherrypy.log('Status not 200', 'TOOLS.ETAGS')
else:
etag = response.collapse_body()
etag = '"%s"' % md5(etag).hexdigest()
if debug:
cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS')
response.headers['ETag'] = etag
response.ETag = etag
# "If the request would, without the If-Match header field, result in
# anything other than a 2xx or 412 status, then the If-Match header
# MUST be ignored."
if debug:
cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS')
if status >= 200 and status <= 299:
request = cherrypy.serving.request
conditions = request.headers.elements('If-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions and not (conditions == ['*'] or etag in conditions):
raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
'not match %r' % (etag, conditions))
conditions = request.headers.elements('If-None-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions == ['*'] or etag in conditions:
if debug:
cherrypy.log('request.method: %s' %
request.method, 'TOOLS.ETAGS')
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
'matched %r' % (etag, conditions))
def validate_since():
"""Validate the current Last-Modified against If-Modified-Since headers.
If no code has set the Last-Modified response header, then no validation
will be performed.
"""
response = cherrypy.serving.response
lastmod = response.headers.get('Last-Modified')
if lastmod:
status, reason, msg = _httputil.valid_status(response.status)
request = cherrypy.serving.request
since = request.headers.get('If-Unmodified-Since')
if since and since != lastmod:
if (status >= 200 and status <= 299) or status == 412:
raise cherrypy.HTTPError(412)
since = request.headers.get('If-Modified-Since')
if since and since == lastmod:
if (status >= 200 and status <= 299) or status == 304:
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412)
# Tool code #
def allow(methods=None, debug=False):
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
The given methods are case-insensitive, and may be in any order.
If only one method is allowed, you may supply a single string;
if more than one, supply a list of strings.
Regardless of whether the current method is allowed or not, this
also emits an 'Allow' response header, containing the given methods.
"""
if not isinstance(methods, (tuple, list)):
methods = [methods]
methods = [m.upper() for m in methods if m]
if not methods:
methods = ['GET', 'HEAD']
elif 'GET' in methods and 'HEAD' not in methods:
methods.append('HEAD')
cherrypy.response.headers['Allow'] = ', '.join(methods)
if cherrypy.request.method not in methods:
if debug:
cherrypy.log('request.method %r not in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
raise cherrypy.HTTPError(405)
else:
if debug:
cherrypy.log('request.method %r in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
scheme='X-Forwarded-Proto', debug=False):
"""Change the base URL (scheme://host[:port][/path]).
For running a CP server behind Apache, lighttpd, or other HTTP server.
For Apache and lighttpd, you should leave the 'local' argument at the
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
tools.proxy.local = 'Origin'.
If you want the new request.base to include path info (not just the host),
you must explicitly set base to the full base path, and ALSO set 'local'
to '', so that the X-Forwarded-Host request header (which never includes
path info) does not override it. Regardless, the value for 'base' MUST
NOT end in a slash.
cherrypy.request.remote.ip (the IP address of the client) will be
rewritten if the header specified by the 'remote' arg is valid.
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
want to rewrite remote.ip, set the 'remote' arg to an empty string.
"""
request = cherrypy.serving.request
if scheme:
s = request.headers.get(scheme, None)
if debug:
cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY')
if s == 'on' and 'ssl' in scheme.lower():
# This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header
scheme = 'https'
else:
# This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
scheme = s
if not scheme:
scheme = request.base[:request.base.find('://')]
if local:
lbase = request.headers.get(local, None)
if debug:
cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY')
if lbase is not None:
base = lbase.split(',')[0]
if not base:
default = urllib.parse.urlparse(request.base).netloc
base = request.headers.get('Host', default)
if base.find('://') == -1:
# add http:// or https:// if needed
base = scheme + '://' + base
request.base = base
if remote:
xff = request.headers.get(remote)
if debug:
cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
if xff:
if remote == 'X-Forwarded-For':
# Grab the first IP in a comma-separated list. Ref #1268.
xff = next(ip.strip() for ip in xff.split(','))
request.remote.ip = xff
def ignore_headers(headers=('Range',), debug=False):
"""Delete request headers whose field names are included in 'headers'.
This is a useful tool for working behind certain HTTP servers;
for example, Apache duplicates the work that CP does for 'Range'
headers, and will doubly-truncate the response.
"""
request = cherrypy.serving.request
for name in headers:
if name in request.headers:
if debug:
cherrypy.log('Ignoring request header %r' % name,
'TOOLS.IGNORE_HEADERS')
del request.headers[name]
def response_headers(headers=None, debug=False):
"""Set headers on the response."""
if debug:
cherrypy.log('Setting response headers: %s' % repr(headers),
'TOOLS.RESPONSE_HEADERS')
for name, value in (headers or []):
cherrypy.serving.response.headers[name] = value
response_headers.failsafe = True
def referer(pattern, accept=True, accept_missing=False, error=403,
message='Forbidden Referer header.', debug=False):
"""Raise HTTPError if Referer header does/does not match the given pattern.
pattern
A regular expression pattern to test against the Referer.
accept
If True, the Referer must match the pattern; if False,
the Referer must NOT match the pattern.
accept_missing
If True, permit requests with no Referer header.
error
The HTTP error code to return to the client on failure.
message
A string to include in the response body on failure.
"""
try:
ref = cherrypy.serving.request.headers['Referer']
match = bool(re.match(pattern, ref))
if debug:
cherrypy.log('Referer %r matches %r' % (ref, pattern),
'TOOLS.REFERER')
if accept == match:
return
except KeyError:
if debug:
cherrypy.log('No Referer header', 'TOOLS.REFERER')
if accept_missing:
return
raise cherrypy.HTTPError(error, message)
class SessionAuth(object):
"""Assert that the user is logged in."""
session_key = 'username'
debug = False
def check_username_and_password(self, username, password):
pass
def anonymous(self):
"""Provide a temporary user name for anonymous users."""
pass
def on_login(self, username):
pass
def on_logout(self, username):
pass
def on_check(self, username):
pass
def login_screen(self, from_page='..', username='', error_msg='',
**kwargs):
return (six.text_type("""<html><body>
Message: %(error_msg)s
<form method="post" action="do_login">
Login: <input type="text" name="username" value="%(username)s" size="10" />
<br />
Password: <input type="password" name="password" size="10" />
<br />
<input type="hidden" name="from_page" value="%(from_page)s" />
<br />
<input type="submit" />
</form>
</body></html>""") % vars()).encode('utf-8')
def do_login(self, username, password, from_page='..', **kwargs):
"""Login. May raise redirect, or return True if request handled."""
response = cherrypy.serving.response
error_msg = self.check_username_and_password(username, password)
if error_msg:
body = self.login_screen(from_page, username, error_msg)
response.body = body
if 'Content-Length' in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers['Content-Length']
return True
else:
cherrypy.serving.request.login = username
cherrypy.session[self.session_key] = username
self.on_login(username)
raise cherrypy.HTTPRedirect(from_page or '/')
def do_logout(self, from_page='..', **kwargs):
"""Logout. May raise redirect, or return True if request handled."""
sess = cherrypy.session
username = sess.get(self.session_key)
sess[self.session_key] = None
if username:
cherrypy.serving.request.login = None
self.on_logout(username)
raise cherrypy.HTTPRedirect(from_page)
def do_check(self):
"""Assert username. Raise redirect, or return True if request handled.
"""
sess = cherrypy.session
request = cherrypy.serving.request
response = cherrypy.serving.response
username = sess.get(self.session_key)
if not username:
sess[self.session_key] = username = self.anonymous()
self._debug_message('No session[username], trying anonymous')
if not username:
url = cherrypy.url(qs=request.query_string)
self._debug_message(
'No username, routing to login_screen with from_page %(url)r',
locals(),
)
response.body = self.login_screen(url)
if 'Content-Length' in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers['Content-Length']
return True
self._debug_message('Setting request.login to %(username)r', locals())
request.login = username
self.on_check(username)
def _debug_message(self, template, context={}):
if not self.debug:
return
cherrypy.log(template % context, 'TOOLS.SESSAUTH')
def run(self):
request = cherrypy.serving.request
response = cherrypy.serving.response
path = request.path_info
if path.endswith('login_screen'):
self._debug_message('routing %(path)r to login_screen', locals())
response.body = self.login_screen()
return True
elif path.endswith('do_login'):
if request.method != 'POST':
response.headers['Allow'] = 'POST'
self._debug_message('do_login requires POST')
raise cherrypy.HTTPError(405)
self._debug_message('routing %(path)r to do_login', locals())
return self.do_login(**request.params)
elif path.endswith('do_logout'):
if request.method != 'POST':
response.headers['Allow'] = 'POST'
raise cherrypy.HTTPError(405)
self._debug_message('routing %(path)r to do_logout', locals())
return self.do_logout(**request.params)
else:
self._debug_message('No special path, running do_check')
return self.do_check()
def session_auth(**kwargs):
sa = SessionAuth()
for k, v in kwargs.items():
setattr(sa, k, v)
return sa.run()
session_auth.__doc__ = (
"""Session authentication hook.
Any attribute of the SessionAuth class may be overridden via a keyword arg
to this function:
""" + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__)
for k in dir(SessionAuth) if not k.startswith('__')])
)
def log_traceback(severity=logging.ERROR, debug=False):
"""Write the last error's traceback to the cherrypy error log."""
cherrypy.log('', 'HTTP', severity=severity, traceback=True)
def log_request_headers(debug=False):
"""Write request headers to the cherrypy error log."""
h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
def log_hooks(debug=False):
"""Write request.hooks to the cherrypy error log."""
request = cherrypy.serving.request
msg = []
# Sort by the standard points if possible.
from cherrypy import _cprequest
points = _cprequest.hookpoints
for k in request.hooks.keys():
if k not in points:
points.append(k)
for k in points:
msg.append(' %s:' % k)
v = request.hooks.get(k, [])
v.sort()
for h in v:
msg.append(' %r' % h)
cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
':\n' + '\n'.join(msg), 'HTTP')
def redirect(url='', internal=True, debug=False):
"""Raise InternalRedirect or HTTPRedirect to the given url."""
if debug:
cherrypy.log('Redirecting %sto: %s' %
({True: 'internal ', False: ''}[internal], url),
'TOOLS.REDIRECT')
if internal:
raise cherrypy.InternalRedirect(url)
else:
raise cherrypy.HTTPRedirect(url)
def trailing_slash(missing=True, extra=False, status=None, debug=False):
"""Redirect if path_info has (missing|extra) trailing slash."""
request = cherrypy.serving.request
pi = request.path_info
if debug:
cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' %
(request.is_index, missing, extra, pi),
'TOOLS.TRAILING_SLASH')
if request.is_index is True:
if missing:
if not pi.endswith('/'):
new_url = cherrypy.url(pi + '/', request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
elif request.is_index is False:
if extra:
# If pi == '/', don't redirect to ''!
if pi.endswith('/') and pi != '/':
new_url = cherrypy.url(pi[:-1], request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
def flatten(debug=False):
"""Wrap response.body in a generator that recursively iterates over body.
This allows cherrypy.response.body to consist of 'nested generators';
that is, a set of generators that yield generators.
"""
def flattener(input):
numchunks = 0
for x in input:
if not is_iterator(x):
numchunks += 1
yield x
else:
for y in flattener(x):
numchunks += 1
yield y
if debug:
cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN')
response = cherrypy.serving.response
response.body = flattener(response.body)
def accept(media=None, debug=False):
"""Return the client's preferred media-type (from the given Content-Types).
If 'media' is None (the default), no test will be performed.
If 'media' is provided, it should be the Content-Type value (as a string)
or values (as a list or tuple of strings) which the current resource
can emit. The client's acceptable media ranges (as declared in the
Accept request header) will be matched in order to these Content-Type
values; the first such string is returned. That is, the return value
will always be one of the strings provided in the 'media' arg (or None
if 'media' is None).
If no match is found, then HTTPError 406 (Not Acceptable) is raised.
Note that most web browsers send */* as a (low-quality) acceptable
media range, which should match any Content-Type. In addition, "...if
no Accept header field is present, then it is assumed that the client
accepts all media types."
Matching types are checked in order of client preference first,
and then in the order of the given 'media' values.
Note that this function does not honor accept-params (other than "q").
"""
if not media:
return
if isinstance(media, text_or_bytes):
media = [media]
request = cherrypy.serving.request
# Parse the Accept request header, and try to match one
# of the requested media-ranges (in order of preference).
ranges = request.headers.elements('Accept')
if not ranges:
# Any media type is acceptable.
if debug:
cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT')
return media[0]
else:
# Note that 'ranges' is sorted in order of preference
for element in ranges:
if element.qvalue > 0:
if element.value == '*/*':
# Matches any type or subtype
if debug:
cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
return media[0]
elif element.value.endswith('/*'):
# Matches any subtype
mtype = element.value[:-1] # Keep the slash
for m in media:
if m.startswith(mtype):
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return m
else:
# Matches exact value
if element.value in media:
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return element.value
# No suitable media-range found.
ah = request.headers.get('Accept')
if ah is None:
msg = 'Your client did not send an Accept header.'
else:
msg = 'Your client sent this Accept header: %s.' % ah
msg += (' But this resource only emits these media types: %s.' %
', '.join(media))
raise cherrypy.HTTPError(406, msg)
class MonitoredHeaderMap(_httputil.HeaderMap):
def transform_key(self, key):
self.accessed_headers.add(key)
return super(MonitoredHeaderMap, self).transform_key(key)
def __init__(self):
self.accessed_headers = set()
super(MonitoredHeaderMap, self).__init__()
def autovary(ignore=None, debug=False):
"""Auto-populate the Vary response header based on request.header access.
"""
request = cherrypy.serving.request
req_h = request.headers
request.headers = MonitoredHeaderMap()
request.headers.update(req_h)
if ignore is None:
ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type'])
def set_response_header():
resp_h = cherrypy.serving.response.headers
v = set([e.value for e in resp_h.elements('Vary')])
if debug:
cherrypy.log(
'Accessed headers: %s' % request.headers.accessed_headers,
'TOOLS.AUTOVARY')
v = v.union(request.headers.accessed_headers)
v = v.difference(ignore)
v = list(v)
v.sort()
resp_h['Vary'] = ', '.join(v)
request.hooks.attach('before_finalize', set_response_header, 95)
def convert_params(exception=ValueError, error=400):
"""Convert request params based on function annotations, with error handling.
exception
Exception class to catch.
status
The HTTP error code to return to the client on failure.
"""
request = cherrypy.serving.request
types = request.handler.callable.__annotations__
with cherrypy.HTTPError.handle(exception, error):
for key in set(types).intersection(request.params):
request.params[key] = types[key](request.params[key])
| 23,443 | 6,851 |
'''Utility functions'''
import multiprocessing
from .globalVariables import *
def readMathIOmicaData(fileName):
'''Read text files exported by MathIOmica and convert to Python data
Parameters:
fileName: str
Path of directories and name of the file containing data
Returns:
data
Python data
Usage:
data = readMathIOmicaData("../../MathIOmica/MathIOmica/MathIOmicaData/ExampleData/rnaExample")
'''
if os.path.isfile(fileName):
with open(fileName, 'r') as tempFile:
data = tempFile.read()
data = data.replace('\n','').replace('{','(').replace('}',')').replace('->',':').replace('|>','}')
data = data.replace('<|','{').replace('^','*').replace('`','*').replace('Missing[]','"Missing[]"')
data = data.replace("\\",'')
else:
print('File not found (%s)'%(fileName))
returning = None
try:
returning = eval(data)
except:
print('Error occured while converting data (%s)'%(fileName))
return returning
def runCPUs(NumberOfAvailableCPUs, func, list_of_tuples_of_func_params):
"""Parallelize function call with multiprocessing.Pool.
Parameters:
NumberOfAvailableCPUs: int
Number of processes to create
func: function
Function to apply, must take at most one argument
list_of_tuples_of_func_params: list
Function parameters
Returns:
2d numpy.array
Results of func in a numpy array
Usage:
results = runCPUs(4, pAutocorrelation, [(times[i], data[i], allTimes) for i in range(10)])
"""
instPool = multiprocessing.Pool(processes = NumberOfAvailableCPUs)
return_values = instPool.map(func, list_of_tuples_of_func_params)
instPool.close()
instPool.join()
return np.vstack(return_values)
def createReverseDictionary(inputDictionary):
"""Efficient way to create a reverse dictionary from a dictionary.
Utilizes Pandas.Dataframe.groupby and Numpy arrays indexing.
Parameters:
inputDictionary: dictionary
Dictionary to reverse
Returns:
dictionary
Reversed dictionary
Usage:
revDict = createReverseDictionary(Dict)
"""
keys, values = np.array(list(inputDictionary.keys())), np.array(list(inputDictionary.values()))
df = pd.DataFrame(np.array([[keys[i], value] for i in range(len(keys)) for value in values[i]]))
dfGrouped = df.groupby(df.columns[1])
keys, values = list(dfGrouped.indices.keys()), list(dfGrouped.indices.values())
GOs = df.values.T[0]
return dict(zip(keys, [GOs[value].tolist() for value in values]))
def createDirectories(path):
"""Create a path of directories, unless the path already exists.
Parameters:
path: str
Path directory
Returns:
None
Usage:
createDirectories("/pathToFolder1/pathToSubFolder2")
"""
if path=='':
return None
if not os.path.exists(path):
os.makedirs(path)
return None
| 3,216 | 1,036 |
#! /usr/bin/env python
#adam-does# runs SeeingClearly to get the seeing and rms of the image, then uses those to get sextractor thresholds for CR detection
#adam-use# use with CRNitschke pipeline
#adam-call_example# call it like ./get_sextract_thresholds.py /path/flname.fits output_file.txt
#IO stuff:
import sys ; sys.path.append('/u/ki/awright/InstallingSoftware/pythons')
###saveout = sys.stdout
saveout = sys.stdout
###logout = open('SeeingClearly_stdout.log','w')
###sys.stdout = logout
saveerr = sys.stderr
###logerr = open('SeeingClearly_stderr.log','w')
###sys.stderr = logerr
sys.stdout = sys.stderr
#the basics
import hashlib
import os
import SeeingClearly
from copy import deepcopy
def seeing_to_ft_dt(x):
y1_dt,m_dt,x1_dt= 5900, -16551.7, 0.48
min_dt= 3500
max_dt= 6000
yy_dts=y1_dt+m_dt*(x-x1_dt)
if yy_dts<min_dt:yy_dts=min_dt
if yy_dts>max_dt:yy_dts=max_dt
y1_ft,m_ft,x1_ft,min_ft= 850, -7000.0, 0.48, 450
min_ft= 450
max_ft= 1000
yy_fts=y1_ft+m_ft*(x-x1_ft)
if yy_fts<min_ft:yy_fts=min_ft
if yy_fts>max_ft:yy_fts=max_ft
return yy_fts,yy_dts
import imagetools
import glob
import astropy
from astropy.io import ascii
from numpy import asarray
if __name__ == "__main__":
args=deepcopy(sys.argv[1:])
for false_arg in ['-i', '--']:
if false_arg in args: args.remove(false_arg)
if len(args)<1:
sys.exit()
if not os.path.isfile(args[0]):
print "sys.argv[1]=",args[0]
raise Exception(args[0]+" is not a file!")
else:
fl=args[0]
fl2save=args[1]
#start tmp
print "Using SeeingClearly to get seeing for: "+fl
print "saving output to: " +fl2save
try:
FILTER=astropy.io.fits.open(fl)[0].header['FILTER']
except:
FILTER="UnknownFilt"
BASE,ending=os.path.basename(fl).split('OCF')
ending="OCF"+ending
ending=ending.replace('.fits','')
fls_dir=os.path.dirname(fl)
basename=os.path.basename(fl)
CCDnum=imagetools.GetCCD(fl)
globthis='_'+str(CCDnum)
glob_basename=basename.replace(globthis,'_*')
fls=sorted(glob.glob(fls_dir+"/"+glob_basename))
if not len(fls)==10:
raise Exception('cannot find 10 files like this from different CCDs')
#adam-old# seeing,back_rms=SeeingClearly.seeing_clearly_withplot(fls,checkplots=1,saveas='pltSeeingClearly_%s_%s' % (FILTER,BASE[:-1]+"ALL"))
import adam_stars_from_cat
import numpy
seeing,back_rms=adam_stars_from_cat.get_seeing_backrms(fls)
back_rms=numpy.array(back_rms)
ft,dt=seeing_to_ft_dt(seeing)
detect_thresh=dt/back_rms #convert to S2N ratio
filter_thresh=ft/back_rms #convert to S2N ratio
if FILTER=='W-J-B':
detect_thresh=asarray([min(170.0,detect_thresh[i]) for i in range(len(detect_thresh))])
filter_thresh=asarray([min(20.0,filter_thresh[i]) for i in range(len(filter_thresh))])
elif (detect_thresh>170.0).any() or (filter_thresh>20.0).any():
print 'checkit: filter=%s and %.2f %% of the detection thresholds are above 170.0 and %.2f %% of the filter thresholds are above 20.0' % (FILTER,(detect_thresh>170.0).mean()*100, (filter_thresh>20.0).mean()*100)
dict_out={}
dict_out['seeing']=[seeing]*10
dict_out['rms']=back_rms
dict_out['dt']=detect_thresh
dict_out['ft']=filter_thresh
dict_out['#files']=fls
t=astropy.table.Table(data=dict_out,names=['#files','rms','seeing','dt','ft'],dtype=[str,float,float,float,float])
t.write(fl2save,format="ascii.basic")
#adam-2014#detect_thresh_cap=min(detect_thresh,150.0) #cap is now set in the function seeing_to_ft_dt
#PIXSCALE=float(os.environ['PIXSCALE'])
#if seeing>PIXSCALE*2.5: #I have no check for being undersampled, should I?
#if seeing>.4:
# sys.stdout=saveout #back to printing to terminal
# ###sys.stdout.write(str(seeing))
# print "'0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)+"'"
#
#else:
# #print "exit 1;"
# #raise Exception('Seeing less than 2.5xPIXSCALE. The image is undersampled')
# #sys.stderr=saveerr #back to printing to terminal
# #sys.stderr.write('1')
# sys.stdout=saveout #back to printing to terminal
# print "0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)
| 4,041 | 1,825 |
import time
import multiprocessing
class Session:
def __init__(self, *, labbox_config, default_feed_name: str):
self._labbox_config = labbox_config
pipe_to_parent, pipe_to_child = multiprocessing.Pipe()
self._worker_process = multiprocessing.Process(target=_run_worker_session, args=(pipe_to_parent, labbox_config, default_feed_name))
self._worker_process.start()
self._pipe_to_worker_process = pipe_to_child
self._incoming_keepalive_timestamp = time.time()
def elapsed_sec_since_incoming_keepalive(self):
return time.time() - self._incoming_keepalive_timestamp
def cleanup(self):
self._pipe_to_worker_process.send('exit')
pass
def check_for_outgoing_messages(self):
ret = []
while self._pipe_to_worker_process.poll():
msg = self._pipe_to_worker_process.recv()
if isinstance(msg, dict):
if msg['type'] == 'outgoing_messages':
ret.extend(msg['messages'])
else:
print(msg)
raise Exception('Unexpected message from worker session')
else:
print(msg)
raise Exception('Unexpected message from worker session')
return ret
def handle_message(self, msg):
if msg['type'] == 'keepAlive':
self._handle_keepalive()
else:
self._pipe_to_worker_process.send(dict(
type='incoming_message',
message=msg
))
def _handle_keepalive(self):
self._incoming_keepalive_timestamp = time.time()
def _run_worker_session(pipe_to_parent, labbox_config, default_feed_name: str):
from ._workersession import WorkerSession
WS = WorkerSession(labbox_config=labbox_config, default_feed_name=default_feed_name)
def handle_messages(msgs):
pipe_to_parent.send(dict(
type='outgoing_messages',
messages=msgs
))
WS.on_messages(handle_messages)
WS.initialize()
while True:
while pipe_to_parent.poll():
x = pipe_to_parent.recv()
if isinstance(x, str):
if x == 'exit':
WS.cleanup()
return
else:
print(x)
raise Exception('Unexpected message in _run_worker_session')
elif isinstance(x, dict):
if x['type'] == 'incoming_message':
WS.handle_message(x['message'])
else:
print(x)
raise Exception('Unexpected message in _run_worker_session')
else:
print(x)
raise Exception('Unexpected message in _run_worker_session')
WS.iterate()
time.sleep(0.05)
| 2,843 | 800 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import skipIf
try:
from django.core.urlresolvers import reverse
except ModuleNotFoundError:
from django.urls import reverse
from django.db import transaction
from aldryn_reversion.core import create_revision as aldryn_create_revision
from parler.utils.context import switch_language
import six
from . import NewsBlogTestCase
from aldryn_newsblog.cms_appconfig import NewsBlogConfig
from ..settings import ENABLE_REVERSION
if ENABLE_REVERSION:
try:
from reversion import create_revision
from reversion import default_revision_manager
except ImportError:
from reversion.revisions import create_revision
from reversion.revisions import default_revision_manager
@skipIf(not ENABLE_REVERSION, 'django-reversion not enabled')
class TestVersioning(NewsBlogTestCase):
def create_revision(self, article, content=None, language=None, **kwargs):
with transaction.atomic():
with create_revision():
for k, v in six.iteritems(kwargs):
setattr(article, k, v)
if content:
plugins = article.content.get_plugins()
plugin = plugins[0].get_plugin_instance()[0]
plugin.body = content
plugin.save()
# TODO: Cover both cases (plugin modification/recreation)
# if content:
# article.content.get_plugins().delete()
# api.add_plugin(article.content, 'TextPlugin',
# self.language, body=content)
article.save()
def revert_to(self, article, revision):
(default_revision_manager.get_for_object(article)[revision]
.revision.revert())
def test_revert_revision(self):
title1 = self.rand_str(prefix='title1_')
title2 = self.rand_str(prefix='title2_')
content0 = self.rand_str(prefix='content0_')
content1 = self.rand_str(prefix='content1_')
content2 = self.rand_str(prefix='content2_')
article = self.create_article(content=content0)
# Revision 1
self.create_revision(article, title=title1, content=content1)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1)
self.assertContains(response, content1)
self.assertNotContains(response, content0)
# Revision 2
self.create_revision(article, title=title2, content=content2)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title2)
self.assertContains(response, content2)
self.assertNotContains(response, content1)
# Revert to revision 1
self.revert_to(article, 1)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1)
self.assertContains(response, content1)
self.assertNotContains(response, content0)
self.assertNotContains(response, content2)
def test_revert_translated_revision(self):
title1_en = self.rand_str(prefix='title1_en_')
title1_de = self.rand_str(prefix='title1_de_')
title2_en = self.rand_str(prefix='title2_en_')
title2_de = self.rand_str(prefix='title2_de_')
article = self.create_article()
# Revision 1
article.set_current_language('en')
self.create_revision(article, title=title1_en)
article.set_current_language('de')
self.create_revision(article, title=title1_de)
with switch_language(article, 'en'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_en)
with switch_language(article, 'de'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_de)
# Revision 2a (modify just EN)
article.set_current_language('en')
self.create_revision(article, title=title2_en)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title2_en)
with switch_language(article, 'de'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_de)
# Revision 2b (modify just DE)
article.set_current_language('de')
self.create_revision(article, title=title2_de)
with switch_language(article, 'en'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title2_en)
with switch_language(article, 'de'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title2_de)
# Revert to revision 2a (EN=2, DE=1)
self.revert_to(article, 1)
with switch_language(article, 'en'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title2_en)
with switch_language(article, 'de'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_de)
# Revert to revision 1 (EN=1, DE=1)
self.revert_to(article, 2)
with switch_language(article, 'en'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_en)
with switch_language(article, 'de'):
response = self.client.get(article.get_absolute_url())
self.assertContains(response, title1_de)
def test_edit_plugin_directly(self):
content0 = self.rand_str(prefix='content0_')
content1 = self.rand_str(prefix='content1_')
content2 = self.rand_str(prefix='content2_')
article = self.create_article(content=content0)
# Revision 1
self.create_revision(article, content=content1)
self.assertEqual(
len(default_revision_manager.get_for_object(article)), 1)
# Revision 2
with transaction.atomic():
plugins = article.content.get_plugins()
plugin = plugins[0].get_plugin_instance()[0]
plugin.body = content2
plugin.save()
aldryn_create_revision(article)
self.assertEqual(
len(default_revision_manager.get_for_object(article)), 2)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, content2)
self.assertNotContains(response, content1)
# Revert to revision 1
self.revert_to(article, 1)
response = self.client.get(article.get_absolute_url())
self.assertContains(response, content1)
self.assertNotContains(response, content2)
def test_blog_config_recovery_accessible(self):
with transaction.atomic():
with create_revision():
new_conf = NewsBlogConfig(
namespace='test_revocery_admin_url', paginate_by=15)
new_conf.save()
new_config_version = (default_revision_manager
.get_for_object(new_conf)[0])
new_config_pk = new_conf.pk
self.assertEqual(NewsBlogConfig.objects.filter(
pk=new_config_pk).count(), 1)
new_conf.delete()
self.assertEqual(NewsBlogConfig.objects.filter(
pk=new_config_pk).count(), 0)
# check that there is a a way to access recovery view
obj = new_config_version.object_version.object
opts = obj._meta
url = reverse(
'admin:{0}_{1}_{2}'.format(
opts.app_label,
obj._meta.model_name,
'recover'),
args=[new_config_version.pk])
# ust in case check the length, but at this step either a
# NoReverseMatch should occur or other error,
# if no exception is raised, it is a good sign
self.assertGreater(len(url), 4)
| 8,114 | 2,338 |
# Library for the dynamics of a lumen network
# The lumen are 2 dimensional and symmetric and connected with 1 dimensional tubes
#
# Created by A. Mielke, 2018
# Modified by M. Le Verge--Serandour on 8/04/2019
"""
network.py conf.init
Defines the class network and associated functions
Imports
-------
Libraries : numpy, os, math
Created by A. Mielke
Modified by H. Turlier on 8/06/2018
Modified by M. Le Verge--Serandour on 8/04/2019
"""
import numpy as np
import math
import os
class network:
def __init__(self, network_folder, out_path, t_step, tube_radius = 0.01, friction = 1, swelling = False, swelling_rate=0., save_area_dat=False):
"""
Initialization of the object network
All properties needed for the simulation are read and initialized
Input
-----
network_folder : str
out_path : str, path-like
t_step : float
Time step of the simulation. Note that if the simulation is adaptative, this time step will change.
tube_radius : float, optional, default = 0.01
Radius of the tube connecting lumens. Define the condition for empty lumens.
friction : float, optional, default = 1
Friction constant for the fluid circulating through pipes.
swelling : bool, optional, default = False
Swelling option for the simulation. True if swelling is included, False otherwise.
swelling_rate : float, optional, default = 0.
Swelling rate value in case the swelling is considered. Make sure the rate is not to big to avoid non-converging simulations.
save_area_dat : bool, optional, default = False
Save area option. True if areas are saved in area.dat, False otherwise.
"""
self.network_folder = network_folder
# Reading properties of the lumen
self.gamma_lumen, self.gamma_contact, self.area = np.loadtxt(os.path.join(network_folder, 'lumen.dat'), dtype = float, usecols = [0,2,3], unpack = True)
# Reading links between two lumen
self.lumen_lumen = self.read_lumen_lumen(os.path.join(network_folder, 'lumen_lumen.dat'))
# Reading links between bridge and lumen
self.bridge_lumen, self.num_bridges = self.read_bridge_lumen(os.path.join(network_folder, 'bridge_lumen.dat'))
# Reading links between two bridges
self.bridge_bridge, self.num_bridges = self.read_bridge_bridge(os.path.join(network_folder, 'bridge_bridge.dat'), self.num_bridges)
# Surface tension ratio
self.alpha = self.gamma_contact/(2*self.gamma_lumen)
self.delta = np.full(len(self.alpha), 1) # Possibility of asymmetric lumen is not included
# Resistances
self.tube_radius = tube_radius # Radius of the tube connecting the lumen and the bridges
self.friction = friction # Friction coefficient; friction * length = resistance
# Opening angle of the lumen (angle between curvature and tube)
self.theta = self.set_theta()
# Area factor for expressing the pressure in terms of the area instead of the radius
self.area_factor = self.set_area_factor()
# Ending time: time at which only one lumen is remaining
self.end_time = 0
# Time step for the output of the area evolution
self.time_step = t_step
# Creating output file for the area evolution, events, error messages
self.save_area(start = True, out_path = out_path)
self.save_event('', start = True, out_path = out_path)
self.save_error('', start = True, out_path = out_path)
# Area distribution after only one lumen is remaining
self.final_area = []
# Current time step of the simulation
self.current_time = 0
# List of empty lumen (area < tube_radius **2)
self.empty_list = np.zeros(len(self.alpha))
# Swelling
self.swelling_bool = swelling
self.swelling_rate = swelling_rate
# Save area
self.save_area_dat = save_area_dat
############################################################################################################################
########################################################## Dynamics ########################################################
############################################################################################################################
def flux(self, t, state):
"""
Determines the flux/ area change for each lumen of the network, main function of network.py
Input
-----
self : network object
Needs to be called by a class object
t : float
Actual time step (not needed for the calculation of the flux, but required for the used integration method in network_simulation.py
state : float array
The current area of the lumens
Returns
-------
flux : float array
Contains the area change for each lumen in dt
"""
# Initialization of the array containing the area change (index == lumen ID)
flux = []
self.current_time = t
for i in range(len(self.alpha)):
flux.append(0)
# If only one lumen remains -> End of simulation, flux is zero (needed as for the integration method used, no dynamic stop is possible)
if(np.sum(self.empty_list) >= len(self.alpha) - 1):
if(self.end_time == 0):
# Setting the end time for the output file area.log
self.end_time = t
# more than one lumen remaining: calculation of the flux
else:
# Adapting network to new state: Empty lumen are removed and graph is reconnected
self.area = state
self.remove_empty_lumen()
# Area change between directly connected lumen
flux = self.flux_lumen(flux)
# Calculating artificial pressure at each bridge; linear system of equations, with flux(bridge) = 0, the bridge does not gain or loose area
pressure_bridges = self.pressure_bridges()
# Area change between lumen-bridges
flux = self.flux_bridges(flux, pressure_bridges)
# Area change due to swelling
if self.swelling_bool :
flux = self.flux_swelling(flux)
# Saving area for the time step given in the configuration file
if self.save_area_dat :
self.save_area()
self.t_old = t
if(np.abs(np.sum(flux)) > self.tube_radius ** 2):
error = 'total flux is non-zero: total flux = %f' % (np.sum(flux))
self.save_error(error)
return flux
def flux_lumen(self,flux):
"""
Determines the flux/ area change for each lumen due to the connection between lumen and lumen
Input
-----
self network object
needs to be called by a class object
flux float array
vector containing the area change for each lumen; index = lumen ID
Returns
-------
flux float array
area changes due to lumen-lumen connection added to the vector passed
"""
# for each connection between two lumen
for line in range(len(self.lumen_lumen)):
lumen_1 = int (self.lumen_lumen[line][0]) # first lumen
lumen_2 = int (self.lumen_lumen[line][1]) # second lumen
# flux from lumen 2 to lumen 1
fl = (self.pressure(lumen_2) - self.pressure(lumen_1))*self.friction/self.lumen_lumen[line][2]
flux[lumen_1] += fl
flux[lumen_2] -= fl
return flux
def pressure_bridges(self):
"""
Determines the pressure at each bridge
for each bridge the total flux is 0, meaning that the bridge does not gain or loose area
this gives a linear equation system, which can be solved
The connections are taken from the files bridge_lumen.dat and bridge_bridge.dat
For Information about the equations see the documentation to the code
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
pressure_bridges : float array
Pressure at each bridge
"""
R_sum = np.zeros(self.num_bridges, dtype = float) # sum of the resistences around one bridge
P_over_R_sum = np.zeros(self.num_bridges, dtype = float) # sum of pressure over resistance between one bridge and all directly connected lumen
matrix_bridges = np.zeros([self.num_bridges, self.num_bridges], dtype= float) # matrix to calculate the pressure at each bridge
# For each connection between bridge and lumen
for line in self.bridge_lumen:
bridge = int(line[0])
lumen = int(line[1])
R_sum[bridge] += 1./line[2]*self.friction
P_over_R_sum[bridge] += self.pressure(lumen)/line[2]*self.friction
# For each connection between bridge and bridge
for line in self.bridge_bridge:
bridge1 = int(line[0])
bridge2 = int(line[1])
matrix_bridges[bridge1][bridge2] = 1./line[2]*self.friction
matrix_bridges[bridge2][bridge1] = 1./line[2]*self.friction
R_sum[bridge1] += 1./line[2]*self.friction
R_sum[bridge2] += 1./line[2]*self.friction
for line in range(self.num_bridges):
matrix_bridges[line][line] = -R_sum[line]
# Solving linear problem with the pressure at each bridge as solution
pressure_bridges = np.linalg.solve(matrix_bridges, -P_over_R_sum)
return pressure_bridges;
def flux_bridges(self, flux, pressure_bridges):
"""
Determines the flux/ area change for each lumen due to the connection between lumen and bridge
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
flux : float array
Area changes due to bridge-lumen connection added to the vector passed
"""
# Area change in one bridge; should be 0; calculated as control value
flux_bridge = np.zeros(self.num_bridges, dtype = float)
# For each connection between bridge and bridge
for line in self.bridge_bridge:
bridge1 = int(line[0])
bridge2 = int(line[1])
fb = (pressure_bridges[bridge2] - pressure_bridges[bridge1])*self.friction/line[2]
flux_bridge[bridge1] += fb
flux_bridge[bridge2] -= fb
# For each connection between bridge and lumen
for line in self.bridge_lumen:
bridge = int(line[0])
lumen = int(line[1])
fl = (pressure_bridges[bridge] - self.pressure(lumen))*self.friction/line[2]
flux[lumen] += fl
flux_bridge[bridge] -= fl
for i in range(len(flux_bridge)):
if (np.abs(flux_bridge[i]) > self.tube_radius ** 2):
error = 'total flux of bridge %d is non-zero: total flux = %f' % (i,flux_bridge[i])
self.save_error(error)
return flux
def flux_swelling(self, flux) :
"""
Determines the flux/ area change for each lumen due to sewlling
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
flux : float array
Area changes due to bridge-lumen connection added to the vector passed
"""
# for each lumen (lumen is the index of the lumen's area)
for lumen in range(len(self.area)) :
# if not empty
if not self.area[lumen] < 2*self.tube_radius ** 2 :
# then add the swelling contribution
flux[lumen] += self.swelling(lumen)
return flux
############################################################################################################################
###################################################### Removing Functions #####################################################
############################################################################################################################
def remove_empty_lumen(self):
"""
Determines and removes empty lumen
Calls a function to obtain a list of empty lumen and passes the list to a function to remove them and reconnect the network
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
no return
"""
empty_lumen_list = []
# Creating a list of empty lumen
empty_lumen_list = self.get_empty_lumen()
# Removing empty lumen and reconnecting the network
if (len(empty_lumen_list) > 0 ):
event = 'empty lumen: ' + ' '.join(map(str, empty_lumen_list))
#print event
self.save_event(event)
self.remove_lumen(empty_lumen_list)
return;
def remove_lumen(self, lumen_to_remove):
"""
Removes the lumen that are passed and connects the neighbors of these lumen
Input
-----
self : network object
Needs to be called by a class object
lumen_to_remove : int list
List of lumen to be removed
Returns
-------
no return
"""
# For each lumen that has to be removed
for lumen in lumen_to_remove:
neighbours = self.get_neighbours(lumen) # List of connected lumen
bridges = self.get_bridges(lumen) # List of connected bridges
self.save_event('lumen ' + str(lumen) + ' neighbours ' + str(neighbours))
self.save_event('lumen ' + str(lumen) + ' bridges ' + str(bridges))
# Lumen had two connections, this means that it disappears and the two connected parts get directly connected, the resistance for the new link is the sum of the resistance of the two previous connections
test=True
if(len(neighbours) + len(bridges) == 2):
# Lumen was connected to two lumen -> new connection between lumen and lumen
if(len(neighbours) == 2):
self.create_link([neighbours[0][0], neighbours[1][0], neighbours[0][1] + neighbours[1][1]])
#print 'lumen_lumen connexion (' + str(neighbours[0][0]) + ', ' + str(neighbours[1][0]) + ')'
# Lumen was connected to a lumen and a bridge -> new connection between lumen and bridge
if(len(neighbours) == 1 and len(bridges)==1):
self.create_bridge_lumen([bridges[0][0], neighbours[0][0], bridges[0][1] + neighbours[0][1]])
#print 'lumen_bridge connexion (' + str(bridges[0][0]) + ', ' + str(neighbours[0][0]) + ')'
# Lumen was connected to two bridges -> new connection between bridge and bridge
if(len(bridges)==2):
self.create_bridge_bridge([bridges[0][0], bridges[1][0], bridges[0][1] + bridges[1][1]])
#print 'bridge_bridge connexion (' + str(bridges[0][0]) + ', ' + str(bridges[1][0]) + ')'
self.create_bridge(neighbours, bridges, lumid=lumen)
# Lumen had more than two connections -> becomes a bridge, the resistances remain the same but the connections are changed to connections to a bridge
if(len(neighbours) + len(bridges) > 2):
self.create_bridge(neighbours, bridges, lumid=lumen)
return;
def remove_link(self, lumen_1, lumen_2):
"""
Removes a connection between two lumen
Input
-----
self : network object
Needs to be called by a class object
lumen_1 : int
First lumen of the connection
lumen_2 :
Second lumen of the connection
Returns
-------
no return
"""
# Due to data structure first lumen must be smaller than second lumen
if(lumen_1 > lumen_2):
n = lumen_1
lumen_1 = lumen_2
lumen_2 = n
# Find connection in lumen_lumen file and remove it
line = 0
# For each line in lumen_lumen until connection is found
while (line < len(self.lumen_lumen)):
# If connection is found removing it
if(self.lumen_lumen[line][0] == lumen_1 and self.lumen_lumen[line][1] == lumen_2):
event = 'link lumen %d to lumen %d removed' % (lumen_1, lumen_2)
#print event
self.save_event(event)
link = [lumen_1, lumen_2, self.lumen_lumen[line][2]]
self.lumen_lumen.remove(link)
break;
# Look at next line
else: line += 1
############################################################################################################################
###################################################### Get Functions #####################################################
############################################################################################################################
def get_empty_lumen(self):
"""
Gets the IDs of the empty lumen
Empty means that the area is smaller than the tube_radius^2
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
empty_lumen_list : int list
Contains the IDs of the empty lumens
"""
empty_lumen_list = []
# For each lumen ID
for i in range(len(self.area)):
# If area is smaller than the treshhold
if(self.area[i] < self.tube_radius ** 2 and self.empty_list[i] == 0):
self.empty_list[i] = 1
self.area[i] = 0
empty_lumen_list.append(i)
return empty_lumen_list
def get_neighbours(self, lumen):
"""
Gets the lumen that are directly connected to the lumen passed on and deletes the connections
Input
-----
self : network object
Needs to be called by a class object
lumen : int
ID of a lumen
Returns
-------
neighbour_list : int list
ID of all lumen that are directly connected to the lumen passed on
"""
neighbour_list = []
line = 0
# Going through links in lumen_lumen.dat
while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen :
if self.lumen_lumen[line][1] == lumen :
neighbour_list.append([self.lumen_lumen[line][0], self.lumen_lumen[line][2]])
event = 'link lumen %d to lumen %d removed' % (self.lumen_lumen[line][0], lumen)
self.save_event(event)
link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]]
self.lumen_lumen.remove(link)
else : line += 1
while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen :
line += 1
while(line < len(self.lumen_lumen) and self.lumen_lumen[line][0] == lumen):
neighbour_list.append([self.lumen_lumen[line][1], self.lumen_lumen[line][2]])
event = 'link lumen %d to lumen %d removed' % (lumen, self.lumen_lumen[line][1])
self.save_event(event)
link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]]
self.lumen_lumen.remove(link)
return neighbour_list
def get_bridges(self, lumen):
"""
Gets the bridges that are directly connected to the lumen passed on
Input
-----
self : network object
Needs to be called by a class object
lumen : int
ID of a lumen
Returns
-------
neighbour_list : int list
ID of all lumen that are directly connected to the lumen passed on
"""
bridge_list = []
line = 0
# Going through the links in bridge_lumen.dat
while(line < len(self.bridge_lumen)):
if (self.bridge_lumen[line][1] == lumen):
bridge_list.append([self.bridge_lumen[line][0], self.bridge_lumen[line][2]])
event = 'link bridge %d to lumen %d removed' % (self.bridge_lumen[line][0], lumen)
self.save_event(event)
self.bridge_lumen.remove(self.bridge_lumen[line])
else: line += 1
return bridge_list
############################################################################################################################
#################################################### Creating Functions ###################################################
############################################################################################################################
def create_link(self, link):
"""
Creates a link between two lumen in lumen_lumen.dat
Input
-----
self : network object
Needs to be called by a class object
link : float array
[ID lumen1, ID lumen2, length]
Returns
-------
no return
"""
# no self-loops allowed
if(len(link) == 4 and link[0] != link[1]):
# Ensuring: lumen_1 < lumen_2
if(link[0] < link[2]):
lumen_1 = link[0]
lumen_2 = link[1]
else:
lumen_1 = link[1]
lumen_2 = link[0]
length = link[2]
line = 0
# Finding line in lumen_lumen.dat, to keep the sorting
while(line < len(self.lumen_lumen) and lumen_1 > self.lumen_lumen[line][0]): line += 1
if(line < len(self.lumen_lumen) - 1):
while(line < len(self.lumen_lumen) and lumen_2 > self.lumen_lumen[line][1] and lumen_1 == self.lumen_lumen[line][0]): line += 1
# Creating the link in lumen_lumen.dat
self.lumen_lumen.append([lumen_1,lumen_2, length])
self.lumen_lumen.sort()
event = 'link lumen %d to lumen %d created' % (lumen_1,lumen_2)
self.save_event(event)
return;
def create_bridge_lumen(self, link):
"""
Creates a link between a lumen and a bridge in bridge_lumen.dat
Input
-----
self : network object
Needs to be called by a class object
link : float array
[ID bridge, ID lumen, length]
Returns
-------
no return
"""
bridge = link[0]
lumen = link[1]
length = link[2]
line = 0
# Creating the link in bridge_lumen.dat
self.bridge_lumen.append(link)
self.bridge_lumen.sort()
event = 'link bridge %d to lumen %d created' % (bridge,lumen)
self.save_event(event)
return;
def create_bridge_bridge(self, link):
"""
Creates a link between two bridges in bridge_bridge.dat
Input
-----
self : network object
Needs to be called by a class object
link : float array
[ID bridge1, ID bridge2, length]
Returns
-------
no return
"""
if(link[0] == link[1]): return;
if(link[0] < link[1]):
bridge_1 = link[0]
bridge_2 = link[1]
else:
bridge_1 = link[1]
bridge_2 = link[0]
length = link[2]
line = 0
# Creating the link in bridge_bridge.dat
self.bridge_bridge.append([bridge_1,bridge_2, length])
self.bridge_bridge.sort()
event = 'link bridge %d to bridge %d created' % (bridge_1,bridge_2)
self.save_event(event)
return;
def create_bridge(self, lumen, bridge, lumid):
"""
Creates a new bridge connected with the lumen and bridges passed on
Input
-----
self : network object
Needs to be called by a class object
lumen : int list
[[lumen ID, length], [lumen ID, length],.....]
lumen IDs to which the new bridge should be connected to
bridge : int list
[[bridge ID, length], [bridge ID, length],.....]
bridge IDs to which the new bridge should be connected to
Returns
-------
no return
"""
#####
bridge_conversionfile = os.path.join(self.network_folder,'bridgesconversion.txt')
# ID of the new bridge
bridge_number = self.num_bridges
# Bridge ID counter, contains the ID of the next new bridge
self.num_bridges += 1
event = 'new bridge %d' % (bridge_number) + ' (' + str(lumid) + ')'
self.save_event(event)
line = 0
lumen.sort()
bridge.sort()
# For each lumen that should be connected to the new bridge
for i in range(len(lumen)):
new_link = [bridge_number, lumen[i][0], lumen[i][1]]
# Create link in bridge_lumen.dat
self.create_bridge_lumen(new_link)
# For each lumen that should be connected to the new bridge
for i in range(len(bridge)):
new_link = [bridge[i][0], bridge_number, bridge[i][1]]
# Create link in bridge_bridge.dat
self.create_bridge_bridge(new_link)
open(bridge_conversionfile, 'a').write(str(bridge_number) + ' ' + str(lumid)+ '\n')
return;
############################################################################################################################
################################ Geometric Functions for area and Pressure ###############################################
############################################################################################################################
def set_theta(self):
"""
Sets the angle theta
Calculates the angle theta, angle between the lumen and the tube
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
theta : float list
Theta value for each lumen
"""
theta = []
for i in range(len(self.alpha)):
#cos = (2*self.alpha[i]-(4*self.alpha[i]**2-self.delta[i]**2+1)/(4*self.alpha[i]))/self.delta[i] ## Old version, for assymmetric lumen
#theta.append(math.acos(cos))
theta.append(np.arccos(self.alpha[i]))
return theta;
def set_area_factor(self):
"""
Sets the area factor, needed to express the pressure in terms of the area instead of the curvature radius
Input
-----
self : network object
Needs to be called by a class object
Returns
-------
area_factor : float list
Area factor for each lumen
"""
area_factor = []
for i in range(len(self.alpha)):
area_factor.append(np.sqrt((2*self.theta[i]-np.sin(2*self.theta[i]))))
return area_factor;
def opening_radius(self, lumen):
"""
Calculates the length/2 parallel to the 'tube' where the membrane is not attached for a given lumen
Input
-----
lumen : int
ID of the lumen
Returns
-------
radius : float
Length/2 of the opening radius
"""
return np.sqrt(2*self.area[lumen]/(2*self.theta[lumen]-np.sin(2*self.theta[lumen])))*np.sin(self.theta[lumen])
def get_area(self, lumen):
"""
Calculates the area in one half of the lumen (for symmetric lumen)
Input
-----
lumen : int
ID of the lumen
Returns
-------
area : float
Area/2 of the lumen
"""
area = self.area[lumen]
return area
def pressure(self,lumen):
"""
Calculates the pressure inside the lumen (for symmetric lumen)
Input
-----
lumen : int
ID of the lumen
Returns
-------
pressure : float
Pressure of the lumen
"""
area = self.get_area(lumen)
# Avoid dividing by zero
if(area < 0.1 * self.tube_radius**2 ):
error = 'division by zero in pressure: lumen ID: %d' % (lumen)
self.save_error(error)
pressure = self.gamma_lumen[lumen]*self.area_factor[lumen]/np.sqrt(area)
return pressure
############################################################################################################################
################################################# Reading Functions ########################################################
############################################################################################################################
def read_lumen_lumen(self, lumen_lumen_file):
"""
Reading the file with links between two lumens
Input
-----
lumen_lumen_file : str
File path to file with the links between two lumens
Returns
-------
lumen_lumen : float list [lumen1, lumen2, length]
Information about the links between two lumens
"""
if (os.path.getsize(lumen_lumen_file)>0): # If the file is not empty
lumen_1, lumen_2 = np.loadtxt(lumen_lumen_file, dtype = int, usecols = [0,1], unpack = True)
length = np.loadtxt(lumen_lumen_file, dtype = float, usecols = [2])
lumen_lumen = np.column_stack([lumen_1, lumen_2, length]).tolist()
else:
lumen_lumen = []
return lumen_lumen
def read_bridge_lumen(self, bridge_lumen_file):
"""
Reading the file with links between bridge and lumen
Input
-----
bridge_lumen_file : str
File path to file with the links between bridge and lumen
Returns
-------
bridge_lumen : float list [bridge, lumen, length]
Information about the links between bridge and lumen
num_bridges : int
Number of bridge_lumen links
"""
with open(bridge_lumen_file, 'r') as f:
lines = f.read().splitlines()
last_line = lines[-1]
if ('#' in last_line): # If the file is empty
bridge_lumen = []
num_bridges = 0 # number of existing bridges
else:
bridge, lumen = np.loadtxt(bridge_lumen_file, dtype = int, usecols = [0,1], unpack = True)
length = np.loadtxt(bridge_lumen_file, dtype = float, usecols = [2])
bridge_lumen = np.column_stack([bridge, lumen, length]).tolist()
num_bridges = max(bridge)+1 # number of existing bridges
return bridge_lumen, num_bridges
def read_bridge_bridge(self, bridge_bridge_file, num_bridges):
"""
Reading the file with links between two bridge
Input
-----
bridge_bridge_file : str
File path to file with the links between two bridge
Returns
-------
bridge_bridge : float list [bridge1, bridge2, length]
Information about the links between two bridge
num : int
Number of bridge_bridge links
"""
with open(bridge_bridge_file, 'r') as f:
lines = f.read().splitlines()
last_line = lines[-1]
if ('#' in last_line>0): # If the file is empty
bridge_bridge = []
num = num_bridges
else:
bridge1, bridge2 = np.loadtxt(bridge_bridge_file, dtype = int, usecols = [0,1], unpack = True)
length = np.loadtxt(bridge_bridge_file, dtype = float, usecols = [2])
bridge_bridge = np.column_stack([bridge1, bridge2, length]).tolist()
if (max(bridge2)+1 > num_bridges): num = max(bridge2)+1
return bridge_bridge, num
############################################################################################################################
################################################# Output functions #########################################################
############################################################################################################################
def save_event(self, event, start = False, out_path = ''):
"""
Saves each event in the output folder in the file event.dat
Events like a lumen disappearing, reconnections in the graph
Input
-----
event : str
Message of the event
start : boolean
True: File is created
False: the message is stored in the file
Returns
------
no return
"""
if(start):
header_event = '# Saves each event during the simulation; event is a disappearing lumen, graph reconnection \n'
self.file_event = os.path.join(out_path, 'event.dat')
fevent = open(self.file_event, 'w')
fevent.write(header_event)
fevent.close()
else:
fevent = open(self.file_event, 'a')
fevent.write('%.5f' % self.current_time)
fevent.write(' ')
fevent.write(event)
fevent.write('\n')
fevent.close()
return;
def save_error(self, error, start = False, out_path = ''):
"""
Saves errors in the output folder in the file error.dat
Errors like volume loss
Input
-----
error : string
Message of the event
start : boolean
True: File is created
False: the message is stored in the file
Returns
------
no return
"""
if(start):
header_error = '# Saves each warning like volume loss \n'
self.file_error = os.path.join(out_path, 'error.dat')
ferror = open(self.file_error, 'w')
ferror.write(header_error)
ferror.close()
else:
ferror = open(self.file_error, 'a')
ferror.write('%.5f' % self.current_time)
ferror.write(' ')
ferror.write(error)
ferror.write('\n')
ferror.close()
return;
def save_area(self, start = False, out_path = ''):
"""
Saves the volume evolution in the output folder in the file area.dat
Input
-----
start : boolean
True: File is created
False: the message is stored in the file
Returns
------
no return
"""
if(start):
header_volume = '# Saves the volume evolution of each lumen for the time step %f \n' %(self.time_step)
self.file_area = os.path.join(out_path, 'area.dat')
farea = open(self.file_area, 'w')
farea.write(header_volume)
farea.close()
self.t_old = 0
else:
farea = open(self.file_area, 'a')
farea.write('%.5f' % self.current_time)
farea.write(' ')
farea.write(' '.join(map(str, self.area)))
farea.write('\n')
farea.close()
return;
############################################################################################################################
################################################# Swelling functions #######################################################
############################################################################################################################
def swelling(self, lumen) :
"""
self.swelling(lumen)
Calculates the input flux for the area fo a given lumen, due to swelling.
Input
-----
lumen : int
Index of the lumen
"""
area = self.get_area(lumen)
theta = self.theta[lumen]
flux_swelling = self.swelling_rate * 4 * theta * np.sqrt(area)/ self.area_factor[lumen]
#print flux_swelling
return flux_swelling
| 38,971 | 10,163 |
# Illustrate upsampling in 2d
# Code from Jason Brownlee
# https://machinelearningmastery.com/generative_adversarial_networks/
import tensorflow as tf
from tensorflow import keras
from numpy import asarray
#from keras.models import Sequential
from tensorflow.keras.models import Sequential
#from keras.layers import UpSampling2D
from tensorflow.keras.layers import UpSampling2D
X = asarray([[1, 2],
[3, 4]])
X = asarray([[1, 2, 3],
[4, 5, 6],
[7,8,9]])
print(X)
nr = X.shape[0]
nc = X.shape[1]
# reshape input data into one sample a sample with a channel
X = X.reshape((1, nr, nc, 1))
model = Sequential()
model.add(UpSampling2D(input_shape=(nr, nc, 1))) # nearest neighbor
yhat = model.predict(X)
yhat = yhat.reshape((2*nr, 2*nc))
print(yhat)
model = Sequential()
model.add(UpSampling2D(input_shape=(nc, nc, 1), interpolation='bilinear'))
yhat = model.predict(X)
yhat = yhat.reshape((2*nr, 2*nc))
print(yhat) | 943 | 372 |
__all__ = ['scaffold', 'command_set']
from gevent import monkey
monkey.patch_all()
import csv
import os
import sys
import time
import shutil
from typing import List
import gevent
from src.BusinessCentralLayer.setting import logger, DEFAULT_POWER, CHROMEDRIVER_PATH, \
REDIS_MASTER, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_CACHE_BGPIC, \
REDIS_SLAVER_DDT, CRAWLER_SEQUENCE, terminal_echo, SERVER_DIR_DATABASE_LOG, SERVER_DIR_SSPANEL_MINING
command_set = {
# ---------------------------------------------
# ้จ็ฝฒๆฅๅฃ
# ---------------------------------------------
'deploy': "้จ็ฝฒ้กน็ฎ๏ผๅฎๆถไปปๅก/Flask ๅผๅฏไธๅฆๅๅณไบyaml้
็ฝฎๆไปถ๏ผ",
# ---------------------------------------------
# ่ฐ่ฏๆฅๅฃ
# ---------------------------------------------
"clear": "ๆธ
็็ณป็ป่ฟ่ก็ผๅญ",
"decouple": "็ซๅณๅค้ไธๆฌกsubs_ddt้พๆฅ่งฃ่ฆไปปๅก",
"overdue": "็ซๅณๆง่กไธๆฌก่ฟๆถ้พๆฅๆธ
ๆดไปปๅก",
"run": "[่ฏทไฝฟ็จspawnๅฝไปคๆฟไปฃ]็ซๅณๆง่กไธๆฌก้้ไปปๅก๏ผๅผบๅถไฝฟ็จๅ็จๅ ้๏ผ",
"force_run": "[่ฏทไฝฟ็จspawnๅฝไปคๆฟไปฃ]ๅผบๅถๆง่ก้้ไปปๅก",
"remain": "่ฏปๅๅฉไฝ่ฎข้
ๆฐ้",
"ping": "ๆต่ฏๆฐๆฎๅบ่ฟๆฅ",
"entropy": "ๆๅฐ้้้ๅ",
"exile": "ๆง่ก้ๅ่ฟ็ปด่ๆฌ๏ผ้ซ้ฅฑๅๅผบ้ปๅกไปปๅก๏ผ",
"spawn": "ๅนถๅๆง่กๆๆๅจๅ็้้ไปปๅก",
"mining": "ๅฏๅจไธๆฌก้ๅฏนSTAFF host็SEOๅ
จ็ซๆๆไปปๅก",
# ---------------------------------------------
# ้ๅ่ฐ่ฏๆฅๅฃ
# ---------------------------------------------
# usage: ่งฃๆๆๆก่ฎข้
้พๆฅ python main.py --parse https://domain/link/token?sub=3
# usage: ่งฃๆๅคๆก่ฎข้
้พๆฅ python main.py --parse https://domain/link/token?sub=3 https://domain/link/token2?sub=3
# "--parse": """่งฃๆ้พๆฅใ่ฅๆฏ่ฎข้
้พๆฅ๏ผๅๆฃๆต่็นๆฐ้ๅนถๆต่ฏpingๅปถๆถ""",
# ---------------------------------------------
# Windows ๅ่ฝๆฅๅฃ
# ---------------------------------------------
"panel": "[for Windows] ๆๅผๆก้ขๅ็ซฏ้ขๆฟ",
"ash": "[for Windows] ไธ้ฎๆธ
ๆด่ฎข้
ๆฑ ,ๅนถๅฐๆๆ็ฑปๅ่ฎข้
่ฝฌๆขไธบClash yaml้
็ฝฎๆไปถ,"
"ๅ็ฑURL Scheme่ชๅจๆๅผClashๅนถไธ่ฝฝ้
็ฝฎๆไปถ",
# ---------------------------------------------
# ่ฐ็จ็คบไพ
# ---------------------------------------------
"example": "python main.py ping"
}
class _ConfigQuarantine:
def __init__(self):
self.root = [
SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS,
SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CACHE_BGPIC
]
self.flag = False
def set_up_file_tree(self, root):
"""
--/qinse/V2RaycSpider{verNum}
--BCL
--BLL
--BVL
--Database
--client_depot
--vcs.csv
--logs
--*error.log
--*runtime.log
--temp_cache
--*AnyTempCacheFile...
--*CrawlFetchHistory.txt
--fake_useragent_0.1.11.json
--*tests
"""
# ๆฃๆฅ้ป่ฎคไธ่ฝฝๅฐๅๆฏๅฆๆฎ็ผบ ๆทฑๅบฆไผๅ
ๅๅงๅ็ณป็ปๆไปถ
for child_ in root:
if not os.path.exists(child_):
self.flag = True
try:
# ๅๅงๅๆไปถๅคน
if os.path.isdir(child_) or not os.path.splitext(child_)[-1]:
os.mkdir(child_)
logger.success(f"็ณป็ปๆไปถ้พๆฅๆๅ->{child_}")
# ๅๅงๅๆไปถ
else:
if child_ == SERVER_PATH_DEPOT_VCS:
try:
with open(child_, 'w', encoding='utf-8', newline='') as fpx:
csv.writer(fpx).writerow(['version', 'title'])
logger.success(f"็ณป็ปๆไปถ้พๆฅๆๅ->{child_}")
except Exception as ep:
logger.exception(f"Exception{child_}{ep}")
except Exception as ep:
logger.exception(ep)
@staticmethod
def check_config(call_driver: bool = False):
chromedriver_not_found_error = "<ScaffoldGuider> ForceRun || ChromedriverNotFound ||" \
"ๆชๆฅๆพๅฐchromedriver้ฉฑๅจ๏ผ่ฏทๆ นๆฎๆๆฏๆๆกฃๆญฃ็กฎ้
็ฝฎ\n" \
">>> https://github.com/QIN2DIM/V2RayCloudSpider"
# if not all(SMTP_ACCOUNT.values()):
# logger.warning('ๆจๆชๆญฃ็กฎ้
็ฝฎ<้ไฟก้ฎ็ฎฑ>ไฟกๆฏ(SMTP_ACCOUNT)')
# if not SERVERCHAN_SCKEY:
# logger.warning("ๆจๆชๆญฃ็กฎ้
็ฝฎ<Server้
ฑ>็SCKEY")
if not all([REDIS_SLAVER_DDT.get("host"), REDIS_SLAVER_DDT.get("password")]):
logger.warning('ๆจๆชๆญฃ็กฎ้
็ฝฎ<Redis-Slave> ๆฌ้กน็ฎ่ตๆบๆท่ดๅ่ฝๆ ๆณไฝฟ็จ๏ผไฝไธๅฝฑๅ็ณป็ปๆญฃๅธธ่ฟ่กใ')
if not all([REDIS_MASTER.get("host"), REDIS_MASTER.get("password")]):
logger.error("ๆจๆชๆญฃ็กฎ้
็ฝฎ<Redis-Master> ๆญค้
็ฝฎไธบโไบๅฝฉๅงฌโ็ๆ ธๅฟ็ปไปถ๏ผ่ฏท้
็ฝฎๅ้ๅฏ้กน็ฎ๏ผ")
sys.exit()
# ๅฝ้่ฆ่ฐ็จ็ๆฅๅฃๆถๅๅฐdriverๆไฝๆถๆๅบ
if call_driver and not os.path.exists(CHROMEDRIVER_PATH):
logger.error(chromedriver_not_found_error)
sys.exit()
def run(self):
try:
if [cq for cq in reversed(self.root) if not os.path.exists(cq)]:
logger.warning('็ณป็ปๆไปถๆฎ็ผบ๏ผ')
logger.debug("ๅฏๅจ<ๅทฅ็จ้ๆ>ๆจกๅ...")
self.set_up_file_tree(self.root)
self.check_config()
finally:
if self.flag:
logger.success(">>> ่ฟ่ก็ฏๅข้พๆฅๅฎๆ๏ผ่ฏท้ๅฏ้กน็ฎ")
logger.warning(">>> ๆ้ๆจๆญฃ็กฎ้
็ฝฎChromeๅๅฏนๅบ็ๆฌ็ChromeDriver")
sys.exit()
_ConfigQuarantine().run()
class _ScaffoldGuider:
# __slots__ = list(command_set.keys())
def __init__(self):
# ่ๆๆถๅ
ฌๅผๆฅๅฃ
self.scaffold_ruler = [i for i in self.__dir__() if i.startswith('_scaffold_')]
self.command2solution = {
'deploy': self._scaffold_deploy,
'decouple': self._scaffold_decouple,
'overdue': self._scaffold_overdue,
'spawn': self._scaffold_spawn,
# 'run': self._scaffold_run,
# 'force_run': self._scaffold_force_run,
'remain': self._scaffold_remain,
'ping': self._scaffold_ping,
'panel': self._scaffold_panel,
'entropy': self._scaffold_entropy,
'ash': self._scaffold_ash,
'mining': self._scaffold_mining,
}
def startup(self, driver_command_set: List[str]):
"""
ไป
ๆฏๆๅ่ฟ็จไฝฟ็จ
@param driver_command_set: ๅจ็ฉบๆไปคๆถๅ่กจไป
ๆ1ไธชๅ
็ด ๏ผ่กจ็คบๅฏๅจ่ทฏๅพ
@return:
"""
# logger.info(f">>> {' '.join(driver_command_set)}")
# -------------------------------
# TODO ไผๅ
็บง0๏ผ้ขๅค็ๆไปค้
# -------------------------------
# CommandId or List[CommandId]
driver_command: List[str] = []
# ๆช่พๅ
ฅไปปไฝๆไปค ๅๅบ่ๆๆถ็ฎไป
if len(driver_command_set) == 1:
print("\n".join([f">>> {menu[0].ljust(20, '-')}|| {menu[-1]}" for menu in command_set.items()]))
return True
# ่พๅ
ฅ็ซๅณๆไปค ่ฝฌ่ฏๆไปค
if len(driver_command_set) == 2:
driver_command = [driver_command_set[-1].lower(), ]
# ่พๅ
ฅๆไปค้ ่ฝฌ่ฏๆไปค้
elif len(driver_command_set) > 2:
driver_command = list({command.lower() for command in driver_command_set[1:]})
# ๆ่ทๆๆไนๅค็ๆ
ๅต
if not isinstance(driver_command, list):
return True
# -------------------------------
# TODO ไผๅ
็บง1๏ผ่งฃๆ่ฟ่กๅๆฐ
# -------------------------------
# TODO --help ๅธฎๅฉ่ๅ๏ผ็ปง็ปญๅฎๅ็ธๅ
ณๅ่ฝ๏ผ
# ไฝฟ็จ่ฏฅๅๆฐๆถ็ณป็ปไธ่งฃๆ่ฟ่กๆไปค
if '--help' in driver_command:
logger.info(">>>GuiderHelp || ๅธฎๅฉ่ๅ")
driver_command.remove("--help")
for command_ in driver_command:
introduction = command_set.get(command_)
if introduction:
print(f"> {command_.ljust(20, '-')}|| {introduction}")
else:
print(f"> {command_}ๆไปคไธๅญๅจ")
return True
# ๆบ่ฝ้้ ่งฃๆ็ฎๆ
if '--parse' in driver_command:
driver_command.remove('--parse')
task_list = []
for url_ in reversed(driver_command):
if url_.startswith("http") or url_.startswith("ssr") or url_.startswith("vmess"):
task_list.append(gevent.spawn(self._scaffold_parse, url=url_))
gevent.joinall(task_list)
return True
# ๆธ
้ค็ณป็ป็ผๅญ
if 'clear' in driver_command:
driver_command.remove('clear')
self._scaffold_clear()
return True
# -------------------------------
# TODO ไผๅ
็บง2๏ผ่ฟ่กๅ็บฟ็จๆไปค
# -------------------------------
# ๅ็จไปปๅก้ๅ
task_list = []
# ๆต่ฏๆฐๆฎๅบ่ฟๆฅ
while driver_command.__len__() > 0:
_pending_command = driver_command.pop()
try:
task_list.append(gevent.spawn(self.command2solution[_pending_command]))
except KeyError as e:
logger.warning(f'่ๆๆถๆๆชๆๆๆไปค<{_pending_command}> {e}')
# ๅนถๅๆง่กไปฅไธๆไปค
gevent.joinall(task_list)
# -------------------------------
# TODO ไผๅ
็บง3๏ผ่ชๅฎไนๅๆฐ้จ็ฝฒ๏ผ้ปๅก็บฟ็จ๏ผ
# -------------------------------
if 'deploy' in driver_command:
self._scaffold_deploy()
@staticmethod
def _scaffold_deploy():
# logger.info("<ScaffoldGuider> Deploy || MainProcess")
from src.BusinessCentralLayer.middleware.interface_io import SystemInterface
SystemInterface.run(deploy_=True)
@staticmethod
def _scaffold_clear():
_permission = {
"logs": input(terminal_echo("ๆฏๅฆๆธ
้คๆๆ่ฟ่กๆฅๅฟ[y]?", 2)),
"cache": input(terminal_echo("ๆฏๅฆๆธ
้คๆๆ่ฟ่ก็ผๅญ[y]?", 2))
}
# ๆธ
้คๆฅๅฟ ~/database/logs
if os.path.exists(SERVER_DIR_DATABASE_LOG) and _permission['logs'].startswith("y"):
history_logs = os.listdir(SERVER_DIR_DATABASE_LOG)
for _log_file in history_logs:
if len(_log_file.split('.')) > 2:
_log_path = os.path.join(SERVER_DIR_DATABASE_LOG, _log_file)
os.remove(_log_path)
terminal_echo(f"ๆธ
้ค่ฟ่กๆฅๅฟ-->{_log_path}", 3)
# ๆธ
้ค่ฟ่ก็ผๅญ ~/database/
if _permission['cache'].startswith("y"):
cache_blocks = {
# ~/database/temp_cache/
SERVER_DIR_DATABASE_CACHE,
# ~/database/staff_hosts/
SERVER_DIR_SSPANEL_MINING,
}
for block in cache_blocks:
# ๆซๆๆไปถ
if os.path.exists(block):
_files = [os.path.join(block, i) for i in os.listdir(block)]
# ๆธ
้คๆไปถ
for _file in _files:
if os.path.isfile(_file):
os.remove(_file)
else:
shutil.rmtree(_file)
os.mkdir(_file)
terminal_echo(f"ๆธ
้ค่ฟ่ก็ผๅญ-->{_file}", 3)
terminal_echo("็ณป็ป็ผๅญๆไปถๆธ
็ๅฎๆฏ", 1)
@staticmethod
def _scaffold_decouple():
logger.info("<ScaffoldGuider> Decouple || General startup")
from src.BusinessLogicLayer.plugins.accelerator import SubscribesCleaner
SubscribesCleaner(debug=True).interface(power=DEFAULT_POWER)
@staticmethod
def _scaffold_overdue():
logger.info("<ScaffoldGuider> Overdue || Redis DDT")
from src.BusinessCentralLayer.middleware.interface_io import SystemInterface
SystemInterface.ddt()
@staticmethod
def _scaffold_spawn():
_ConfigQuarantine.check_config(call_driver=True)
logger.info("<ScaffoldGuider> Spawn || MainCollector")
from src.BusinessLogicLayer.cluster.slavers import __entropy__
from src.BusinessLogicLayer.plugins.accelerator import booster
booster(docker=__entropy__, silence=True, power=DEFAULT_POWER, assault=True)
@staticmethod
def _scaffold_run():
_ConfigQuarantine.check_config(call_driver=True)
logger.info("<ScaffoldGuider> Run || MainCollector")
from src.BusinessCentralLayer.middleware.interface_io import SystemInterface
SystemInterface.run(deploy_=False)
@staticmethod
def _scaffold_force_run():
_ConfigQuarantine.check_config(call_driver=True)
logger.info("<ScaffoldGuider> ForceRun || MainCollector")
from src.BusinessLogicLayer.plugins.accelerator import ForceRunRelease
ForceRunRelease(task_docker=CRAWLER_SEQUENCE).interface()
@staticmethod
def _scaffold_remain():
from src.BusinessCentralLayer.middleware.subscribe_io import select_subs_to_admin
tracer = [f"{tag[0]}\n้้็ฑปๅ๏ผ{info_[0]}\nๅญๆดปๆฐ้๏ผ{tag[-1]}" for info_ in
select_subs_to_admin(select_netloc=None, _debug=False)['info'].items() for tag in info_[-1].items()]
for i, tag in enumerate(tracer):
print(f">>> [{i + 1}/{tracer.__len__()}]{tag}")
@staticmethod
def _scaffold_ping():
from src.BusinessCentralLayer.middleware.redis_io import RedisClient
logger.info(f"<ScaffoldGuider> Ping || {RedisClient().test()}")
@staticmethod
def _scaffold_parse(url, _unused_mode: str = "subscribe"):
logger.info(f">>> PARSE --> {url}")
from src.BusinessLogicLayer.plugins.accelerator import cleaner
# ๆฃๆฅ่ทฏๅพๅฎๆดๆง
if not os.path.exists(SERVER_DIR_DATABASE_CACHE):
os.mkdir(SERVER_DIR_DATABASE_CACHE)
# ่ฐๅAPI่งฃๆ้พๆฅ
result = cleaner.subs2node(url)
if result and isinstance(result, dict):
_, info, nodes = result.values()
# ่็นๆฐ้ ๅๅปๆ ๆ็ๆณจ้้กน
_unused_node_num = nodes.__len__() - 2 if nodes.__len__() - 2 >= 0 else 0
token_ = '' if info.get('token') is None else info.get('token')
# ็ผๅญๆฐๆฎ
cache_sub2node = os.path.join(SERVER_DIR_DATABASE_CACHE, f'sub2node_{token_}.txt')
with open(cache_sub2node, 'w', encoding="utf8") as f:
for node in nodes:
f.write(f"{node}\n")
# ่ชๅจๆๅผ็ผๅญๆไปถ๏ผไป
ๅจparseไธไธช้พๆฅๆถๅฏ็จ
# os.startfile(cache_sub2node)
cleaner.node2detail(nodes[0])
else:
return False
@staticmethod
def _scaffold_panel():
from src.BusinessCentralLayer.middleware.interface_io import SystemInterface
SystemInterface.system_panel()
@staticmethod
def _scaffold_entropy(_debug=False):
from src.BusinessLogicLayer.cluster.slavers import __entropy__
for i, host_ in enumerate(__entropy__):
print(f">>> [{i + 1}/{__entropy__.__len__()}]{host_['name']}")
print(f"ๆณจๅ้พๆฅ: {host_['register_url']}")
print(f"ๅญๆดปๅจๆ: {host_['life_cycle']}ๅคฉ")
print(f"้้็ฑปๅ: {'&'.join([f'{j[0].lower()}' for j in host_['hyper_params'].items() if j[-1]])}\n")
@staticmethod
def _scaffold_exile(task_sequential=4):
logger.debug(f"<ScaffoldGuider> Exile[0/{task_sequential}] || Running scaffold exile...")
time.sleep(0.3)
# task1: ๆฃๆฅ้ๅไปปๅก
logger.debug(f"<ScaffoldGuider> Exile[1/{task_sequential}] || Checking the task queue...")
time.sleep(0.3)
_ScaffoldGuider._scaffold_entropy(_debug=True)
# logger.success(f">>> [Mission Completed] || entropy")
# task2: decouple
logger.debug(f"<ScaffoldGuider> Exile[2/{task_sequential}] || Cleaning the subscribe pool...")
time.sleep(0.3)
_ScaffoldGuider._scaffold_decouple()
# logger.success(f">>> [Mission Completed] || decouple")
# task3: overdue
logger.debug(f"<ScaffoldGuider> Exile[3/{task_sequential}] || Cleaning timed out subscribes...")
time.sleep(0.3)
_ScaffoldGuider._scaffold_overdue()
# logger.success(">>> [Mission Completed] || overdue")
# finally: print task-queue๏ผ remaining subscribes
logger.debug(f"<ScaffoldGuider> Exile[{task_sequential}/{task_sequential}] || Outputting debug data...")
_ScaffoldGuider._scaffold_entropy()
_ScaffoldGuider._scaffold_remain()
logger.success("<ScaffoldGuider> Exile[Mission Completed] || exile")
@staticmethod
@logger.catch()
def _scaffold_ash():
"""
ๆ ๅฐฝๅฅๅจ
"""
from src.BusinessLogicLayer.apis import scaffold_api
logger.info("<ScaffoldGuider> ash | Clash่ฎข้
ๅ ไธ้ฎ็ๆ่ๆฌ")
# --------------------------------------------------
# ๅๆฐๆธ
ๆด
# --------------------------------------------------
if 'win' not in sys.platform:
return
# --------------------------------------------------
# ่ฟ่ก่ๆฌ
# --------------------------------------------------
return scaffold_api.ash(debug=True, decouple=True)
@staticmethod
def _scaffold_mining():
"""
โๅฝๅคโๆๅกๅจ๏ผ็ดๆฅ่ฟ่ก
ๅคง้ไธปๆบ๏ผๅผๅฏไปฃ็ๅ่ฟ่ก
:return:
"""
from src.BusinessLogicLayer.apis.staff_mining import staff_api
use_collector = staff_api.is_first_run()
classify_dir, staff_info = staff_api.go(
debug=False,
silence=True,
power=os.cpu_count() * 2,
identity_recaptcha=False,
use_collector=use_collector,
use_checker=True,
use_generator=False,
)
staff_api.refresh_cache(mode='de-dup')
print(f"\n\nSTAFF INFO\n{'_' * 32}")
for element in staff_info.items():
for i, tag in enumerate(element[-1]):
print(f">>> [{i + 1}/{len(element[-1])}]{element[0]}: {tag}")
print(f">>> ๆไปถๅฏผๅบ็ฎๅฝ: {classify_dir}")
scaffold = _ScaffoldGuider()
| 17,334 | 6,370 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016 Daniel Estevez <daniel@destevez.net>.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org>
#
import numpy
from gnuradio import gr
import pmt
import array
class swap_header(gr.basic_block):
"""
docstring for block swap_header
"""
def __init__(self):
gr.basic_block.__init__(self,
name="swap_crc",
in_sig=[],
out_sig=[])
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
self.message_port_register_out(pmt.intern('out'))
def handle_msg(self, msg_pmt):
msg = pmt.cdr(msg_pmt)
if not pmt.is_u8vector(msg):
print "[ERROR] Received invalid message type. Expected u8vector"
return
packet = array.array("B", pmt.u8vector_elements(msg))
header = packet[:4]
header.reverse()
packet = header + packet[4:]
msg_pmt = pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(packet), bytearray(packet)))
self.message_port_pub(pmt.intern('out'), msg_pmt)
| 2,336 | 786 |
#!/usr/bin/python
#
# Start dfplayer.
import argparse
import os
import shutil
import subprocess
import sys
import time
_PROJ_DIR = os.path.dirname(__file__)
def main():
os.chdir(_PROJ_DIR)
os.environ['LD_LIBRARY_PATH'] = '/lib:/usr/lib:/usr/local/lib'
arg_parser = argparse.ArgumentParser(description='Start player')
arg_parser.add_argument('--gdb', action='store_true')
arg_parser.add_argument('--no-reset', action='store_true')
arg_parser.add_argument('--disable-net', action='store_true')
arg_parser.add_argument('--mpd', action='store_true')
arg_parser.add_argument('--disable-fin', action='store_true')
arg_parser.add_argument('--max', action='store_true')
arg_parser.add_argument('--no-sound', action='store_true')
arg_parser.add_argument('--no-sound-config', action='store_true')
arg_parser.add_argument('--prod', action='store_true')
arg_parser.add_argument('--enable-kinect', action='store_true')
args = arg_parser.parse_args()
if args.prod:
print 'dfplayer is sleeping for 30 seconds before startup'
time.sleep(30)
if not args.no_sound_config and not args.no_sound:
shutil.copyfile(
'dfplayer/asoundrc.sample', '/home/' + os.getlogin() + '/.asoundrc')
params = ['env/bin/dfplayer', '--listen=0.0.0.0:8081']
if args.no_reset:
params.append('--no-reset')
if args.no_sound:
params.append('--no-sound')
if args.disable_net:
params.append('--disable-net')
if args.disable_fin:
params.append('--disable-fin')
if args.enable_kinect or args.prod:
params.append('--enable-kinect')
if args.mpd:
params.append('--mpd')
if args.max or args.prod:
params.append('--max')
try:
if args.gdb:
subprocess.check_call(
['gdb', '-ex', 'run', '--args', 'env/bin/python'] + params)
#['gdb', '--args', 'env/bin/python'] + params)
else:
subprocess.check_call(params)
except KeyboardInterrupt:
print 'Player is exiting via KeyboardInterrupt'
except Exception, err:
print sys.exc_info()[0]
if args.prod:
print 'dfplayer has exited and start.py script is now sleeping'
time.sleep(3600)
main()
| 2,149 | 776 |
def attack():
pass
def defend():
pass
def pass_turn():
pass
def use_ability_One(kit):
pass
def use_ability_Two(kit):
pass
def end_Of_Battle():
pass | 176 | 66 |
#!/usr/bin/env python3
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A demo of the Google CloudSpeech recognizer."""
import aiy.audio
import aiy.cloudspeech
import aiy.voicehat
import aiy.i18n
import aiy.audio
CONFIRM_SOUND_PATH = '/home/pi/Music/R2D2/R2_Understood.wav'
CONFUSED_SOUND_PATH = '/home/pi/Music/R2D2/R2_Confused.wav'
UNRECOGNISED_SOUND_PATH = '/home/pi/Music/R2D2/R2_FastBip.wav'
def main():
status_ui = aiy.voicehat.get_status_ui()
status_ui.status('starting')
aiy.i18n.set_language_code("fr-FR")
recognizer = aiy.cloudspeech.get_recognizer()
recognizer.expect_phrase('allumer le feu')
recognizer.expect_phrase('รฉteindre')
recognizer.expect_phrase('clignotter')
recognizer.expect_phrase('cuir')
recognizer.expect_phrase('R2')
button = aiy.voicehat.get_button()
led = aiy.voicehat.get_led()
aiy.audio.get_recorder().start()
while True:
status_ui.status('ready')
print('Press the button and speak')
button.wait_for_press()
aiy.voicehat.get_status_ui().set_trigger_sound_wave('/home/pi/Music/R2D2/hotword.wav')
status_ui.status('listening')
WaitingForHotword = True
while WaitingForHotword == True:
print('Say the hotword to start')
hotword = recognizer.recognize()
if not hotword:
print('I recognised nothing ... looping')
else:
if ('R2') in hotword:
WaitingForHotword = False
print('Playing a test sound...')
aiy.audio.play_wave(CONFIRM_SOUND_PATH)
print('Listening...')
text = recognizer.recognize()
if not text:
print('Sorry, I did not hear you.')
aiy.audio.play_wave(CONFUSED_SOUND_PATH)
else:
WaitingForHotword = True
print('You said "', text, '"')
if 'allumer le feu' in text:
led.set_state(aiy.voicehat.LED.ON)
elif 'รฉteindre' in text:
led.set_state(aiy.voicehat.LED.OFF)
elif 'clignotter' in text:
led.set_state(aiy.voicehat.LED.BLINK)
elif 'cuir' in text:
# led.set_state(aiy.voicehat.LED.BLINK)
aiy.audio.say('cuir cuir cuir moustache')
elif 'goodbye' in text:
break
else: aiy.audio.play_wave(UNRECOGNISED_SOUND_PATH)
else: print('Hotword not detected .... looping')
if __name__ == '__main__':
main()
| 3,368 | 1,050 |
import argparse
import models
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
class BaseOptions():
def __init__(self):
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.initialized = False
def initialize(self):
# experiment specifics
self.parser.add_argument('--name', type=str, default=None,
help='name of the experiment. It decides where to store samples and models')
self.parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU')
self.parser.add_argument('--model', type=str, default='errnet_model', help='chooses which model to use.')
self.parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here')
self.parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint')
self.parser.add_argument('--resume_epoch', '-re', type=int, default=None,
help='checkpoint to use. (default: latest')
self.parser.add_argument('--seed', type=int, default=2018, help='random seed to use. Default=2018')
self.parser.add_argument('--supp_eval', action='store_true', help='supplementary evaluation')
self.parser.add_argument('--start_now', action='store_true', help='supplementary evaluation')
self.parser.add_argument('--testr', action='store_true', help='test for reflections')
self.parser.add_argument('--select', type=str, default=None)
# for setting input
self.parser.add_argument('--serial_batches', action='store_true',
help='if true, takes images in order to make batches, otherwise takes them randomly')
self.parser.add_argument('--nThreads', default=8, type=int, help='# threads for loading data')
self.parser.add_argument('--max_dataset_size', type=int, default=None,
help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.')
# for display
self.parser.add_argument('--no-log', action='store_true', help='disable tf logger?')
self.parser.add_argument('--no-verbose', action='store_true', help='disable verbose info?')
self.parser.add_argument('--display_winsize', type=int, default=256, help='display window size')
self.parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display')
self.parser.add_argument('--display_id', type=int, default=0,
help='window id of the web display (use 0 to disable visdom)')
self.parser.add_argument('--display_single_pane_ncols', type=int, default=0,
help='if positive, display all images in a single visdom web panel with certain number of images per row.')
self.initialized = True
| 3,159 | 878 |
from flask import Flask, Response
from flask_basicauth import BasicAuth
from flask_cors import CORS, cross_origin
import os
#from flask_admin import Admin,AdminIndexView
#from flask_admin.contrib.sqla import ModelView
from flask_sqlalchemy import SQLAlchemy as _BaseSQLAlchemy
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from werkzeug.exceptions import HTTPException
from flask_login import LoginManager
from itsdangerous import URLSafeSerializer
# import psycopg2
# import pymysql
# import logging
# import warnings
# warnings.filterwarnings("ignore")
# Initializing Flask App
app = Flask(__name__)
app.secret_key="Vampire"
# This video demonstrates why we use CORS in our Flask App - https://www.youtube.com/watch?v=vWl5XcvQBx0
CORS(app)
app.config.from_object("config.DevelopmentConfig")
class SQLAlchemy(_BaseSQLAlchemy):
"""
This class is defined so that we can set "pool_pre_ping" to True.
pool_pre_ping is a boolean flag, which when set to True,
will enable the connection pool 'pre-ping' feature
that tests connections for liveness upon each checkout.
This prevents from dropping of database connection with our app.
This class inherits the original SQLAlchemy class,
and nothing else is changed except pool_pre_ping flag
https://docs.sqlalchemy.org/en/13/core/pooling.html#dealing-with-disconnects
https://github.com/pallets/flask-sqlalchemy/issues/589
"""
def apply_pool_defaults(self, app, options):
super(SQLAlchemy, self).apply_pool_defaults(app, options)
options["pool_pre_ping"] = True
# Creating and Initializing db object of SQLAlchemy class
db = SQLAlchemy(app)
db.init_app(app)
migrate = Migrate(app, db, render_as_batch=True)
with app.app_context():
if db.engine.url.drivername == 'sqlite':
migrate.init_app(app, db, render_as_batch=True)
else:
migrate.init_app(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
# Creating serializer object of URLSafeSerializer class for serializing session_token
serializer = URLSafeSerializer(app.secret_key)
# Here we set session_token as our user_loader.
from bookstore.client.views import client
from bookstore.admin.views import admin
app.register_blueprint(client)
app.register_blueprint(admin)
| 2,277 | 749 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from cobl.lexicon.models import LanguageList, \
MeaningList, \
Meaning, \
Lexeme, \
CognateClass, \
CognateJudgement, \
LanguageClade, \
Clade
class Command(BaseCommand):
help = "Computes statistics for https://github.com/lingdb/CoBL/issues/236"\
"\nPossible parameters are: {1, 2, 3} for task number."
def add_arguments(self, parser):
parser.add_argument('task', type=int)
missing_args_message = "Please provide a task number of {1,2,3}."
def handle(self, *args, **options):
# Data to work with:
current = LanguageList.objects.get(name='Current')
jena200 = MeaningList.objects.get(name='Jena200')
languageIds = set(current.languages.values_list('id', flat=True))
meaningIds = jena200.meanings.values_list('id', flat=True)
lexemeIds = Lexeme.objects.filter(
language_id__in=languageIds,
meaning_id__in=meaningIds).values_list('id', flat=True)
cognateClassIds = CognateJudgement.objects.filter(
lexeme_id__in=lexemeIds).values_list(
'cognate_class_id', flat=True)
cognateClasses = CognateClass.objects.filter(
id__in=cognateClassIds,
root_form='').all() # Only without root_form is wanted.
if options['task'] == 1:
self.stdout.write('Task 1')
self.report(self.compute(2, cognateClasses,
meaningIds, languageIds), meaningIds)
elif options['task'] == 2:
self.stdout.write('Task 2')
task1 = self.compute(2, cognateClasses, meaningIds, languageIds)
task1CCIds = set([c.id for c in task1 if c is not None])
self.report([c for c in self.compute(
1, cognateClasses, meaningIds, languageIds)
if c is not None and c.id not in task1CCIds], meaningIds)
elif options['task'] == 3:
self.stdout.write('Task 3')
unwantedCognateClassIds = set(
[c.id for c in self.compute(1, cognateClasses,
meaningIds,
languageIds) if c is not None])
cIdcladeMap = {c.id: c for c in Clade.objects.exclude(
cladeLevel0=0).all()}
# Computing ._cognateClasses for each clade:
for _, clade in cIdcladeMap.items():
inCladeLanguageIds = set(LanguageClade.objects.filter(
clade=clade).values_list('language_id', flat=True))
lexemes = Lexeme.objects.filter(
language_id__in=languageIds & inCladeLanguageIds,
meaning_id__in=meaningIds,
not_swadesh_term=False).all()
cognateClassIds = set(CognateJudgement.objects.filter(
lexeme__in=lexemes).values_list(
'cognate_class_id', flat=True))
clade._cognateClassIds = set(CognateClass.objects.filter(
id__in=cognateClassIds - unwantedCognateClassIds,
root_form='').order_by('id').values_list('id', flat=True))
# Removing cognate class IDs we don't want:
for _, clade in cIdcladeMap.items():
cogIdCounts = {cId: 0 for cId in clade._cognateClassIds}
childIds = clade.queryChildren().values_list('id', flat=True)
for childId in childIds:
child = cIdcladeMap[childId]
for cId in child._cognateClassIds:
if cId in cogIdCounts:
cogIdCounts[cId] += 1
# Setting ._cognateClassIds for current clade:
clade._cognateClassIds = set([cId for cId, count
in cogIdCounts.items()
if count != 1])
# Updating children:
for childId in childIds:
child = cIdcladeMap[childId]
child._cognateClassIds = child._cognateClassIds & \
set([cId for cId, count
in cogIdCounts.items()
if count == 1])
# Creating .txt files:
for _, clade in cIdcladeMap.items():
# Grouping by meaning:
meaningMarkdowns = {}
for c in clade._cognateClassIds:
s = '- [ ] cog. class '\
'[%s](http://cobl.info/cognate/%s/)' % (c, c)
meanings = Meaning.objects.filter(
lexeme__cognate_class=c,
lexeme__language_id__in=languageIds,
lexeme__not_swadesh_term=False,
id__in=meaningIds).distinct().all()
s += ''.join([
' = meaning [%s](http://cobl.info/meaning/%s/)' %
(m.gloss, m.gloss) for m in meanings])
for m in meanings:
if m.gloss not in meaningMarkdowns:
meaningMarkdowns[m.gloss] = []
meaningMarkdowns[m.gloss].append(s)
# Composing markdown:
markdown = []
for k in sorted(meaningMarkdowns.keys()):
markdown += meaningMarkdowns[k]
# Writing if content:
if len(markdown) > 0:
fname = '/tmp/%s.txt' % clade.taxonsetName
self.stdout.write("Writing file '%s'." % fname)
with open(fname, 'w') as f:
f.write("\n".join(markdown)+"\n")
def compute(self, lowerBranchBound,
cognateClasses, meaningIds, languageIds):
# The computation we want to perform twice
for cognateClass in cognateClasses:
lexemeIds = CognateJudgement.objects.filter(
cognate_class_id=cognateClass.id).values_list(
'lexeme_id', flat=True)
# Need to investigate lexemes:
cladeNamesSet = set()
for lexeme in Lexeme.objects.filter(
id__in=lexemeIds,
language_id__in=languageIds,
meaning_id__in=meaningIds).all():
# Need to investigate clades:
clades = Clade.objects.filter(
id__in=LanguageClade.objects.filter(
language_id=lexeme.language_id,
language_id__in=languageIds).values_list(
'clade_id', flat=True),
cladeLevel1=0).exclude(
cladeLevel0=0 # Ignore PIE
).all()
if len(clades) > 0:
cladeNamesSet.add(', '.join([
c.cladeName for c in clades]))
# Yield interesting clades:
if len(cladeNamesSet) > lowerBranchBound:
cognateClass.bNames = ', '.join('"%s"' % n for
n in cladeNamesSet)
yield(cognateClass)
yield(None) # EOG
def report(self, cognateClasses, meaningIds):
# Print given cognateClasses:
for cognateClass in cognateClasses:
if cognateClass is None:
continue
lexemeIds = CognateJudgement.objects.filter(
cognate_class_id=cognateClass.id).values_list(
'lexeme_id', flat=True)
meaningNames = Meaning.objects.filter(
lexeme__id__in=lexemeIds,
id__in=meaningIds).distinct().values_list('gloss', flat=True)
meaningNames = ', '.join(['"%s"' % m for m in meaningNames])
self.stdout.write("Cognate set id: %s "
"meanings: %s branches: %s" %
(cognateClass.id,
meaningNames,
cognateClass.bNames))
| 8,438 | 2,313 |
import pandas as pd
from icu import Collator, Locale, RuleBasedCollator
ddf = pd.read_csv("../word_frequency/unilex/din.txt", sep='\t', skiprows = range(2,5))
collator = Collator.createInstance(Locale('en_AU.UTF-8'))
# https://stackoverflow.com/questions/13838405/custom-sorting-in-pandas-dataframe/27009771#27009771
# https://gist.github.com/seanpue/e1cb846f676194ae77eb
def sort_pd(key=None,reverse=False):
def sorter(series):
series_list = list(series)
return [series_list.index(i)
for i in sorted(series_list,key=key,reverse=reverse)]
return sorter
sort_by_custom_dict = sort_pd(key=collator.getSortKey)
#ddf.iloc[sort_by_custom_dict(ddf.index)]
# ddf.iloc[sort_by_custom_dict(ddf['Form'])]
ddf.iloc[sort_by_custom_dict(ddf['Form'])]
#https://python3.wannaphong.com/2015/03/sort-python.html
# https://pyerror.com/detail/1316/
lexemes = ddf.Form
#lexemes2 = ddf['Form']
temp = lexemes.sort_values()
collation_rules = "&A<<aa<<<aA<<<Aa<<<AA<<รค<<<ร<<รครค<<<รคร<<<รรค<<<รร\n&D<dh<<<dH<<<Dh<<<DH\n&E<<ee<<<eE<<<Ee<<<EE<<รซ<<<ร<<รซรซ<<<รซร<<<รรซ<<<รร<ษ<<<ฦ<<ษษ<<<ษฦ<<<ฦษ<<<ฦฦ<<ษฬ<<<ฦฬ<<ษฬษฬ<<<ษฬฦฬ<<<ฦฬษฬ<<<ฦฬฦฬ\n&G<ษฃ<<<ฦ\n&I<<ii<<<iI<<<Ii<<<II<<รฏ<<<ร<<รฏรฏ<<<รฏร<<<รรฏ<<<รร\n&N<nh<<<nH<<<Nh<<<NH<ny<<<nY<<<Ny<<<NH<ล<<<ล\n&O<<oo<<<oO<<<Oo<<<OO<<รถ<<<ร<<รถรถ<<<รถร<<<รรถ<<<รร<ษ<<<ฦ<<ษษ<<<ษฦ<<<ฦษ<<<ฦฦ<<ษฬ<<<ฦฬ<<ษฬษฬ<<<ษฬฦฬ<<<ฦฬษฬ<<<ฦฬฦฬ\n&T<th<<<tH<<<Th<<<TH\n&U<<uu<<<uU<<<Uu<<<UU"
custom_collator = RuleBasedCollator(collation_rules)
temp.sort_values(key=lambda x: custom_collator.getSortKey(x) )
def sort_pd(key=None,reverse=False):
def sorter(series):
series_list = list(series)
return [series_list.index(i)
for i in sorted(series_list,key=key,reverse=reverse)]
return sorter
sort_by_custom_dict = sort_pd(key=custom_collator.getSortKey) | 1,809 | 916 |
# Python
import unittest
from copy import deepcopy
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Genie
from genie.libs.ops.dot1x.ios.dot1x import Dot1X
from genie.libs.ops.dot1x.ios.tests.dot1x_output import Dot1xOutput
# Parser
from genie.libs.parser.ios.show_dot1x import ShowDot1xAllDetail, \
ShowDot1xAllStatistics, \
ShowDot1xAllSummary, \
ShowDot1xAllCount
class test_dot1x(unittest.TestCase):
def setUp(self):
self.device = Device(name='aDevice')
self.device.os = 'ios'
self.device.custom['abstraction'] = {'order':['os']}
self.device.mapping={}
self.device.mapping['cli']='cli'
# Give the device as a connection type
# This is done in order to call the parser on the output provided
self.device.connectionmgr.connections['cli'] = self.device
def test_complete_output(self):
self.maxDiff = None
dot1x = Dot1X(device=self.device)
# Get outputs
dot1x.maker.outputs[ShowDot1xAllDetail] = \
{'': Dot1xOutput.ShowDot1xAllDetail}
dot1x.maker.outputs[ShowDot1xAllStatistics] = \
{'': Dot1xOutput.ShowDot1xAllStatistics}
dot1x.maker.outputs[ShowDot1xAllSummary] = \
{'': Dot1xOutput.ShowDot1xAllSummary}
dot1x.maker.outputs[ShowDot1xAllCount] = \
{'': Dot1xOutput.ShowDot1xAllCount}
# Learn the feature
dot1x.learn()
# Verify Ops was created successfully
self.assertEqual(dot1x.info, Dot1xOutput.Dot1x_info)
# Check Selected Attributes
self.assertEqual(dot1x.info['version'], 3)
# info - mdot1x default
self.assertEqual(dot1x.info['interfaces']['GigabitEthernet1/0/9']\
['max_start'], 3)
def test_empty_output(self):
self.maxDiff = None
dot1x = Dot1X(device=self.device)
dot1x.maker.outputs[ShowDot1xAllDetail] = \
{'': {}}
dot1x.maker.outputs[ShowDot1xAllStatistics] = \
{'': {}}
dot1x.maker.outputs[ShowDot1xAllSummary] = \
{'': {}}
dot1x.maker.outputs[ShowDot1xAllCount] = \
{'': {}}
# Learn the feature
dot1x.learn()
# Check no attribute not found
with self.assertRaises(AttributeError):
dot1x.info['version']
def test_incomplete_output(self):
self.maxDiff = None
dot1x = Dot1X(device=self.device)
# Get outputs
dot1x.maker.outputs[ShowDot1xAllDetail] = \
{'': Dot1xOutput.ShowDot1xAllDetail}
dot1x.maker.outputs[ShowDot1xAllStatistics] = \
{'': Dot1xOutput.ShowDot1xAllStatistics}
dot1x.maker.outputs[ShowDot1xAllSummary] = \
{'': Dot1xOutput.ShowDot1xAllSummary}
dot1x.maker.outputs[ShowDot1xAllCount] = \
{'': {}}
# Learn the feature
dot1x.learn()
# Delete missing specific attribute values
expect_dict = deepcopy(Dot1xOutput.Dot1x_info)
del(expect_dict['sessions'])
# Verify Ops was created successfully
self.assertEqual(dot1x.info, expect_dict)
if __name__ == '__main__':
unittest.main()
| 3,495 | 1,139 |
# ======================================================================
# copyright 2020. Triad National Security, LLC. All rights
# reserved. This program was produced under U.S. Government contract
# 89233218CNA000001 for Los Alamos National Laboratory (LANL), which
# is operated by Triad National Security, LLC for the U.S. Department
# of Energy/National Nuclear Security Administration. All rights in
# the program are reserved by Triad National Security, LLC, and the
# U.S. Department of Energy/National Nuclear Security
# Administration. The Government is granted for itself and others
# acting on its behalf a nonexclusive, paid-up, irrevocable worldwide
# license in this material to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
# ======================================================================
# Authors: Oleg Korobkin (korobkin@lanl.gov)
# Purpose:
# Provides a check of whether a coordinate transformation of the metric
# from code coordinates to Kerr-Schild coordinates produces correct
# metric, consistent with the closed form (as in e.g. Eq.(3)
# McKinney & Gammie 2004, https://arxiv.org/abs/astro-ph/0404512)
#
# Functions:
# - print_matrix
# - check_transformation_matrices
#
from math import *
import numpy as np
def print_matrix(matrix,fmt="%19.11e",tostdout=True) -> str:
"""Pretty-prints a matrix to a string (optinally, to stdout)
Parameters
----------
matrix : numpy.array([N,M])
matrix to print
fmt : str
C-style format of each element (default: "%19.11e")
tostdout : bool
output to stdout (default: true)
Returns
-------
str
formatted output string
"""
N = matrix.shape[0]
M = matrix.shape[1]
s = "["
for i in range(N):
s+= "["
for j in range(M):
s+= (fmt % matrix[i,j])
if j < M - 1: s += ", "
s+= "]"
if i < N - 1: s += ",\n "
s+="]"
if tostdout: print(s)
return s
def check_transformation_matrices(geom, a, ir, jth,
verbose=True, tol=1e-12) -> bool:
"""Transforms the metric to spherical KS and compares with analytic formula
Test 1: covariant metric, gcov, at A = {ir, jth}
1.1 sample gcov and Lambda_h2bl_cov at A
1.2 transform gcov to gks using transofmration matrices
1.3 compare to expected values at {r,th} at A
Parameters
----------
geom : dictionary
nubhlight geom object
a : Float
dimensionless Kerr spin parameter
ir : Integer
index of sample point in radial direction
jth : Integer
index of sample point in angular theta-direction
verbose : bool
output steps to stdout
tol : Float
tolerance to relative error (wrt det g)
Returns
-------
bool
True if all checks passed
Examples
--------
import hdf5_to_dict as io
hdr = io.load_hdr("dump_00000010.h5")
geom = io.load_geom(hdr,recalc=True)
check_transformation_matrices(geom, -1, 64)
"""
# sample gcov and h2bl at point A
gcov_A = geom['gcov'][ir,jth]
h2bl_A = geom['Lambda_h2bl_cov'][ir,jth]
# sample r and theta, compute BL metric-related quantities
r = geom['r'][ir,jth,0]; r2 = r*r
a2 = a*a
th= geom['th'][ir,jth,0]
sth2= sin(th)**2
Delta= r2 - 2*r + a2
Sigma= r2 + a2*cos(th)**2
A = (r2 + a2)**2 - a2*Delta*sin(th)**2
if verbose:
print ("r = %19.11e" % r)
print ("theta = %19.11e" % th)
print ("a = %19.11e" % a)
print ("Delta = %19.11e" % Delta)
print ("Sigma = %19.11e" % Sigma)
print ("A = %19.11e" % A)
# output metric
print ("gcov_A = ")
print_matrix (gcov_A)
print ("")
# output transformation matrix
print ("h2bl_A = ")
print_matrix (h2bl_A)
print ("")
# compute BL metric at A
gks_A = np.zeros([4,4])
for i in range(4):
for j in range(4):
for k in range(4):
for l in range(4):
gks_A[i,j] = gks_A[i,j] + h2bl_A[k,i]*h2bl_A[l,j]*gcov_A[k,l]
if verbose:
print ("gks_A = ")
print_matrix (gks_A)
print("")
# expected values at {r, th}
g_tt = -1. + 2.*r/Sigma
g_rr = 1. + 2.*r/Sigma
g_ff = sth2*(Sigma + a2*g_rr*sth2)
g_thth = Sigma
g_tr = 2*r/Sigma
g_tf = -2*a*r*sth2/Sigma
g_rf = -a*g_rr*sth2
det_g = -Sigma**2*sth2
if verbose:
print ("Expected:")
print (" g_tt = %19.11e" % g_tt )
print (" g_rr = %19.11e" % g_rr )
print (" g_thth = %19.11e" % g_thth)
print (" g_ff = %19.11e" % g_ff )
print (" g_tr = %19.11e" % g_tr )
print (" g_rf = %19.11e" % g_rf )
print (" g_tf = %19.11e" % g_tf )
print ("")
# check gks_A
gks_expected = np.array(
[[ g_tt, g_tr, 0.0, g_tf],
[ g_tr, g_rr, 0.0, g_rf],
[ 0.0, 0.0, g_thth, 0.0],
[ g_tf, g_rf, 0.0, g_ff]]
)
passed = True
for i in range(4):
for j in range(4):
if abs(gks_A[i,j] - gks_expected[i,j])/abs(det_g) > tol:
passed = False
if verbose:
print (f"WARNING: Significant mismatch in gks_A[{i},{j}]:")
print (" -- expected: %19.11e" % gks_expected[i,j])
print (" -- actual: %19.11e" % gks_A[i,j])
return passed
| 5,218 | 2,070 |
from dataclasses import dataclass
@dataclass
class Channel:
id: str
| 73 | 22 |
# -*- coding: utf-8 -*-
##########################################################################
# NSAp - Copyright (C) CEA, 2020
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
"""
3D MRI Brain Generation with Generative Adversarial Networks (BGGAN) with
Variational Auto Encoder (VAE).
"""
# Imports
import logging
import collections
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as func
from pynet.utils import Networks
# Global parameters
logger = logging.getLogger("pynet")
@Networks.register
class BGDiscriminator(nn.Module):
""" This is the discriminator part of the BGGAN.
"""
def __init__(self, in_shape, in_channels=1, out_channels=1,
start_filts=64, with_logit=True):
""" Init class.
Parameters
----------
in_shape: uplet
the input tensor data shape (X, Y, Z).
in_channels: int, default 1
number of channels in the input tensor.
out_channels: int, default 1
number of channels in the output tensor.
start_filts: int, default 64
number of convolutional filters for the first conv.
with_logit: bool, default True
apply the logit function to the result.
"""
super(BGDiscriminator, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.start_filts = start_filts
self.with_logit = with_logit
self.in_shape = in_shape
self.shapes = _downsample_shape(
self.in_shape, nb_iterations=4, scale_factor=2)
self.conv1 = nn.Conv3d(
self.in_channels, self.start_filts, kernel_size=4, stride=2,
padding=1)
self.conv2 = nn.Conv3d(
self.start_filts, self.start_filts * 2, kernel_size=4, stride=2,
padding=1)
self.bn2 = nn.BatchNorm3d(self.start_filts * 2)
self.conv3 = nn.Conv3d(
self.start_filts * 2, self.start_filts * 4, kernel_size=4,
stride=2, padding=1)
self.bn3 = nn.BatchNorm3d(self.start_filts * 4)
self.conv4 = nn.Conv3d(
self.start_filts * 4, self.start_filts * 8, kernel_size=4,
stride=2, padding=1)
self.bn4 = nn.BatchNorm3d(self.start_filts * 8)
self.conv5 = nn.Conv3d(
self.start_filts * 8, self.out_channels,
kernel_size=self.shapes[-1], stride=1, padding=0)
def forward(self, x):
logger.debug("BGGAN Discriminator...")
self.debug("input", x)
h1 = func.leaky_relu(self.conv1(x), negative_slope=0.2)
self.debug("conv1", h1)
h2 = func.leaky_relu(self.bn2(self.conv2(h1)), negative_slope=0.2)
self.debug("conv2", h2)
h3 = func.leaky_relu(self.bn3(self.conv3(h2)), negative_slope=0.2)
self.debug("conv3", h3)
h4 = func.leaky_relu(self.bn4(self.conv4(h3)), negative_slope=0.2)
self.debug("conv4", h4)
h5 = self.conv5(h4)
self.debug("conv5", h5)
if self.with_logit:
output = torch.sigmoid(h5.view(h5.size(0), -1))
self.debug("output", output)
else:
output = h5
logger.debug("Done.")
return output
def debug(self, name, tensor):
logger.debug(" {3}: {0} - {1} - {2}".format(
tensor.shape, tensor.get_device(), tensor.dtype, name))
@Networks.register
class BGEncoder(nn.Module):
""" This is the encoder part of the BGGAN.
"""
def __init__(self, in_shape, in_channels=1, start_filts=64,
latent_dim=1000):
""" Init class.
Parameters
----------
in_shape: uplet
the input tensor data shape (X, Y, Z).
in_channels: int, default 1
number of channels in the input tensor.
start_filts: int, default 64
number of convolutional filters for the first conv.
latent_dim: int, default 1000
the latent variable sizes.
"""
super(BGEncoder, self).__init__()
self.in_channels = in_channels
self.start_filts = start_filts
self.latent_dim = latent_dim
self.in_shape = in_shape
self.shapes = _downsample_shape(
self.in_shape, nb_iterations=4, scale_factor=2)
self.dense_features = np.prod(self.shapes[-1])
logger.debug("BGGAN Encoder shapes: {0}".format(self.shapes))
self.conv1 = nn.Conv3d(
self.in_channels, self.start_filts, kernel_size=4, stride=2,
padding=1)
self.conv2 = nn.Conv3d(
self.start_filts, self.start_filts * 2, kernel_size=4, stride=2,
padding=1)
self.bn2 = nn.BatchNorm3d(self.start_filts * 2)
self.conv3 = nn.Conv3d(
self.start_filts * 2, self.start_filts * 4, kernel_size=4,
stride=2, padding=1)
self.bn3 = nn.BatchNorm3d(self.start_filts * 4)
self.conv4 = nn.Conv3d(
self.start_filts * 4, self.start_filts * 8, kernel_size=4,
stride=2, padding=1)
self.bn4 = nn.BatchNorm3d(self.start_filts * 8)
self.mean = nn.Sequential(
nn.Linear(self.start_filts * 8 * self.dense_features, 2048),
nn.BatchNorm1d(2048),
nn.ReLU(),
nn.Linear(2048, self.latent_dim))
self.logvar = nn.Sequential(
nn.Linear(self.start_filts * 8 * self.dense_features, 2048),
nn.BatchNorm1d(2048),
nn.ReLU(),
nn.Linear(2048, self.latent_dim))
def forward(self, x):
logger.debug("BGGAN Encoder...")
batch_size = x.size(0)
logger.debug(" batch_size: {0}".format(batch_size))
self.debug("input", x)
h1 = func.leaky_relu(self.conv1(x), negative_slope=0.2)
self.debug("conv1", h1)
h2 = func.leaky_relu(self.bn2(self.conv2(h1)), negative_slope=0.2)
self.debug("conv2", h2)
h3 = func.leaky_relu(self.bn3(self.conv3(h2)), negative_slope=0.2)
self.debug("conv3", h3)
h4 = func.leaky_relu(self.bn4(self.conv4(h3)), negative_slope=0.2)
self.debug("conv4", h4)
mean = self.mean(h4.view(batch_size, -1))
self.debug("mean", mean)
logvar = self.logvar(h4.view(batch_size, -1))
self.debug("logvar", logvar)
std = logvar.mul(0.5).exp_()
reparametrized_noise = Variable(
torch.randn((batch_size, self.latent_dim))).to(x.device)
reparametrized_noise = mean + std * reparametrized_noise
self.debug("reparametrization", reparametrized_noise)
logger.debug("Done.")
return mean, logvar, reparametrized_noise
def debug(self, name, tensor):
logger.debug(" {3}: {0} - {1} - {2}".format(
tensor.shape, tensor.get_device(), tensor.dtype, name))
@Networks.register
class BGCodeDiscriminator(nn.Module):
""" This is the code discriminator part of the BGGAN.
"""
def __init__(self, out_channels=1, code_size=1000, n_units=4096):
""" Init class.
Parameters
----------
out_channels: int, default 1
number of channels in the output tensor.
code_size: int, default 1000
the code sier.
n_units: int, default 4096
the number of hidden units.
"""
super(BGCodeDiscriminator, self).__init__()
self.out_channels = out_channels
self.code_size = code_size
self.n_units = n_units
self.layer1 = nn.Sequential(
nn.Linear(self.code_size, self.n_units),
nn.BatchNorm1d(self.n_units),
nn.LeakyReLU(0.2, inplace=True))
self.layer2 = nn.Sequential(
nn.Linear(self.n_units, self.n_units),
nn.BatchNorm1d(self.n_units),
nn.LeakyReLU(0.2, inplace=True))
self.layer3 = nn.Linear(self.n_units, self.out_channels)
def forward(self, x):
logger.debug("BGGAN Code Discriminator...")
self.debug("input", x)
h1 = self.layer1(x)
self.debug("layer1", h1)
h2 = self.layer2(h1)
self.debug("layer2", h2)
output = self.layer3(h2)
self.debug("layer3", output)
logger.debug("Done.")
return output
def debug(self, name, tensor):
logger.debug(" {3}: {0} - {1} - {2}".format(
tensor.shape, tensor.get_device(), tensor.dtype, name))
@Networks.register
class BGGenerator(nn.Module):
""" This is the generator part of the BGGAN.
"""
def __init__(self, in_shape, out_channels=1, start_filts=64,
latent_dim=1000, mode="trilinear", with_code=False):
""" Init class.
Parameters
----------
in_shape: uplet
the input tensor data shape (X, Y, Z).
out_channels: int, default 1
number of channels in the output tensor.
start_filts: int, default 64
number of convolutional filters for the first conv.
latent_dim: int, default 1000
the latent variable sizes.
mode: str, default 'trilinear'
the interpolation mode.
with_code: bool, default False
change the architecture if code discriminator is used.
"""
super(BGGenerator, self).__init__()
self.out_channels = out_channels
self.start_filts = start_filts
self.latent_dim = latent_dim
self.in_shape = in_shape
self.mode = mode
self.with_code = with_code
self.shapes = _downsample_shape(
self.in_shape, nb_iterations=4, scale_factor=2)
self.dense_features = np.prod(self.shapes[-1])
logger.debug("BGGAN Generator shapes: {0}".format(self.shapes))
if self.with_code:
self.tp_conv1 = nn.ConvTranspose3d(
self.latent_dim, self.start_filts * 8, kernel_size=4,
stride=1, padding=0, bias=False)
else:
self.fc = nn.Linear(
self.latent_dim, self.start_filts * 8 * self.dense_features)
self.bn1 = nn.BatchNorm3d(self.start_filts * 8)
self.tp_conv2 = nn.Conv3d(
self.start_filts * 8, self.start_filts * 4, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm3d(self.start_filts * 4)
self.tp_conv3 = nn.Conv3d(
self.start_filts * 4, self.start_filts * 2, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn3 = nn.BatchNorm3d(self.start_filts * 2)
self.tp_conv4 = nn.Conv3d(
self.start_filts * 2, self.start_filts, kernel_size=3, stride=1,
padding=1, bias=False)
self.bn4 = nn.BatchNorm3d(self.start_filts)
self.tp_conv5 = nn.Conv3d(
self.start_filts, self.out_channels, kernel_size=3, stride=1,
padding=1, bias=False)
def forward(self, noise):
logger.debug("BGGAN Generator...")
self.debug("input", noise)
if self.with_code:
noise = noise.view(-1, self.latent_dim, 1, 1, 1)
self.debug("view", noise)
h = self.tp_conv1(noise)
self.debug("tp_conv1", h)
else:
noise = noise.view(-1, self.latent_dim)
self.debug("view", noise)
h = self.fc(noise)
self.debug("dense", h)
h = h.view(-1, self.start_filts * 8, *self.shapes[-1])
self.debug("view", h)
h = func.relu(self.bn1(h))
h = nn.functional.interpolate(
h, size=self.shapes[-2], mode=self.mode, align_corners=False)
h = self.tp_conv2(h)
h = func.relu(self.bn2(h))
self.debug("tp_conv2", h)
h = nn.functional.interpolate(
h, size=self.shapes[-3], mode=self.mode, align_corners=False)
h = self.tp_conv3(h)
h = func.relu(self.bn3(h))
self.debug("tp_conv3", h)
h = nn.functional.interpolate(
h, size=self.shapes[-4], mode=self.mode, align_corners=False)
h = self.tp_conv4(h)
h = func.relu(self.bn4(h))
self.debug("tp_conv4", h)
h = nn.functional.interpolate(
h, size=self.shapes[-5], mode=self.mode, align_corners=False)
h = self.tp_conv5(h)
self.debug("tp_conv5", h)
h = torch.tanh(h)
self.debug("output", h)
logger.debug("Done.")
return h
def debug(self, name, tensor):
logger.debug(" {3}: {0} - {1} - {2}".format(
tensor.shape, tensor.get_device(), tensor.dtype, name))
def _downsample_shape(shape, nb_iterations=1, scale_factor=2):
shape = np.asarray(shape)
all_shapes = [shape.astype(int).tolist()]
for idx in range(nb_iterations):
shape = np.floor(shape / scale_factor)
all_shapes.append(shape.astype(int).tolist())
return all_shapes
| 13,174 | 4,711 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.core.freezable_batch_norm."""
import numpy as np
import tensorflow as tf
from object_detection.core import freezable_batch_norm
class FreezableBatchNormTest(tf.test.TestCase):
"""Tests for FreezableBatchNorm operations."""
def _build_model(self, training=None):
model = tf.keras.models.Sequential()
norm = freezable_batch_norm.FreezableBatchNorm(training=training,
input_shape=(10,),
momentum=0.8)
model.add(norm)
return model, norm
def _train_freezable_batch_norm(self, training_mean, training_var):
model, _ = self._build_model()
model.compile(loss='mse', optimizer='sgd')
# centered on training_mean, variance training_var
train_data = np.random.normal(
loc=training_mean,
scale=training_var,
size=(1000, 10))
model.fit(train_data, train_data, epochs=4, verbose=0)
return model.weights
def _test_batchnorm_layer(
self, norm, should_be_training, test_data,
testing_mean, testing_var, training_arg, training_mean, training_var):
out_tensor = norm(tf.convert_to_tensor(test_data, dtype=tf.float32),
training=training_arg)
out = tf.keras.backend.eval(out_tensor)
out -= tf.keras.backend.eval(norm.beta)
out /= tf.keras.backend.eval(norm.gamma)
if not should_be_training:
out *= training_var
out += (training_mean - testing_mean)
out /= testing_var
np.testing.assert_allclose(out.mean(), 0.0, atol=1.5e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1.5e-1)
def test_batchnorm_freezing_training_none(self):
with self.test_session():
training_mean = 5.0
training_var = 10.0
testing_mean = -10.0
testing_var = 5.0
# Initially train the batch norm, and save the weights
trained_weights = self._train_freezable_batch_norm(training_mean,
training_var)
# Load the batch norm weights, freezing training to True.
# Apply the batch norm layer to testing data and ensure it is normalized
# according to the batch statistics.
model, norm = self._build_model(training=True)
for trained_weight, blank_weight in zip(trained_weights, model.weights):
weight_copy = blank_weight.assign(tf.keras.backend.eval(trained_weight))
tf.keras.backend.eval(weight_copy)
# centered on testing_mean, variance testing_var
test_data = np.random.normal(
loc=testing_mean,
scale=testing_var,
size=(1000, 10))
# Test with training=True passed to the call method:
training_arg = True
should_be_training = True
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
# Test with training=False passed to the call method:
training_arg = False
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
# Test the layer in various Keras learning phase scopes:
training_arg = None
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
tf.keras.backend.set_learning_phase(True)
should_be_training = True
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
tf.keras.backend.set_learning_phase(False)
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
def test_batchnorm_freezing_training_false(self):
with self.test_session():
training_mean = 5.0
training_var = 10.0
testing_mean = -10.0
testing_var = 5.0
# Initially train the batch norm, and save the weights
trained_weights = self._train_freezable_batch_norm(training_mean,
training_var)
# Load the batch norm back up, freezing training to False.
# Apply the batch norm layer to testing data and ensure it is normalized
# according to the training data's statistics.
model, norm = self._build_model(training=False)
for trained_weight, blank_weight in zip(trained_weights, model.weights):
weight_copy = blank_weight.assign(tf.keras.backend.eval(trained_weight))
tf.keras.backend.eval(weight_copy)
# centered on testing_mean, variance testing_var
test_data = np.random.normal(
loc=testing_mean,
scale=testing_var,
size=(1000, 10))
# Make sure that the layer is never training
# Test with training=True passed to the call method:
training_arg = True
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
# Test with training=False passed to the call method:
training_arg = False
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
# Test the layer in various Keras learning phase scopes:
training_arg = None
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
tf.keras.backend.set_learning_phase(True)
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
tf.keras.backend.set_learning_phase(False)
should_be_training = False
self._test_batchnorm_layer(norm, should_be_training, test_data,
testing_mean, testing_var, training_arg,
training_mean, training_var)
if __name__ == '__main__':
tf.test.main()
| 7,587 | 2,153 |
# from PyQt5.QtWidgets import QMessageBox
# def raise_error(message: str = "DEFAULT:Error Description:More Information"):
# box = QMessageBox()
# kind, msg, info = message.split(":")
# box.setIcon(QMessageBox.Critical)
# box.setWindowTitle(kind + " Error")
# box.setText(msg)
# box.setInformativeText(info)
# box.exec_()
| 351 | 122 |
"""
759. Employee Free Time
We are given a list schedule of employees, which represents the working time for each employee.
Each employee has a list of non-overlapping Intervals, and these intervals are in sorted order.
Return the list of finite intervals representing common, positive-length free time for all employees, also in sorted order.
(Even though we are representing Intervals in the form [x, y], the objects inside are Intervals, not lists or arrays. For example, schedule[0][0].start = 1, schedule[0][0].end = 2, and schedule[0][0][0] is not defined). Also, we wouldn't include intervals like [5, 5] in our answer, as they have zero length.
"""
# Line Swap method
# if we met a start, cnt += 1
# if we met an end, cnt -= 1
# time complexity -- O(NlogN), need sort all intervals
# Runtime: 96 ms, faster than 87.95% of Python3 online submissions for Employee Free Time.
# Memory Usage: 14.7 MB, less than 25.00% of Python3 online submissions for Employee Free Time.
"""
# Definition for an Interval.
class Interval:
def __init__(self, start: int = None, end: int = None):
self.start = start
self.end = end
"""
class Solution:
def employeeFreeTime(self, schedule: '[[Interval]]') -> '[Interval]':
START, END = 0, 1
all_interval = []
for person in schedule:
for interval in person:
all_interval.append((interval.start, START))
all_interval.append((interval.end, END))
all_interval = sorted(all_interval, key=lambda x: x[0])
prev = None
cnt = 0
res = []
for i in range(len(all_interval)):
if cnt == 0 and prev is not None:
if prev != all_interval[i][0]:
res.append(Interval(prev, all_interval[i][0]))
if all_interval[i][1] == START:
cnt += 1
else:
cnt -= 1
prev = all_interval[i][0]
return res
# priority queue
# if the current end is less than the smallest start
# then means there is a free time
# use priority queue to maintain the smallest start
# also only stort one of jobs of each person in the queue to save memory
# time complexity -- O(NlogC), C is the number of employee
"""
# Definition for an Interval.
class Interval:
def __init__(self, start: int = None, end: int = None):
self.start = start
self.end = end
"""
import heapq
class Solution:
def employeeFreeTime(self, schedule: '[[Interval]]') -> '[Interval]':
res = []
job_start_q = [(emp[0].start, emp_id, 0) for emp_id, emp in enumerate(schedule)]
heapq.heapify(job_start_q)
largest_end = min(interval.start for emp in schedule for interval in emp)
while job_start_q:
start, emp_id, job_id = heapq.heappop(job_start_q)
if largest_end < start:
res.append(Interval(largest_end, start))
largest_end = max(largest_end, schedule[emp_id][job_id].end)
if job_id + 1 < len(schedule[emp_id]):
heapq.heappush(job_start_q, (schedule[emp_id][job_id+1].start, emp_id, job_id+1))
return res
| 3,182 | 962 |
import os, yaml
config = {
'debug': False,
'port': 5000,
'store_path': '/var/storitch',
'pool_size': 5,
'logging': {
'level': 'warning',
'path': None,
'max_size': 100 * 1000 * 1000,# ~ 95 mb
'num_backups': 10,
},
'image_exts': [
'.jpg', '.jpeg', '.png', '.tiff', '.tif', '.gif',
'.bmp', '.bmp2', '.bmp3', '.dcm', '.dicom', '.webp',
],
}
def load(path=None):
default_paths = [
'~/storitch.yaml',
'./storitch.yaml',
'../storitch.yaml',
'/etc/storitch/storitch.yaml',
'/etc/storitch.yaml',
]
if not path:
path = os.environ.get('STORITCH_CONFIG', None)
if not path:
for p in default_paths:
p = os.path.expanduser(p)
if os.path.isfile(p):
path = p
break
if not path:
raise Exception('No config file specified.')
if not os.path.isfile(path):
raise Exception('Config: "{}" could not be found.'.format(path))
with open(path) as f:
data = yaml.load(f, Loader=yaml.FullLoader)
for key in data:
if key in config:
if isinstance(config[key], dict):
config[key].update(data[key])
else:
config[key] = data[key] | 1,333 | 457 |
import io
import os
import numpy as np
import pandas
import json
import logging #<== Optional. Log to console, file, kafka
from pipeline_monitor import prometheus_monitor as monitor #<== Optional. Monitor runtime metrics
from pipeline_logger import log
import tensorflow as tf
from tensorflow.contrib import predictor
from keras.models import Sequential, load_model
from keras.preprocessing import sequence
from keras.preprocessing.text import Tokenizer
from collections import OrderedDict
_logger = logging.getLogger('pipeline-logger')
_logger.setLevel(logging.INFO)
_logger_stream_handler = logging.StreamHandler()
_logger_stream_handler.setLevel(logging.INFO)
_logger.addHandler(_logger_stream_handler)
__all__ = ['invoke'] #<== Optional. Being a good Python citizen.
_labels = { #<== Optional. Used for metrics/labels
'name': 'injection',
'tag': 'v1',
'type': 'tensorflow',
'runtime': 'python',
'chip': 'cpu',
}
def _initialize_upon_import(): #<== Optional. Called once upon server startup
''' Initialize / Restore Model Object.
'''
model = load_model('securitai-lstm-model.h5')
model.load_weights('securitai-lstm-weights.h5')
model.compile(loss = 'binary_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
return model
# This is called unconditionally at *module import time*...
_model = _initialize_upon_import()
#@log(labels=_labels, logger=_logger) #<== Optional. Sample and compare predictions
def invoke(request): #<== Required. Called on every prediction
'''Where the magic happens...'''
with monitor(labels=_labels, name="transform_request"): #<== Optional. Expose fine-grained metrics
transformed_request = _transform_request(request) #<== Optional. Transform input (json) into TensorFlow (tensor)
with monitor(labels=_labels, name="invoke"): #<== Optional. Calls _model.predict()
response = _model.predict(transformed_request)
with monitor(labels=_labels, name="transform_response"): #<== Optional. Transform TensorFlow (tensor) into output (json)
transformed_response = _transform_response(response)
return transformed_response #<== Required. Returns the predicted value(s)
def _transform_request(request):
request_str = request.decode('utf-8')
# tokenize the csv request and create json
X = pandas.read_csv(io.StringIO(request_str), engine='python', quotechar='|', header=None).values[:,0]
for index, item in enumerate(X):
reqJson = json.loads(item, object_pairs_hook=OrderedDict)
del reqJson['http']['timestamp']
del reqJson['http']['headers']
del reqJson['http']['source']
del reqJson['http']['route']
del reqJson['http']['responsePayload']
X[index] = json.dumps(reqJson, separators=(',', ':'))
tokenizer = Tokenizer(filters='\t\n', char_level=True)
tokenizer.fit_on_texts(X)
# this used to be [log_entry]
seq = tokenizer.texts_to_sequences([request_str])
max_log_length = 1024
log_entry_processed = sequence.pad_sequences(seq, maxlen=max_log_length)
return log_entry_processed
def _transform_response(response):
return response[0]
if __name__ == '__main__':
with open('./pipeline_test_request.csv', 'rb') as fb:
request_bytes = fb.read()
response_bytes = invoke(request_bytes)
print(response_bytes)
| 3,727 | 1,058 |
import argparse
import importlib
import os
import re
import signal
import subprocess
import sys
import time
import logging
from act.common import aCTLogger
from act.common.aCTConfig import aCTConfigAPP
from act.arc import aCTDBArc
class aCTReport:
'''Print summary info on jobs in DB. Use --web to print html that is
automatically refreshed. Add filenames to query more than one aCT DB'''
def __init__(self, args):
self.output = ""
self.outfile = args.web
self.actconfs = args.conffiles or [''] # empty string for default behaviour
self.logger=aCTLogger.aCTLogger("aCTReport")
self.actlog=self.logger()
self.actlog.logger.setLevel(logging.INFO)
self.criticallogger = aCTLogger.aCTLogger('aCTCritical', arclog=False)
self.criticallog = self.criticallogger()
if self.outfile:
self.log('<META HTTP-EQUIV="refresh" CONTENT="60"><pre>')
self.log(time.asctime() + '\n')
self.db=aCTDBArc.aCTDBArc(self.actlog)
def log(self, message=''):
self.output += message + '\n'
def AppReport(self):
appconf = aCTConfigAPP()
apps = appconf.getList(["modules", "app"])
for app in apps:
try:
ap = importlib.import_module(f'{app}.aCTReport').report
self.log(ap(self.actconfs))
except ModuleNotFoundError as e:
self.actlog.info(f'No report in module {app}')
except AttributeError:
self.actlog.info(f'aCTReport.report() not found in {app}')
except Exception as e:
self.actlog.error(f'Exception running {app}.aCTReport.report: {e}')
def ProcessReport(self):
if self.actconfs != ['']:
return # don't print processes for combined report
actprocscmd = 'ps ax -ww -o pid,etime,args'
try:
out = subprocess.run(actprocscmd.split(), check=True, encoding='utf-8', stdout=subprocess.PIPE).stdout
except subprocess.CalledProcessError as e:
self.log('Error: could not run ps command: %s' % e.stderr)
return
# Group processes by cluster
cluster_procs = {}
longprocesses = []
for line in out.split('\n'):
reg = re.match(r'\s*(\d*)\s*(.*) .*python.* .*(aCT\w*)\.py\s?(\S*)', line)
if reg:
pid, runningtime, process, cluster = reg.groups()
# ignore Main and this process
if process in ['aCTReport', 'aCTMain', 'aCTHeartbeatWatchdog']:
continue
if cluster == '':
cluster = '(no cluster defined)'
elif not re.match(r'\d\d:\d\d$', runningtime):
# Check for overrunning processes
longprocesses.append((process, pid, cluster, runningtime))
if cluster in cluster_procs:
cluster_procs[cluster].append(process)
else:
cluster_procs[cluster] = [process]
for proc in longprocesses:
self.log('WARNING: %s (pid %s) for %s running for more than one hour (%s), this process will be killed' % proc)
# Kill process and log a critical message to send email
# Too many emails, disable
#self.criticallog.critical('Killing process %s (pid %s) for %s running for more than one hour (%s)' % proc)
try:
os.kill(int(proc[1]), signal.SIGKILL)
except OSError:
pass
self.log()
self.log('Active processes per cluster:')
for cluster in sorted(cluster_procs):
procs = cluster_procs[cluster]
procs.sort()
self.log(f'{cluster:>38.38}: {" ".join(procs)}')
self.log()
def ArcJobReport(self):
rep={}
rtot={}
states = ["Undefined", "Accepted", "Preparing", "Submitting",
"Queuing", "Running", "Finishing", "Finished", "Hold", "Killed",
"Failed", "Deleted", "Other"]
for conf in self.actconfs:
if conf:
os.environ['ACTCONFIGARC'] = conf
db=aCTDBArc.aCTDBArc(self.actlog)
c=db.db.conn.cursor()
c.execute("select jobid,state from arcjobs")
rows=c.fetchall()
for r in rows:
reg=re.search('.+//([^:]+)',str(r[0]))
cl=""
try:
cl=reg.group(1)
except:
cl='WaitingSubmission'
jid=str(r[1])
if jid == 'None':
jid="Other"
try:
rep[cl][jid]+=1
except:
try:
rep[cl][jid]=1
except:
rep[cl]={}
rep[cl][jid]=1
try:
rtot[jid]+=1
except:
rtot[jid]=1
if sum(rtot.values()) == 0:
return
self.log(f"All ARC jobs: {sum(rtot.values())}")
self.log(f"{'':39} {' '.join([f'{s:>9}' for s in states])}")
for k in sorted(rep, key=lambda x: x.split('.')[-1]):
log=f"{k:>38.38}:"
for s in states:
try:
log += f'{rep[k][s]:>10}'
except KeyError:
log += f'{"-":>10}'
self.log(log)
log = f"{'Totals':>38}:"
for s in states:
try:
log += f'{rtot[s]:>10}'
except:
log += f'{"-":>10}'
self.log(log+'\n\n')
def CondorJobReport(self):
rep = {}
rtot = {}
condorjobstatemap = ['Undefined', # used before real state is known
'Idle',
'Running',
'Removed',
'Completed',
'Held',
'Transferring',
'Suspended']
for conf in self.actconfs:
if conf:
os.environ['ACTCONFIGARC'] = conf
db=aCTDBArc.aCTDBArc(self.actlog)
c = db.db.conn.cursor()
c.execute("select cluster, JobStatus from condorjobs")
rows = c.fetchall()
for r in rows:
cl = str(r[0])
if not cl:
cl = 'WaitingSubmission'
jid = r[1]
try:
rep[cl][jid]+=1
except:
try:
rep[cl][jid]=1
except:
rep[cl]={}
rep[cl][jid]=1
try:
rtot[jid]+=1
except:
rtot[jid]=1
if sum(rtot.values()) == 0:
return
self.log(f"All Condor jobs: {sum(rtot.values())}")
self.log(f"{'':39} {' '.join([f'{s:>9}' for s in condorjobstatemap])}")
for k in sorted(rep, key=lambda x: x.split('.')[-1]):
log=f"{k:>38.38}:"
for s in range(8):
try:
log += f'{rep[k][s]:>10}'
except KeyError:
log += f'{"-":>10}'
self.log(log)
log = f"{'Totals':>38}:"
for s in range(8):
try:
log += f'{rtot[s]:>10}'
except:
log += f'{"-":>10}'
self.log(log+'\n\n')
def StuckReport(self):
# Query for lost jobs older than lostlimit
lostlimit = 86400
select = "(arcstate='submitted' or arcstate='running') and " \
+ self.db.timeStampLessThan("tarcstate", lostlimit) + \
" order by tarcstate"
columns = ['cluster']
jobs = self.db.getArcJobsInfo(select, columns)
if jobs:
self.log('Found %d jobs not updated in over %d seconds:\n' % (len(jobs), lostlimit))
clustercount = {}
for job in jobs:
try:
host = re.search('.+//([^:]+)', job['cluster']).group(1)
except:
host = None
if host in clustercount:
clustercount[host] += 1
else:
clustercount[host] = 1
for cluster, count in clustercount.items():
self.log(f'{count} {cluster}')
self.log()
def end(self):
if self.outfile:
self.log('</pre>')
def main():
parser = argparse.ArgumentParser(description='Report table of aCT jobs.')
parser.add_argument('conffiles', nargs='*', help='list of configuration files')
parser.add_argument('--web', help='Output suitable for web page')
parser.add_argument('--harvester', action='store_true', help='Dummy arg for backwards compatibility')
args = parser.parse_args(sys.argv[1:])
acts = aCTReport(args)
acts.AppReport()
acts.ArcJobReport()
acts.CondorJobReport()
acts.StuckReport()
acts.ProcessReport()
acts.end()
if acts.outfile is None:
sys.stdout.write(acts.output)
else:
f=open(acts.outfile,"w")
f.write(acts.output)
f.close()
if __name__ == '__main__':
main()
| 9,465 | 2,827 |
from rest_framework.test import APITestCase, APIClient
from django.urls import reverse
from rest_framework.authtoken.models import Token
class UserTest(APITestCase):
"""
Test the User APIv2 endpoint.
"""
fixtures = ['dojo_testdata.json']
def setUp(self):
token = Token.objects.get(user__username='admin')
self.client = APIClient()
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
def test_user_list(self):
r = self.client.get(reverse('user-list'))
self.assertEqual(r.status_code, 200, r.content[:1000])
user_list = r.json()['results']
self.assertTrue(len(user_list) >= 1, r.content[:1000])
for user in user_list:
for item in ['username', 'first_name', 'last_name', 'email']:
self.assertIn(item, user, r.content[:1000])
for item in ['password']:
self.assertNotIn(item, user, r.content[:1000])
def test_user_add(self):
# simple user without password
r = self.client.post(reverse('user-list'), {
"username": "api-user-1"
}, format='json')
self.assertEqual(r.status_code, 201, r.content[:1000])
# user with good password
password = 'testTEST1234!@#$'
r = self.client.post(reverse('user-list'), {
"username": "api-user-2",
"password": password
}, format='json')
self.assertEqual(r.status_code, 201, r.content[:1000])
# test password by fetching API key
r = self.client.post(reverse('api-token-auth'), {
"username": "api-user-2",
"password": password
}, format='json')
self.assertEqual(r.status_code, 200, r.content[:1000])
# user with weak password
r = self.client.post(reverse('user-list'), {
"username": "api-user-3",
"password": "weakPassword"
}, format='json')
self.assertEqual(r.status_code, 400, r.content[:1000])
self.assertIn('The password must contain at least 1 digit, 0-9.', r.content.decode("utf-8"))
def test_user_change_password(self):
# some user
r = self.client.post(reverse('user-list'), {
"username": "api-user-4"
}, format='json')
self.assertEqual(r.status_code, 201, r.content[:1000])
user_id = r.json()['id']
r = self.client.put("{}{}/".format(reverse('user-list'), user_id), {
"username": "api-user-4",
"first_name": "first"
}, format='json',)
self.assertEqual(r.status_code, 200, r.content[:1000])
r = self.client.patch("{}{}/".format(reverse('user-list'), user_id), {
"last_name": "last"
}, format='json')
self.assertEqual(r.status_code, 200, r.content[:1000])
r = self.client.put("{}{}/".format(reverse('user-list'), user_id), {
"username": "api-user-4",
"password": "testTEST1234!@#$"
}, format='json')
self.assertEqual(r.status_code, 400, r.content[:1000])
self.assertIn("Update of password though API is not allowed", r.content.decode("utf-8"))
r = self.client.patch("{}{}/".format(reverse('user-list'), user_id), {
"password": "testTEST1234!@#$"
}, format='json')
self.assertEqual(r.status_code, 400, r.content[:1000])
self.assertIn("Update of password though API is not allowed", r.content.decode("utf-8"))
| 3,477 | 1,156 |
from flask import Flask
from src.models import db
from . import config
def create_app():
flask_app = Flask(__name__)
flask_app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_CONNECTION_URI
flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
flask_app.app_context().push()
db.init_app(flask_app)
db.create_all()
return flask_app
| 371 | 134 |
from dependencies import Injector
from dependencies import this
from dependencies.contrib.celery import shared_task
from examples.order.commands import ProcessOrder
@shared_task
class ProcessOrderTask(Injector):
name = "process_order"
run = ProcessOrder
bind = True
retry = this.task.retry
| 310 | 84 |
ano = int(input('Digite o ano: '))
if (ano%4) == 0:
print ('Ele รฉ bissexto')
else:
print ('Ele nรฃo รฉ bissexto') | 115 | 53 |
__version__ = '0.0.1'
__license__ = 'BSD'
| 42 | 23 |
from datetime import timedelta
from typing import Union, List, Optional
import click
import pandas as pd
from flask import current_app as app
from flask.cli import with_appcontext
from flexmeasures import Sensor
from flexmeasures.data import db
from flexmeasures.data.schemas.generic_assets import GenericAssetIdField
from flexmeasures.data.schemas.sensors import SensorIdField
from flexmeasures.data.models.generic_assets import GenericAsset
from flexmeasures.data.models.time_series import TimedBelief
from flexmeasures.data.utils import save_to_db
@click.group("edit")
def fm_edit_data():
"""FlexMeasures: Edit data."""
@fm_edit_data.command("attribute")
@with_appcontext
@click.option(
"--asset-id",
"assets",
required=False,
multiple=True,
type=GenericAssetIdField(),
help="Add/edit attribute to this asset. Follow up with the asset's ID.",
)
@click.option(
"--sensor-id",
"sensors",
required=False,
multiple=True,
type=SensorIdField(),
help="Add/edit attribute to this sensor. Follow up with the sensor's ID.",
)
@click.option(
"--attribute",
"attribute_key",
required=True,
help="Add/edit this attribute. Follow up with the name of the attribute.",
)
@click.option(
"--float",
"attribute_float_value",
required=False,
type=float,
help="Set the attribute to this float value.",
)
@click.option(
"--bool",
"attribute_bool_value",
required=False,
type=bool,
help="Set the attribute to this bool value.",
)
@click.option(
"--str",
"attribute_str_value",
required=False,
type=str,
help="Set the attribute to this string value.",
)
@click.option(
"--int",
"attribute_int_value",
required=False,
type=int,
help="Set the attribute to this integer value.",
)
@click.option(
"--null",
"attribute_null_value",
required=False,
is_flag=True,
default=False,
help="Set the attribute to a null value.",
)
def edit_attribute(
attribute_key: str,
assets: List[GenericAsset],
sensors: List[Sensor],
attribute_null_value: bool,
attribute_float_value: Optional[float] = None,
attribute_bool_value: Optional[bool] = None,
attribute_str_value: Optional[str] = None,
attribute_int_value: Optional[int] = None,
):
"""Edit (or add) an asset attribute or sensor attribute."""
if not assets and not sensors:
raise ValueError("Missing flag: pass at least one --asset-id or --sensor-id.")
# Parse attribute value
attribute_value = parse_attribute_value(
attribute_float_value=attribute_float_value,
attribute_bool_value=attribute_bool_value,
attribute_str_value=attribute_str_value,
attribute_int_value=attribute_int_value,
attribute_null_value=attribute_null_value,
)
# Set attribute
for asset in assets:
asset.attributes[attribute_key] = attribute_value
db.session.add(asset)
for sensor in sensors:
sensor.attributes[attribute_key] = attribute_value
db.session.add(sensor)
db.session.commit()
print("Successfully edited/added attribute.")
@fm_edit_data.command("resample-data")
@with_appcontext
@click.option(
"--sensor-id",
"sensor_ids",
multiple=True,
required=True,
help="Resample data for this sensor. Follow up with the sensor's ID. This argument can be given multiple times.",
)
@click.option(
"--event-resolution",
"event_resolution_in_minutes",
type=int,
required=True,
help="New event resolution as an integer number of minutes.",
)
@click.option(
"--from",
"start_str",
required=False,
help="Resample only data from this datetime onwards. Follow up with a timezone-aware datetime in ISO 6801 format.",
)
@click.option(
"--until",
"end_str",
required=False,
help="Resample only data until this datetime. Follow up with a timezone-aware datetime in ISO 6801 format.",
)
@click.option(
"--skip-integrity-check",
is_flag=True,
help="Whether to skip checking the resampled time series data for each sensor."
" By default, an excerpt and the mean value of the original"
" and resampled data will be shown for manual approval.",
)
def resample_sensor_data(
sensor_ids: List[int],
event_resolution_in_minutes: int,
start_str: Optional[str] = None,
end_str: Optional[str] = None,
skip_integrity_check: bool = False,
):
"""Assign a new event resolution to an existing sensor and resample its data accordingly."""
event_resolution = timedelta(minutes=event_resolution_in_minutes)
event_starts_after = pd.Timestamp(start_str) # note that "" or None becomes NaT
event_ends_before = pd.Timestamp(end_str)
for sensor_id in sensor_ids:
sensor = Sensor.query.get(sensor_id)
if sensor.event_resolution == event_resolution:
print(f"{sensor} already has the desired event resolution.")
continue
df_original = sensor.search_beliefs(
most_recent_beliefs_only=False,
event_starts_after=event_starts_after,
event_ends_before=event_ends_before,
).sort_values("event_start")
df_resampled = df_original.resample_events(event_resolution).sort_values(
"event_start"
)
if not skip_integrity_check:
message = ""
if sensor.event_resolution < event_resolution:
message += f"Downsampling {sensor} to {event_resolution} will result in a loss of data. "
click.confirm(
message
+ f"Data before:\n{df_original}\nData after:\n{df_resampled}\nMean before: {df_original['event_value'].mean()}\nMean after: {df_resampled['event_value'].mean()}\nContinue?",
abort=True,
)
# Update sensor
sensor.event_resolution = event_resolution
db.session.add(sensor)
# Update sensor data
query = TimedBelief.query.filter(TimedBelief.sensor == sensor)
if not pd.isnull(event_starts_after):
query = query.filter(TimedBelief.event_start >= event_starts_after)
if not pd.isnull(event_ends_before):
query = query.filter(
TimedBelief.event_start + sensor.event_resolution <= event_ends_before
)
query.delete()
save_to_db(df_resampled, bulk_save_objects=True)
db.session.commit()
print("Successfully resampled sensor data.")
app.cli.add_command(fm_edit_data)
def parse_attribute_value(
attribute_null_value: bool,
attribute_float_value: Optional[float] = None,
attribute_bool_value: Optional[bool] = None,
attribute_str_value: Optional[str] = None,
attribute_int_value: Optional[int] = None,
) -> Union[float, int, bool, str, None]:
"""Parse attribute value."""
if not single_true(
[attribute_null_value]
+ [
v is not None
for v in [
attribute_float_value,
attribute_bool_value,
attribute_str_value,
attribute_int_value,
]
]
):
raise ValueError("Cannot set multiple values simultaneously.")
if attribute_null_value:
return None
elif attribute_float_value is not None:
return float(attribute_float_value)
elif attribute_bool_value is not None:
return bool(attribute_bool_value)
elif attribute_int_value is not None:
return int(attribute_int_value)
return attribute_str_value
def single_true(iterable) -> bool:
i = iter(iterable)
return any(i) and not any(i)
| 7,655 | 2,294 |
mail_settings = {
"MAIL_SERVER": 'smtp.gmail.com',
"MAIL_PORT": 465,
"MAIL_USE_TLS": False,
"MAIL_USE_SSL": True,
"MAIL_USERNAME": 'c003.teste.jp@gmail.com',
"MAIL_PASSWORD": 'C003.teste'
} | 216 | 107 |
#!/usr/bin/env python3.6
# -*- encoding=utf8 -*-
import pyquery
"""
้ๆฑๅญๆฎต๏ผ
ๆจ้กใ็ผ่กจๆฅๆใๅ้กใๆจ็ฑคใๅ
งๅฎนใๅ็
้่ฆ็ๅญๆฎตไฟกๆฏ
1. ็ฝ็ซๆ นURL
2. ่งฃๆๅจๅๅญ
3. ่งฃๆๅจ็ฑปๅ
1. PARSER_PASSAGE_URL ๆ็ซ URL
2. PARSER_PASSAGE_TITLE ๆ็ซ ๆ ้ข
3. PARSER_PASSAGE_DATE ๅ่กจๆฅๆ
4. PARSER_PASSAGE_CATEGORY ๆ็ซ ๅ้ก
5. PARSER_PASSAGE_TAG ๆ็ซ ๆจ็ฑค
6. PARSER_PASSAGE_CONTENT ๆ็ซ ๅ
ๅฎน
7. PARSER_PASSAGE_IMGURL ๆ็ซ ไธญ็ๅพ็ URL
"""
class Parser(object):
def __init__ (self):
self._webURL = ''
self._parserName = 'base_parser'
def _parser_passage_url (self, doc: str) -> (bool, str):
return
def _parser_passage_title (self, doc: str) -> (bool, str):
return
def _parser_passage_date (self, doc: str) -> (bool, str):
return
def _parser_passage_category (self, doc: str) -> (bool, str):
return
def _parser_passage_tag (self, doc: str) -> (bool, str):
return
def _parser_passage_content (self, doc: str) -> (bool, str):
return
def _parser_passage_img_url (self, doc: str) -> (bool, str, bytes):
return
def get_parser_name (self):
return self._parserName
@staticmethod
def _parser (doc: str, rule: str):
return pyquery.PyQuery(doc).find(rule)
def parse (self, doc: str, rule='', parse_type=-1):
if self.PARSER_PASSAGE_URL == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_url(doc)
elif self.PARSER_PASSAGE_TITLE == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_title(doc)
elif self.PARSER_PASSAGE_DATE == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_date(doc)
elif self.PARSER_PASSAGE_CATEGORY == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_category(doc)
elif self.PARSER_PASSAGE_TAG == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_tag(doc)
elif self.PARSER_PASSAGE_CONTENT == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_content(doc)
elif self.PARSER_PASSAGE_IMGURL == parse_type:
if doc == '' or doc == None:
return (False, '')
return self._parser_passage_img_url(doc)
else:
if doc == '' or doc == None:
return (False, '')
return Parser._parser(doc, rule)
PARSER_PASSAGE_URL = 1
PARSER_PASSAGE_TITLE = 2
PARSER_PASSAGE_DATE = 3
PARSER_PASSAGE_CATEGORY = 4
PARSER_PASSAGE_TAG = 5
PARSER_PASSAGE_CONTENT = 6
PARSER_PASSAGE_IMGURL = 7
| 3,071 | 1,051 |
#!/usr/bin/env python3
# ----------------------------------------------------------------------------
# Copyright 2019 Drunella
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import sys
import glob
import subprocess
import argparse
import hashlib
import traceback
import pprint
def readblockmap_info(filename):
directory = dict()
with open(filename) as f:
result = [line.split() for line in f]
for l in result:
directory[l[0]] = l[1:]
return directory
def readdisks_info(filename):
disks = []
with open(filename) as f:
result = [line.split() for line in f]
#pprint.pprint(result)
return result
def readdisks_getdiskinfo(disks, diskname):
for d in disks:
if d[0] == diskname:
return d
return []
def map_initialize():
global bank_data, map_data
map_data = bytearray([0xff] * 0x800)
def crtmap_appendentry(filename, block, name, address):
with open(filename, "at") as f:
content = "{0} f {1} addr 0x{2:04x}\n".format(block, name, address)
return f.write(content)
def load_file(filename):
with open(filename, "rb") as f:
return f.read()
def write_prg(dirname, lowhigh, data):
if lowhigh == 0:
# low
a = bytearray(2)
a[0] = 0
a[1] = 0x80
elif lowhigh == 1:
# high
a = bytearray(2)
a[0] = 0
a[1] = 0xA0
else:
raise Exception("lowhigh can only be 0 or 1")
with open(dirname, "wb") as f:
#f.write(a)
f.write(data)
def blockmap_appendentry(diskid, line, bank, highaddress):
global map_data
base = diskid * 256 + line * 2
map_data[base] = bank
map_data[base+1] = highaddress
#print("blockmap_appendentry: " + str(base) + ": " + str(bank) + " " + str(highaddress))
def calculate_address(lowhigh):
if lowhigh == 0:
# low
a = 0x80
elif lowhigh == 1:
# high
a = 0xA0
else:
raise Exception("lowhigh can only be 0 or 1")
return a
def main(argv):
global bank_data, map_data
p = argparse.ArgumentParser()
p.add_argument("-v", dest="verbose", action="store_true", help="Verbose output.")
p.add_argument("-o", dest="disks", action="store", required=True, help="disk configuration file.")
p.add_argument("-f", dest="files", action="store", required=True, help="files directory.")
p.add_argument("-m", dest="crtfile", action="store", required=True, help="crt.map file")
p.add_argument("-d", dest="destination", action="store", required=True, help="destination directory.")
p.add_argument("-b", dest="blockmap", action="store", required=True, help="blockmap file.")
#p.add_argument("-f", dest="fileoutput", action="store", required=True, help="output data content file.")
args = p.parse_args()
#temp_path = os.path.join(args.build, "temp")
#os.makedirs(temp_path, exist_ok=True)
files_path = args.files #os.path.join(args.build, "files")
os.makedirs(files_path, exist_ok=True)
destination_path = args.destination #os.path.join(args.build, "obj")
os.makedirs(destination_path, exist_ok=True)
disks = readdisks_info(args.disks)
blockmap = readblockmap_info(args.blockmap)
map_initialize()
if os.path.exists(args.crtfile):
os.remove(args.crtfile)
# add blocks file
for d in ("britannia", "towne", "dwelling", "castle", "keep", "dungeon", "underworld"):
diskinfo = readdisks_getdiskinfo(disks, d)
starttrack = int(diskinfo[2], 0)
height = int(diskinfo[4], 0) - int(diskinfo[2], 0) + 1
diskid = int(diskinfo[1], 0) - 0x41
startbank = int(blockmap[d][0], 0)
lowhigh = int(blockmap[d][1], 0)
block_data = load_file(os.path.join(files_path, d + ".data"))
# build map and blocks
map_data[diskid*256+255] = starttrack
for b in range(0, height, 2):
# double line or single line
#factor = 2
#if b+1 >= height:
# factor = 1
# make data
bank_data = bytearray([0xff] * 0x2000)
baseaddress = calculate_address(lowhigh)
if b+1 >= height:
# one line
s = b * 256*16
l = 0x1000
bank_data[0:l] = block_data[s:s+l]
blockmap_appendentry(diskid, b, startbank, baseaddress)
else:
# two lines
s = b * 256*16
l = 0x2000
bank_data[0:l] = block_data[s:s+l]
blockmap_appendentry(diskid, b, startbank, baseaddress)
blockmap_appendentry(diskid, b+1, startbank, baseaddress+0x10)
# write data and map
filename = "{0}_{1:02d}.aprg".format(d, b)
write_prg(os.path.join(destination_path, filename), lowhigh, bank_data)
crtmap_appendentry(args.crtfile, startbank, filename, baseaddress * 0x100)
# increase values
startbank += 1
# write block map
blockmap_bank = int(blockmap["blockmap"][0], 0)
blockmap_lowhigh = int(blockmap["blockmap"][1], 0)
blockmap_address = calculate_address(blockmap_lowhigh) * 256
#blockmap_appendentry(0, b, startbank, baseaddress)
blockmapname = os.path.join(destination_path, "blockmap.aprg")
write_prg(blockmapname, blockmap_lowhigh, map_data)
crtmap_appendentry(args.crtfile, blockmap_bank, "blockmap.aprg", blockmap_address)
return 0
if __name__ == '__main__':
try:
retval = main(sys.argv)
sys.exit(retval)
except Exception as e:
print(e)
traceback.print_exc()
sys.exit(1)
| 6,341 | 2,140 |
# SPDX-License-Identifier: BSD-3-Clause
"""
Text decode functions.
These functions can be used to get Unicode strings from a series of bytes.
"""
from codecs import (
BOM_UTF8,
BOM_UTF16_BE,
BOM_UTF16_LE,
BOM_UTF32_BE,
BOM_UTF32_LE,
CodecInfo,
lookup as lookup_codec,
)
from collections import OrderedDict
from typing import Dict, Iterable, Optional, Tuple
from apetest.typing import LoggerT
def encoding_from_bom(data: bytes) -> Optional[str]:
"""
Look for a byte-order-marker at the start of the given C{bytes}.
If found, return the encoding matching that BOM, otherwise return C{None}.
"""
if data.startswith(BOM_UTF8):
return "utf-8"
elif data.startswith(BOM_UTF16_LE) or data.startswith(BOM_UTF16_BE):
return "utf-16"
elif data.startswith(BOM_UTF32_LE) or data.startswith(BOM_UTF32_BE):
return "utf-32"
else:
return None
def standard_codec_name(name: str) -> str:
"""
Map a codec name to the preferred standardized version.
The preferred names were taken from this list published by IANA:
U{http://www.iana.org/assignments/character-sets/character-sets.xhtml}
@param name:
Text encoding name, in lower case.
"""
if name.startswith("iso8859"):
return "iso-8859" + name[7:]
return {
"ascii": "us-ascii",
"euc_jp": "euc-jp",
"euc_kr": "euc-kr",
"iso2022_jp": "iso-2022-jp",
"iso2022_jp_2": "iso-2022-jp-2",
"iso2022_kr": "iso-2022-kr",
}.get(name, name)
def try_decode(data: bytes, encodings: Iterable[str]) -> Tuple[str, str]:
"""
Attempt to decode text using the given encodings in order.
@param data:
Encoded version of the text.
@param encodings:
Names of the encodings to try. Must all be lower case.
@return: C{(text, encoding)}
The decoded string and the encoding used to decode it.
The returned encoding is name the preferred name, which could differ
from the name used in the C{encodings} argument.
@raise ValueError:
If the text could not be decoded.
"""
# Build sequence of codecs to try.
codecs: Dict[str, CodecInfo] = OrderedDict()
for encoding in encodings:
try:
codec = lookup_codec(encoding)
except LookupError:
pass
else:
codecs[standard_codec_name(codec.name)] = codec
# Apply decoders to the document.
for name, codec in codecs.items():
try:
text, consumed = codec.decode(data, "strict")
except UnicodeDecodeError:
continue
if consumed == len(data):
return text, name
raise ValueError("Unable to determine document encoding")
def decode_and_report(
data: bytes,
encoding_options: Iterable[Tuple[Optional[str], str]],
logger: LoggerT,
) -> Tuple[str, str]:
"""
Attempt to decode text using several encoding options in order.
@param data:
Encoded version of the text.
@param encoding_options: C{(encoding | None, source)*}
Each option is a pair of encoding name and a description of
where this encoding suggestion originated.
If the encoding name is C{None}, the option is skipped.
@param logger:
Non-fatal problems are logged here.
Such problems include an unknown or differing encodings
among the options.
@return: C{(text, encoding)}
The decoded string and the encoding used to decode it.
@raise ValueError:
If the text could not be decoded.
"""
# Filter and remember encoding options.
options = [
(encoding, source)
for encoding, source in encoding_options
if encoding is not None
]
encodings = [encoding for encoding, source in options]
# Always try to decode as UTF-8, since that is the most common encoding
# these days, plus it's a superset of ASCII so it also works for old or
# simple documents.
encodings.append("utf-8")
text, used_encoding = try_decode(data, encodings)
# Report differences between suggested encodings and the one we
# settled on.
for encoding, source in options:
try:
codec = lookup_codec(encoding)
except LookupError:
logger.warning(
'%s specifies encoding "%s", which is unknown to Python',
source,
encoding,
)
continue
std_name = standard_codec_name(codec.name)
if std_name != used_encoding:
logger.warning(
'%s specifies encoding "%s", ' 'while actual encoding seems to be "%s"',
source,
encoding,
used_encoding,
)
elif std_name != encoding:
logger.info(
'%s specifies encoding "%s", ' 'which is not the standard name "%s"',
source,
encoding,
used_encoding,
)
return text, used_encoding
| 5,079 | 1,514 |
import os
import json
import gzip
from copy import deepcopy, copy
import numpy as np
import csv
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader, RandomSampler
from transformers.tokenization_utils import trim_batch
class LabelSmoothingLoss(nn.Module):
def __init__(self, label_smooth, tgt_vocab_size, ignore_index=-100):
assert 0. < label_smooth <= 1.
self.ignore_index = ignore_index
super(LabelSmoothingLoss, self).__init__()
smoothing_value = label_smooth / (tgt_vocab_size - 2)
one_hot = torch.full((tgt_vocab_size,), smoothing_value)
one_hot[self.ignore_index] = 0
self.register_buffer('one_hot', one_hot.unsqueeze(0).unsqueeze(0))
self.confidence = 1.0 - label_smooth
self.lossfct = torch.nn.KLDivLoss(reduction='none')
def forward(self, pred, target):
"""
Args:
pred: [bsz, seq_len, vocab_size]
target: [bsz, seq_len]
Returns:
"""
model_prob = self.one_hot.repeat(target.size(0), target.size(1), 1) # [bsz, seq_len, vocab_size]
model_prob.scatter_(2, target.unsqueeze(2), self.confidence)
model_prob.masked_fill_((target == self.ignore_index).unsqueeze(2), 0)
pred_prob = F.log_softmax(pred, dim=2)
#return F.kl_div(pred_prob, model_prob, reduction='mean')
loss = self.lossfct(pred_prob, model_prob)
loss = torch.sum(loss, dim=2).masked_fill_((target == self.ignore_index), 0)
avg_loss = torch.sum(loss) / torch.sum((target != self.ignore_index).to(torch.float))
return avg_loss
# Special symbols
SOS_token = "<SOS>" # start of sentence
EOS_token = "<EOS>" # end of sentence
PAD_token = SOS_token # padding symbol
INPUT_TOKENS_SCAN = ['jump', 'opposite', 'right', 'twice', 'and', 'turn', 'thrice', 'run', 'after', 'around', 'left', 'walk', 'look']
OUTPUT_TOKENS_SCAN = ['I_TURN_RIGHT', 'I_JUMP', 'I_TURN_LEFT', 'I_RUN', 'I_WALK', 'I_LOOK']
# ACTION_TO_TEXT = {'I_TURN_RIGHT': 'right', 'I_JUMP': 'jump', 'I_TURN_LEFT': 'left', 'I_RUN': 'run', 'I_WALK': 'walk', 'I_LOOK': 'look'}
class Lang:
# Class for converting strings/words to numerical indices, and vice versa.
# Should use separate class for input language (English) and output language (actions)
#
def __init__(self, symbols, io_type):
# symbols : list of all possible symbols
n = len(symbols)
self.symbols = [_s.strip('\n') for _s in symbols]
self.io_type = io_type
if SOS_token not in self.symbols:
assert EOS_token not in self.symbols
self.index2symbol = {n: SOS_token, n+1: EOS_token}
self.symbol2index = {SOS_token: n, EOS_token: n + 1}
self.sos_id, self.eos_id = n, n + 1
else:
self.index2symbol = {}
self.symbol2index = {}
self.sos_id, self.eos_id = 0, 1
self.pad_token_id = self.sos_id
for idx,s in enumerate(self.symbols):
self.index2symbol[idx] = s
self.symbol2index[s] = idx
self.n_symbols = len(self.index2symbol)
def variableFromSymbols(self, mylist, add_eos=True):
# Convert a list of symbols to a tensor of indices (adding a EOS token at end)
#
# Input
# mylist : list of m symbols
# add_eos : true/false, if true add the EOS symbol at end
#
# Output
# output : [m or m+1 LongTensor] indices of each symbol (plus EOS if appropriate)
mylist = copy(mylist)
if add_eos:
mylist.append(EOS_token)
indices = [self.symbol2index[s] for s in mylist]
output = torch.LongTensor(indices)
#if USE_CUDA:
output = output.cuda()
return output
def symbolsFromVector(self, v):
# Convert indices to symbols, breaking where we get a EOS token
#
# Input
# v : list of m indices
#
# Output
# mylist : list of m or m-1 symbols (excluding EOS)
mylist = []
for x in v:
s = self.index2symbol[x]
if s == EOS_token:
break
mylist.append(s)
return mylist
def encode_scan_file(self, data, max_length):
encoded_data = []
for dp in data:
input, output = dp[0], dp[1]
if self.io_type == 'input':
raw = input
else:
assert self.io_type == 'output'
raw = output
encoded = self.variableFromSymbols(raw.split(' '))
encoded_data.append(encoded)
return encoded_data
def encode_scan_file_2_seg(self, data, max_length, cutoffs):
encoded_data_1, encoded_data_2 = [], []
for _id, dp in enumerate(data):
input, output, cutoff = dp[0], dp[1], cutoffs[_id]
assert self.io_type == 'output'
raw = output
encoded_1 = self.variableFromSymbols(raw.split(' ')[:cutoff])
encoded_2 = self.variableFromSymbols(raw.split(' ')[cutoff:])
encoded_data_1.append(encoded_1)
encoded_data_2.append(encoded_2)
return encoded_data_1, encoded_data_2
def encode_cfq_file(self, data, max_length):
encoded_data = []
for dp in data:
input, output = dp['query_ids'], dp['sparql_ids']
if self.io_type == 'input':
raw = input
else:
assert self.io_type == 'output'
raw = output + [self.eos_id]
encoded = torch.LongTensor(raw).cuda()
encoded_data.append(encoded)
return encoded_data
def encode_cogs_file(self, data, max_length):
encoded_data = []
for dp in data:
input, output = dp['src'], dp['trg']
if self.io_type == 'input':
raw = input
else:
assert self.io_type == 'output'
raw = output
encoded = self.variableFromSymbols(raw.split(' '))
encoded_data.append(encoded)
return encoded_data
def decode(self, ids):
out = self.symbolsFromVector(ids.cpu().numpy())
if out == []:
return out
if out[0] in ['<SOS>', '<SOS_2>']:
out = out[1:]
return out
def calculate_accuracy(preds, gts):
assert len(preds) == len(gts)
match = 0
for pred, gt in zip(preds, gts):
if pred == gt:
match += 1
return match / len(preds)
def encode_file(tokenizer, data_path, max_length, pad_to_max_length=True, return_tensors="pt", max_examples=None):
examples = []
if data_path[-3:] == '.gz':
print('Data file is gzipped')
f = gzip.open(data_path, "rt")
else:
print('Data file is plain text')
print(data_path)
f = open(data_path, "r", encoding='utf-8')
for i, text in enumerate(f.readlines()):
tokenized = tokenizer.batch_encode_plus( [text + ' </s>'], max_length=max_length,
pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
if max_examples and i >= max_examples:
break
examples.append(tokenized)
f.close()
return examples
# def encode_file_iterator(tokenizer, data_path, max_length, pad_to_max_length=True, return_tensors="pt", max_examples=None):
# '''
# This provides a low-memory usage way of iterating thru all of the source/target lines for processing by JIT loader.
# '''
# if data_path[-3:] == '.gz':
# print('Data file is gzipped')
# f = gzip.open(data_path, "rt")
# else:
# print('Data file is plain text')
# f = open(data_path, "r", encoding='utf-8')
#
# for i, text in enumerate(f):
#
# tokenized = tokenizer.batch_encode_plus( [text + ' </s>'], max_length=max_length,
# pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
#
# yield tokenized
#
# if max_examples and i >= max_examples:
# break
#
# f.close()
# def convert_scan_actions_to_text(actions):
# return ' '.join([ACTION_TO_TEXT[_action] for _action in actions.split(' ')])
# def encode_scan_file(tokenizer, data, io_type, max_length, pad_to_max_length=True, return_tensors="pt", max_examples=None):
# examples = []
# # a = tokenizer.batch_encode_plus( ['right jump left run walk look' + ' <s> </s>'], max_length=max_length,
# # pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
# # print(a)
# # exit()
# for dp in data:
# input, output = dp[0], dp[1]
# if io_type == 'input':
# raw = input
# else:
# assert io_type == 'output'
# raw = convert_scan_actions_to_text(output)
#
# tokenized = tokenizer.batch_encode_plus( [raw + ' </s>'], max_length=max_length,
# pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
#
# if max_examples and i >= max_examples:
# break
# examples.append(tokenized)
#
# return examples
def load_scan_file(mytype, split):
# Load SCAN dataset from file
#
# Input
# mytype : type of SCAN experiment
# split : 'train' or 'test'
#
# Output
# commands : list of input/output strings (as tuples)
assert mytype in ['simple', 'addprim_jump', 'length', 'addprim_turn_left', 'all', 'template_around_right', 'viz',
'examine', 'template_jump_around_right', 'template_right', 'template_around_right',
'mcd1', 'mcd2', 'mcd3', 'mcd1.1', 'mcd1.2', 'debug', 'attn_vis']
assert split in ['train', 'test', 'val']
if split == 'val' and mytype not in ['mcd1', 'mcd2', 'mcd3', 'mcd1.1', 'mcd1.2']:
split = 'test'
fn = 'data/scan/tasks_' + split + '_' + mytype + '.txt'
fid = open(fn, 'r')
lines = fid.readlines()
fid.close()
lines = [l.strip() for l in lines]
lines = [l.lstrip('IN: ') for l in lines]
commands = [l.split(' OUT: ') for l in lines]
return commands
class CompositionDataset(Dataset):
def __init__(
self,
src_lang,
trg_lang,
data_dir,
type_path,
sub_task,
max_source_length=20,
max_target_length=20,
tokenized=False,
):
super().__init__()
self.max_source_length = max_source_length
self.max_target_length = max_target_length
self.tokenized = tokenized
self.src_lang = src_lang
self.trg_lang = trg_lang
def __len__(self):
if self.tokenized:
return len(self.dataset)
else:
return len(self.source)
def __getitem__(self, index):
if self.tokenized:
dp = self.dataset[index]
source_ids, src_mask, target_ids = dp[0], dp[1], dp[2]
source_ids = source_ids[:self.max_source_length]
#src_mask = src_mask[:self.max_source_length]
target_ids = target_ids[:self.max_target_length]
else:
source_ids = self.source[index]
target_ids = self.target[index]
return {"source_ids": source_ids, "target_ids": target_ids}
@staticmethod
def trim_seq2seq_batch(batch, src_pad_token_id, trg_pad_token_id, trim_y=True):
if trim_y:
y = trim_batch(batch["target_ids"], trg_pad_token_id)
else:
y = batch["target_ids"]
source_ids, source_mask = trim_batch(batch["source_ids"], src_pad_token_id, attention_mask=batch["source_mask"])
return source_ids, source_mask, y
def pad_to_max_len(self, ids, max_len, pad_token_id):
ids_length = ids.size(0)
if ids_length == max_len:
return ids
pad_tokens = torch.tensor([pad_token_id] * (max_len - ids_length))
# if ids.type() == 'torch.cuda.FloatTensor':
# print(ids)
# exit()
padded_ids = torch.cat([ids, pad_tokens.cuda()])
return padded_ids
def create_mask(self, ids, max_len):
ids_length = ids.size(0)
mask = torch.tensor([1] * ids_length + [0] * (max_len - ids_length)).cuda()
return mask
def collate_fn(self, batch):
max_src_len = max(map(len, [x["source_ids"] for x in batch]))
max_trg_len = max(map(len, [x["target_ids"] for x in batch]))
src_mask = torch.stack([self.create_mask(x["source_ids"], max_src_len) for x in batch])
src_ids = torch.stack([self.pad_to_max_len(x["source_ids"], max_src_len, self.src_lang.pad_token_id) for x in batch])
#masks = torch.stack([x["source_mask"] for x in batch])
trg_ids = torch.stack([self.pad_to_max_len(x["target_ids"], max_trg_len, self.trg_lang.pad_token_id) for x in batch])
y = trim_batch(trg_ids, self.trg_lang.pad_token_id)
src_ids, src_mask = trim_batch(src_ids, self.src_lang.pad_token_id, attention_mask=src_mask)
return {"source_ids": src_ids, "source_mask": src_mask, "target_ids": y}
class ScanDataset(CompositionDataset):
def __init__(
self,
src_lang,
trg_lang,
data_dir="./data/scan/",
type_path="train",
sub_task="addprim_jump",
max_source_length=20,
max_target_length=20,
tokenized=False,
):
super().__init__(src_lang, trg_lang, data_dir, type_path, sub_task, max_source_length,
max_target_length, tokenized)
scan_data = load_scan_file(sub_task, type_path)
print(len(scan_data))
all_scan_dict = self.convert_to_dict(load_scan_file('all', 'train'))
self.action_count_labels, self.action_group_labels, self.action_type_labels = self.construct_count_label(scan_data, all_scan_dict)
if not tokenized:
self.source = self.src_lang.encode_scan_file(scan_data, max_source_length)
self.target = self.trg_lang.encode_scan_file(scan_data, max_target_length)
else:
self.dataset = torch.load(os.path.join(data_dir, type_path))
def construct_count_label(self, raw_data, all_data_dict):
all_count_labels = []
count_label_scheme = "v1"
group_label_scheme = "v2"
type_label_scheme = "v2"
all_action_group_labels, all_action_type_labels = [], []
# Group 1: single prim (jump), Group 2: prim + direction (jump left), Group 3: prim opposite, Group 4: prim around
#no_skip_id = np.random.randint(0, len(raw_data), int(len(raw_data)*0.05))
#no_skip_id = np.random.choice(range(len(raw_data)), int(len(raw_data)*0.07), replace=False)
# no_skip_id = np.random.choice(range(len(raw_data)), 10, replace=False)
skip_cnt, sup_cnt = 0, 0
for _id, dp in enumerate(raw_data):
input_text, output_text = dp[0], dp[1]
input_tok, output_tok = input_text.split(' '), output_text.split(' ')
count_labels, group_labels, type_labels = [], [], []
first_part_output_text, second_part_output_text = '', ''
if 'and' in input_tok:
first_part_input_tok = input_tok[:input_tok.index('and')]
second_part_input_tok = input_tok[input_tok.index('and')+1:]
first_part_output_text = all_data_dict[' '.join(first_part_input_tok)]
second_part_output_text = all_data_dict[' '.join(second_part_input_tok)]
elif 'after' in input_tok:
second_part_input_tok = input_tok[:input_tok.index('after')]
first_part_input_tok = input_tok[input_tok.index('after') + 1:]
first_part_output_text = all_data_dict[' '.join(first_part_input_tok)]
second_part_output_text = all_data_dict[' '.join(second_part_input_tok)]
else:
first_part_input_tok, second_part_input_tok = input_tok, []
first_part_output_text = output_text
first_part_output_tok, second_part_output_tok = first_part_output_text.split(' '), second_part_output_text.split(' ')
if second_part_output_text == '':
second_part_output_tok = []
assert len(first_part_output_tok) + len(second_part_output_tok) == len(output_tok), \
(len(first_part_output_tok), len(second_part_output_tok), len(output_tok), first_part_output_text, second_part_output_text, output_text)
### 1. Build the action count labels ###
if count_label_scheme == 'v1':
### For the first part output
if 'twice' in first_part_input_tok:
if 'after' in input_tok:
count_labels += ([4] * int(len(first_part_output_tok) / 2) + [3] * int(len(first_part_output_tok) / 2))
else:
count_labels += ([1] * int(len(first_part_output_tok) / 2) + [0] * int(len(first_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in first_part_input_tok:
if 'after' in input_tok:
count_labels += ([5] * int(len(first_part_output_tok) / 3) + [4] * int(len(first_part_output_tok) / 3) + \
[3] * int(len(first_part_output_tok) / 3))
else:
count_labels += ([2] * int(len(first_part_output_tok) / 3) + [1] * int(len(first_part_output_tok) / 3) + \
[0] * int(len(first_part_output_tok) / 3))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok) / 3) - 1)) * 3
else:
if 'after' in input_tok:
count_labels += ([3] * len(first_part_output_tok))
else:
count_labels += ([0] * len(first_part_output_tok))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok)) - 1))
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
if 'after' in input_tok:
count_labels += ([1] * int(len(second_part_output_tok) / 2) + [0] * int(len(second_part_output_tok) / 2))
else:
count_labels += ([4] * int(len(second_part_output_tok) / 2) + [3] * int(len(second_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in second_part_input_tok:
if 'after' in input_tok:
count_labels += ([2] * int(len(second_part_output_tok) / 3) + [1] * int(len(second_part_output_tok) / 3) + \
[0] * int(len(second_part_output_tok) / 3))
else:
count_labels += ([5] * int(len(second_part_output_tok) / 3) + [4] * int(len(second_part_output_tok) / 3) + \
[3] * int(len(second_part_output_tok) / 3))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok) / 3) - 1)) * 3
else:
if 'after' in input_tok:
count_labels += ([0] * len(second_part_output_tok))
else:
count_labels += ([3] * len(second_part_output_tok))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok)) - 1))
elif count_label_scheme == 'v2':
### For the first part output
if 'twice' in first_part_input_tok:
count_labels += ([1] * int(len(first_part_output_tok) / 2) + [0] * int(
len(first_part_output_tok) / 2))
elif 'thrice' in first_part_input_tok:
count_labels += ([2] * int(len(first_part_output_tok) / 3) + [1] * int(
len(first_part_output_tok) / 3) + \
[0] * int(len(first_part_output_tok) / 3))
else:
count_labels += ([0] * len(first_part_output_tok))
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
count_labels += ([1] * int(len(second_part_output_tok) / 2) + [0] * int(
len(second_part_output_tok) / 2))
elif 'thrice' in second_part_input_tok:
count_labels += ([2] * int(len(second_part_output_tok) / 3) + [1] * int(
len(second_part_output_tok) / 3) + [0] * int(len(second_part_output_tok) / 3))
else:
count_labels += ([0] * len(second_part_output_tok))
elif count_label_scheme == 'v3':
### For the first part output
if 'thrice' in first_part_input_tok and 'thrice' in second_part_input_tok:
start_count = 5
elif ('thrice' in first_part_input_tok and 'twice' in second_part_input_tok) or \
('twice' in first_part_input_tok and 'thrice' in second_part_input_tok):
start_count = 4
elif ('twice' in first_part_input_tok and 'twice' in second_part_input_tok) or \
('thrice' in first_part_input_tok) or ('thrice' in second_part_input_tok):
start_count = 3
elif 'twice' in first_part_input_tok or 'twice' in second_part_input_tok:
start_count = 2
else:
start_count = 1
if 'twice' in first_part_input_tok:
if 'after' in input_tok:
count_labels += ([start_count] * int(len(first_part_output_tok) / 2) + [start_count-1] * int(len(first_part_output_tok) / 2))
else:
count_labels += ([1] * int(len(first_part_output_tok) / 2) + [0] * int(len(first_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in first_part_input_tok:
if 'after' in input_tok:
count_labels += ([start_count] * int(len(first_part_output_tok) / 3) + [start_count-1] * int(len(first_part_output_tok) / 3) + \
[start_count-2] * int(len(first_part_output_tok) / 3))
else:
count_labels += ([2] * int(len(first_part_output_tok) / 3) + [1] * int(len(first_part_output_tok) / 3) + \
[0] * int(len(first_part_output_tok) / 3))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok) / 3) - 1)) * 3
else:
if 'after' in input_tok:
count_labels += ([start_count] * len(first_part_output_tok))
else:
count_labels += ([0] * len(first_part_output_tok))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok)) - 1))
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
if 'after' in input_tok:
count_labels += ([1] * int(len(second_part_output_tok) / 2) + [0] * int(len(second_part_output_tok) / 2))
else:
count_labels += ([start_count] * int(len(second_part_output_tok) / 2) + [start_count-1] * int(len(second_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in second_part_input_tok:
if 'after' in input_tok:
count_labels += ([2] * int(len(second_part_output_tok) / 3) + [1] * int(len(second_part_output_tok) / 3) + \
[0] * int(len(second_part_output_tok) / 3))
else:
count_labels += ([start_count] * int(len(second_part_output_tok) / 3) + [start_count-1] * int(len(second_part_output_tok) / 3) + \
[start_count-2] * int(len(second_part_output_tok) / 3))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok) / 3) - 1)) * 3
else:
if 'after' in input_tok:
count_labels += ([0] * len(second_part_output_tok))
else:
count_labels += ([start_count] * len(second_part_output_tok))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok)) - 1))
elif count_label_scheme == 'v3.1':
### For the first part output
if 'thrice' in first_part_input_tok and 'thrice' in second_part_input_tok:
start_count = 5
elif ('thrice' in first_part_input_tok and 'twice' in second_part_input_tok) or \
('twice' in first_part_input_tok and 'thrice' in second_part_input_tok):
start_count = 4
elif ('twice' in first_part_input_tok and 'twice' in second_part_input_tok) or \
('thrice' in first_part_input_tok) or ('thrice' in second_part_input_tok):
start_count = 3
elif 'twice' in first_part_input_tok or 'twice' in second_part_input_tok:
start_count = 2
else:
start_count = 1
if 'twice' in first_part_input_tok:
count_labels += ([start_count] * int(len(first_part_output_tok) / 2) + [start_count - 1] * int(
len(first_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(first_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in first_part_input_tok:
count_labels += ([start_count] * int(len(first_part_output_tok) / 3) + [start_count - 1] * int(
len(first_part_output_tok) / 3) + \
[start_count - 2] * int(len(first_part_output_tok) / 3))
else:
count_labels += ([start_count] * len(first_part_output_tok))
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
count_labels += ([1] * int(len(second_part_output_tok) / 2) + [0] * int(
len(second_part_output_tok) / 2))
# count_labels += ([1] + [0] * (int(len(second_part_output_tok) / 2) - 1)) * 2
elif 'thrice' in second_part_input_tok:
count_labels += ([2] * int(len(second_part_output_tok) / 3) + [1] * int(
len(second_part_output_tok) / 3) + \
[0] * int(len(second_part_output_tok) / 3))
else:
count_labels += ([0] * len(second_part_output_tok))
else:
### For the first part output
if 'twice' in first_part_input_tok:
if 'after' in input_tok:
new_count_labels = list(range(int(len(first_part_output_tok) / 2)))[::-1] * 2
else:
new_count_labels = list(range(int(len(first_part_output_tok) / 2)))[::-1] * 2
elif 'thrice' in first_part_input_tok:
if 'after' in input_tok:
new_count_labels = list(range(int(len(first_part_output_tok) / 3)))[::-1] * 3
else:
new_count_labels = list(range(int(len(first_part_output_tok) / 3)))[::-1] * 3
else:
if 'after' in input_tok:
new_count_labels = list(range(len(first_part_output_tok)))[::-1]
else:
new_count_labels = list(range(len(first_part_output_tok)))[::-1]
count_labels += new_count_labels
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
if 'after' in input_tok:
new_count_labels = list(range(int(len(second_part_output_tok) / 2)))[::-1] * 2
new_count_labels = [_c + 8 for _c in new_count_labels]
else:
new_count_labels = list(range(int(len(second_part_output_tok) / 2)))[::-1] * 2
new_count_labels = [_c + 8 for _c in new_count_labels]
elif 'thrice' in second_part_input_tok:
if 'after' in input_tok:
new_count_labels = list(range(int(len(second_part_output_tok) / 3)))[::-1] * 3
new_count_labels = [_c + 8 for _c in new_count_labels]
else:
new_count_labels = list(range(int(len(second_part_output_tok) / 3)))[::-1] * 3
new_count_labels = [_c + 8 for _c in new_count_labels]
else:
if 'after' in input_tok:
new_count_labels = list(range(len(second_part_output_tok)))[::-1]
new_count_labels = [_c + 8 for _c in new_count_labels]
else:
new_count_labels = list(range(len(second_part_output_tok)))[::-1]
new_count_labels = [_c + 8 for _c in new_count_labels]
count_labels += new_count_labels
# count_labels = []
# count_labels += list(range(len(first_part_output_tok)))[::-1]
# count_labels += list(range(len(second_part_output_tok)))[::-1]
assert len(count_labels) == len(output_tok), (len(count_labels), len(output_tok), input_text, first_part_input_tok, count_labels, output_tok,
first_part_output_text, first_part_output_tok, second_part_output_text, second_part_output_tok)
count_labels.append(-1) # For the EOS token
# count_labels.append(7) # For the EOS token
### 2. Build the action group labels ###
if group_label_scheme == 'v1': ## As used in exp 9.0-9.4
if 'around' in first_part_input_tok:
if 'after' in input_tok:
group_labels += ([4] * len(first_part_output_tok))
else:
group_labels += ([0] * len(first_part_output_tok))
elif 'opposite' in first_part_input_tok:
if 'after' in input_tok:
group_labels += ([5] * len(first_part_output_tok))
else:
group_labels += ([1] * len(first_part_output_tok))
elif 'left' in first_part_input_tok or 'right' in first_part_input_tok:
if 'after' in input_tok:
group_labels += ([6] * len(first_part_output_tok))
else:
group_labels += ([2] * len(first_part_output_tok))
else:
if 'after' in input_tok:
group_labels += ([7] * len(first_part_output_tok))
else:
group_labels += ([3] * len(first_part_output_tok))
if 'around' in second_part_input_tok:
if 'after' in input_tok:
group_labels += ([0] * len(second_part_output_tok))
else:
group_labels += ([4] * len(second_part_output_tok))
elif 'opposite' in second_part_input_tok:
if 'after' in input_tok:
group_labels += ([1] * len(second_part_output_tok))
else:
group_labels += ([5] * len(second_part_output_tok))
elif 'left' in second_part_input_tok or 'right' in second_part_input_tok:
if 'after' in input_tok:
group_labels += ([2] * len(second_part_output_tok))
else:
group_labels += ([6] * len(second_part_output_tok))
else:
if 'after' in input_tok:
group_labels += ([3] * len(second_part_output_tok))
else:
group_labels += ([7] * len(second_part_output_tok))
else:
### For the first part output
if 'twice' in first_part_input_tok:
if 'after' in input_tok:
new_group_labels = list(range(int(len(first_part_output_tok) / 2)))[::-1] * 2
new_group_labels = [_c + 8 for _c in new_group_labels]
else:
new_group_labels = list(range(int(len(first_part_output_tok) / 2)))[::-1] * 2
elif 'thrice' in first_part_input_tok:
if 'after' in input_tok:
new_group_labels = list(range(int(len(first_part_output_tok) / 3)))[::-1] * 3
new_group_labels = [_c + 8 for _c in new_group_labels]
else:
new_group_labels = list(range(int(len(first_part_output_tok) / 3)))[::-1] * 3
else:
if 'after' in input_tok:
new_group_labels = list(range(len(first_part_output_tok)))[::-1]
new_group_labels = [_c + 8 for _c in new_group_labels]
else:
new_group_labels = list(range(len(first_part_output_tok)))[::-1]
group_labels += new_group_labels
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
if 'after' in input_tok:
new_group_labels = list(range(int(len(second_part_output_tok) / 2)))[::-1] * 2
else:
new_group_labels = list(range(int(len(second_part_output_tok) / 2)))[::-1] * 2
new_group_labels = [_c + 8 for _c in new_group_labels]
elif 'thrice' in second_part_input_tok:
if 'after' in input_tok:
new_group_labels = list(range(int(len(second_part_output_tok) / 3)))[::-1] * 3
else:
new_group_labels = list(range(int(len(second_part_output_tok) / 3)))[::-1] * 3
new_group_labels = [_c + 8 for _c in new_group_labels]
else:
if 'after' in input_tok:
new_group_labels = list(range(len(second_part_output_tok)))[::-1]
else:
new_group_labels = list(range(len(second_part_output_tok)))[::-1]
new_group_labels = [_c + 8 for _c in new_group_labels]
group_labels += new_group_labels
assert len(group_labels) == len(output_tok)
group_labels.append(-1) # For the EOS token
# group_labels.append(17) # For the EOS token
### 3. Build the action type labels ###
### For the first part output
if type_label_scheme == 'v1':
if 'around' in first_part_input_tok:
new_type_labels = [3] * len(first_part_output_tok)
elif 'opposite' in first_part_input_tok:
new_type_labels = [2] * len(first_part_output_tok)
elif 'left' in first_part_input_tok or 'right' in first_part_input_tok:
new_type_labels = [1] * len(first_part_output_tok)
else:
new_type_labels = [0] * len(first_part_output_tok)
# if 'after' in input_tok:
# new_type_labels = [_c + 4 for _c in new_type_labels]
type_labels += new_type_labels
### For the second part output
if len(second_part_output_tok) > 0:
if 'around' in second_part_input_tok:
new_type_labels = [3] * len(second_part_output_tok)
elif 'opposite' in second_part_input_tok:
new_type_labels = [2] * len(second_part_output_tok)
elif 'left' in second_part_input_tok or 'right' in second_part_input_tok:
new_type_labels = [1] * len(second_part_output_tok)
else:
new_type_labels = [0] * len(second_part_output_tok)
# if 'after' not in input_tok:
# new_type_labels = [_c + 4 for _c in new_type_labels]
type_labels += new_type_labels
elif type_label_scheme == 'v2':
if 'twice' in first_part_input_tok:
type_labels += ([1] * int(len(first_part_output_tok) / 2) + [0] * int(
len(first_part_output_tok) / 2))
elif 'thrice' in first_part_input_tok:
type_labels += ([2] * int(len(first_part_output_tok) / 3) + [1] * int(
len(first_part_output_tok) / 3) + \
[0] * int(len(first_part_output_tok) / 3))
else:
type_labels += ([0] * len(first_part_output_tok))
### For the second part output
if len(second_part_output_tok) > 0:
if 'twice' in second_part_input_tok:
type_labels += ([1] * int(len(second_part_output_tok) / 2) + [0] * int(
len(second_part_output_tok) / 2))
elif 'thrice' in second_part_input_tok:
type_labels += ([2] * int(len(second_part_output_tok) / 3) + [1] * int(
len(second_part_output_tok) / 3) + [0] * int(len(second_part_output_tok) / 3))
else:
type_labels += ([0] * len(second_part_output_tok))
assert len(type_labels) == len(output_tok)
type_labels.append(-1) # For the EOS token
# group_labels.append(17) # For the EOS token
# if _id not in no_skip_id:
# count_labels = [-1] * len(count_labels)
# group_labels = [-1] * len(group_labels)
# skip_cnt += 1
# else:
# sup_cnt += 1
all_action_type_labels.append(torch.tensor(type_labels).cuda())
all_count_labels.append(torch.tensor(count_labels).cuda())
all_action_group_labels.append(torch.tensor(group_labels).cuda())
print(skip_cnt, sup_cnt)
return all_count_labels, all_action_group_labels, all_action_type_labels
def convert_to_dict(self, raw_data):
dict_data = {}
for dp in raw_data:
input, output = dp[0], dp[1]
assert input not in dict_data
dict_data[input] = output
return dict_data
def __getitem__(self, index):
if self.tokenized:
dp = self.dataset[index]
source_ids, src_mask, target_ids = dp[0], dp[1], dp[2]
source_ids = source_ids[:self.max_source_length]
#src_mask = src_mask[:self.max_source_length]
target_ids = target_ids[:self.max_target_length]
else:
source_ids = self.source[index]
target_ids = self.target[index]
count_labels = self.action_count_labels[index]
group_labels = self.action_group_labels[index]
type_labels = self.action_type_labels[index]
return {"source_ids": source_ids, "target_ids": target_ids, "action_count_labels": count_labels,
"action_group_labels": group_labels, "action_type_labels": type_labels}
@staticmethod
def trim_seq2seq_batch(batch, src_pad_token_id, trg_pad_token_id, trim_y=True):
if trim_y:
y = trim_batch(batch["target_ids"], trg_pad_token_id)
else:
y = batch["target_ids"]
source_ids, source_mask = trim_batch(batch["source_ids"], src_pad_token_id, attention_mask=batch["source_mask"])
return source_ids, source_mask, y
def collate_fn(self, batch):
max_src_len = max(map(len, [x["source_ids"] for x in batch]))
max_trg_len = max(map(len, [x["target_ids"] for x in batch]))
src_mask = torch.stack([self.create_mask(x["source_ids"], max_src_len) for x in batch])
trg_mask = torch.stack([self.create_mask(x["target_ids"], max_trg_len) for x in batch])
src_ids = torch.stack([self.pad_to_max_len(x["source_ids"], max_src_len, self.src_lang.pad_token_id) for x in batch])
#masks = torch.stack([x["source_mask"] for x in batch])
trg_ids = torch.stack([self.pad_to_max_len(x["target_ids"], max_trg_len, self.trg_lang.pad_token_id) for x in batch])
action_count_labels = torch.stack([self.pad_to_max_len(x["action_count_labels"], max_trg_len, -1) for x in batch])
action_group_labels = torch.stack([self.pad_to_max_len(x["action_group_labels"], max_trg_len, -1) for x in batch])
action_type_labels = torch.stack(
[self.pad_to_max_len(x["action_type_labels"], max_trg_len, -1) for x in batch])
y = trim_batch(trg_ids, self.trg_lang.pad_token_id)
#action_count_labels = trim_batch(action_count_labels, -1)
# _src_ids, src_mask = trim_batch(src_ids, self.src_lang.pad_token_id, attention_mask=src_mask)
# print(_src_ids.size(), src_ids.size())
return {"source_ids": src_ids, "source_mask": src_mask, "target_ids": y, "target_mask": trg_mask,
"action_count_labels": action_count_labels, "action_group_labels": action_group_labels,
"action_type_labels": action_type_labels}
| 43,000 | 13,685 |
from typing import Dict
from rest_framework import serializers
from rest_framework.fields import empty
from rest_framework.relations import ManyRelatedField
from rest_framework.request import Request
from .mixins import BridgerSerializerFieldMixin
from .types import BridgerType, ReturnContentType
class BridgerManyRelatedField(ManyRelatedField):
def __init__(self, *args, **kwargs):
required = kwargs.get("required", True)
if not required:
kwargs["allow_null"] = True
super().__init__(*args, **kwargs)
def run_validation(self, data=empty):
# If the data is send through form data, we need to convert the data into a proper list of ids
if data not in [None, empty] and len(data) == 1 and isinstance(data[0], str) and "," in data[0]:
data = data[0].split(",")
# If the data is a list of an empty string we need to convert it (FORM DATA)
if data not in [None, empty] and len(data) == 1 and isinstance(data[0], str) and data[0] == "":
data = []
# If the data is a list and contains the string null, then we need to convert it (FORM DATA)
if data == ["null"]:
data = []
# If the data is None and null is an allowed value, data needs to be set to an empty list
if data is None and self.allow_null:
data = []
return super().run_validation(data)
def get_representation(self, request: Request, field_name: str) -> Dict:
representation = self.child_relation.get_representation(request, field_name)
representation["multiple"] = True
return representation
class PrimaryKeyRelatedField(BridgerSerializerFieldMixin, serializers.PrimaryKeyRelatedField):
MANY_RELATION_KWARGS = (
"read_only",
"write_only",
"required",
"default",
"initial",
"source",
"label",
"help_text",
"style",
"error_messages",
"allow_empty",
"html_cutoff",
"html_cutoff_text",
"allow_null",
)
def __init__(self, *args, **kwargs):
self.field_type = kwargs.pop("field_type", BridgerType.SELECT.value)
super().__init__(*args, **kwargs)
def __new__(cls, *args, **kwargs):
kwargs["style"] = {"base_template": "input.html"}
return super().__new__(cls, *args, **kwargs)
@classmethod
def many_init(cls, *args, **kwargs):
list_kwargs = {"child_relation": cls(*args, **kwargs)}
for key in kwargs:
if key in cls.MANY_RELATION_KWARGS:
list_kwargs[key] = kwargs[key]
return BridgerManyRelatedField(**list_kwargs)
def run_validation(self, data=empty):
if isinstance(data, str) and data == "null":
data = None
if data is empty:
parent_model_id = self.parent.context["view"].kwargs.get(f"{self.field_name}_id")
if parent_model_id:
data = parent_model_id
return super().run_validation(data)
class ListSerializer(serializers.ListSerializer):
"""
A Wrapper around the normal DRF ListSerializer which also return the child representation
"""
def get_representation(self, request: Request, field_name: str) -> Dict:
representation = self.child.get_representation(request, field_name)
representation["multiple"] = True
representation["related_key"] = self.source
return representation
| 3,484 | 1,003 |
import networkx.algorithms.operators.tests.test_product
import pytest
from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx
import_as_graphscope_nx(networkx.algorithms.operators.tests.test_product,
decorators=pytest.mark.usefixtures("graphscope_session"))
def test_tensor_product_combinations():
# basic smoke test, more realistic tests would be useful
P5 = nx.path_graph(5)
K3 = nx.complete_graph(3)
G = nx.tensor_product(P5, K3)
assert nx.number_of_nodes(G) == 5 * 3
G = nx.tensor_product(nx.DiGraph(P5), nx.DiGraph(K3))
assert nx.number_of_nodes(G) == 5 * 3
@pytest.mark.skip(reason="not support multigraph")
def test_cartesian_product_multigraph():
pass
def test_lexicographic_product_combinations():
P5 = nx.path_graph(5)
K3 = nx.complete_graph(3)
G = nx.lexicographic_product(P5, K3)
assert nx.number_of_nodes(G) == 5 * 3
def test_strong_product_combinations():
P5 = nx.path_graph(5)
K3 = nx.complete_graph(3)
G = nx.strong_product(P5, K3)
assert nx.number_of_nodes(G) == 5 * 3
@pytest.mark.skip(reason="not support multigraph")
def test_graph_power_raises():
pass
| 1,198 | 463 |
# {team} -> Name of team
# {name} -> Name of person who supports team
teamMatchStarted: list[str] = [
"{team} are shit",
"{team} cunts",
"Dirty {team}",
"Dirty {team}, dirty {name}",
]
drawing: list[str] = [
"{team} level, this is a shit match",
"Boring old {team}",
"Happy with how it's going, {name}?",
"Yawn...",
"{team} wankers",
"How can you support this rubbish, {name}?",
"You get the feeling that {team} don't really want this",
"No passion from {team}, {name}",
"If a game of football is like making love to a beautiful woman, this {team} game is a ยฃ10 hand job from a swivel-eyed misfit",
"This {team} match is like a game of chess. But with more players and only one piece",
]
teamLeadByOne: list[str] = [
"{team} cheats, the ref's a cunt",
"That was never a goal for {team}",
"{team} don't deserve that",
"Bollocks",
"That should go to VAR",
"Bit fortunuate for {team}",
"Can't imagine {team} will keep this lead",
"Lucky goal for {team}",
"{team} got lucky there",
"{team} aren't good enough to stay ahead",
"Offside!",
]
teamExtendingLead: list[str] = [
"There's no way {team} deserve this lead",
"Have {team} paid the ref?",
"This is bullshit",
"The ref's a cunt, {name}'s a cunt",
"The ref's a cunt, {team} are cunts, {name}'s a cunt",
"Something has gone seriously wrong with this country",
"When I voted for Brexit, I didn't vote for this",
"At least Boris remains in charge, we've still got that",
"Richard Wanger would be turning in his grave",
"Liberal elite bullshit",
"That was so offside",
"VAR!",
"Is the linesman OK?",
"If only {name}'s wife was as dirty as this game",
]
teamLosingLead: list[str] = [
"Lazy old {team}, lazy old {name}",
"{team} are throwing it away",
"{team} are rubbish",
"{team} fucking it up again",
"We really are being treated to some world class flouncing from {team} today",
"Brace yourself, {name}. This is going to hurt",
"This is brown trouser time for {team}",
"I hope {name} brought a spare pair of underpants",
"I see {team} are playing their B Team. B for Bullshit",
]
teamDeficitOfOne: list[str] = [
"This is more like it from {team}",
"Oh dear...",
"{team} wankers",
"How are you feeling, {name}?",
"Bit disappointing, {name}?",
"Not looking good for {team}, {name}",
"You must be furious, {name}",
"{team} have just got no heart",
"This is what happens when you try to buy success",
"All that money spent, {name}, and for what?",
]
teamExtendingDeficit: list[str] = [
"Starting to feel a bit sorry for {team}",
"Never mind, {name}, there's always the next game",
"Poor {team}",
"Whoops...",
"Oh dear, everything OK, {name}?",
"Hey {name}, where'd you get your keeper?\nPOUNDSHOP !! POUNDSHOP !!",
"{team} can't raise themselves for this game, typical",
"A team like {team} have such a proud history, but what we see today is just embarrassing",
"{team} clearly not up for it today",
"{team} are letting you down, {name}",
"Watching {team} is like watching a bunch of cavemen: Neanderthal",
]
teamLosingDeficit: list[str] = [
"Too little too late for {team}",
"{team} won't come back from here",
"The ref's a cunt",
"This is awful",
"What a mess",
"Well this is an unmitigated shit show",
]
teamWon: list[str] = [
"That was a shit game",
"There's no way {team} deserved that",
"Fuck you, {name} !!",
"This will go down in history...\nAs the most tedious game I have ever had the misfortune to witness",
]
teamLost: list[str] = [
"Justice done, {team} lost",
"Job done for {team}?",
"Job done, {name}?",
"{name} !!?",
"Probably the best {team} could hope for",
"Everything OK, {name}?",
"{team} continue to disappoint",
"Well if the football thing doesn't work out for {team}, they can always consider a career on the stage",
"{team} set the bar low",
"{team} fail to meet their already low expectations",
]
teamDrew: list [str] = [
"Another uninspiring result for {team}",
"Thanks for nothing, {team}",
"Well that's 90 minutes we won't get back, thanks {team}",
"Another draw for {team}",
"Boring old {team}",
"You should be happy with that result, {name}",
"If I could pick one highlight from this {team} game it would be when it finally ended.",
"I think {name} will be happy with {team}'s performance today.",
]
| 4,574 | 1,535 |
from bench import bench
print(bench(100, '''
def fib(n):
return n if n < 2 else fib(n-1) + fib(n-2)
''', '''
fib(20)
'''))
| 128 | 63 |
# coding=utf-8
from nlpir.native.nlpir_base import NLPIRBase
from ctypes import c_bool, c_char_p, c_int, POINTER, Structure, c_float
class StDoc(Structure):
__fields__ = [
("sTitle", c_char_p),
("sContent", c_char_p),
("sAuthor", c_char_p),
("sBoard", c_char_p),
("sDatatype", c_char_p)
]
class Classifier(NLPIRBase):
@property
def dll_name(self):
return "LJClassifier"
@NLPIRBase.byte_str_transform
def init_lib(self, data_path: str, encode: int, license_code: str) -> int:
"""
Call **classifier_init**
:param data_path:
:param encode:
:param license_code:
:return: 1 success 0 fail
"""
return self.get_func("classifier_init", [c_char_p, c_char_p, c_int, c_char_p], c_bool)(
"rulelist.xml", data_path, encode, license_code)
@NLPIRBase.byte_str_transform
def exit_lib(self) -> bool:
"""
Call **classifier_exit**
:return: exit success or not
"""
return self.get_func("classifier_exit", None, None)()
@NLPIRBase.byte_str_transform
def get_last_error_msg(self) -> str:
return self.get_func("classifier_GetLastErrorMsg", None, c_char_p)()
@NLPIRBase.byte_str_transform
def exec_1(self, data: StDoc, out_type: int = 0):
"""
Call **classifier_exec1**
ๅฏน่พๅ
ฅ็ๆ็ซ ็ปๆ่ฟ่กๅ็ฑป
:param data: ๆ็ซ ็ปๆ
:param out_type: ่พๅบๆฏๅฆๅ
ๆฌ็ฝฎไฟกๅบฆ, 0 ๆฒกๆ็ฝฎไฟกๅบฆ 1 ๆ็ฝฎไฟกๅบฆ
:return: ไธป้ข็ฑปๅซไธฒ ๅ็ฑปไน้ด็จ\t้ๅผ๏ผ็ฑปๅๆ็
ง็ฝฎไฟกๅบฆไป้ซๅฐไฝๆๅบ
ไธพไพ๏ผโ่ฆ้ป ๆๆ ่ฏ่ฎผโ, โ่ฆ้ป 1.00 ๆๆ 0.95 ่ฏ่ฎผ 0.82โ
"""
return self.get_func("classifier_exec1", [POINTER(StDoc), c_int], c_char_p)(data, out_type)
@NLPIRBase.byte_str_transform
def exec(self, title: str, content: str, out_type: int):
"""
Call **classifier_exec**
ๅฏน่พๅ
ฅ็ๆ็ซ ่ฟ่กๅ็ฑป
:param title: ๆ็ซ ๆ ้ข
:param content: ๆ็ซ ๅ
ๅฎน
:param out_type: ่พๅบ็ฅๅฆๅ
ๆฌ็ฝฎไฟกๅบฆ,ๅ :func:`exec_1`
:return: ๅ :func:`exec_1`
"""
return self.get_func("classifier_exec", [c_char_p, c_char_p, c_int], c_char_p)(title, content, out_type)
@NLPIRBase.byte_str_transform
def exec_file(self, filename: str, out_type: int) -> str:
"""
Call **classifier_execFile**
:param filename: ๆไปถๅ
:param out_type: ่พๅบๆฏๅฆๅ
ๆฌ็ฝฎไฟกๅบฆ, 0 ๆฒกๆ็ฝฎไฟกๅบฆ 1 ๆ็ฝฎไฟกๅบฆ
:return: ไธป้ข็ฑปๅซไธฒ ๅ็ฑปไน้ด็จ\t้ๅผ๏ผ็ฑปๅๆ็
ง็ฝฎไฟกๅบฆไป้ซๅฐไฝๆๅบ
ไธพไพ๏ผโ่ฆ้ป ๆๆ ่ฏ่ฎผโ, โ่ฆ้ป 1.00 ๆๆ 0.95 ่ฏ่ฎผ 0.82โ
"""
return self.get_func("classifier_execFile", [c_char_p, c_int], c_char_p)(filename, out_type)
@NLPIRBase.byte_str_transform
def detail(self, class_name: str):
"""
Call **classifier_detail**
ๅฏนไบๅฝๅๆๆกฃ๏ผ่พๅ
ฅ็ฑปๅ๏ผๅๅพ็ปๆๆ็ป
:param class_name: ็ปๆ็ฑปๅ
:return: ็ปๆๆ็ป ไพๅฆ:
::
RULE3:
SUBRULE1: ๅ
ๅน 1
SUBRULE2: ่กๅธ 1 ๅบ้ 3 ่ก็ฅจ 8
SUBRULE3: ไนฆๆ 2
"""
return self.get_func("classifier_detail", [c_char_p], c_char_p)(class_name)
@NLPIRBase.byte_str_transform
def set_sim_thresh(self, sim: float):
"""
Call **classifier_setsimthresh**
่ฎพ็ฝฎ้ๅผ
:param sim: ้ๅผ
:return:
"""
return self.get_func("classifier_setsimthresh", [c_float])(sim)
| 3,271 | 1,426 |
# -*- coding: utf-8 -*-
import pytest
from mock import Mock
from bravado_core.exception import SwaggerMappingError
from bravado_core.operation import Operation
from bravado_core.param import get_param_type_spec
from bravado_core.param import Param
from bravado_core.spec import Spec
@pytest.fixture
def body_param_spec():
return {
'name': 'body',
'in': 'body',
'description': 'pet id',
'required': True,
'schema': {
'type': 'string',
},
}
def test_location_is_body(empty_swagger_spec, body_param_spec):
param = Param(empty_swagger_spec, Mock(spec=Operation), body_param_spec)
assert body_param_spec['schema'] == get_param_type_spec(param)
def test_location_is_not_body(empty_swagger_spec):
for location in ('path', 'query', 'header', 'formData',):
param_spec = {
'name': 'petId',
'in': location,
'description': 'ID of pet that needs to be updated',
'required': True,
'type': 'string',
}
param = Param(empty_swagger_spec, Mock(spec=Operation), param_spec)
assert param_spec == get_param_type_spec(param)
def test_location_invalid(empty_swagger_spec, body_param_spec):
body_param_spec['in'] = 'foo'
param = Param(empty_swagger_spec, Mock(spec=Operation), body_param_spec)
with pytest.raises(SwaggerMappingError) as excinfo:
get_param_type_spec(param)
assert 'location foo' in str(excinfo.value)
def test_ref(minimal_swagger_dict, body_param_spec):
minimal_swagger_dict['parameters'] = {
'PetIdParam': body_param_spec,
}
param_ref_spec = {'$ref': '#/parameters/PetIdParam'}
swagger_spec = Spec(minimal_swagger_dict)
param = Param(swagger_spec, Mock(spec=Operation), param_ref_spec)
assert {'type': 'string'} == get_param_type_spec(param)
| 1,875 | 611 |
# Copyright (c) 2019 Princeton University
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from markdown import markdown
import base64
import json
import base64
def main(params):
try:
md = json.loads(base64.decodebytes(params["__ow_body"].encode("utf-8")))["markdown"].encode("utf-8")
md_text = base64.decodebytes(md).decode("utf-8")
except KeyError:
return {'Error' : 'Possibly lacking markdown parameter in request.'}
test_id = params["__ow_query"].split("&")[0]
html = markdown(md_text)
return {"result": "ok", "html_response": html, "testid": test_id}
| 682 | 227 |