code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
// Package sql provides SQL implementations of the storage interface.
package sql
import (
"database/sql"
"regexp"
"time"
"github.com/Sirupsen/logrus"
"github.com/cockroachdb/cockroach-go/crdb"
// import third party drivers
_ "github.com/go-sql-driver/mysql"
_ "github.com/lib/pq"
_ "github.com/mattn/go-sqlite3"
)
// flavor represents a specific SQL implementation, and is used to translate query strings
// between different drivers. Flavors shouldn't aim to translate all possible SQL statements,
// only the specific queries used by the SQL storages.
type flavor struct {
queryReplacers []replacer
// Optional function to create and finish a transaction. This is mainly for
// cockroachdb support which requires special retry logic provided by their
// client package.
//
// This will be nil for most flavors.
//
// See: https://github.com/cockroachdb/docs/blob/63761c2e/_includes/app/txn-sample.go#L41-L44
executeTx func(db *sql.DB, fn func(*sql.Tx) error) error
// Does the flavor support timezones?
supportsTimezones bool
}
// A regexp with a replacement string.
type replacer struct {
re *regexp.Regexp
with string
}
// Match a postgres query binds. E.g. "$1", "$12", etc.
var bindRegexp = regexp.MustCompile(`\$\d+`)
func matchLiteral(s string) *regexp.Regexp {
return regexp.MustCompile(`\b` + regexp.QuoteMeta(s) + `\b`)
}
var (
// The "github.com/lib/pq" driver is the default flavor. All others are
// translations of this.
flavorPostgres = flavor{
// The default behavior for Postgres transactions is consistent reads, not consistent writes.
// For each transaction opened, ensure it has the correct isolation level.
//
// See: https://www.postgresql.org/docs/9.3/static/sql-set-transaction.html
//
// NOTE(ericchiang): For some reason using `SET SESSION CHARACTERISTICS AS TRANSACTION` at a
// session level didn't work for some edge cases. Might be something worth exploring.
executeTx: func(db *sql.DB, fn func(sqlTx *sql.Tx) error) error {
tx, err := db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
if _, err := tx.Exec(`SET TRANSACTION ISOLATION LEVEL SERIALIZABLE;`); err != nil {
return err
}
if err := fn(tx); err != nil {
return err
}
return tx.Commit()
},
supportsTimezones: true,
}
flavorSQLite3 = flavor{
queryReplacers: []replacer{
{bindRegexp, "?"},
// Translate for booleans to integers.
{matchLiteral("true"), "1"},
{matchLiteral("false"), "0"},
{matchLiteral("boolean"), "integer"},
// Translate other types.
{matchLiteral("bytea"), "blob"},
{matchLiteral("timestamptz"), "timestamp"},
// SQLite doesn't have a "now()" method, replace with "date('now')"
{regexp.MustCompile(`\bnow\(\)`), "date('now')"},
},
}
// Incomplete.
flavorMySQL = flavor{
queryReplacers: []replacer{
{bindRegexp, "?"},
},
}
// Not tested.
flavorCockroach = flavor{
executeTx: crdb.ExecuteTx,
}
)
func (f flavor) translate(query string) string {
// TODO(ericchiang): Heavy cashing.
for _, r := range f.queryReplacers {
query = r.re.ReplaceAllString(query, r.with)
}
return query
}
// translateArgs translates query parameters that may be unique to
// a specific SQL flavor. For example, standardizing "time.Time"
// types to UTC for clients that don't provide timezone support.
func (c *conn) translateArgs(args []interface{}) []interface{} {
if c.flavor.supportsTimezones {
return args
}
for i, arg := range args {
if t, ok := arg.(time.Time); ok {
args[i] = t.UTC()
}
}
return args
}
// conn is the main database connection.
type conn struct {
db *sql.DB
flavor flavor
logger logrus.FieldLogger
}
func (c *conn) Close() error {
return c.db.Close()
}
// conn implements the same method signatures as encoding/sql.DB.
func (c *conn) Exec(query string, args ...interface{}) (sql.Result, error) {
query = c.flavor.translate(query)
return c.db.Exec(query, c.translateArgs(args)...)
}
func (c *conn) Query(query string, args ...interface{}) (*sql.Rows, error) {
query = c.flavor.translate(query)
return c.db.Query(query, c.translateArgs(args)...)
}
func (c *conn) QueryRow(query string, args ...interface{}) *sql.Row {
query = c.flavor.translate(query)
return c.db.QueryRow(query, c.translateArgs(args)...)
}
// ExecTx runs a method which operates on a transaction.
func (c *conn) ExecTx(fn func(tx *trans) error) error {
if c.flavor.executeTx != nil {
return c.flavor.executeTx(c.db, func(sqlTx *sql.Tx) error {
return fn(&trans{sqlTx, c})
})
}
sqlTx, err := c.db.Begin()
if err != nil {
return err
}
if err := fn(&trans{sqlTx, c}); err != nil {
sqlTx.Rollback()
return err
}
return sqlTx.Commit()
}
type trans struct {
tx *sql.Tx
c *conn
}
// trans implements the same method signatures as encoding/sql.Tx.
func (t *trans) Exec(query string, args ...interface{}) (sql.Result, error) {
query = t.c.flavor.translate(query)
return t.tx.Exec(query, t.c.translateArgs(args)...)
}
func (t *trans) Query(query string, args ...interface{}) (*sql.Rows, error) {
query = t.c.flavor.translate(query)
return t.tx.Query(query, t.c.translateArgs(args)...)
}
func (t *trans) QueryRow(query string, args ...interface{}) *sql.Row {
query = t.c.flavor.translate(query)
return t.tx.QueryRow(query, t.c.translateArgs(args)...)
}
| xeonx/dex | storage/sql/sql.go | GO | apache-2.0 | 5,357 |
/*
Copyright 2007-2012 Selenium committers
Portions copyright 2011-2012 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import org.junit.Test;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import java.util.concurrent.Callable;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.openqa.selenium.TestWaiter.waitFor;
import static org.openqa.selenium.testing.Ignore.Driver.ANDROID;
import static org.openqa.selenium.testing.Ignore.Driver.CHROME;
import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.IPHONE;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA_MOBILE;
import static org.openqa.selenium.testing.Ignore.Driver.PHANTOMJS;
import static org.openqa.selenium.testing.Ignore.Driver.SAFARI;
public class VisibilityTest extends JUnit4TestBase {
@JavascriptEnabled
@Test
public void testShouldAllowTheUserToTellIfAnElementIsDisplayedOrNot() {
driver.get(pages.javascriptPage);
assertThat(driver.findElement(By.id("displayed")).isDisplayed(),
is(true));
assertThat(driver.findElement(By.id("none")).isDisplayed(), is(false));
assertThat(driver.findElement(By.id("suppressedParagraph")).isDisplayed(), is(false));
assertThat(driver.findElement(By.id("hidden")).isDisplayed(), is(false));
}
@JavascriptEnabled
@Test
public void testVisibilityShouldTakeIntoAccountParentVisibility() {
driver.get(pages.javascriptPage);
WebElement childDiv = driver.findElement(By.id("hiddenchild"));
WebElement hiddenLink = driver.findElement(By.id("hiddenlink"));
assertFalse(childDiv.isDisplayed());
assertFalse(hiddenLink.isDisplayed());
}
@JavascriptEnabled
@Test
public void testShouldCountElementsAsVisibleIfStylePropertyHasBeenSet() {
driver.get(pages.javascriptPage);
WebElement shown = driver.findElement(By.id("visibleSubElement"));
assertTrue(shown.isDisplayed());
}
@JavascriptEnabled
@Test
public void testShouldModifyTheVisibilityOfAnElementDynamically() {
driver.get(pages.javascriptPage);
WebElement element = driver.findElement(By.id("hideMe"));
assertTrue(element.isDisplayed());
element.click();
waitFor(elementNotToDisplayed(element));
assertFalse(element.isDisplayed());
}
@JavascriptEnabled
@Test
public void testHiddenInputElementsAreNeverVisible() {
driver.get(pages.javascriptPage);
WebElement shown = driver.findElement(By.name("hidden"));
assertFalse(shown.isDisplayed());
}
@JavascriptEnabled
@Test
public void testShouldNotBeAbleToClickOnAnElementThatIsNotDisplayed() {
driver.get(pages.javascriptPage);
WebElement element = driver.findElement(By.id("unclickable"));
try {
element.click();
fail("You should not be able to click on an invisible element");
} catch (ElementNotVisibleException e) {
// This is expected
}
}
@JavascriptEnabled
@Test
public void testShouldNotBeAbleToTypeAnElementThatIsNotDisplayed() {
driver.get(pages.javascriptPage);
WebElement element = driver.findElement(By.id("unclickable"));
try {
element.sendKeys("You don't see me");
fail("You should not be able to send keyboard input to an invisible element");
} catch (ElementNotVisibleException e) {
// This is expected
}
assertThat(element.getAttribute("value"), is(not("You don't see me")));
}
@JavascriptEnabled
@Ignore({IE})
@Test
public void testZeroSizedDivIsShownIfDescendantHasSize() {
driver.get(pages.javascriptPage);
WebElement element = driver.findElement(By.id("zero"));
Dimension size = element.getSize();
assertEquals("Should have 0 width", 0, size.width);
assertEquals("Should have 0 height", 0, size.height);
assertTrue(element.isDisplayed());
}
private Callable<Boolean> elementNotToDisplayed(final WebElement element) {
return new Callable<Boolean>() {
public Boolean call() throws Exception {
return !element.isDisplayed();
}
};
}
@Test
public void parentNodeVisibleWhenAllChildrenAreAbsolutelyPositionedAndOverflowIsHidden() {
String url = appServer.whereIs("visibility-css.html");
driver.get(url);
WebElement element = driver.findElement(By.id("suggest"));
assertTrue(element.isDisplayed());
}
@Ignore({IE, CHROME, HTMLUNIT, OPERA, OPERA_MOBILE, PHANTOMJS, SAFARI})
@Test
public void testElementHiddenByOverflowXIsNotVisible() {
String[] pages = new String[]{
"overflow/x_hidden_y_hidden.html",
"overflow/x_hidden_y_scroll.html",
"overflow/x_hidden_y_auto.html",
};
for (String page: pages) {
driver.get(appServer.whereIs(page));
WebElement right = driver.findElement(By.id("right"));
assertFalse(page, right.isDisplayed());
WebElement bottomRight = driver.findElement(By.id("bottom-right"));
assertFalse(page, bottomRight.isDisplayed());
}
}
@Ignore({CHROME, HTMLUNIT, OPERA, OPERA_MOBILE, PHANTOMJS})
@Test
public void testElementHiddenByOverflowYIsNotVisible() {
String[] pages = new String[]{
"overflow/x_hidden_y_hidden.html",
"overflow/x_scroll_y_hidden.html",
"overflow/x_auto_y_hidden.html",
};
for (String page: pages) {
driver.get(appServer.whereIs(page));
WebElement bottom = driver.findElement(By.id("bottom"));
assertFalse(page, bottom.isDisplayed());
WebElement bottomRight = driver.findElement(By.id("bottom-right"));
assertFalse(page, bottomRight.isDisplayed());
}
}
@Ignore({IE})
@Test
public void testElementScrollableByOverflowXIsVisible() {
String[] pages = new String[]{
"overflow/x_scroll_y_hidden.html",
"overflow/x_scroll_y_scroll.html",
"overflow/x_scroll_y_auto.html",
"overflow/x_auto_y_hidden.html",
"overflow/x_auto_y_scroll.html",
"overflow/x_auto_y_auto.html",
};
for (String page: pages) {
driver.get(appServer.whereIs(page));
WebElement right = driver.findElement(By.id("right"));
assertTrue(page, right.isDisplayed());
}
}
@Ignore({IE, SAFARI})
@Test
public void testElementScrollableByOverflowYIsVisible() {
String[] pages = new String[]{
"overflow/x_hidden_y_scroll.html",
"overflow/x_scroll_y_scroll.html",
"overflow/x_auto_y_scroll.html",
"overflow/x_hidden_y_auto.html",
"overflow/x_scroll_y_auto.html",
"overflow/x_auto_y_auto.html",
};
for (String page: pages) {
driver.get(appServer.whereIs(page));
WebElement bottom = driver.findElement(By.id("bottom"));
assertTrue(page, bottom.isDisplayed());
}
}
@Test
public void testElementScrollableByOverflowXAndYIsVisible() {
String[] pages = new String[]{
"overflow/x_scroll_y_scroll.html",
"overflow/x_scroll_y_auto.html",
"overflow/x_auto_y_scroll.html",
"overflow/x_auto_y_auto.html",
};
for (String page: pages) {
driver.get(appServer.whereIs(page));
WebElement bottomRight = driver.findElement(By.id("bottom-right"));
assertTrue(page, bottomRight.isDisplayed());
}
}
@Test
@Ignore({ANDROID, IPHONE, OPERA, OPERA_MOBILE})
public void tooSmallAWindowWithOverflowHiddenIsNotAProblem() {
WebDriver.Window window = driver.manage().window();
Dimension originalSize = window.getSize();
try {
// Short in the Y dimension
window.setSize(new Dimension(1024, 500));
String url = appServer.whereIs("overflow-body.html");
driver.get(url);
WebElement element = driver.findElement(By.name("resultsFrame"));
assertTrue(element.isDisplayed());
} finally {
window.setSize(originalSize);
}
}
} | lummyare/lummyare-lummy | java/client/test/org/openqa/selenium/VisibilityTest.java | Java | apache-2.0 | 8,824 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl.references;
import com.intellij.codeInsight.completion.CompletionUtil;
import com.intellij.codeInsight.lookup.AutoCompletionPolicy;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.ProjectScope;
import com.intellij.psi.stubs.StubUpdatingIndex;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.QualifiedName;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PlatformIcons;
import com.intellij.util.ProcessingContext;
import com.intellij.util.indexing.FileBasedIndex;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.Scope;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyBuiltinCache;
import com.jetbrains.python.psi.impl.PyImportedModule;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.psi.impl.ResolveResultList;
import com.jetbrains.python.psi.resolve.ImplicitResolveResult;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
import com.jetbrains.python.psi.resolve.RatedResolveResult;
import com.jetbrains.python.psi.search.PyProjectScopeBuilder;
import com.jetbrains.python.psi.stubs.PyClassAttributesIndex;
import com.jetbrains.python.psi.stubs.PyClassNameIndexInsensitive;
import com.jetbrains.python.psi.stubs.PyFunctionNameIndex;
import com.jetbrains.python.psi.types.*;
import com.jetbrains.python.pyi.PyiUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author yole
*/
public class PyQualifiedReference extends PyReferenceImpl {
private static final Logger LOG = Logger.getInstance(PyQualifiedReference.class);
public PyQualifiedReference(PyQualifiedExpression element, PyResolveContext context) {
super(element, context);
}
@NotNull
@Override
protected List<RatedResolveResult> resolveInner() {
PyPsiUtils.assertValid(myElement);
ResolveResultList ret = new ResolveResultList();
final String referencedName = myElement.getReferencedName();
if (referencedName == null) return ret;
final PyExpression qualifier = myElement.getQualifier();
PyPsiUtils.assertValid(qualifier);
if (qualifier == null) {
return ret;
}
// regular attributes
PyType qualifierType = myContext.getTypeEvalContext().getType(qualifier);
// is it a class-private name qualified by a different class?
if (PyUtil.isClassPrivateName(referencedName) && qualifierType instanceof PyClassType) {
if (isOtherClassQualifying(qualifier, (PyClassType)qualifierType)) return Collections.emptyList();
}
//
if (qualifierType != null) {
qualifierType.assertValid("qualifier: " + qualifier);
// resolve within the type proper
AccessDirection ctx = AccessDirection.of(myElement);
final List<? extends RatedResolveResult> membersOfQualifier = qualifierType.resolveMember(referencedName, qualifier, ctx, myContext);
if (membersOfQualifier == null) {
return ret; // qualifier is positive that such name cannot exist in it
}
ret.addAll(membersOfQualifier);
}
// look for assignment of this attribute in containing function
if (qualifier instanceof PyQualifiedExpression && ret.isEmpty()) {
if (addAssignedAttributes(ret, referencedName, (PyQualifiedExpression)qualifier)) {
return ret;
}
}
if ((PyTypeChecker.isUnknown(qualifierType, myContext.getTypeEvalContext()) ||
(qualifierType instanceof PyStructuralType && ((PyStructuralType)qualifierType).isInferredFromUsages())) &&
myContext.allowImplicits() && canQualifyAnImplicitName(qualifier)) {
addImplicitResolveResults(referencedName, ret);
}
// special case of __doc__
if ("__doc__".equals(referencedName)) {
addDocReference(ret, qualifier, qualifierType);
}
return ret;
}
private static boolean isOtherClassQualifying(@NotNull PyExpression qualifier, @NotNull PyClassType qualifierType) {
final List<? extends PsiElement> match = PyUtil.searchForWrappingMethod(qualifier, true);
if (match == null) {
return true;
}
if (match.size() > 1) {
final PyClass ourClass = PyiUtil.getOriginalElementOrLeaveAsIs(qualifierType.getPyClass(), PyClass.class);
final PsiElement theirClass = CompletionUtil.getOriginalOrSelf(match.get(match.size() - 1));
if (ourClass != theirClass) return true;
}
return false;
}
private void addImplicitResolveResults(String referencedName, ResolveResultList ret) {
final Project project = myElement.getProject();
final GlobalSearchScope scope = PyProjectScopeBuilder.excludeSdkTestsScope(project);
final Collection functions = PyFunctionNameIndex.find(referencedName, project, scope);
final PsiFile containingFile = myElement.getContainingFile();
final List<QualifiedName> imports;
if (containingFile instanceof PyFile) {
imports = collectImports((PyFile)containingFile);
}
else {
imports = Collections.emptyList();
}
for (Object function : functions) {
if (!(function instanceof PyFunction)) {
FileBasedIndex.getInstance().scheduleRebuild(StubUpdatingIndex.INDEX_ID,
new Throwable("found non-function object " + function + " in function list"));
break;
}
PyFunction pyFunction = (PyFunction)function;
if (pyFunction.getContainingClass() != null) {
ret.add(new ImplicitResolveResult(pyFunction, getImplicitResultRate(pyFunction, imports)));
}
}
PyClassAttributesIndex
.findClassAndInstanceAttributes(referencedName, project, scope)
.forEach(attribute -> ret.add(new ImplicitResolveResult(attribute, getImplicitResultRate(attribute, imports))));
}
private static List<QualifiedName> collectImports(PyFile containingFile) {
List<QualifiedName> imports = new ArrayList<>();
for (PyFromImportStatement anImport : containingFile.getFromImports()) {
final QualifiedName source = anImport.getImportSourceQName();
if (source != null) {
imports.add(source);
}
}
for (PyImportElement importElement : containingFile.getImportTargets()) {
final QualifiedName qName = importElement.getImportedQName();
if (qName != null) {
imports.add(qName.removeLastComponent());
}
}
return imports;
}
private int getImplicitResultRate(PyElement target, List<QualifiedName> imports) {
int rate = RatedResolveResult.RATE_LOW;
if (target.getContainingFile() == myElement.getContainingFile()) {
rate += 200;
}
else {
final VirtualFile vFile = target.getContainingFile().getVirtualFile();
if (vFile != null) {
if (ProjectScope.getProjectScope(myElement.getProject()).contains(vFile)) {
rate += 80;
}
final QualifiedName qName = QualifiedNameFinder.findShortestImportableQName(myElement, vFile);
if (qName != null && imports.contains(qName)) {
rate += 70;
}
}
}
if (myElement.getParent() instanceof PyCallExpression) {
if (target instanceof PyFunction) rate += 50;
}
else {
if (!(target instanceof PyFunction)) rate += 50;
}
return rate;
}
private static boolean canQualifyAnImplicitName(@NotNull PyExpression qualifier) {
if (qualifier instanceof PyCallExpression) {
final PyExpression callee = ((PyCallExpression)qualifier).getCallee();
if (callee instanceof PyReferenceExpression && PyNames.SUPER.equals(callee.getName())) {
final PsiElement target = ((PyReferenceExpression)callee).getReference().resolve();
if (target != null && PyBuiltinCache.getInstance(qualifier).isBuiltin(target)) return false; // super() of unresolved type
}
}
return true;
}
private static boolean addAssignedAttributes(ResolveResultList ret,
String referencedName,
@NotNull final PyQualifiedExpression qualifier) {
final QualifiedName qName = qualifier.asQualifiedName();
if (qName == null) {
return false;
}
for (PyExpression ex : collectAssignedAttributes(qName, qualifier)) {
if (referencedName.equals(ex.getName())) {
ret.poke(ex, RatedResolveResult.RATE_NORMAL);
return true;
}
}
return false;
}
private void addDocReference(ResolveResultList ret, PyExpression qualifier, PyType qualifierType) {
PsiElement docstring = null;
if (qualifierType instanceof PyClassType) {
PyClass qualClass = ((PyClassType)qualifierType).getPyClass();
docstring = qualClass.getDocStringExpression();
}
else if (qualifierType instanceof PyModuleType) {
PyFile qualModule = ((PyModuleType)qualifierType).getModule();
docstring = qualModule.getDocStringExpression();
}
else if (qualifier instanceof PyReferenceExpression) {
PsiElement qual_object = ((PyReferenceExpression)qualifier).getReference(myContext).resolve();
if (qual_object instanceof PyDocStringOwner) {
docstring = ((PyDocStringOwner)qual_object).getDocStringExpression();
}
}
ret.poke(docstring, RatedResolveResult.RATE_HIGH);
}
@NotNull
@Override
public Object[] getVariants() {
PyExpression qualifier = myElement.getQualifier();
if (qualifier != null) {
qualifier = CompletionUtil.getOriginalOrSelf(qualifier);
}
if (qualifier == null) {
return EMPTY_ARRAY;
}
final PyQualifiedExpression element = CompletionUtil.getOriginalOrSelf(myElement);
PyType qualifierType = TypeEvalContext.codeCompletion(element.getProject(), element.getContainingFile()).getType(qualifier);
ProcessingContext ctx = new ProcessingContext();
final Set<String> namesAlready = new HashSet<>();
ctx.put(PyType.CTX_NAMES, namesAlready);
final Collection<Object> variants = new ArrayList<>();
if (qualifierType != null) {
Collections.addAll(variants, getVariantFromHasAttr(qualifier));
if (qualifierType instanceof PyStructuralType && ((PyStructuralType)qualifierType).isInferredFromUsages()) {
final PyClassType guessedType = guessClassTypeByName();
if (guessedType != null) {
Collections.addAll(variants, getTypeCompletionVariants(myElement, guessedType));
}
}
if (qualifier instanceof PyQualifiedExpression) {
final PyQualifiedExpression qualifierExpression = (PyQualifiedExpression)qualifier;
final QualifiedName qualifiedName = qualifierExpression.asQualifiedName();
if (qualifiedName != null) {
final Collection<PyTargetExpression> attrs = collectAssignedAttributes(qualifiedName, qualifier);
for (PyTargetExpression expression : attrs) {
final String name = expression.getName();
if (name != null && name.endsWith(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED)) {
continue;
}
if (qualifierType instanceof PyClassType && name != null) {
variants.add(LookupElementBuilder.createWithSmartPointer(name, expression)
.withTypeText(qualifierType.getName())
.withIcon(PlatformIcons.FIELD_ICON));
namesAlready.add(name);
}
}
}
Collections.addAll(variants, qualifierType.getCompletionVariants(element.getName(), element, ctx));
return variants.toArray();
}
else {
return qualifierType.getCompletionVariants(element.getName(), element, ctx);
}
}
else {
final PyClassType guessedType = guessClassTypeByName();
if (guessedType != null) {
Collections.addAll(variants, getTypeCompletionVariants(myElement, guessedType));
}
if (qualifier instanceof PyReferenceExpression) {
Collections.addAll(variants, collectSeenMembers(qualifier.getText()));
}
return variants.toArray();
}
}
private Object[] getVariantFromHasAttr(PyExpression qualifier) {
Collection<Object> variants = new ArrayList<>();
PyIfStatement ifStatement = PsiTreeUtil.getParentOfType(myElement, PyIfStatement.class);
while (ifStatement != null) {
PyExpression condition = ifStatement.getIfPart().getCondition();
if (condition instanceof PyCallExpression && ((PyCallExpression)condition).isCalleeText(PyNames.HAS_ATTR)) {
PyCallExpression call = (PyCallExpression)condition;
if (call.getArguments().length > 1 && call.getArguments()[0].getText().equals(qualifier.getText())) {
PyStringLiteralExpression string = call.getArgument(1, PyStringLiteralExpression.class);
if (string != null && StringUtil.isJavaIdentifier(string.getStringValue())) variants.add(string.getStringValue());
}
}
ifStatement = PsiTreeUtil.getParentOfType(ifStatement, PyIfStatement.class);
}
return variants.toArray();
}
@Nullable
private PyClassType guessClassTypeByName() {
final PyExpression qualifierElement = myElement.getQualifier();
if (qualifierElement instanceof PyReferenceExpression) {
PyReferenceExpression qualifier = (PyReferenceExpression)qualifierElement;
final String className = qualifier.getReferencedName();
if (className != null) {
Collection<PyClass> classes = PyClassNameIndexInsensitive.find(className, getElement().getProject());
classes = filterByImports(classes, myElement.getContainingFile());
if (classes.size() == 1) {
return new PyClassTypeImpl(classes.iterator().next(), false);
}
}
}
return null;
}
private static Collection<PyClass> filterByImports(Collection<PyClass> classes, PsiFile containingFile) {
if (classes.size() <= 1) {
return classes;
}
List<PyClass> result = new ArrayList<>();
for (PyClass pyClass : classes) {
if (pyClass.getContainingFile() == containingFile) {
result.add(pyClass);
}
else {
final PsiElement exportedClass = ((PyFile)containingFile).getElementNamed(pyClass.getName());
if (exportedClass == pyClass) {
result.add(pyClass);
}
}
}
return result;
}
private Object[] collectSeenMembers(final String text) {
final Set<String> members = new HashSet<>();
myElement.getContainingFile().accept(new PyRecursiveElementVisitor() {
@Override
public void visitPyReferenceExpression(PyReferenceExpression node) {
super.visitPyReferenceExpression(node);
visitPyQualifiedExpression(node);
}
@Override
public void visitPyTargetExpression(PyTargetExpression node) {
super.visitPyTargetExpression(node);
visitPyQualifiedExpression(node);
}
private void visitPyQualifiedExpression(PyQualifiedExpression node) {
if (node != myElement) {
final PyExpression qualifier = node.getQualifier();
if (qualifier != null && qualifier.getText().equals(text)) {
final String refName = node.getReferencedName();
if (refName != null) {
members.add(refName);
}
}
}
}
});
List<LookupElement> results = new ArrayList<>(members.size());
for (String member : members) {
results.add(AutoCompletionPolicy.NEVER_AUTOCOMPLETE.applyPolicy(LookupElementBuilder.create(member)));
}
return ArrayUtil.toObjectArray(results);
}
/**
* Returns expressions accessible from scope of "anchor" with names that start with provided "qualifierQName".
* Can be used for completion.
*/
@NotNull
public static Collection<PyTargetExpression> collectAssignedAttributes(@NotNull final QualifiedName qualifierQName,
@NotNull final PsiElement anchor) {
final Set<String> names = new HashSet<>();
final List<PyTargetExpression> results = new ArrayList<>();
for (ScopeOwner owner = ScopeUtil.getScopeOwner(anchor); owner != null; owner = ScopeUtil.getScopeOwner(owner)) {
final Scope scope = ControlFlowCache.getScope(owner);
for (final PyTargetExpression target : scope.getTargetExpressions()) {
final QualifiedName targetQName = target.asQualifiedName();
if (targetQName != null) {
if (targetQName.getComponentCount() == qualifierQName.getComponentCount() + 1 && targetQName.matchesPrefix(qualifierQName)) {
final String name = target.getName();
if (!names.contains(name)) {
names.add(name);
results.add(target);
}
}
}
}
}
return results;
}
@Override
public boolean isReferenceTo(@NotNull PsiElement element) {
// performance: a qualified reference can never resolve to a local variable or parameter
if (isLocalScope(element)) {
return false;
}
final String referencedName = myElement.getReferencedName();
PyResolveContext resolveContext = myContext.withoutImplicits();
// Guess type eval context origin for switching to local dataflow and return type analysis
if (resolveContext.getTypeEvalContext().getOrigin() == null) {
final PsiFile containingFile = myElement.getContainingFile();
if (containingFile instanceof StubBasedPsiElement) {
assert ((StubBasedPsiElement)containingFile).getStub() == null : "Stub origin for type eval context in isReferenceTo()";
}
final TypeEvalContext context = TypeEvalContext.codeAnalysis(containingFile.getProject(), containingFile);
resolveContext = resolveContext.withTypeEvalContext(context);
}
if (element instanceof PyFunction && Comparing.equal(referencedName, ((PyFunction)element).getName()) &&
((PyFunction)element).getContainingClass() != null && !PyNames.INIT.equals(referencedName)) {
final PyExpression qualifier = myElement.getQualifier();
if (qualifier != null) {
final PyType qualifierType = resolveContext.getTypeEvalContext().getType(qualifier);
if (qualifierType == null ||
(qualifierType instanceof PyStructuralType && ((PyStructuralType)qualifierType).isInferredFromUsages())) {
return true;
}
}
}
for (ResolveResult result : copyWithResolveContext(resolveContext).multiResolve(false)) {
LOG.assertTrue(!(result instanceof ImplicitResolveResult));
PsiElement resolveResult = result.getElement();
if (isResolvedToResult(element, resolveResult)) {
return true;
}
}
return false;
}
@NotNull
protected PyQualifiedReference copyWithResolveContext(PyResolveContext context) {
return new PyQualifiedReference(myElement, context);
}
private boolean isResolvedToResult(PsiElement element, PsiElement resolveResult) {
if (resolveResult instanceof PyImportedModule) {
resolveResult = resolveResult.getNavigationElement();
}
if (element instanceof PsiDirectory && resolveResult instanceof PyFile &&
PyNames.INIT_DOT_PY.equals(((PyFile)resolveResult).getName()) && ((PyFile)resolveResult).getContainingDirectory() == element) {
return true;
}
if (resolveResult == element) {
return true;
}
if (resolveResult instanceof PyTargetExpression && PyUtil.isAttribute((PyTargetExpression)resolveResult) &&
element instanceof PyTargetExpression && PyUtil.isAttribute((PyTargetExpression)element) && Comparing.equal(
((PyTargetExpression)resolveResult).getReferencedName(),
((PyTargetExpression)element).getReferencedName())) {
PyClass aClass = PsiTreeUtil.getParentOfType(resolveResult, PyClass.class);
PyClass bClass = PsiTreeUtil.getParentOfType(element, PyClass.class);
if (aClass != null && bClass != null && bClass.isSubclass(aClass, myContext.getTypeEvalContext())) {
return true;
}
}
if (resolvesToWrapper(element, resolveResult)) {
return true;
}
return false;
}
private static boolean isLocalScope(PsiElement element) {
if (element instanceof PyParameter) {
return true;
}
if (element instanceof PyTargetExpression) {
final PyTargetExpression target = (PyTargetExpression)element;
return !target.isQualified() && ScopeUtil.getScopeOwner(target) instanceof PyFunction;
}
return false;
}
@Override
public String toString() {
return "PyQualifiedReference(" + myElement + "," + myContext + ")";
}
}
| msebire/intellij-community | python/src/com/jetbrains/python/psi/impl/references/PyQualifiedReference.java | Java | apache-2.0 | 21,861 |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
/**
* @author Alef Arendsen
*/
public class MockLog4jAppender extends AppenderSkeleton {
public static final List loggingStrings = new ArrayList();
public static boolean closeCalled = false;
/* (non-Javadoc)
* @see org.apache.log4j.AppenderSkeleton#append(org.apache.log4j.spi.LoggingEvent)
*/
@Override
protected void append(LoggingEvent evt) {
//System.out.println("Adding " + evt.getMessage());
loggingStrings.add(evt.getMessage());
}
/* (non-Javadoc)
* @see org.apache.log4j.Appender#close()
*/
@Override
public void close() {
closeCalled = true;
}
/* (non-Javadoc)
* @see org.apache.log4j.Appender#requiresLayout()
*/
@Override
public boolean requiresLayout() {
return false;
}
}
| qobel/esoguproject | spring-framework/spring-core/src/test/java/org/springframework/util/MockLog4jAppender.java | Java | apache-2.0 | 1,524 |
using System.Collections.ObjectModel;
namespace api.Areas.HelpPage.ModelDescriptions
{
public class ComplexTypeModelDescription : ModelDescription
{
public ComplexTypeModelDescription()
{
Properties = new Collection<ParameterDescription>();
}
public Collection<ParameterDescription> Properties { get; private set; }
}
} | akeresztesgh/AngularAndWebAPI | api/api/Areas/HelpPage/ModelDescriptions/ComplexTypeModelDescription.cs | C# | apache-2.0 | 377 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.spi.Registry;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class FromRestGetInterceptTest extends ContextTestSupport {
@Override
protected Registry createRegistry() throws Exception {
Registry jndi = super.createRegistry();
jndi.bind("dummy-rest", new DummyRestConsumerFactory());
return jndi;
}
@Test
public void testFromRestModel() throws Exception {
getMockEndpoint("mock:hello").expectedMessageCount(1);
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:intercept").expectedMessageCount(4);
String out = template.requestBody("seda:get-say-hello", "I was here", String.class);
assertEquals("Bye World", out);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
restConfiguration().host("localhost");
intercept().to("mock:intercept");
rest("/say/hello").get().to("direct:hello");
from("direct:hello")
.to("mock:hello").to("mock:bar").transform().constant("Bye World");
}
};
}
}
| christophd/camel | core/camel-core/src/test/java/org/apache/camel/component/rest/FromRestGetInterceptTest.java | Java | apache-2.0 | 2,315 |
package openid
import (
"net/http"
"github.com/ory-am/fosite"
"github.com/pkg/errors"
"golang.org/x/net/context"
)
type OpenIDConnectExplicitHandler struct {
// OpenIDConnectRequestStorage is the storage for open id connect sessions.
OpenIDConnectRequestStorage OpenIDConnectRequestStorage
*IDTokenHandleHelper
}
func (c *OpenIDConnectExplicitHandler) HandleAuthorizeEndpointRequest(ctx context.Context, req *http.Request, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error {
if !(ar.GetGrantedScopes().Has("openid") && ar.GetResponseTypes().Exact("code")) {
return nil
}
if !ar.GetClient().GetResponseTypes().Has("id_token", "code") {
return errors.Wrap(fosite.ErrInvalidRequest, "The client is not allowed to use response type id_token and code")
}
if len(resp.GetCode()) == 0 {
return errors.Wrap(fosite.ErrMisconfiguration, "Authorization code has not been issued yet")
}
if err := c.OpenIDConnectRequestStorage.CreateOpenIDConnectSession(ctx, resp.GetCode(), ar); err != nil {
return errors.Wrap(fosite.ErrServerError, err.Error())
}
// there is no need to check for https, because it has already been checked by core.explicit
return nil
}
| tangfeixiong/go-to-exercise | vendor/github.com/ory-am/fosite/handler/openid/flow_explicit_auth.go | GO | apache-2.0 | 1,197 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for reduction ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import tensor_shape
class SumReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims, use_gpu=False):
np_ans = x
if reduction_axes is None:
np_ans = np.sum(np_ans, keepdims=keep_dims)
else:
reduction_axes = np.array(reduction_axes).astype(np.int32)
for ra in reduction_axes.ravel()[::-1]:
np_ans = np.sum(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session(use_gpu=use_gpu):
tf_ans = tf.reduce_sum(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
if reduction_axes is not None and np.shape(reduction_axes) == (1,):
# Test scalar reduction_axes argument
self._compareAll(x, reduction_axes[0])
self._compare(x, reduction_axes, False, use_gpu=True)
self._compare(x, reduction_axes, False, use_gpu=False)
self._compare(x, reduction_axes, True, use_gpu=True)
self._compare(x, reduction_axes, True, use_gpu=False)
def testFloatReduce1D(self):
# Create a 1D array of floats
np_arr = np.arange(1, 6).reshape([5]).astype(np.float32)
self._compareAll(np_arr, [0])
def testFloatReduce2D(self):
# Create a 2D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 10).reshape([2, 5]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [0, 1])
def testFloatReduce3D(self):
# Create a 3D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 30).reshape([2, 3, 5]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testFloatReduce4D(self):
# Create a 4D array of floats and reduce across some
# dimensions
np_arr = np.arange(0, 210).reshape([2, 3, 5, 7]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
# Need specialization for reduce(4D, [0, 2])
# self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
self._compareAll(np_arr, [1, 2, 3])
self._compareAll(np_arr, [0, 1, 2, 3])
def testFloatReduce5D(self):
# Create a 5D array of floats and reduce across some dimensions
np_arr = np.arange(0, 840).reshape([2, 3, 5, 7, 4]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
# Need specialization for reduce(4D, [0, 2])
# self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
self._compareAll(np_arr, [1, 2, 3])
self._compareAll(np_arr, [0, 1, 2, 3])
self._compareAll(np_arr, [1, 2, 3, 4])
self._compareAll(np_arr, [0, 1, 2, 3, 4])
# Simple tests for various types.
def testDoubleReduce1D(self):
np_arr = np.arange(1, 6).reshape([5]).astype(np.float64)
self._compare(np_arr, [], False)
self._compare(np_arr, [0], False)
def testInt32Reduce1D(self):
np_arr = np.arange(1, 6).reshape([5]).astype(np.int32)
self._compare(np_arr, [], False)
self._compare(np_arr, [0], False)
def testComplex64Reduce1D(self):
np_arr = np.arange(1, 6).reshape([5]).astype(np.complex64)
self._compare(np_arr, [], False)
self._compare(np_arr, [0], False)
def testInvalidIndex(self):
np_arr = np.arange(0, 10).reshape([2, 5]).astype(np.float32)
input_tensor = tf.convert_to_tensor(np_arr)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Invalid reduction dimension" in str(e)):
tf.reduce_sum(input_tensor, [-1])
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Invalid reduction dimension" in str(e)):
tf.reduce_sum(input_tensor, [2])
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Invalid reduction dimension" in str(e)):
tf.reduce_sum(input_tensor, [0, 2])
# Int64??
def _compareGradient(self, shape, sum_shape, reduction_axes):
if reduction_axes is not None and np.shape(reduction_axes) == (1,):
# Test scalar reduction_axes argument
self._compareGradient(shape, sum_shape, reduction_axes[0])
x = np.arange(1.0, 49.0).reshape(shape).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_sum(t, reduction_axes)
jacob_t, jacob_n = tf.test.compute_gradient(t,
shape,
su,
sum_shape,
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient(self):
self._compareGradient([2, 3, 4, 2], [2, 2], [1, 2])
def testGradient2(self):
self._compareGradient([2, 3, 4, 2], [2, 4, 2], [1])
def testGradient3(self):
self._compareGradient([2, 3, 4, 2], [2, 3, 2], [2])
def testGradient4(self):
self._compareGradient([2, 3, 4, 2], [], None)
class MeanReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims):
np_sum = x
count = 1
for ra in reduction_axes[::-1]:
np_sum = np.sum(np_sum, axis=ra, keepdims=keep_dims)
count *= x.shape[ra]
np_ans = np_sum / count
with self.test_session():
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_mean(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False)
self._compare(x, reduction_axes, True)
def testFloatReduce3D(self):
# Create a 3D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 30).reshape([2, 3, 5]).astype(np.float32)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testGradient(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float32)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_mean(t, [1, 2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
su = tf.reduce_mean(t, [0, 1, 2, 3])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[1],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
su = tf.reduce_mean(t, [])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 3, 4, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
class ProdReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims):
np_ans = x
if reduction_axes is None:
np_ans = np.prod(np_ans, keepdims=keep_dims)
else:
for ra in reduction_axes[::-1]:
np_ans = np.prod(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session():
if reduction_axes is not None:
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_prod(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False)
self._compare(x, reduction_axes, True)
def testFloatReduce3D(self):
# Create a 3D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 30).reshape([2, 3, 5]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testGradient(self):
s = [2, 3, 4, 2]
# NOTE(kearnes): divide by 20 so product is a reasonable size
x = np.arange(1.0, 49.0).reshape(s).astype(np.float32) / 20.
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_prod(t, [])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 3, 4, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
su = tf.reduce_prod(t, [1, 2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
su = tf.reduce_prod(t, [0, 1, 2, 3])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[1],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
# NOTE(kearnes): the current gradient calculation gives NaNs for 0 inputs
x = np.arange(0.0, 48.0).reshape(s).astype(np.float32) / 20.
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_prod(t, [])
jacob_t, _ = tf.test.compute_gradient(t,
s,
su,
[2, 3, 4, 2],
x_init_value=x,
delta=1)
with self.assertRaisesOpError("Tensor had NaN values"):
tf.check_numerics(jacob_t, message="_ProdGrad NaN test").op.run()
class MinReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims, use_gpu=False):
np_ans = x
if reduction_axes is None:
np_ans = np.amin(np_ans, keepdims=keep_dims)
else:
for ra in reduction_axes[::-1]:
np_ans = np.amin(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session(use_gpu=use_gpu):
if reduction_axes is not None:
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_min(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False, use_gpu=True)
self._compare(x, reduction_axes, False, use_gpu=False)
self._compare(x, reduction_axes, True, use_gpu=True)
self._compare(x, reduction_axes, True, use_gpu=False)
def testFloatReduce3D(self):
# Create a 3D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 30).reshape([2, 3, 5]).astype(np.float32)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testGradient(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_min(t, [1, 2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient2(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_min(t, [1])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 4, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient3(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_min(t, [2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 3, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient4(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_min(t)
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[1],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
class MaxReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims, use_gpu=False):
np_ans = x
if reduction_axes is None:
np_ans = np.amax(np_ans, keepdims=keep_dims)
else:
for ra in reduction_axes[::-1]:
np_ans = np.amax(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session(use_gpu=use_gpu):
if reduction_axes is not None:
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_max(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False, use_gpu=True)
self._compare(x, reduction_axes, False, use_gpu=False)
self._compare(x, reduction_axes, True, use_gpu=True)
self._compare(x, reduction_axes, True, use_gpu=False)
def testFloatReduce3D(self):
# Create a 3D array of floats and reduce across all possible
# dimensions
np_arr = np.arange(0, 30).reshape([2, 3, 5]).astype(np.float32)
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testGradient(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_max(t, [1, 2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient2(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_max(t, [1])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 4, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient3(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_max(t, [2])
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[2, 3, 2],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient4(self):
s = [2, 3, 4, 2]
x = np.arange(1.0, 49.0).reshape(s).astype(np.float64)
with self.test_session():
t = tf.convert_to_tensor(x)
su = tf.reduce_max(t)
jacob_t, jacob_n = tf.test.compute_gradient(t,
s,
su,
[1],
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
class AllReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims, use_gpu=False):
np_ans = x
if reduction_axes is None:
np_ans = np.all(np_ans, keepdims=keep_dims)
else:
for ra in reduction_axes[::-1]:
np_ans = np.all(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session(use_gpu=use_gpu):
if reduction_axes is not None:
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_all(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False, use_gpu=True)
self._compare(x, reduction_axes, False, use_gpu=False)
self._compare(x, reduction_axes, True, use_gpu=True)
self._compare(x, reduction_axes, True, use_gpu=False)
def testAll3D(self):
# Create a 3D array of bools and reduce across all possible
# dimensions
np_arr = (np.random.uniform(0, 1, 30) > 0.1).reshape([2, 3, 5])
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
class AnyReductionTest(tf.test.TestCase):
def _compare(self, x, reduction_axes, keep_dims, use_gpu=False):
np_ans = x
if reduction_axes is None:
np_ans = np.any(np_ans, keepdims=keep_dims)
else:
for ra in reduction_axes[::-1]:
np_ans = np.any(np_ans, axis=ra, keepdims=keep_dims)
with self.test_session(use_gpu=use_gpu):
if reduction_axes is not None:
reduction_axes = np.array(reduction_axes).astype(np.int32)
tf_ans = tf.reduce_any(x, reduction_axes, keep_dims)
out = tf_ans.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, x, reduction_axes):
self._compare(x, reduction_axes, False, use_gpu=True)
self._compare(x, reduction_axes, False, use_gpu=False)
self._compare(x, reduction_axes, True, use_gpu=True)
self._compare(x, reduction_axes, True, use_gpu=False)
def testAll3D(self):
# Create a 3D array of bools and reduce across all possible
# dimensions
np_arr = (np.random.uniform(0, 1, 30) > 0.9).reshape([2, 3, 5])
self._compareAll(np_arr, None)
self._compareAll(np_arr, [])
self._compareAll(np_arr, [0])
self._compareAll(np_arr, [1])
self._compareAll(np_arr, [2])
self._compareAll(np_arr, [0, 1])
self._compareAll(np_arr, [1, 2])
self._compareAll(np_arr, [0, 2])
self._compareAll(np_arr, [0, 1, 2])
def testPartialShapes(self):
# Input shape is unknown.
c_unknown = tf.placeholder(tf.float32)
s_unknown = tf.reduce_sum(c_unknown, [1, 2])
self.assertEqual(tensor_shape.unknown_shape(), s_unknown.get_shape())
# Input shape only has known rank.
c_known_rank = tf.placeholder(tf.float32)
c_known_rank.set_shape(tensor_shape.unknown_shape(ndims=3))
s_known_rank = tf.reduce_sum(c_known_rank, [1, 2], keep_dims=True)
self.assertEqual(3, s_known_rank.get_shape().ndims)
# Reduction indices are unknown.
unknown_indices = tf.placeholder(tf.int32)
c_unknown_indices = tf.constant([[10.0], [20.0]])
s_unknown_indices = tf.reduce_sum(c_unknown_indices, unknown_indices,
keep_dims=False)
self.assertEqual(tensor_shape.unknown_shape(),
s_unknown_indices.get_shape())
s_unknown_indices_keep = tf.reduce_sum(c_unknown_indices, unknown_indices,
keep_dims=True)
self.assertEqual(2, s_unknown_indices_keep.get_shape().ndims)
if __name__ == "__main__":
tf.test.main()
| YanTangZhai/tf | tensorflow/python/kernel_tests/reduction_ops_test.py | Python | apache-2.0 | 24,486 |
package org.apereo.cas.support.saml.mdui.web.flow;
import org.apereo.cas.config.CasCoreAuthenticationConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationHandlersConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationMetadataConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationPolicyConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationPrincipalConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationServiceSelectionStrategyConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationSupportConfiguration;
import org.apereo.cas.config.CasCoreConfiguration;
import org.apereo.cas.config.CasCoreHttpConfiguration;
import org.apereo.cas.config.CasCoreServicesConfiguration;
import org.apereo.cas.config.CasCoreTicketCatalogConfiguration;
import org.apereo.cas.config.CasCoreTicketIdGeneratorsConfiguration;
import org.apereo.cas.config.CasCoreTicketsConfiguration;
import org.apereo.cas.config.CasCoreUtilConfiguration;
import org.apereo.cas.config.CasCoreWebConfiguration;
import org.apereo.cas.config.CasDefaultServiceTicketIdGeneratorsConfiguration;
import org.apereo.cas.config.CasPersonDirectoryConfiguration;
import org.apereo.cas.config.CoreSamlConfiguration;
import org.apereo.cas.config.SamlConfiguration;
import org.apereo.cas.config.support.CasWebApplicationServiceFactoryConfiguration;
import org.apereo.cas.config.support.EnvironmentConversionServiceInitializer;
import org.apereo.cas.logout.config.CasCoreLogoutConfiguration;
import org.apereo.cas.support.saml.AbstractOpenSamlTests;
import org.apereo.cas.support.saml.SamlProtocolConstants;
import org.apereo.cas.support.saml.mdui.SamlMetadataUIInfo;
import org.apereo.cas.support.saml.mdui.config.SamlMetadataUIConfiguration;
import org.apereo.cas.util.SchedulingUtils;
import org.apereo.cas.validation.config.CasCoreValidationConfiguration;
import org.apereo.cas.web.config.CasCookieConfiguration;
import org.apereo.cas.web.config.CasProtocolViewsConfiguration;
import org.apereo.cas.web.config.CasValidationConfiguration;
import org.apereo.cas.web.flow.config.CasCoreWebflowConfiguration;
import org.apereo.cas.web.support.WebUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.aop.AopAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.execution.Action;
import org.springframework.webflow.test.MockRequestContext;
import javax.annotation.PostConstruct;
import static org.junit.Assert.*;
/**
* This is {@link SamlMetadataUIParserActionTests}.
*
* @author Misagh Moayyed
* @since 4.1.0
*/
@RunWith(SpringRunner.class)
@SpringBootTest(
classes = {
SamlMetadataUIParserActionTests.CasTestConfiguration.class,
SamlMetadataUIConfiguration.class,
CasDefaultServiceTicketIdGeneratorsConfiguration.class,
CasCoreTicketIdGeneratorsConfiguration.class,
CasWebApplicationServiceFactoryConfiguration.class,
CasCoreAuthenticationConfiguration.class,
CasCoreAuthenticationPolicyConfiguration.class,
CasCoreAuthenticationPrincipalConfiguration.class,
CasCoreAuthenticationMetadataConfiguration.class,
CasCoreAuthenticationSupportConfiguration.class,
CasCoreAuthenticationHandlersConfiguration.class,
CasCoreHttpConfiguration.class,
CasCoreServicesConfiguration.class,
CoreSamlConfiguration.class,
CasCoreWebConfiguration.class,
CasCoreWebflowConfiguration.class,
RefreshAutoConfiguration.class,
AopAutoConfiguration.class,
CasCookieConfiguration.class,
CasCoreAuthenticationConfiguration.class,
CasCoreTicketsConfiguration.class,
CasCoreTicketCatalogConfiguration.class,
CasCoreLogoutConfiguration.class,
CasValidationConfiguration.class,
CasProtocolViewsConfiguration.class,
CasCoreValidationConfiguration.class,
CasCoreConfiguration.class,
CasCoreAuthenticationServiceSelectionStrategyConfiguration.class,
SamlConfiguration.class,
CasPersonDirectoryConfiguration.class,
CasCoreUtilConfiguration.class})
@TestPropertySource(properties = {"cas.samlMetadataUi.resources=classpath:sample-metadata.xml::classpath:inc-md-pub.pem"})
@ContextConfiguration(initializers = EnvironmentConversionServiceInitializer.class)
public class SamlMetadataUIParserActionTests extends AbstractOpenSamlTests {
@Autowired
@Qualifier("samlMetadataUIParserAction")
private Action samlMetadataUIParserAction;
@TestConfiguration
public static class CasTestConfiguration {
@Autowired
protected ApplicationContext applicationContext;
@PostConstruct
public void init() {
SchedulingUtils.prepScheduledAnnotationBeanPostProcessor(applicationContext);
}
}
@Test
public void verifyEntityIdUIInfoExists() throws Exception {
final MockRequestContext ctx = new MockRequestContext();
final MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(SamlProtocolConstants.PARAMETER_ENTITY_ID, "https://carmenwiki.osu.edu/shibboleth");
final MockHttpServletResponse response = new MockHttpServletResponse();
final MockServletContext sCtx = new MockServletContext();
ctx.setExternalContext(new ServletExternalContext(sCtx, request, response));
samlMetadataUIParserAction.execute(ctx);
assertNotNull(WebUtils.getServiceUserInterfaceMetadata(ctx, SamlMetadataUIInfo.class));
}
@Test
public void verifyEntityIdUIInfoNoParam() throws Exception {
final MockRequestContext ctx = new MockRequestContext();
final MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter("somethingelse", "https://carmenwiki.osu.edu/shibboleth");
final MockHttpServletResponse response = new MockHttpServletResponse();
final MockServletContext sCtx = new MockServletContext();
ctx.setExternalContext(new ServletExternalContext(sCtx, request, response));
samlMetadataUIParserAction.execute(ctx);
assertNull(WebUtils.getServiceUserInterfaceMetadata(ctx, SamlMetadataUIInfo.class));
}
}
| mrluo735/cas-5.1.0 | support/cas-server-support-saml-mdui/src/test/java/org/apereo/cas/support/saml/mdui/web/flow/SamlMetadataUIParserActionTests.java | Java | apache-2.0 | 7,305 |
/**
*
* @author greg (at) myrobotlab.org
*
* This file is part of MyRobotLab (http://myrobotlab.org).
*
* MyRobotLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version (subject to the "Classpath" exception
* as provided in the LICENSE.txt file that accompanied this code).
*
* MyRobotLab is distributed in the hope that it will be useful or fun,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* All libraries in thirdParty bundle are subject to their own license
* requirements - please refer to http://myrobotlab.org/libraries for
* details.
*
* Enjoy !
*
* */
package org.myrobotlab.control.opencv;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JComboBox;
import javax.swing.SwingUtilities;
import org.myrobotlab.opencv.FilterWrapper;
import org.myrobotlab.opencv.OpenCVFilterErode;
import org.myrobotlab.service.GUIService;
public class OpenCVFilterErodeGUI extends OpenCVFilterGUI implements ActionListener {
JComboBox iterations = new JComboBox(new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 });
public OpenCVFilterErodeGUI(String boundFilterName, String boundServiceName, GUIService myService) {
super(boundFilterName, boundServiceName, myService);
iterations.addActionListener(this);
display.add(iterations);
}
@Override
public void actionPerformed(ActionEvent e) {
Object o = e.getSource();
OpenCVFilterErode bf = (OpenCVFilterErode) boundFilter.filter;
if (o == iterations) {
bf.numberOfIterations = (Integer) iterations.getSelectedItem();
}
setFilterState(bf);
}
// @Override
public void attachGUI() {
log.debug("attachGUI");
}
// @Override
public void detachGUI() {
log.debug("detachGUI");
}
@Override
public void getFilterState(final FilterWrapper filterWrapper) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
OpenCVFilterErode bf = (OpenCVFilterErode) filterWrapper.filter;
iterations.setSelectedItem(bf.numberOfIterations);
}
});
}
}
| robojukie/myrobotlab | src/org/myrobotlab/control/opencv/OpenCVFilterErodeGUI.java | Java | apache-2.0 | 2,355 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.javaapi;
import akka.actor.ActorSystem;
import org.apache.gearpump.cluster.Application;
import org.apache.gearpump.cluster.ApplicationMaster;
import org.apache.gearpump.cluster.UserConfig;
/**
* Java version of StreamApplication.
*
* Also see {@link org.apache.gearpump.streaming.StreamApplication}
*/
public class StreamApplication implements Application {
private org.apache.gearpump.streaming.StreamApplication app;
/**
* Creates a streaming application
*
* @param name Name of the application
* @param conf User configuration
* @param graph The DAG
*/
public StreamApplication(String name, UserConfig conf, Graph graph) {
//by pass the tricky type check in scala 2.10
org.apache.gearpump.util.Graph untypedGraph = graph;
this.app = org.apache.gearpump.streaming.StreamApplication.apply(
name, untypedGraph, conf);
}
@Override
public String name() {
return app.name();
}
@Override
public UserConfig userConfig(ActorSystem system) {
return app.userConfig(system);
}
@Override
public Class<? extends ApplicationMaster> appMaster() {
return app.appMaster();
}
} | manuzhang/incubator-gearpump | streaming/src/main/java/org/apache/gearpump/streaming/javaapi/StreamApplication.java | Java | apache-2.0 | 1,992 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.SolutionCrawler
{
internal sealed partial class SolutionCrawlerRegistrationService
{
/// <summary>
/// this will be used in the unit test to indicate certain action has happened or not.
/// </summary>
public const string EnqueueItem = nameof(EnqueueItem);
internal sealed partial class WorkCoordinator
{
private sealed class SemanticChangeProcessor : IdleProcessor
{
private static readonly Func<int, DocumentId, bool, string> s_enqueueLogger = (tick, documentId, hint) => $"Tick:{tick}, {documentId}, {documentId.ProjectId}, hint:{hint}";
private readonly SemaphoreSlim _gate;
private readonly Registration _registration;
private readonly ProjectProcessor _processor;
private readonly NonReentrantLock _workGate;
private readonly Dictionary<DocumentId, Data> _pendingWork;
public SemanticChangeProcessor(
IAsynchronousOperationListener listener,
Registration registration,
IncrementalAnalyzerProcessor documentWorkerProcessor,
TimeSpan backOffTimeSpan,
TimeSpan projectBackOffTimeSpan,
CancellationToken cancellationToken)
: base(listener, backOffTimeSpan, cancellationToken)
{
_gate = new SemaphoreSlim(initialCount: 0);
_registration = registration;
_processor = new ProjectProcessor(listener, registration, documentWorkerProcessor, projectBackOffTimeSpan, cancellationToken);
_workGate = new NonReentrantLock();
_pendingWork = new Dictionary<DocumentId, Data>();
Start();
// Register a clean-up task to ensure pending work items are flushed from the queue if they will
// never be processed.
AsyncProcessorTask.ContinueWith(
_ => ClearQueueWorker(_workGate, _pendingWork, data => data.AsyncToken),
CancellationToken.None,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
public override Task AsyncProcessorTask
{
get
{
return Task.WhenAll(base.AsyncProcessorTask, _processor.AsyncProcessorTask);
}
}
protected override Task WaitAsync(CancellationToken cancellationToken)
=> _gate.WaitAsync(cancellationToken);
protected override async Task ExecuteAsync()
{
var data = Dequeue();
using (data.AsyncToken)
{
// we have a hint. check whether we can take advantage of it
if (await TryEnqueueFromHintAsync(data).ConfigureAwait(continueOnCapturedContext: false))
return;
EnqueueFullProjectDependency(data.Project);
}
}
private Data Dequeue()
=> DequeueWorker(_workGate, _pendingWork, CancellationToken);
private async Task<bool> TryEnqueueFromHintAsync(Data data)
{
var changedMember = data.ChangedMember;
if (changedMember == null)
return false;
var document = data.GetRequiredDocument();
// see whether we already have semantic model. otherwise, use the expansive full project dependency one
// TODO: if there is a reliable way to track changed member, we could use GetSemanticModel here which could
// rebuild compilation from scratch
if (!document.TryGetSemanticModel(out var model) ||
!changedMember.TryResolve(await document.GetSyntaxRootAsync(CancellationToken).ConfigureAwait(false), out SyntaxNode? declarationNode))
{
return false;
}
var symbol = model.GetDeclaredSymbol(declarationNode, CancellationToken);
if (symbol == null)
{
return false;
}
return await TryEnqueueFromMemberAsync(document, symbol).ConfigureAwait(false) ||
await TryEnqueueFromTypeAsync(document, symbol).ConfigureAwait(false);
}
private async Task<bool> TryEnqueueFromTypeAsync(Document document, ISymbol symbol)
{
if (!IsType(symbol))
{
return false;
}
if (symbol.DeclaredAccessibility == Accessibility.Private)
{
await EnqueueWorkItemAsync(document, symbol).ConfigureAwait(false);
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_EnqueueFromType, symbol.Name);
return true;
}
if (IsInternal(symbol))
{
var assembly = symbol.ContainingAssembly;
EnqueueFullProjectDependency(document.Project, assembly);
return true;
}
return false;
}
private async Task<bool> TryEnqueueFromMemberAsync(Document document, ISymbol symbol)
{
if (!IsMember(symbol))
{
return false;
}
var typeSymbol = symbol.ContainingType;
if (symbol.DeclaredAccessibility == Accessibility.Private)
{
await EnqueueWorkItemAsync(document, symbol).ConfigureAwait(false);
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_EnqueueFromMember, symbol.Name);
return true;
}
if (typeSymbol == null)
{
return false;
}
return await TryEnqueueFromTypeAsync(document, typeSymbol).ConfigureAwait(false);
}
private Task EnqueueWorkItemAsync(Document document, ISymbol symbol)
=> EnqueueWorkItemAsync(document, symbol.ContainingType != null ? symbol.ContainingType.Locations : symbol.Locations);
private async Task EnqueueWorkItemAsync(Document thisDocument, ImmutableArray<Location> locations)
{
var solution = thisDocument.Project.Solution;
var projectId = thisDocument.Id.ProjectId;
foreach (var location in locations)
{
Debug.Assert(location.IsInSource);
var documentId = solution.GetDocumentId(location.SourceTree, projectId);
if (documentId == null || thisDocument.Id == documentId)
continue;
await _processor.EnqueueWorkItemAsync(solution.GetRequiredProject(documentId.ProjectId), documentId, document: null).ConfigureAwait(false);
}
}
private static bool IsInternal(ISymbol symbol)
{
return symbol.DeclaredAccessibility is Accessibility.Internal or
Accessibility.ProtectedAndInternal or
Accessibility.ProtectedOrInternal;
}
private static bool IsType(ISymbol symbol)
=> symbol.Kind == SymbolKind.NamedType;
private static bool IsMember(ISymbol symbol)
{
return symbol.Kind is SymbolKind.Event or
SymbolKind.Field or
SymbolKind.Method or
SymbolKind.Property;
}
private void EnqueueFullProjectDependency(Project project, IAssemblySymbol? internalVisibleToAssembly = null)
{
var self = project.Id;
// if there is no hint (this can happen for cases such as solution/project load and etc),
// we can postpone it even further
if (internalVisibleToAssembly == null)
{
_processor.Enqueue(self, needDependencyTracking: true);
return;
}
// most likely we got here since we are called due to typing.
// calculate dependency here and register each affected project to the next pipe line
var solution = project.Solution;
foreach (var projectId in GetProjectsToAnalyze(solution, self))
{
var otherProject = solution.GetProject(projectId);
if (otherProject == null)
continue;
if (otherProject.TryGetCompilation(out var compilation))
{
var assembly = compilation.Assembly;
if (assembly != null && !assembly.IsSameAssemblyOrHasFriendAccessTo(internalVisibleToAssembly))
continue;
}
_processor.Enqueue(projectId);
}
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_FullProjects, internalVisibleToAssembly == null ? "full" : "internals");
}
public void Enqueue(Project project, DocumentId documentId, Document? document, SyntaxPath? changedMember)
{
UpdateLastAccessTime();
using (_workGate.DisposableWait(CancellationToken))
{
if (_pendingWork.TryGetValue(documentId, out var data))
{
// create new async token and dispose old one.
var newAsyncToken = Listener.BeginAsyncOperation(nameof(Enqueue), tag: _registration.Workspace);
data.AsyncToken.Dispose();
_pendingWork[documentId] = new Data(project, documentId, document, data.ChangedMember == changedMember ? changedMember : null, newAsyncToken);
return;
}
_pendingWork.Add(documentId, new Data(project, documentId, document, changedMember, Listener.BeginAsyncOperation(nameof(Enqueue), tag: _registration.Workspace)));
_gate.Release();
}
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_Enqueue, s_enqueueLogger, Environment.TickCount, documentId, changedMember != null);
}
private static TValue DequeueWorker<TKey, TValue>(NonReentrantLock gate, Dictionary<TKey, TValue> map, CancellationToken cancellationToken)
where TKey : notnull
{
using (gate.DisposableWait(cancellationToken))
{
var first = default(KeyValuePair<TKey, TValue>);
foreach (var kv in map)
{
first = kv;
break;
}
// this is only one that removes data from the queue. so, it should always succeed
var result = map.Remove(first.Key);
Debug.Assert(result);
return first.Value;
}
}
private static void ClearQueueWorker<TKey, TValue>(NonReentrantLock gate, Dictionary<TKey, TValue> map, Func<TValue, IDisposable> disposerSelector)
where TKey : notnull
{
using (gate.DisposableWait(CancellationToken.None))
{
foreach (var (_, data) in map)
{
disposerSelector?.Invoke(data)?.Dispose();
}
map.Clear();
}
}
private static IEnumerable<ProjectId> GetProjectsToAnalyze(Solution solution, ProjectId projectId)
{
var graph = solution.GetProjectDependencyGraph();
if (solution.Workspace.Options.GetOption(InternalSolutionCrawlerOptions.DirectDependencyPropagationOnly))
{
return graph.GetProjectsThatDirectlyDependOnThisProject(projectId).Concat(projectId);
}
// re-analyzing all transitive dependencies is very expensive. by default we will only
// re-analyze direct dependency for now. and consider flipping the default only if we must.
return graph.GetProjectsThatTransitivelyDependOnThisProject(projectId).Concat(projectId);
}
private readonly struct Data
{
private readonly DocumentId _documentId;
private readonly Document? _document;
public readonly Project Project;
public readonly SyntaxPath? ChangedMember;
public readonly IAsyncToken AsyncToken;
public Data(Project project, DocumentId documentId, Document? document, SyntaxPath? changedMember, IAsyncToken asyncToken)
{
_documentId = documentId;
_document = document;
Project = project;
ChangedMember = changedMember;
AsyncToken = asyncToken;
}
public Document GetRequiredDocument()
=> WorkCoordinator.GetRequiredDocument(Project, _documentId, _document);
}
private class ProjectProcessor : IdleProcessor
{
private static readonly Func<int, ProjectId, string> s_enqueueLogger = (t, i) => string.Format("[{0}] {1}", t, i.ToString());
private readonly SemaphoreSlim _gate;
private readonly Registration _registration;
private readonly IncrementalAnalyzerProcessor _processor;
private readonly NonReentrantLock _workGate;
private readonly Dictionary<ProjectId, Data> _pendingWork;
public ProjectProcessor(
IAsynchronousOperationListener listener,
Registration registration,
IncrementalAnalyzerProcessor processor,
TimeSpan backOffTimeSpan,
CancellationToken cancellationToken)
: base(listener, backOffTimeSpan, cancellationToken)
{
_registration = registration;
_processor = processor;
_gate = new SemaphoreSlim(initialCount: 0);
_workGate = new NonReentrantLock();
_pendingWork = new Dictionary<ProjectId, Data>();
Start();
// Register a clean-up task to ensure pending work items are flushed from the queue if they will
// never be processed.
AsyncProcessorTask.ContinueWith(
_ => ClearQueueWorker(_workGate, _pendingWork, data => data.AsyncToken),
CancellationToken.None,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
public void Enqueue(ProjectId projectId, bool needDependencyTracking = false)
{
UpdateLastAccessTime();
using (_workGate.DisposableWait(CancellationToken))
{
// the project is already in the queue. nothing needs to be done
if (_pendingWork.ContainsKey(projectId))
{
return;
}
var data = new Data(projectId, needDependencyTracking, Listener.BeginAsyncOperation(nameof(Enqueue), tag: _registration.Workspace));
_pendingWork.Add(projectId, data);
_gate.Release();
}
Logger.Log(FunctionId.WorkCoordinator_Project_Enqueue, s_enqueueLogger, Environment.TickCount, projectId);
}
public async Task EnqueueWorkItemAsync(Project project, DocumentId documentId, Document? document)
{
// we are shutting down
CancellationToken.ThrowIfCancellationRequested();
// call to this method is serialized. and only this method does the writing.
var priorityService = project.GetLanguageService<IWorkCoordinatorPriorityService>();
var isLowPriority = priorityService != null && await priorityService.IsLowPriorityAsync(GetRequiredDocument(project, documentId, document), CancellationToken).ConfigureAwait(false);
_processor.Enqueue(
new WorkItem(documentId, project.Language, InvocationReasons.SemanticChanged,
isLowPriority, activeMember: null, Listener.BeginAsyncOperation(nameof(EnqueueWorkItemAsync), tag: EnqueueItem)));
}
protected override Task WaitAsync(CancellationToken cancellationToken)
=> _gate.WaitAsync(cancellationToken);
protected override async Task ExecuteAsync()
{
var data = Dequeue();
using (data.AsyncToken)
{
var project = _registration.GetSolutionToAnalyze().GetProject(data.ProjectId);
if (project == null)
{
return;
}
if (!data.NeedDependencyTracking)
{
await EnqueueWorkItemAsync(project).ConfigureAwait(false);
return;
}
// do dependency tracking here with current solution
var solution = _registration.GetSolutionToAnalyze();
foreach (var projectId in GetProjectsToAnalyze(solution, data.ProjectId))
{
project = solution.GetProject(projectId);
await EnqueueWorkItemAsync(project).ConfigureAwait(false);
}
}
}
private Data Dequeue()
=> DequeueWorker(_workGate, _pendingWork, CancellationToken);
private async Task EnqueueWorkItemAsync(Project? project)
{
if (project == null)
return;
foreach (var documentId in project.DocumentIds)
await EnqueueWorkItemAsync(project, documentId, document: null).ConfigureAwait(false);
}
private readonly struct Data
{
public readonly IAsyncToken AsyncToken;
public readonly ProjectId ProjectId;
public readonly bool NeedDependencyTracking;
public Data(ProjectId projectId, bool needDependencyTracking, IAsyncToken asyncToken)
{
AsyncToken = asyncToken;
ProjectId = projectId;
NeedDependencyTracking = needDependencyTracking;
}
}
}
}
}
}
}
| physhi/roslyn | src/Features/Core/Portable/SolutionCrawler/WorkCoordinator.SemanticChangeProcessor.cs | C# | apache-2.0 | 21,695 |
/*
* Copyright 2013 Wicresoft, Inc. All rights reserved.
*/
package com.cardpay.pccredit.intopieces.model;
import java.util.Date;
import com.wicresoft.jrad.base.database.model.ModelParam;
import com.wicresoft.jrad.base.database.model.BusinessModel;
/**
*
* @author 贺珈
*
*/
@ModelParam(table = "qz_appln_process_result")
public class QzApplnProcessResult extends BusinessModel {
private static final long serialVersionUID = 1L;
private String id;
private String applicationId;
private String nodeId;
private String nodeName;
private String operateType;
private String userId;
private String userName;
private String remark;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getNodeId() {
return nodeId;
}
public void setNodeId(String nodeId) {
this.nodeId = nodeId;
}
public String getNodeName() {
return nodeName;
}
public void setNodeName(String nodeName) {
this.nodeName = nodeName;
}
public String getOperateType() {
return operateType;
}
public void setOperateType(String operateType) {
this.operateType = operateType;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
}
| qkhj/PCCREDIT_QZ | src/java/com/cardpay/pccredit/intopieces/model/QzApplnProcessResult.java | Java | apache-2.0 | 1,730 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/monitoring/v3/uptime.proto
namespace Google\Cloud\Monitoring\V3;
if (false) {
/**
* This class is deprecated. Use Google\Cloud\Monitoring\V3\UptimeCheckConfig\ContentMatcher\ContentMatcherOption instead.
* @deprecated
*/
class UptimeCheckConfig_ContentMatcher_ContentMatcherOption {}
}
class_exists(UptimeCheckConfig\ContentMatcher\ContentMatcherOption::class);
@trigger_error('Google\Cloud\Monitoring\V3\UptimeCheckConfig_ContentMatcher_ContentMatcherOption is deprecated and will be removed in the next major release. Use Google\Cloud\Monitoring\V3\UptimeCheckConfig\ContentMatcher\ContentMatcherOption instead', E_USER_DEPRECATED);
| googleapis/google-cloud-php-monitoring | src/V3/UptimeCheckConfig_ContentMatcher_ContentMatcherOption.php | PHP | apache-2.0 | 743 |
import sys, json
import random, os, subprocess
from twisted.internet import reactor
from twisted.web import server, resource
from twisted.web.static import File
from twisted.python import log
from datetime import datetime
import urllib, urllib2
import logging
import re
from sensei_client import *
PARSER_AGENT_PORT = 18888
client = SenseiClient("localhost",8080,'sensei')
#
# Main server resource
#
class Root(resource.Resource):
def render_GET(self, request):
"""
get response method for the root resource
localhost:/18888
"""
return 'Welcome to the REST API'
def getChild(self, name, request):
"""
We overrite the get child function so that we can handle invalid
requests
"""
print "root getchild"
request.setHeader("Access-Control-Allow-Origin", "*")
request.setHeader("Access-Control-Allow-Methods", "GET, POST")
request.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Accept")
if name == '':
return self
else:
if name in VIEWS.keys():
return VIEWS.get(name)#resource.Resource.getChild(self, name, request)
else:
return PageNotFoundError()
class PageNotFoundError(resource.Resource):
def render_GET(self, request):
return 'Page Not Found!'
class ParseBQL(resource.Resource):
def getChild(self, name, request):
"""
We overrite the get child function so that we can handle invalid
requests
"""
print "root getchild"
request.setHeader("Access-Control-Allow-Origin", "*")
request.setHeader("Access-Control-Allow-Methods", "GET, POST")
request.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Accept")
def render_OPTIONS(self,request):
# request.setHeader("Access-Control-Allow-Origin", "*")
# request.setHeader("Access-Control-Allow-Methods", "GET, POST")
# request.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Accept")
print "parse render options"
return "ok"
def render_GET(self, request):
"""Start a Sensei store."""
try:
info = request.args["info"][0]
info = json.loads(info.encode('utf-8'))
print ">>> info = ", info
variables = re.findall(r"\$[a-zA-Z0-9]+", info["bql"])
variables = list(set(variables))
info["auxParams"] = [ {"name": var[1:]} for var in variables ]
stmt = info["bql"]
req = SenseiRequest(stmt)
res = client.doQuery(req)
print "numhits: %d" % res.numHits
result = json.dumps(res.jsonMap)
print result
return json.dumps(
{
"ok": True,
"result": res.jsonMap
})
except ParseException as err:
print err
return json.dumps(
{
"ok": False,
"error": "Parsing error at location %s: %s" % (err.loc, err.msg)
})
except Exception as err:
print err
return "Error"
def render_POST(self, request):
return self.render_GET(request)
#to make the process of adding new views less static
VIEWS = {
"parse": ParseBQL()
}
if __name__ == '__main__':
params = {}
# params["info"] = """{"name": "nus_member", "description": "xxx xxxx", "urn": "urn:feed:nus:member:exp:a:$memberId", 'bql': 'select * from cars where memberId in ("$memberId")'}"""
params["info"] = """{"name": "nus_member", "description": "xxx xxxx"}"""
print urllib.urlencode(params)
root = Root()
#for viewName, className in VIEWS.items():
#add the view to the web service
# root.putChild(viewName, className)
log.startLogging(sys.stdout)
log.msg('Starting parser agent: %s' %str(datetime.now()))
server = server.Site(root)
reactor.listenTCP(PARSER_AGENT_PORT, server)
reactor.run()
| DataDog/sensei | clients/python/sensei/sensei_ql_proxy.py | Python | apache-2.0 | 3,753 |
/**
* @license
* Copyright 2018 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.nanos.auth.twofactor',
name: 'AbstractTOTPAuthService',
extends: 'foam.nanos.auth.twofactor.AbstractOTPAuthService',
abstract: true,
documentation: 'Abstract time-based one-time password auth service',
javaImports: [
'java.util.Date'
],
methods: [
{
name: 'checkCode',
type: 'Boolean',
args: [
{
name: 'key',
type: 'Byte[]'
},
{
name: 'code',
type: 'Long'
},
{
name: 'stepsize',
type: 'Long'
},
{
name: 'window',
type: 'Integer'
}
],
javaCode:
`try {
long t = new Date().getTime() / stepsize;
for (int i = -window; i <= window; ++i) {
long hash = calculateCode(key, t + i);
if (hash == code) {
return true;
}
}
return false;
} catch (Throwable t) {
return false;
}`
}
]
});
| jacksonic/vjlofvhjfgm | src/foam/nanos/auth/twofactor/AbstractTOTPAuthService.js | JavaScript | apache-2.0 | 1,063 |
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
from eventlet import corolocal
# Thread local storage.
_th_loc_storage = threading.local()
def _get_greenlet_local_storage():
greenlet_id = corolocal.get_ident()
greenlet_locals = getattr(_th_loc_storage, "greenlet_locals", None)
if not greenlet_locals:
greenlet_locals = {}
_th_loc_storage.greenlet_locals = greenlet_locals
if greenlet_id in greenlet_locals:
return greenlet_locals[greenlet_id]
else:
return None
def has_thread_local(var_name):
gl_storage = _get_greenlet_local_storage()
return gl_storage and var_name in gl_storage
def get_thread_local(var_name):
if not has_thread_local(var_name):
return None
return _get_greenlet_local_storage()[var_name]
def set_thread_local(var_name, val):
if not val and has_thread_local(var_name):
gl_storage = _get_greenlet_local_storage()
# Delete variable from greenlet local storage.
if gl_storage:
del gl_storage[var_name]
# Delete the entire greenlet local storage from thread local storage.
if gl_storage and len(gl_storage) == 0:
del _th_loc_storage.greenlet_locals[corolocal.get_ident()]
if val:
gl_storage = _get_greenlet_local_storage()
if not gl_storage:
gl_storage =\
_th_loc_storage.greenlet_locals[corolocal.get_ident()] = {}
gl_storage[var_name] = val
def log_exec(logger, level=logging.INFO):
"""Decorator for logging function execution.
By default, target function execution is logged with INFO level.
"""
def _decorator(func):
def _logged(*args, **kw):
params_repr = "[args=%s, kw=%s]" % (str(args), str(kw)) \
if len(args) > 0 or len(kw) > 0 else ""
func_repr = "Called method [name=%s, doc='%s', params=%s]" % \
(func.__name__, func.__doc__, params_repr)
logger.log(level, func_repr)
return func(*args, **kw)
_logged.__doc__ = func.__doc__
return _logged
return _decorator
| dmitryilyin/mistral | mistral/utils/__init__.py | Python | apache-2.0 | 2,765 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.server.websocket;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.infinispan.Cache;
import org.infinispan.notifications.Listener;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryCreated;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryModified;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved;
import org.infinispan.notifications.cachelistener.event.CacheEntryCreatedEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryModifiedEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryRemovedEvent;
import org.infinispan.notifications.cachelistener.event.Event;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.handler.codec.http.websocketx.TextWebSocketFrame;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Cache listener.
* <p/>
* Used to notify websocket clients of cache entry updates.
*
* @author <a href="mailto:tom.fennelly@gmail.com">tom.fennelly@gmail.com</a>
*/
@Listener
public class CacheListener {
private List<ChannelNotifyParams> channels = new CopyOnWriteArrayList<ChannelNotifyParams>();
@CacheEntryCreated
public void cacheEntryCreated(CacheEntryCreatedEvent<Object, Object> event) {
notifyChannels(event, event.getType());
}
@CacheEntryModified
public void cacheEntryModified(CacheEntryModifiedEvent<Object, Object> event) {
notifyChannels(event, event.getType());
}
@CacheEntryRemoved
public void cacheEntryRemoved(CacheEntryRemovedEvent<Object, Object> event) {
notifyChannels(event, event.getType());
}
private void notifyChannels(CacheEntryEvent<Object, Object> event, Event.Type eventType) {
if(event.isPre()) {
return;
}
JSONObject jsonObject;
try {
Cache<Object, Object> cache = event.getCache();
Object key = event.getKey();
Object value;
switch(eventType) {
case CACHE_ENTRY_CREATED:
// TODO: Add optimization ... don't get from cache if non of the channels are interested in creates...
value = cache.get(key);
jsonObject = ChannelUtils.toJSON(key.toString(), value, cache.getName());
break;
case CACHE_ENTRY_MODIFIED:
value = ((CacheEntryModifiedEvent<Object, Object>)event).getValue();
jsonObject = ChannelUtils.toJSON(key.toString(), value, cache.getName());
break;
case CACHE_ENTRY_REMOVED:
jsonObject = ChannelUtils.toJSON(key.toString(), null, cache.getName());
break;
default:
return;
}
jsonObject.put("eventType", eventType.toString());
} catch (JSONException e) {
return;
}
String jsonString = jsonObject.toString();
for(ChannelNotifyParams channel : channels) {
if(channel.channel.isOpen() && channel.onEvents.contains(eventType)) {
if(channel.key != null) {
if(event.getKey().equals(channel.key) || channel.key.equals("*")) {
channel.channel.write(new TextWebSocketFrame(jsonString));
}
} else {
channel.channel.write(new TextWebSocketFrame(jsonString));
}
}
}
}
public void addChannel(ChannelNotifyParams channel) {
if(!channels.contains(channel)) {
channels.add(channel);
channel.channel.getCloseFuture().addListener(new ChannelCloseFutureListener());
}
}
public void removeChannel(ChannelNotifyParams channel) {
channels.remove(channel);
}
public static class ChannelNotifyParams {
private static final String[] DEFAULT_EVENTS = {Event.Type.CACHE_ENTRY_MODIFIED.toString(), Event.Type.CACHE_ENTRY_REMOVED.toString()};
private Channel channel;
private String key;
private List<Event.Type> onEvents = new ArrayList<Event.Type>();
public ChannelNotifyParams(Channel channel, String key, String[] onEvents) {
if(channel == null) {
throw new IllegalArgumentException("null 'channel' arg in constructor call.");
}
String[] onEventsSpec = onEvents;
this.channel = channel;
this.key = key;
if(onEventsSpec == null) {
onEventsSpec = DEFAULT_EVENTS;
}
for(String eventType : onEventsSpec) {
try {
this.onEvents.add(Event.Type.valueOf(eventType));
} catch(RuntimeException e) {
// Ignore for now
}
}
if(onEvents == null && key.equals("*")) {
this.onEvents.add(Event.Type.CACHE_ENTRY_CREATED);
}
}
@Override
public boolean equals(Object obj) {
if(obj instanceof ChannelNotifyParams) {
ChannelNotifyParams channelNotifyParams = (ChannelNotifyParams) obj;
if(channelNotifyParams.channel == channel) {
if(key == null) {
return (channelNotifyParams.key == null);
} else {
return key.equals(channelNotifyParams.key);
}
}
}
return false;
}
@Override
public int hashCode() {
if(key != null) {
return super.hashCode() + channel.hashCode() + key.hashCode();
} else {
return super.hashCode() + channel.hashCode();
}
}
}
private class ChannelCloseFutureListener implements ChannelFutureListener {
@Override
public void operationComplete(ChannelFuture channelCloseFuture) throws Exception {
for(ChannelNotifyParams channel : channels) {
if(channelCloseFuture.getChannel() == channel.channel) {
removeChannel(channel);
}
}
}
}
}
| nmldiegues/stibt | infinispan/server/websocket/src/main/java/org/infinispan/server/websocket/CacheListener.java | Java | apache-2.0 | 6,553 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/socket/socks_client_socket.h"
#include "base/basictypes.h"
#include "base/bind.h"
#include "base/compiler_specific.h"
#include "base/sys_byteorder.h"
#include "net/base/io_buffer.h"
#include "net/base/net_log.h"
#include "net/base/net_util.h"
#include "net/socket/client_socket_handle.h"
namespace net {
// Every SOCKS server requests a user-id from the client. It is optional
// and we send an empty string.
static const char kEmptyUserId[] = "";
// For SOCKS4, the client sends 8 bytes plus the size of the user-id.
static const unsigned int kWriteHeaderSize = 8;
// For SOCKS4 the server sends 8 bytes for acknowledgement.
static const unsigned int kReadHeaderSize = 8;
// Server Response codes for SOCKS.
static const uint8 kServerResponseOk = 0x5A;
static const uint8 kServerResponseRejected = 0x5B;
static const uint8 kServerResponseNotReachable = 0x5C;
static const uint8 kServerResponseMismatchedUserId = 0x5D;
static const uint8 kSOCKSVersion4 = 0x04;
static const uint8 kSOCKSStreamRequest = 0x01;
// A struct holding the essential details of the SOCKS4 Server Request.
// The port in the header is stored in network byte order.
struct SOCKS4ServerRequest {
uint8 version;
uint8 command;
uint16 nw_port;
uint8 ip[4];
};
COMPILE_ASSERT(sizeof(SOCKS4ServerRequest) == kWriteHeaderSize,
socks4_server_request_struct_wrong_size);
// A struct holding details of the SOCKS4 Server Response.
struct SOCKS4ServerResponse {
uint8 reserved_null;
uint8 code;
uint16 port;
uint8 ip[4];
};
COMPILE_ASSERT(sizeof(SOCKS4ServerResponse) == kReadHeaderSize,
socks4_server_response_struct_wrong_size);
SOCKSClientSocket::SOCKSClientSocket(ClientSocketHandle* transport_socket,
const HostResolver::RequestInfo& req_info,
HostResolver* host_resolver)
: transport_(transport_socket),
next_state_(STATE_NONE),
completed_handshake_(false),
bytes_sent_(0),
bytes_received_(0),
host_resolver_(host_resolver),
host_request_info_(req_info),
net_log_(transport_socket->socket()->NetLog()) {
}
SOCKSClientSocket::SOCKSClientSocket(StreamSocket* transport_socket,
const HostResolver::RequestInfo& req_info,
HostResolver* host_resolver)
: transport_(new ClientSocketHandle()),
next_state_(STATE_NONE),
completed_handshake_(false),
bytes_sent_(0),
bytes_received_(0),
host_resolver_(host_resolver),
host_request_info_(req_info),
net_log_(transport_socket->NetLog()) {
transport_->set_socket(transport_socket);
}
SOCKSClientSocket::~SOCKSClientSocket() {
Disconnect();
}
int SOCKSClientSocket::Connect(const CompletionCallback& callback) {
DCHECK(transport_.get());
DCHECK(transport_->socket());
DCHECK_EQ(STATE_NONE, next_state_);
DCHECK(user_callback_.is_null());
// If already connected, then just return OK.
if (completed_handshake_)
return OK;
next_state_ = STATE_RESOLVE_HOST;
net_log_.BeginEvent(NetLog::TYPE_SOCKS_CONNECT);
int rv = DoLoop(OK);
if (rv == ERR_IO_PENDING) {
user_callback_ = callback;
} else {
net_log_.EndEventWithNetErrorCode(NetLog::TYPE_SOCKS_CONNECT, rv);
}
return rv;
}
void SOCKSClientSocket::Disconnect() {
completed_handshake_ = false;
host_resolver_.Cancel();
transport_->socket()->Disconnect();
// Reset other states to make sure they aren't mistakenly used later.
// These are the states initialized by Connect().
next_state_ = STATE_NONE;
user_callback_.Reset();
}
bool SOCKSClientSocket::IsConnected() const {
return completed_handshake_ && transport_->socket()->IsConnected();
}
bool SOCKSClientSocket::IsConnectedAndIdle() const {
return completed_handshake_ && transport_->socket()->IsConnectedAndIdle();
}
const BoundNetLog& SOCKSClientSocket::NetLog() const {
return net_log_;
}
void SOCKSClientSocket::SetSubresourceSpeculation() {
if (transport_.get() && transport_->socket()) {
transport_->socket()->SetSubresourceSpeculation();
} else {
NOTREACHED();
}
}
void SOCKSClientSocket::SetOmniboxSpeculation() {
if (transport_.get() && transport_->socket()) {
transport_->socket()->SetOmniboxSpeculation();
} else {
NOTREACHED();
}
}
bool SOCKSClientSocket::WasEverUsed() const {
if (transport_.get() && transport_->socket()) {
return transport_->socket()->WasEverUsed();
}
NOTREACHED();
return false;
}
bool SOCKSClientSocket::UsingTCPFastOpen() const {
if (transport_.get() && transport_->socket()) {
return transport_->socket()->UsingTCPFastOpen();
}
NOTREACHED();
return false;
}
bool SOCKSClientSocket::WasNpnNegotiated() const {
if (transport_.get() && transport_->socket()) {
return transport_->socket()->WasNpnNegotiated();
}
NOTREACHED();
return false;
}
NextProto SOCKSClientSocket::GetNegotiatedProtocol() const {
if (transport_.get() && transport_->socket()) {
return transport_->socket()->GetNegotiatedProtocol();
}
NOTREACHED();
return kProtoUnknown;
}
bool SOCKSClientSocket::GetSSLInfo(SSLInfo* ssl_info) {
if (transport_.get() && transport_->socket()) {
return transport_->socket()->GetSSLInfo(ssl_info);
}
NOTREACHED();
return false;
}
// Read is called by the transport layer above to read. This can only be done
// if the SOCKS handshake is complete.
int SOCKSClientSocket::Read(IOBuffer* buf, int buf_len,
const CompletionCallback& callback) {
DCHECK(completed_handshake_);
DCHECK_EQ(STATE_NONE, next_state_);
DCHECK(user_callback_.is_null());
return transport_->socket()->Read(buf, buf_len, callback);
}
// Write is called by the transport layer. This can only be done if the
// SOCKS handshake is complete.
int SOCKSClientSocket::Write(IOBuffer* buf, int buf_len,
const CompletionCallback& callback) {
DCHECK(completed_handshake_);
DCHECK_EQ(STATE_NONE, next_state_);
DCHECK(user_callback_.is_null());
return transport_->socket()->Write(buf, buf_len, callback);
}
bool SOCKSClientSocket::SetReceiveBufferSize(int32 size) {
return transport_->socket()->SetReceiveBufferSize(size);
}
bool SOCKSClientSocket::SetSendBufferSize(int32 size) {
return transport_->socket()->SetSendBufferSize(size);
}
void SOCKSClientSocket::DoCallback(int result) {
DCHECK_NE(ERR_IO_PENDING, result);
DCHECK(!user_callback_.is_null());
// Since Run() may result in Read being called,
// clear user_callback_ up front.
CompletionCallback c = user_callback_;
user_callback_.Reset();
DVLOG(1) << "Finished setting up SOCKS handshake";
c.Run(result);
}
void SOCKSClientSocket::OnIOComplete(int result) {
DCHECK_NE(STATE_NONE, next_state_);
int rv = DoLoop(result);
if (rv != ERR_IO_PENDING) {
net_log_.EndEventWithNetErrorCode(NetLog::TYPE_SOCKS_CONNECT, rv);
DoCallback(rv);
}
}
int SOCKSClientSocket::DoLoop(int last_io_result) {
DCHECK_NE(next_state_, STATE_NONE);
int rv = last_io_result;
do {
State state = next_state_;
next_state_ = STATE_NONE;
switch (state) {
case STATE_RESOLVE_HOST:
DCHECK_EQ(OK, rv);
rv = DoResolveHost();
break;
case STATE_RESOLVE_HOST_COMPLETE:
rv = DoResolveHostComplete(rv);
break;
case STATE_HANDSHAKE_WRITE:
DCHECK_EQ(OK, rv);
rv = DoHandshakeWrite();
break;
case STATE_HANDSHAKE_WRITE_COMPLETE:
rv = DoHandshakeWriteComplete(rv);
break;
case STATE_HANDSHAKE_READ:
DCHECK_EQ(OK, rv);
rv = DoHandshakeRead();
break;
case STATE_HANDSHAKE_READ_COMPLETE:
rv = DoHandshakeReadComplete(rv);
break;
default:
NOTREACHED() << "bad state";
rv = ERR_UNEXPECTED;
break;
}
} while (rv != ERR_IO_PENDING && next_state_ != STATE_NONE);
return rv;
}
int SOCKSClientSocket::DoResolveHost() {
next_state_ = STATE_RESOLVE_HOST_COMPLETE;
// SOCKS4 only supports IPv4 addresses, so only try getting the IPv4
// addresses for the target host.
host_request_info_.set_address_family(ADDRESS_FAMILY_IPV4);
return host_resolver_.Resolve(
host_request_info_, &addresses_,
base::Bind(&SOCKSClientSocket::OnIOComplete, base::Unretained(this)),
net_log_);
}
int SOCKSClientSocket::DoResolveHostComplete(int result) {
if (result != OK) {
// Resolving the hostname failed; fail the request rather than automatically
// falling back to SOCKS4a (since it can be confusing to see invalid IP
// addresses being sent to the SOCKS4 server when it doesn't support 4A.)
return result;
}
next_state_ = STATE_HANDSHAKE_WRITE;
return OK;
}
// Builds the buffer that is to be sent to the server.
const std::string SOCKSClientSocket::BuildHandshakeWriteBuffer() const {
SOCKS4ServerRequest request;
request.version = kSOCKSVersion4;
request.command = kSOCKSStreamRequest;
request.nw_port = base::HostToNet16(host_request_info_.port());
DCHECK(!addresses_.empty());
const IPEndPoint& endpoint = addresses_.front();
// We disabled IPv6 results when resolving the hostname, so none of the
// results in the list will be IPv6.
// TODO(eroman): we only ever use the first address in the list. It would be
// more robust to try all the IP addresses we have before
// failing the connect attempt.
CHECK_EQ(ADDRESS_FAMILY_IPV4, endpoint.GetFamily());
CHECK_LE(endpoint.address().size(), sizeof(request.ip));
memcpy(&request.ip, &endpoint.address()[0], endpoint.address().size());
DVLOG(1) << "Resolved Host is : " << endpoint.ToStringWithoutPort();
std::string handshake_data(reinterpret_cast<char*>(&request),
sizeof(request));
handshake_data.append(kEmptyUserId, arraysize(kEmptyUserId));
return handshake_data;
}
// Writes the SOCKS handshake data to the underlying socket connection.
int SOCKSClientSocket::DoHandshakeWrite() {
next_state_ = STATE_HANDSHAKE_WRITE_COMPLETE;
if (buffer_.empty()) {
buffer_ = BuildHandshakeWriteBuffer();
bytes_sent_ = 0;
}
int handshake_buf_len = buffer_.size() - bytes_sent_;
DCHECK_GT(handshake_buf_len, 0);
handshake_buf_ = new IOBuffer(handshake_buf_len);
memcpy(handshake_buf_->data(), &buffer_[bytes_sent_],
handshake_buf_len);
return transport_->socket()->Write(
handshake_buf_, handshake_buf_len,
base::Bind(&SOCKSClientSocket::OnIOComplete, base::Unretained(this)));
}
int SOCKSClientSocket::DoHandshakeWriteComplete(int result) {
if (result < 0)
return result;
// We ignore the case when result is 0, since the underlying Write
// may return spurious writes while waiting on the socket.
bytes_sent_ += result;
if (bytes_sent_ == buffer_.size()) {
next_state_ = STATE_HANDSHAKE_READ;
buffer_.clear();
} else if (bytes_sent_ < buffer_.size()) {
next_state_ = STATE_HANDSHAKE_WRITE;
} else {
return ERR_UNEXPECTED;
}
return OK;
}
int SOCKSClientSocket::DoHandshakeRead() {
next_state_ = STATE_HANDSHAKE_READ_COMPLETE;
if (buffer_.empty()) {
bytes_received_ = 0;
}
int handshake_buf_len = kReadHeaderSize - bytes_received_;
handshake_buf_ = new IOBuffer(handshake_buf_len);
return transport_->socket()->Read(handshake_buf_, handshake_buf_len,
base::Bind(&SOCKSClientSocket::OnIOComplete,
base::Unretained(this)));
}
int SOCKSClientSocket::DoHandshakeReadComplete(int result) {
if (result < 0)
return result;
// The underlying socket closed unexpectedly.
if (result == 0)
return ERR_CONNECTION_CLOSED;
if (bytes_received_ + result > kReadHeaderSize) {
// TODO(eroman): Describe failure in NetLog.
return ERR_SOCKS_CONNECTION_FAILED;
}
buffer_.append(handshake_buf_->data(), result);
bytes_received_ += result;
if (bytes_received_ < kReadHeaderSize) {
next_state_ = STATE_HANDSHAKE_READ;
return OK;
}
const SOCKS4ServerResponse* response =
reinterpret_cast<const SOCKS4ServerResponse*>(buffer_.data());
if (response->reserved_null != 0x00) {
LOG(ERROR) << "Unknown response from SOCKS server.";
return ERR_SOCKS_CONNECTION_FAILED;
}
switch (response->code) {
case kServerResponseOk:
completed_handshake_ = true;
return OK;
case kServerResponseRejected:
LOG(ERROR) << "SOCKS request rejected or failed";
return ERR_SOCKS_CONNECTION_FAILED;
case kServerResponseNotReachable:
LOG(ERROR) << "SOCKS request failed because client is not running "
<< "identd (or not reachable from the server)";
return ERR_SOCKS_CONNECTION_HOST_UNREACHABLE;
case kServerResponseMismatchedUserId:
LOG(ERROR) << "SOCKS request failed because client's identd could "
<< "not confirm the user ID string in the request";
return ERR_SOCKS_CONNECTION_FAILED;
default:
LOG(ERROR) << "SOCKS server sent unknown response";
return ERR_SOCKS_CONNECTION_FAILED;
}
// Note: we ignore the last 6 bytes as specified by the SOCKS protocol
}
int SOCKSClientSocket::GetPeerAddress(IPEndPoint* address) const {
return transport_->socket()->GetPeerAddress(address);
}
int SOCKSClientSocket::GetLocalAddress(IPEndPoint* address) const {
return transport_->socket()->GetLocalAddress(address);
}
} // namespace net
| plxaye/chromium | src/net/socket/socks_client_socket.cc | C++ | apache-2.0 | 13,728 |
package lock
import (
"github.com/containers/podman/v3/libpod/lock/file"
)
// FileLockManager manages shared memory locks.
type FileLockManager struct {
locks *file.FileLocks
}
// NewFileLockManager makes a new FileLockManager at the specified directory.
func NewFileLockManager(lockPath string) (Manager, error) {
locks, err := file.CreateFileLock(lockPath)
if err != nil {
return nil, err
}
manager := new(FileLockManager)
manager.locks = locks
return manager, nil
}
// OpenFileLockManager opens an existing FileLockManager at the specified directory.
func OpenFileLockManager(path string) (Manager, error) {
locks, err := file.OpenFileLock(path)
if err != nil {
return nil, err
}
manager := new(FileLockManager)
manager.locks = locks
return manager, nil
}
// AllocateLock allocates a new lock from the manager.
func (m *FileLockManager) AllocateLock() (Locker, error) {
semIndex, err := m.locks.AllocateLock()
if err != nil {
return nil, err
}
lock := new(FileLock)
lock.lockID = semIndex
lock.manager = m
return lock, nil
}
// AllocateAndRetrieveLock allocates the lock with the given ID and returns it.
// If the lock is already allocated, error.
func (m *FileLockManager) AllocateAndRetrieveLock(id uint32) (Locker, error) {
lock := new(FileLock)
lock.lockID = id
lock.manager = m
if err := m.locks.AllocateGivenLock(id); err != nil {
return nil, err
}
return lock, nil
}
// RetrieveLock retrieves a lock from the manager given its ID.
func (m *FileLockManager) RetrieveLock(id uint32) (Locker, error) {
lock := new(FileLock)
lock.lockID = id
lock.manager = m
return lock, nil
}
// FreeAllLocks frees all locks in the manager.
// This function is DANGEROUS. Please read the full comment in locks.go before
// trying to use it.
func (m *FileLockManager) FreeAllLocks() error {
return m.locks.DeallocateAllLocks()
}
// FileLock is an individual shared memory lock.
type FileLock struct {
lockID uint32
manager *FileLockManager
}
// ID returns the ID of the lock.
func (l *FileLock) ID() uint32 {
return l.lockID
}
// Lock acquires the lock.
func (l *FileLock) Lock() {
if err := l.manager.locks.LockFileLock(l.lockID); err != nil {
panic(err.Error())
}
}
// Unlock releases the lock.
func (l *FileLock) Unlock() {
if err := l.manager.locks.UnlockFileLock(l.lockID); err != nil {
panic(err.Error())
}
}
// Free releases the lock, allowing it to be reused.
func (l *FileLock) Free() error {
return l.manager.locks.DeallocateLock(l.lockID)
}
| kubernetes-incubator/ocid | vendor/github.com/containers/podman/v3/libpod/lock/file_lock_manager.go | GO | apache-2.0 | 2,518 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the distributed values library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
import os
from absl.testing import parameterized
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.distribute import tpu_strategy
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import tpu_cluster_resolver
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.saved_model.model_utils import mode_keys
from tensorflow.python.tpu import tpu_strategy_util
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training.tracking import util as trackable_utils
from tensorflow.python.util import nest
class DistributedValuesTest(test.TestCase):
def testGetEager(self):
with ops.device("/device:CPU:0"):
one = constant_op.constant(1)
two = constant_op.constant(2)
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedValues(device_map, (one, two))
self.assertEqual(two, v.get("/device:GPU:0"))
self.assertEqual(one, v.get())
with self.assertRaises(ValueError):
self.assertIsNone(v.get("/device:GPU:2"))
def testGetGraph(self):
with context.graph_mode(), \
ops.Graph().as_default(), \
ops.device("/device:CPU:0"):
one = constant_op.constant(1)
two = constant_op.constant(2)
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedValues(device_map, (one, two))
self.assertEqual(two, v.get("/device:GPU:0"))
self.assertEqual(one, v.get())
with self.assertRaises(ValueError):
self.assertIsNone(v.get("/device:GPU:2"))
def testCanonicalization(self):
canonical_cpu = ("/job:localhost/replica:0/task:0/device:CPU:0",)
v = values.DistributedValues(values.SingleDeviceMap(""), (42,))
self.assertEqual(canonical_cpu, v.devices)
v = values.DistributedValues(values.SingleDeviceMap("/device:CPU:0"), (42,))
self.assertEqual(canonical_cpu, v.devices)
v = values.DistributedValues(values.SingleDeviceMap("/cpu:0"), (42,))
self.assertEqual(canonical_cpu, v.devices)
v = values.DistributedValues(values.SingleDeviceMap("/CPU:0"), (42,))
self.assertEqual(canonical_cpu, v.devices)
def testIsTensorLike(self):
with context.graph_mode(), \
ops.Graph().as_default(), \
ops.device("/device:CPU:0"):
one = constant_op.constant(1)
two = constant_op.constant(2)
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedValues(device_map, (one, two))
self.assertEqual(two, v.get("/device:GPU:0"))
self.assertEqual(one, v.get())
self.assertTrue(v.is_tensor_like)
self.assertTrue(tensor_util.is_tensor(v))
def testIsTensorLikeWithAConstant(self):
with context.graph_mode(), \
ops.Graph().as_default(), \
ops.device("/device:CPU:0"):
one = constant_op.constant(1)
two = 2.0
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedValues(device_map, (one, two))
self.assertEqual(two, v.get("/device:GPU:0"))
self.assertEqual(one, v.get())
self.assertFalse(v.is_tensor_like)
self.assertFalse(tensor_util.is_tensor(v))
class DistributedDelegateTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testGetAttr(self):
with ops.device("/device:CPU:0"):
class Foo(object):
def __init__(self, x):
self.x = x
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedDelegate(device_map, (Foo(7), Foo(8)))
self.assertEqual(7, v.x)
with self.assertRaises(AttributeError):
_ = v.y
@test_util.run_in_graph_and_eager_modes
def testOperatorOverride(self):
with ops.device("/device:CPU:0"):
device_map = values.ReplicaDeviceMap(("/device:CPU:0", "/device:GPU:0"))
v = values.DistributedDelegate(device_map, (7, 8))
# v should act like int(7).
self.assertEqual(8, v + 1)
self.assertEqual(10, 3 + v)
self.assertEqual(14, v + v)
self.assertEqual(5, v - 2)
self.assertEqual(6, 13 - v)
self.assertEqual(0, v - v)
self.assertEqual(14, v * 2)
self.assertEqual(21, 3 * v)
self.assertEqual(49, v * v)
self.assertEqual(3.5, v / 2)
self.assertEqual(1.5, 10.5 / v)
self.assertEqual(3, v // 2)
self.assertEqual(2, 15 // v)
self.assertEqual(1, v % 2)
self.assertEqual(2, 16 % v)
self.assertTrue(v < 12)
self.assertTrue(v <= 12)
self.assertFalse(v > 12)
self.assertFalse(v >= 12)
self.assertFalse(12 < v)
self.assertFalse(12 <= v)
self.assertTrue(12 > v)
self.assertTrue(12 >= v)
self.assertEqual(3, v & 3)
self.assertEqual(3, 11 & v)
self.assertEqual(15, v | 8)
self.assertEqual(23, 16 | v)
self.assertEqual(4, v ^ 3)
self.assertEqual(12, 11 ^ v)
self.assertEqual(343, pow(v, 3))
self.assertEqual(3, pow(v, 3, 10))
self.assertEqual(128, pow(2, v))
self.assertEqual(-7, -v)
self.assertEqual(~7, ~v)
self.assertEqual(7, abs(v))
with self.assertRaises(TypeError):
_ = v[2]
def _device_str(d):
return "/device:GPU:" + str(d)
def _nested_value(d):
return ("a" + d, ["b" + d, {"c": "d" + d, "e": "f" + d}, "g" + d], "h" + d)
def _make_mirrored_val(init_val=5.0):
v = []
devices = ["/device:GPU:0", "/device:CPU:0"]
for d, _ in zip(devices, ["v", "v/replica"]):
with ops.device(d):
v.append(constant_op.constant(init_val))
device_map = values.ReplicaDeviceMap(devices)
mirrored = values.Mirrored(device_map, v)
return mirrored
def _make_mirrored():
v = []
devices = ["/device:GPU:0", "/device:CPU:0"]
for d, n, init in zip(devices, ["v", "v/replica"], [1., 2.]):
with ops.device(d):
v.append(variable_scope.get_variable(
name=n, initializer=init, use_resource=True))
device_map = values.ReplicaDeviceMap(devices)
mirrored = values.MirroredVariable(None, device_map, v,
variable_scope.VariableAggregation.SUM)
return v, device_map, mirrored
class RegroupAndSelectDeviceTest(test.TestCase):
def _is_per_replica(self, result, expected, klass=values.PerReplica):
self.assertIsInstance(result, klass)
# We canonicalize the devices to match the device strings returned
# by PerReplica, which also does device string canonicalization.
devices = [device_util.canonicalize(_device_str(i))
for i in range(len(expected))]
self.assertEqual(set(devices), set(result.devices))
for i, d in enumerate(devices):
self.assertEqual(expected[i], result.get(d))
self.assertEqual(expected[i], result.get(_device_str(i)))
def testNested(self):
device_map = values.ReplicaDeviceMap((_device_str(0), _device_str(1)))
result = values.regroup(device_map,
(_nested_value("1"), _nested_value("2")))
self.assertIsInstance(result, tuple)
self.assertEqual(3, len(result))
self._is_per_replica(result[0], ["a1", "a2"])
self._is_per_replica(result[2], ["h1", "h2"])
self.assertIsInstance(result[1], list)
self.assertEqual(3, len(result[1]))
self._is_per_replica(result[1][0], ["b1", "b2"])
self._is_per_replica(result[1][2], ["g1", "g2"])
self.assertIsInstance(result[1][1], dict)
self.assertEqual(set(["c", "e"]), set(result[1][1].keys()))
self._is_per_replica(result[1][1]["c"], ["d1", "d2"])
self._is_per_replica(result[1][1]["e"], ["f1", "f2"])
# Also test that we can undo the merge using select_replica()
self.assertEqual(_nested_value("1"),
values.select_replica(0, result))
self.assertEqual(_nested_value("2"),
values.select_replica(1, result))
# select_device_mirrored() should fail due to non-mirrored values
with self.assertRaises(TypeError):
values.select_device_mirrored(_device_str(0), result)
with self.assertRaises(TypeError):
values.select_device_mirrored(_device_str(1), result)
def testWrapClass(self):
# Normally a mirrored value would be the same across devices, but
# for a test it is convenient to be able to tell the values apart.
device_map = values.ReplicaDeviceMap((_device_str(0), _device_str(1)))
result = values.regroup(device_map,
(_nested_value("1"), _nested_value("2")),
values.Mirrored)
self.assertIsInstance(result, tuple)
self.assertEqual(3, len(result))
self._is_per_replica(result[0], ["a1", "a2"], values.Mirrored)
self._is_per_replica(result[2], ["h1", "h2"], values.Mirrored)
self.assertIsInstance(result[1], list)
self.assertEqual(3, len(result[1]))
self._is_per_replica(result[1][0], ["b1", "b2"], values.Mirrored)
self._is_per_replica(result[1][2], ["g1", "g2"], values.Mirrored)
self.assertIsInstance(result[1][1], dict)
self.assertEqual(set(["c", "e"]), set(result[1][1].keys()))
self._is_per_replica(result[1][1]["c"], ["d1", "d2"], values.Mirrored)
self._is_per_replica(result[1][1]["e"], ["f1", "f2"], values.Mirrored)
# Also test that we can undo the merge using select_replica()
self.assertEqual(_nested_value("1"),
values.select_replica(0, result))
self.assertEqual(_nested_value("2"),
values.select_replica(1, result))
# Values are marked as mirrored, so select_device_mirrored() is allowed.
self.assertEqual(_nested_value("1"),
values.select_device_mirrored(_device_str(0), result))
self.assertEqual(_nested_value("2"),
values.select_device_mirrored(_device_str(1), result))
def testWrapAListOfTwoTuples(self):
device_map = values.ReplicaDeviceMap((_device_str(0), _device_str(1)))
result = values.regroup(device_map, [("1", "2"), ("3", "4")])
self.assertIsInstance(result, tuple)
self.assertEqual(2, len(result))
self._is_per_replica(result[0], ("1", "3"), values.PerReplica)
self._is_per_replica(result[1], ("2", "4"), values.PerReplica)
def testMirroredContainer(self):
if context.num_gpus() < 1 and context.executing_eagerly():
self.skipTest("A GPU is not available for this test in eager mode.")
v, device_map, mirrored = _make_mirrored()
result = values.regroup(device_map, v)
self.assertIs(mirrored, result)
def testSameId(self):
foo = object()
device_map = values.ReplicaDeviceMap((_device_str(0), _device_str(1)))
result = values.regroup(device_map, (("a", foo), ("b", foo)))
self.assertIsInstance(result, tuple)
self.assertEqual(2, len(result))
self._is_per_replica(result[0], ["a", "b"])
self.assertIs(foo, result[1])
# Test select_replica(), should undo the merge done by regroup().
result_0 = values.select_replica(0, result)
self.assertIsInstance(result_0, tuple)
self.assertEqual(2, len(result_0))
self.assertEqual("a", result_0[0])
self.assertIs(foo, result_0[1])
result_1 = values.select_replica(1, result)
self.assertIsInstance(result_1, tuple)
self.assertEqual(2, len(result_1))
self.assertEqual("b", result_1[0])
self.assertIs(foo, result_1[1])
def testOneDevice(self):
device_map = values.ReplicaDeviceMap((_device_str(0),))
result = values.regroup(device_map, (_nested_value("1"),))
# On one device regroup() and select_replica() are basically identity.
self.assertEqual(_nested_value("1"), result)
self.assertEqual(_nested_value("1"),
values.select_replica(0, result))
# The one exception has to do with MirroredVariables.
d = "/device:CPU:0"
with ops.device(d):
v = variable_scope.get_variable(
name="v", initializer=1., use_resource=True)
device_map = values.ReplicaDeviceMap((d,))
mirrored = values.MirroredVariable(None, device_map, (v,),
variable_scope.VariableAggregation.SUM)
result = values.regroup(device_map, (v,))
self.assertIs(mirrored, result)
def testNamedTuple(self):
# We include toy implementations of Scaffold and EstimatorSpec to
# avoid a dependency on Estimator here.
class Scaffold(object):
pass
class EstimatorSpec(collections.namedtuple(
"EstimatorSpec", ["mode", "loss", "train_op", "scaffold"])):
def __new__(cls, mode, loss, train_op, scaffold=None):
return super(EstimatorSpec, cls).__new__(
cls, mode=mode, loss=loss, train_op=train_op,
scaffold=scaffold or Scaffold())
with context.graph_mode(), ops.Graph().as_default():
devices = []
created_estimator_specs = []
for device_id in range(3):
spec = EstimatorSpec(
mode=mode_keys.EstimatorModeKeys.TRAIN,
loss=constant_op.constant(device_id / 2),
train_op=array_ops.identity(constant_op.constant(device_id)))
devices.append(_device_str(device_id))
created_estimator_specs.append(spec)
device_map = values.ReplicaDeviceMap(devices)
merged_estimator_spec = values.regroup(
device_map, created_estimator_specs)
self.assertIsInstance(merged_estimator_spec, EstimatorSpec)
self.assertEqual(mode_keys.EstimatorModeKeys.TRAIN,
merged_estimator_spec.mode)
for device_id in range(3):
d = _device_str(device_id)
self.assertEqual(created_estimator_specs[device_id].loss,
merged_estimator_spec.loss.get(d))
self.assertEqual(created_estimator_specs[device_id].train_op,
merged_estimator_spec.train_op.get(d))
# Scaffold is populated by `EstimatorSpec.__new__`.
self.assertEqual(created_estimator_specs[device_id].scaffold,
merged_estimator_spec.scaffold.get(d))
self.assertIsInstance(created_estimator_specs[device_id].scaffold,
Scaffold)
# Also test that we can undo the merge using select_replica()
self.assertEqual(created_estimator_specs[device_id],
values.select_replica(device_id,
merged_estimator_spec))
class MirroredVariableTest(test.TestCase, parameterized.TestCase):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
@test_util.run_in_graph_and_eager_modes(config=config)
def testProperties(self):
if context.num_gpus() < 1 and context.executing_eagerly():
self.skipTest("A GPU is not available for this test in eager mode.")
v, _, mirrored = _make_mirrored()
self.assertEqual(v[0].name, mirrored.name)
self.assertEqual(v[0].dtype, mirrored.dtype)
self.assertEqual(v[0].shape, mirrored.shape)
@test_util.run_in_graph_and_eager_modes(config=config)
def testVariableOnAnotherDevice(self):
v = variable_scope.get_variable(
name="v", initializer=[1.], use_resource=True)
device_map = values.ReplicaDeviceMap(("/job:foo/device:CPU:0",))
mirrored = values.MirroredVariable(None, device_map, (v,),
variable_scope.VariableAggregation.MEAN)
self.assertEqual(v.name, mirrored.name)
self.assertEqual(v.dtype, mirrored.dtype)
self.assertEqual(v.shape, mirrored.shape)
def _assign_mirrored(self, devices, v, new):
for d, var, n in zip(devices, v, new):
with ops.device(d):
self.evaluate(var.assign(n))
def _save_return_saver(self, sess, var):
saver = saver_lib.Saver(var_list=[var])
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
return saver.save(sess, prefix), saver
def _save(self, sess, var):
save_path, _ = self._save_return_saver(sess, var)
return save_path
@test_util.run_in_graph_and_eager_modes(config=config)
def testSaveAndRestoreMirroredOneGraph(self):
if context.num_gpus() < 1 and context.executing_eagerly():
# Graph mode can work without GPU because the Placer "moves" the
# variable to a CPU. In other words, if there is no GPU available, but
# user requested to create a variable on GPU, Placer will ignore the
# user request and assign the VarHandleOp to CPU. This requires
# soft_placement, which is on by default.
self.skipTest("A GPU is not available for this test in eager mode.")
with self.cached_session(config=self.config) as sess:
v, device_map, mirrored = _make_mirrored()
devices = device_map.all_devices
# Overwrite the initial values.
self._assign_mirrored(devices, v, [3., 4.])
# Saves the current value of v[0], 3.
save_path, saver = self._save_return_saver(sess, mirrored)
# Change the values between save and restore.
self._assign_mirrored(devices, v, [5., 6.])
# Restores the saved value of 3. to both variables.
saver.restore(sess, save_path)
self.assertEqual([3., 3.], self.evaluate([v[0], v[1]]))
def _save_mirrored(self):
"""Save variables with mirroring, returns save_path."""
with self.session(graph=ops.Graph()) as sess:
v, device_map, mirrored = _make_mirrored()
devices = device_map.all_devices
# Overwrite the initial values.
self._assign_mirrored(devices, v, [3., 4.])
# Saves the current value of v[0], 3.
save_path = self._save(sess, mirrored)
# Change the values between save and restore.
self._assign_mirrored(devices, v, [5., 6.])
return save_path
def _save_normal(self):
"""Save variables without mirroring, returns save_path."""
with self.session(graph=ops.Graph()) as sess:
var = variable_scope.get_variable(
name="v", initializer=1., use_resource=True)
# Overwrite the initial value.
self.evaluate(var.assign(3.))
# Saves the current value of var, 3.
save_path = self._save(sess, var)
# Change the values between save and restore.
self.evaluate(var.assign(5.))
return save_path
def _restore_normal(self, save_path):
"""Restore to variables without mirroring in a fresh graph."""
with self.session(graph=ops.Graph()) as sess:
var = variable_scope.get_variable(
name="v", initializer=7., use_resource=True)
# Overwrite the initial value.
self.evaluate(var.assign(8.))
# Restores the saved value of 3. to `var`.
saver = saver_lib.Saver(var_list=[var])
saver.restore(sess, save_path)
self.assertEqual(3., self.evaluate(var))
def _restore_mirrored(self, save_path):
"""Restore to variables with mirroring in a fresh graph."""
with self.session(graph=ops.Graph()) as sess:
v, device_map, mirrored = _make_mirrored()
devices = device_map.all_devices
# Overwrite the initial values.
self._assign_mirrored(devices, v, [7., 8.])
# Restores the saved value of 3. to both variables.
saver = saver_lib.Saver(var_list=[mirrored])
saver.restore(sess, save_path)
self.assertEqual([3., 3.], self.evaluate([v[0], v[1]]))
@test_util.run_in_graph_and_eager_modes(config=config)
def testSaveMirroredRestoreMirrored(self):
if context.num_gpus() < 1 and context.executing_eagerly():
# Graph mode can work without GPU because the Placer "moves" the
# variable to a CPU. In other words, if there is no GPU available, but
# user requested to create a variable on GPU, Placer will ignore the
# user request and assign the VarHandleOp to CPU. This requires
# soft_placement, which is on by default.
self.skipTest("A GPU is not available for this test in eager mode.")
save_path = self._save_mirrored()
self._restore_mirrored(save_path)
@test_util.run_in_graph_and_eager_modes(config=config)
def testSaveMirroredRestoreNormal(self):
if context.num_gpus() < 1 and context.executing_eagerly():
# Graph mode can work without GPU because the Placer "moves" the
# variable to a CPU. In other words, if there is no GPU available, but
# user requested to create a variable on GPU, Placer will ignore the
# user request and assign the VarHandleOp to CPU. This requires
# soft_placement, which is on by default.
self.skipTest("A GPU is not available for this test in eager mode.")
save_path = self._save_mirrored()
self._restore_normal(save_path)
@test_util.run_in_graph_and_eager_modes(config=config)
def testSaveNormalRestoreMirrored(self):
if context.num_gpus() < 1 and context.executing_eagerly():
# Graph mode can work without GPU because the Placer "moves" the
# variable to a CPU. In other words, if there is no GPU available, but
# user requested to create a variable on GPU, Placer will ignore the
# user request and assign the VarHandleOp to CPU. This requires
# soft_placement, which is on by default.
self.skipTest("A GPU is not available for this test in eager mode.")
save_path = self._save_normal()
self._restore_mirrored(save_path)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_gpu,
],
mode=["graph"]))
def testFetchAMirroredVariable(self, distribution):
with self.session(graph=ops.Graph()) as sess, distribution.scope():
with ops.device("/device:GPU:0"):
v = variable_scope.get_variable(
name="v", initializer=1., use_resource=True)
mirrored = values.MirroredVariable(
distribution, values.ReplicaDeviceMap(("/device:GPU:0",)), (v,),
variable_scope.VariableAggregation.MEAN)
sess.run(variables_lib.global_variables_initializer())
sess.run({"complicated": mirrored})
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
],
mode=["graph", "eager"]))
def testAssignOutOfScope_mirrored(self, distribution):
with distribution.scope():
mirrored = variables_lib.Variable(1.)
if not isinstance(mirrored, values.MirroredVariable):
self.assertIsInstance(mirrored, values.TPUMirroredVariable)
self.evaluate(mirrored.assign(3.))
self.assertEqual(self.evaluate(mirrored.read_value()), 3.)
for component in mirrored.values:
self.assertEqual(self.evaluate(component.read_value()), 3.)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.central_storage_strategy_with_two_gpus
],
mode=["graph", "eager"]))
def testAssignOutOfScope_aggregating(self, distribution):
with distribution.scope():
aggregating = variables_lib.Variable(1.)
self.assertIsInstance(aggregating, values.AggregatingVariable)
self.evaluate(aggregating.assign(3.))
self.assertEqual(self.evaluate(aggregating.read_value()), 3.)
self.assertEqual(self.evaluate(aggregating._v.read_value()), 3.)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["graph", "eager"]))
def testExtendsVariable(self, distribution):
with distribution.scope():
v = variables_lib.Variable(1.)
self.assertIsInstance(v, variables_lib.Variable)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["graph", "eager"]))
def testCheckpointing(self, distribution):
with distribution.scope():
v = variables_lib.Variable(constant_op.constant([1., 2., 3., 4]))
self.evaluate(v.initializer)
before_save = self.evaluate(v.read_value())
# Save random weights into checkpoint.
checkpoint = trackable_utils.Checkpoint(v=v)
prefix = os.path.join(self.get_temp_dir(), "ckpt")
with self.test_session():
save_path = checkpoint.save(prefix)
# Assign inverted value.
self.evaluate(v.assign(constant_op.constant([4., 3., 2., 1.])))
after_assign = self.evaluate(v.read_value())
self.assertNotAllClose(before_save, after_assign)
# Restore from the checkpoint.
with self.test_session():
checkpoint.restore(save_path).assert_consumed().run_restore_ops()
after_restore = self.evaluate(v)
self.assertAllClose(before_save, after_restore)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["graph"]))
def testTraceback(self, distribution):
with distribution.scope():
variable_scope.get_variable(
name="testVar", initializer=1., use_resource=True)
with self.assertRaisesRegex(
ValueError, "Variable testVar already exists"):
variable_scope.get_variable(
name="testVar", initializer=1., use_resource=True)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["eager"]))
def testInitializedToSameValueInsideEagerRun(self, distribution):
v = [None]
@def_function.function
def step():
def f():
if v[0] is None:
v[0] = variables_lib.Variable(random_ops.random_normal([]))
distribution.experimental_run_v2(f)
context.set_global_seed(None)
step()
vals = self.evaluate(v[0].values)
self.assertAllEqual(vals[0], vals[1])
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["graph", "eager"]))
def testSelectReplica(self, distribution):
with distribution.scope():
v = variables_lib.Variable(1.)
self.assertIs(v, values.select_replica(0, v))
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
strategy_combinations.central_storage_strategy_with_two_gpus,
],
mode=["graph", "eager"]))
def testModAfterAssign(self, distribution):
with distribution.scope():
v = variables_lib.Variable(0)
def replica_fn():
def merge_fn(_):
return math_ops.mod(v.assign_add(1), 2)
return distribution_strategy_context.get_replica_context().merge_call(
merge_fn)
@def_function.function
def foo():
distribution.experimental_run_v2(replica_fn)
foo()
_TPU_STRATEGIES = (tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)
def _make_replica_local(method, strategy=None):
if strategy is None:
devices = ("/device:GPU:0", "/device:CPU:0")
else:
devices = strategy.extended.worker_devices
device_map = values.ReplicaDeviceMap(devices)
v = []
for d, n, init in zip(devices, ["v", "v/replica"], [1., 2.]):
with ops.device(d):
v.append(variable_scope.get_variable(
name=n, initializer=init, use_resource=True))
if (strategy is not None) and isinstance(strategy, _TPU_STRATEGIES):
var_cls = values.TPUSyncOnReadVariable
else:
var_cls = values.SyncOnReadVariable
replica_local = var_cls(strategy, device_map, v, method)
return v, replica_local
class SyncOnReadVariablePropertiesTest(test.TestCase):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
@test_util.run_in_graph_and_eager_modes(config=config)
def testProperties(self):
if context.num_gpus() < 1 and context.executing_eagerly():
self.skipTest("A GPU is not available for this test in eager mode.")
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.SUM)
self.assertEqual(v[0].name, replica_local.name)
self.assertEqual(v[0].dtype, replica_local.dtype)
self.assertEqual(v[0].shape, replica_local.shape)
self.assertEqual(variable_scope.VariableAggregation.SUM,
replica_local.aggregation)
@test_util.run_in_graph_and_eager_modes(config=config)
def testVariableOnAnotherDevice(self):
v = variable_scope.get_variable(
name="v", initializer=[1.], use_resource=True)
device_map = values.ReplicaDeviceMap(("/job:foo/device:CPU:0",))
replica_local = values.SyncOnReadVariable(
None, device_map, (v,), variable_scope.VariableAggregation.MEAN)
self.assertEqual(v.name, replica_local.name)
self.assertEqual(v.dtype, replica_local.dtype)
self.assertEqual(v.shape, replica_local.shape)
self.assertEqual(variable_scope.VariableAggregation.MEAN,
replica_local.aggregation)
def testTensorConversion(self):
with context.graph_mode():
_, replica_local = _make_replica_local(
variable_scope.VariableAggregation.SUM)
converted = ops.convert_to_tensor(replica_local, as_ref=False)
self.assertIsInstance(converted, ops.Tensor)
self.assertEqual(converted.dtype, replica_local.dtype)
converted = ops.convert_to_tensor(replica_local, as_ref=True)
# Resources variable are converted to tensors as well when as_ref is True.
self.assertIsInstance(converted, ops.Tensor)
self.assertEqual(converted.dtype, replica_local.dtype)
@test_util.run_v2_only
def testCanPassToDefFun(self):
@def_function.function
def add1(x):
return x + 1
v = variable_scope.get_variable(
name="v", initializer=[1.], use_resource=True)
device_map = values.ReplicaDeviceMap(("/job:foo/device:CPU:0",))
replica_local = values.SyncOnReadVariable(
None, device_map, (v,), variable_scope.VariableAggregation.MEAN)
self.assertEqual(2., self.evaluate(add1(replica_local)))
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
],
mode=["graph", "eager"]))
class SyncOnReadVariableTest(test.TestCase, parameterized.TestCase):
def _assign_replica_local(self, v, new):
for var, n in zip(v, new):
with ops.device(var.device):
self.evaluate(var.assign(n))
def _save_return_saver(self, sess, var):
saver = saver_lib.Saver(var_list=[var])
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
return saver.save(sess, prefix), saver
def _save(self, sess, var):
save_path, _ = self._save_return_saver(sess, var)
return save_path
def testSaveAndRestoreReplicaLocalSumOneGraph(self, distribution):
with self.cached_session() as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.SUM, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [3., 4.])
with distribution.scope():
# Saves the current value of v[0] + v[1], 7.
save_path, saver = self._save_return_saver(sess, replica_local)
# Change the values between save and restore.
self._assign_replica_local(v, [5., 6.])
# Restores the saved value of 7. which gets divided equally
# between the variables.
saver.restore(sess, save_path)
self.assertEqual([3.5, 3.5], self.evaluate([v[0], v[1]]))
def testSaveAndRestoreReplicaLocalMeanOneGraph(self, distribution):
if context.num_gpus() < 1 and context.executing_eagerly():
self.skipTest("A GPU is not available for this test in eager mode.")
with self.cached_session() as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.MEAN, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [3., 4.])
with distribution.scope():
# Saves the current value of (v[0] + v[1])/2, 3.5.
save_path, saver = self._save_return_saver(sess, replica_local)
# Change the values between save and restore.
self._assign_replica_local(v, [5., 6.])
# Restores the saved value of 3.5 to both variables.
saver.restore(sess, save_path)
self.assertEqual([3.5, 3.5], self.evaluate([v[0], v[1]]))
def _save_replica_local_mean(self, distribution):
"""Save variables with mirroring, returns save_path."""
with self.session(graph=ops.Graph()) as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.MEAN, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [3., 4.])
with distribution.scope():
# Saves the current value of (v[0] + v[1])/2, 3.5
save_path = self._save(sess, replica_local)
# Change the values between save and restore.
self._assign_replica_local(v, [5., 6.])
return save_path
def _save_replica_local_sum(self, distribution):
"""Save variables with mirroring, returns save_path."""
with self.session(graph=ops.Graph()) as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.SUM, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [1.5, 2.])
with distribution.scope():
# Saves the current value of v[0] + v[1], 3.5
save_path = self._save(sess, replica_local)
# Change the values between save and restore.
self._assign_replica_local(v, [5., 6.])
return save_path
def _save_normal(self):
"""Save variables without mirroring, returns save_path."""
with self.session(graph=ops.Graph()) as sess:
var = variable_scope.get_variable(
name="v", initializer=1., use_resource=True)
# Overwrite the initial value.
self.evaluate(var.assign(3.5))
# Saves the current value of var, 3.5.
save_path = self._save(sess, var)
# Change the values between save and restore.
self.evaluate(var.assign(5.))
return save_path
def _restore_normal(self, save_path):
"""Restore to variables without mirroring in a fresh graph."""
with self.session(graph=ops.Graph()) as sess:
var = variable_scope.get_variable(
name="v", initializer=7., use_resource=True)
# Overwrite the initial value.
self.evaluate(var.assign(8.))
# Restores the saved value of 3.5 to `var`.
saver = saver_lib.Saver(var_list=[var])
saver.restore(sess, save_path)
self.assertEqual(3.5, self.evaluate(var))
def _restore_replica_local_mean(self, save_path, distribution):
"""Restore to variables with mirroring in a fresh graph."""
with self.session(graph=ops.Graph()) as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.MEAN, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [7., 8.])
with distribution.scope():
# Restores the saved value of 3.5 to both variables.
saver = saver_lib.Saver(var_list=[replica_local])
saver.restore(sess, save_path)
self.assertEqual([3.5, 3.5], self.evaluate([v[0], v[1]]))
def _restore_replica_local_sum(self, save_path, distribution):
"""Restore to variables with mirroring in a fresh graph."""
with self.session(graph=ops.Graph()) as sess:
v, replica_local = _make_replica_local(
variable_scope.VariableAggregation.SUM, distribution)
# Overwrite the initial values.
self._assign_replica_local(v, [7., 8.])
with distribution.scope():
# Restores the saved value of 3.5 to both variables.
saver = saver_lib.Saver(var_list=[replica_local])
saver.restore(sess, save_path)
self.assertEqual([1.75, 1.75], self.evaluate([v[0], v[1]]))
def testSaveReplicaLocalRestoreReplicaLocalMean(self, distribution):
save_path = self._save_replica_local_mean(distribution)
self._restore_replica_local_mean(save_path, distribution)
def testSaveReplicaLocalRestoreReplicaLocalSum(self, distribution):
save_path = self._save_replica_local_sum(distribution)
self._restore_replica_local_sum(save_path, distribution)
def testSaveReplicaLocalMeanRestoreNormal(self, distribution):
save_path = self._save_replica_local_mean(distribution)
self._restore_normal(save_path)
def testSaveReplicaLocalSumRestoreNormal(self, distribution):
save_path = self._save_replica_local_sum(distribution)
self._restore_normal(save_path)
def testSaveNormalRestoreReplicaLocalMean(self, distribution):
save_path = self._save_normal()
self._restore_replica_local_mean(save_path, distribution)
def testSaveNormalRestoreReplicaLocalSum(self, distribution):
save_path = self._save_normal()
self._restore_replica_local_sum(save_path, distribution)
def testAssign(self, distribution):
def assign(fn, v, update_value, cross_replica):
update_fn = lambda: getattr(v, fn)(update_value)
if cross_replica:
return update_fn()
else:
return distribution.experimental_local_results(
distribution.experimental_run_v2(update_fn))
updates = [("assign", 1.), ("assign_add", 1.), ("assign_sub", -1.)]
aggregations = [
variables_lib.VariableAggregation.NONE,
variables_lib.VariableAggregation.SUM,
variables_lib.VariableAggregation.MEAN,
variables_lib.VariableAggregation.ONLY_FIRST_REPLICA,
]
options = ( # VariableAggregation.SUM in cross-replica mode is tested below
[x for x in itertools.product(updates, aggregations, [True, False])
if not(x[1] == variables_lib.VariableAggregation.SUM and x[2])])
for update, aggregation, cross_replica in options:
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=aggregation)
self.evaluate(variables_lib.global_variables_initializer())
fn, update_value = update
self.evaluate(assign(fn, v, update_value, cross_replica))
for component in v._values:
self.assertAllEqual(self.evaluate(component.read_value()),
self.evaluate(array_ops.ones_like(component)))
def testAssignDtypeConversion(self, distribution):
def assign(fn, v, update_value, cross_replica):
update_fn = lambda: getattr(v, fn)(update_value)
if cross_replica:
return update_fn()
else:
return distribution.experimental_local_results(
distribution.experimental_run_v2(update_fn))
updates = [("assign", 1), ("assign_add", 1), ("assign_sub", -1)]
aggregations = [
variables_lib.VariableAggregation.NONE,
variables_lib.VariableAggregation.SUM,
variables_lib.VariableAggregation.MEAN,
variables_lib.VariableAggregation.ONLY_FIRST_REPLICA,
]
options = ( # VariableAggregation.SUM in cross-replica mode is tested below
[x for x in itertools.product(updates, aggregations, [True, False])
if not(x[1] == variables_lib.VariableAggregation.SUM and x[2])])
for update, aggregation, cross_replica in options:
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=aggregation)
self.evaluate(variables_lib.global_variables_initializer())
fn, update_value = update
self.evaluate(assign(fn, v, update_value, cross_replica))
for component in v._values:
self.assertAllEqual(self.evaluate(component.read_value()),
self.evaluate(array_ops.ones_like(component)))
def testAssignWithAggregationSum(self, distribution):
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=variables_lib.VariableAggregation.SUM)
self.evaluate(variables_lib.global_variables_initializer())
self.evaluate(v.assign(1. * distribution.num_replicas_in_sync))
for component in v._values:
self.assertAllEqual(self.evaluate(component.read_value()),
self.evaluate(array_ops.ones_like(component)))
def testAssignAddSubWithAggregationSum(self, distribution):
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=variables_lib.VariableAggregation.SUM)
self.evaluate(variables_lib.global_variables_initializer())
with self.assertRaisesRegex(
ValueError, "SyncOnReadVariable does not support "):
self.evaluate(v.assign_add(1.))
with self.assertRaisesRegex(
ValueError, "SyncOnReadVariable does not support "):
self.evaluate(v.assign_sub(1.))
def testReadValueInReplicaContext(self, distribution):
aggregations = [
variables_lib.VariableAggregation.NONE,
variables_lib.VariableAggregation.SUM,
variables_lib.VariableAggregation.MEAN,
variables_lib.VariableAggregation.ONLY_FIRST_REPLICA,
]
for aggregation in aggregations:
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=aggregation)
self.evaluate(variables_lib.global_variables_initializer())
results = self.evaluate(distribution.experimental_local_results(
distribution.experimental_run_v2(v.read_value)))
for component, value in zip(v._values, results):
self.assertAllEqual(self.evaluate(component.read_value()), value)
def testReadValueInCrossReplicaContext(self, distribution):
aggregations = [
variables_lib.VariableAggregation.SUM,
variables_lib.VariableAggregation.MEAN,
variables_lib.VariableAggregation.ONLY_FIRST_REPLICA,
]
for aggregation in aggregations:
if isinstance(distribution, _TPU_STRATEGIES):
resolver = tpu_cluster_resolver.TPUClusterResolver('')
tpu_strategy_util.initialize_tpu_system(resolver)
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=aggregation)
self.evaluate(variables_lib.global_variables_initializer())
def assign(v=v):
ctx = distribution_strategy_context.get_replica_context()
replica_id = ctx.replica_id_in_sync_group
return v.assign(math_ops.cast(replica_id, dtypes.float32))
self.evaluate(distribution.experimental_local_results(
distribution.experimental_run_v2(assign)))
result = self.evaluate(v.read_value())
num_replicas = distribution.num_replicas_in_sync
sum_of_replica_values = num_replicas * (num_replicas - 1) / 2.
if aggregation == variables_lib.VariableAggregation.SUM:
expected = sum_of_replica_values
elif aggregation == variables_lib.VariableAggregation.MEAN:
expected = sum_of_replica_values / num_replicas
else:
expected = 0
self.assertEqual(expected, result, aggregation)
def testReadValueWithAggregationNoneInCrossReplicaContext(self, distribution):
with distribution.scope():
v = variable_scope.variable(
0.,
synchronization=variables_lib.VariableSynchronization.ON_READ,
aggregation=variables_lib.VariableAggregation.NONE)
self.evaluate(variables_lib.global_variables_initializer())
with self.assertRaisesRegex(
ValueError, "Could not convert from .* VariableAggregation\\.NONE"):
self.evaluate(v.read_value())
def testInitializedToSameValueInsideEagerRun(self, distribution):
if not context.executing_eagerly(): self.skipTest("eager only")
v = [None]
@def_function.function
def step():
def f():
if v[0] is None:
v[0] = variables_lib.Variable(
random_ops.random_normal([]),
synchronization=variables_lib.VariableSynchronization.ON_READ)
distribution.experimental_run_v2(f)
context.set_global_seed(None)
step()
vals = self.evaluate(v[0].values)
self.assertAllEqual(vals[0], vals[1])
class MirroredTest(test.TestCase):
def testAddOp(self):
if context.num_gpus() < 1:
self.skipTest("A GPU is not available for this test.")
mirrored_val = _make_mirrored_val(init_val=3.)
self.assertEqual(self.evaluate(constant_op.constant(6.)),
self.evaluate(mirrored_val + mirrored_val))
self.assertEqual(self.evaluate(constant_op.constant(4.)),
self.evaluate(mirrored_val + 1))
self.assertEqual(self.evaluate(mirrored_val + 1),
self.evaluate(math_ops.add(mirrored_val, 1)))
self.assertEqual(type(mirrored_val + 1),
type(math_ops.add(mirrored_val, 1)))
class PerReplicaTest(test.TestCase, parameterized.TestCase):
def testTypeSpec(self):
device_map = values.SingleDeviceMap("CPU")
vals = (constant_op.constant(1.),)
per_replica = values.PerReplica(device_map, vals)
spec = per_replica._type_spec
self.assertEqual(spec._value_specs,
(tensor_spec.TensorSpec([], dtypes.float32),))
self.assertEqual(spec._device_map, per_replica.device_map)
self.assertEqual(spec._logical_device, per_replica.logical_device)
def testTypeSpecRoundTrip(self):
device_map = values.SingleDeviceMap("CPU")
vals = (constant_op.constant(1.),)
per_replica = values.PerReplica(device_map, vals)
spec = per_replica._type_spec
tensor_list = spec._to_components(per_replica)
reconstructed = spec._from_components(tensor_list)
self.assertEqual(per_replica.device_map, reconstructed.device_map)
self.assertEqual(per_replica.logical_device, reconstructed.logical_device)
self.assertAllEqual(per_replica.values, reconstructed.values)
def testTypeSpecNest(self):
device_map = values.ReplicaDeviceMap(["CPU:0", "CPU:1"])
vals = (constant_op.constant(1.), constant_op.constant([5., 6.0]),)
per_replica = values.PerReplica(device_map, vals)
# Note: nest.map_structutre exercises nest.flatten and
# nest.pack_sequence_as.
result = nest.map_structure(lambda t: t + 10, per_replica,
expand_composites=True)
self.assertEqual(per_replica.device_map, result.device_map)
self.assertEqual(per_replica.logical_device, result.logical_device)
self.assertLen(result.values, 2)
self.assertAllEqual(result.values[0], 11.)
self.assertAllEqual(result.values[1], [15., 16.0])
@test_util.run_in_graph_and_eager_modes
def testIsGraphTensor(self):
per_replica = values.PerReplica(values.SingleDeviceMap("CPU"),
(constant_op.constant(1.),))
for t in nest.flatten(per_replica, expand_composites=True):
self.assertEqual(hasattr(t, "graph"), not context.executing_eagerly())
def testDoesNotTriggerFunctionTracing(self):
traces = []
@def_function.function
def f(x):
traces.append(None) # Only happens on trace.
return x
per_replica = values.PerReplica(
values.SingleDeviceMap("CPU"), (constant_op.constant(1.),))
# Trace once.
f(per_replica)
self.assertNotEmpty(traces)
del traces[:]
per_replica_spec = per_replica._type_spec
for _ in range(5):
vals = per_replica_spec._to_components(per_replica)
vals = [v * 2 for v in vals]
per_replica = per_replica_spec._from_components(vals)
output = f(per_replica)
self.assertIsInstance(output, values.PerReplica)
self.assertAllEqual(output._values, per_replica._values)
self.assertAllEqual(output._device_map, per_replica._device_map)
self.assertAllEqual(output._logical_device, per_replica._logical_device)
self.assertEmpty(traces) # Make sure we're not re-tracing `f`.
def testFunctionCanReturnPerReplica(self):
f = def_function.function(lambda x: x)
x = values.PerReplica(
values.SingleDeviceMap("CPU"), (constant_op.constant(1.),))
y = f(x)
self.assertIsNot(x, y)
nest.map_structure(self.assertAllEqual, x, y, expand_composites=True)
self.assertEqual(x._type_spec, y._type_spec)
@test_util.run_in_graph_and_eager_modes
def testCondWithTensorValues(self):
device_map = values.SingleDeviceMap("CPU")
per_replica_1 = values.PerReplica(device_map, (constant_op.constant("a"),))
per_replica_2 = values.PerReplica(device_map,
(constant_op.constant(["b", "c"]),))
condition = array_ops.placeholder_with_default(True, [])
result = control_flow_ops.cond(
condition, lambda: per_replica_1, lambda: per_replica_2)
self.assertEqual(per_replica_1.device_map, result.device_map)
self.assertEqual(per_replica_1.logical_device, result.logical_device)
self.assertLen(result.values, 1)
self.assertAllEqual(result.values[0], "a")
@test_util.run_in_graph_and_eager_modes
def testCondWithValuesConvertibleToTensor(self):
device_map = values.SingleDeviceMap("CPU")
per_replica_1 = values.PerReplica(device_map, ("a",))
per_replica_2 = values.PerReplica(device_map, ("b",))
condition = array_ops.placeholder_with_default(True, [])
result = control_flow_ops.cond(
condition, lambda: per_replica_1, lambda: per_replica_2)
self.assertEqual(per_replica_1.device_map, result.device_map)
self.assertEqual(per_replica_1.logical_device, result.logical_device)
self.assertLen(result.values, 1)
self.assertAllEqual(result.values[0], "a")
@test_util.build_as_function_and_v1_graph
def testCondWithValuesNotConvertibleToTensor(self):
device_map = values.SingleDeviceMap("CPU")
per_replica_1 = values.PerReplica(device_map, (set(["a"]),))
per_replica_2 = values.PerReplica(device_map, (set(["b", "c"]),))
condition = array_ops.placeholder(dtypes.bool, [])
with self.assertRaisesRegex(TypeError, "Could not build a TypeSpec for"):
control_flow_ops.cond(
condition, lambda: per_replica_1, lambda: per_replica_2)
class WorkerDeviceMapTest(test.TestCase, parameterized.TestCase):
class ReplicaContext(object):
def __init__(self, replica_id_in_sync_group):
self.replica_id_in_sync_group = replica_id_in_sync_group
def testBasic(self):
devices = [
"/job:worker/replica:0/task:0/device:CPU:0",
"/job:worker/replica:0/task:2/device:CPU:0"
]
device_map = values.WorkerDeviceMap(devices, 1)
self.assertAllEqual(devices, device_map.all_devices)
# pylint:disable=pointless-statement
with self.assertRaisesWithPredicateMatch(
ValueError, "`WorkerDeviceMap` is not indexed by replicas"):
device_map.devices_by_replica
self.assertEqual(1, device_map.num_logical_devices)
self.assertEqual(2, device_map.num_replicas_in_graph)
self.assertEqual(0, device_map.logical_device_from_values(["a", "b"]))
self.assertAllEqual(devices, device_map.logical_to_actual_devices(0))
replica_context = WorkerDeviceMapTest.ReplicaContext(1)
self.assertEqual(
"b", device_map.select_for_current_replica(["a", "b"], replica_context))
with self.assertRaisesWithPredicateMatch(
ValueError, "`WorkerDeviceMap` not indexed by replicas"):
device_map.replica_for_device(devices[1])
self.assertEqual("b", device_map.select_for_device(["a", "b"], devices[1]))
with self.assertRaisesWithPredicateMatch(
ValueError, "WorkerDeviceMap not indexed by replicas"):
device_map.is_device_in_replica(devices[1], 1)
self.assertEqual(
"WorkerDeviceMap(('/job:worker/replica:0/task:0/device:CPU:0', "
"'/job:worker/replica:0/task:2/device:CPU:0'), "
"num_replicas_per_worker=1)", repr(device_map))
def testMultipleReplicasPerWorker(self):
devices = [
"/job:worker/replica:0/task:0/device:CPU:0",
"/job:worker/replica:0/task:2/device:CPU:0"
]
device_map = values.WorkerDeviceMap(devices, 2)
replica_context = WorkerDeviceMapTest.ReplicaContext(3)
self.assertEqual(
"b", device_map.select_for_current_replica(["a", "b"], replica_context))
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.tpu_strategy,
],
mode=["graph", "eager"]))
def testExperimentalLocalResultsOrder(self, distribution):
# Create 2 devices in the device map, where the alphabetical order and the
# actual order of devices are different.
device_map = values.ReplicaDeviceMap(["CPU:2", "CPU:10"])
vals = (
constant_op.constant(1.),
constant_op.constant([5., 6.0]),
)
per_replica = values.PerReplica(device_map, vals)
results = self.evaluate(
distribution.experimental_local_results(per_replica))
# We expect the outputs order the same as the inputs order.
self.assertLen(results, 2)
self.assertAllEqual(1.0, results[0])
self.assertAllEqual([5., 6.], results[1])
if __name__ == "__main__":
test.main()
| ppwwyyxx/tensorflow | tensorflow/python/distribute/values_test.py | Python | apache-2.0 | 56,467 |
# -------------------------------------------------------------------------- #
# Copyright 2002-2017, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'CloudServer'
class EC2Application
############################################################################
# Configuration constants
############################################################################
EC2_AUTH = VAR_LOCATION + "/.one/ec2_auth"
EC2_LOG = LOG_LOCATION + "/econe-server.log"
CONFIGURATION_FILE = ETC_LOCATION + "/econe.conf"
TEMPLATE_LOCATION = ETC_LOCATION + "/ec2query_templates"
VIEWS_LOCATION = RUBY_LIB_LOCATION + "/cloud/econe/views"
############################################################################
# Attribute accesors
############################################################################
attr_reader :conf
attr_reader :logger
attr_reader :econe_host, :econe_port, :econe_path
############################################################################
# Initialization of the EC2 application server
############################################################################
def initialize
# ----------- Parse configuration -----------
begin
@conf = YAML.load_file(CONFIGURATION_FILE)
rescue Exception => e
raise "Error parsing file #{CONFIGURATION_FILE}: #{e.message}"
end
@conf[:template_location] = TEMPLATE_LOCATION
@conf[:views] = VIEWS_LOCATION
@conf[:debug_level] ||= 3
CloudServer.print_configuration(@conf)
# ----------- Init logging system -----------
@logger = CloudLogger::CloudLogger.new(EC2_LOG)
@logger.level = CloudLogger::DEBUG_LEVEL[@conf[:debug_level].to_i]
@logger.formatter = proc do |severity, datetime, progname, msg|
CloudLogger::MSG_FORMAT % [
datetime.strftime( CloudLogger::DATE_FORMAT),
severity[0..0],
msg ]
end
# ----------- Init Authentication System -----------
begin
ENV["ONE_CIPHER_AUTH"] = EC2_AUTH
@cloud_auth = CloudAuth.new(@conf, @logger)
rescue => e
raise "Error initializing authentication system: #{e.message}"
end
# ----------- Check port -----------
if CloudServer.is_port_open?(@conf[:host], @conf[:port])
raise "Port #{@conf[:port]} busy."
end
# ----------- Init EC2 attributes -----------
if @conf[:ssl_server]
uri = URI.parse(@conf[:ssl_server])
@econe_host = uri.host
@econe_port = uri.port
@econe_path = uri.path
else
@econe_host = @conf[:host]
@econe_port = @conf[:port]
@econe_path = '/'
end
end
############################################################################
# Authentication & route methods
############################################################################
def authenticate(renv, rparams)
@cloud_auth.auth(renv, rparams)
end
def do_http_request(params)
econe_server = EC2QueryServer.new(
@cloud_auth.client(params['econe_username']), @cloud_auth.client,
@conf, @logger)
case params['Action']
when 'UploadImage'
result,rc = econe_server.upload_image(params)
when 'RegisterImage'
result,rc = econe_server.register_image(params)
when 'DescribeImages'
result,rc = econe_server.describe_images(params)
when 'RunInstances'
result,rc = econe_server.run_instances(params)
when 'DescribeInstances'
result,rc = econe_server.describe_instances(params)
when 'TerminateInstances'
result,rc = econe_server.terminate_instances(params)
when 'StartInstances'
result,rc = econe_server.start_instances(params)
when 'StopInstances'
result,rc = econe_server.stop_instances(params)
when 'RebootInstances'
result,rc = econe_server.reboot_instances(params)
when 'AllocateAddress'
result,rc = econe_server.allocate_address(params)
when 'AssociateAddress'
result,rc = econe_server.associate_address(params)
when 'DisassociateAddress'
result,rc = econe_server.disassociate_address(params)
when 'ReleaseAddress'
result,rc = econe_server.release_address(params)
when 'DescribeAddresses'
result,rc = econe_server.describe_addresses(params)
when 'DescribeRegions'
result,rc = econe_server.describe_regions(params)
when 'DescribeAvailabilityZones'
result,rc = econe_server.describe_availability_zones(params)
when 'CreateSnapshot'
result,rc = econe_server.create_snapshot(params)
when 'DeleteSnapshot'
result,rc = econe_server.delete_snapshot(params)
when 'DescribeSnapshots'
result,rc = econe_server.describe_snapshots(params)
when 'CreateTags'
result,rc = econe_server.create_tags(params)
when 'DeleteTags'
result,rc = econe_server.delete_tags(params)
when 'DescribeTags'
result,rc = econe_server.describe_tags(params)
#when 'CreateImage'
# result,rc = econe_server.create_image(params)
when 'CreateVolume'
result,rc = econe_server.create_volume(params)
when 'DescribeVolumes'
result,rc = econe_server.describe_volumes(params)
when 'AttachVolume'
result,rc = econe_server.attach_volume(params)
when 'DetachVolume'
result,rc = econe_server.detach_volume(params)
when 'DeleteVolume'
result,rc = econe_server.delete_volume(params)
when 'DescribeKeyPairs'
result,rc = econe_server.describe_keypairs(params)
when 'CreateKeyPair'
result,rc = econe_server.create_keypair(params)
when 'DeleteKeyPair'
result,rc = econe_server.delete_keypair(params)
else
result = OpenNebula::Error.new(
"#{params['Action']} feature is not supported",
OpenNebula::Error::ENO_EXISTS)
end
return result, rc
end
end
| unistra/one | src/cloud/ec2/lib/econe_application.rb | Ruby | apache-2.0 | 7,480 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hdfs2;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.impl.DefaultScheduledPollConsumerScheduler;
import org.apache.camel.impl.JndiRegistry;
import org.apache.camel.impl.PropertyPlaceholderDelegateRegistry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayFile;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.Progressable;
import org.junit.Before;
import org.junit.Test;
import static org.apache.hadoop.io.SequenceFile.CompressionType;
import static org.hamcrest.CoreMatchers.equalTo;
public class HdfsConsumerTest extends HdfsTestSupport {
private static final int ITERATIONS = 200;
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Before
public void setUp() throws Exception {
if (!canTest()) {
return;
}
// must be able to get security configuration
try {
javax.security.auth.login.Configuration.getConfiguration();
} catch (Exception e) {
return;
}
deleteDirectory("target/test");
super.setUp();
}
@Test
public void testSimpleConsumer() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataOutputStream out = fs.create(file);
for (int i = 0; i < 1024; ++i) {
out.write(("PIPPO" + i).getBytes("UTF-8"));
out.flush();
}
out.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(2);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testConcurrentConsumers() throws Exception {
if (!canTest()) {
return;
}
final File rootdir = new File(".");
final File dir = new File("target/test/multiple-consumers");
dir.mkdirs();
for (int i = 1; i <= ITERATIONS; i++) {
FileOutputStream fos = new FileOutputStream(new File(dir, String.format("file-%04d.txt", i)));
fos.write(String.format("hello (%04d)\n", i).getBytes());
fos.close();
}
final Set<String> fileNames = new HashSet<>();
final CountDownLatch latch = new CountDownLatch(ITERATIONS);
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
fileNames.add(exchange.getIn().getHeader(Exchange.FILE_NAME, String.class));
latch.countDown();
}
});
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2://" + rootdir.toURI() + "/target/test/multiple-consumers?pattern=*.txt&fileSystemType=LOCAL&chunkSize=100&initialDelay=0").to("mock:result");
from("hdfs2://" + rootdir.toURI() + "/target/test/multiple-consumers?pattern=*.txt&fileSystemType=LOCAL&chunkSize=200&initialDelay=0").to("mock:result");
from("hdfs2://" + rootdir.toURI() + "/target/test/multiple-consumers?pattern=*.txt&fileSystemType=LOCAL&chunkSize=300&initialDelay=0").to("mock:result");
from("hdfs2://" + rootdir.toURI() + "/target/test/multiple-consumers?pattern=*.txt&fileSystemType=LOCAL&chunkSize=400&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.expectedMessageCount(ITERATIONS);
latch.await(30, TimeUnit.SECONDS);
resultEndpoint.assertIsSatisfied();
assertThat(fileNames.size(), equalTo(ITERATIONS));
}
@Test
public void testSimpleConsumerWithEmptyFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataOutputStream out = fs.create(file);
out.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
// TODO: See comment from Claus at ticket: https://issues.apache.org/jira/browse/CAMEL-8434
resultEndpoint.expectedMinimumMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
Thread.sleep(2000);
resultEndpoint.assertIsSatisfied();
assertThat(resultEndpoint.getReceivedExchanges().get(0).getIn().getBody(ByteArrayOutputStream.class).toByteArray().length, equalTo(0));
}
@Test
public void testSimpleConsumerFileWithSizeEqualToNChunks() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataOutputStream out = fs.create(file);
// size = 5 times chunk size = 210 bytes
for (int i = 0; i < 42; ++i) {
out.write(new byte[] {0x61, 0x62, 0x63, 0x64, 0x65});
out.flush();
}
out.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(5);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=42&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
assertThat(resultEndpoint.getReceivedExchanges().get(0).getIn().getBody(ByteArrayOutputStream.class).toByteArray().length, equalTo(42));
}
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(0);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadWithReadSuffix() throws Exception {
if (!canTest()) {
return;
}
String[] beforeFiles = new File("target/test").list();
int before = beforeFiles != null ? beforeFiles.length : 0;
final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
NullWritable keyWritable = NullWritable.get();
BooleanWritable valueWritable = new BooleanWritable();
valueWritable.set(true);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.getParent().toUri() + "?scheduler=#myScheduler&pattern=*&fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0&readSuffix=handled")
.to("mock:result");
}
});
ScheduledExecutorService pool = context.getExecutorServiceManager().newScheduledThreadPool(null, "unitTestPool", 1);
DefaultScheduledPollConsumerScheduler scheduler = new DefaultScheduledPollConsumerScheduler(pool);
((JndiRegistry) ((PropertyPlaceholderDelegateRegistry) context.getRegistry()).getRegistry()).bind("myScheduler", scheduler);
context.start();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.assertIsSatisfied();
// synchronize on pool that was used to run hdfs consumer thread
scheduler.getScheduledExecutorService().shutdown();
scheduler.getScheduledExecutorService().awaitTermination(5000, TimeUnit.MILLISECONDS);
Set<String> files = new HashSet<>(Arrays.asList(new File("target/test").list()));
// there may be some leftover files before, so test that we only added 2 new files
assertThat(files.size() - before, equalTo(2));
assertTrue(files.remove("test-camel-boolean.handled"));
assertTrue(files.remove(".test-camel-boolean.handled.crc"));
}
@Test
public void testReadBoolean() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
NullWritable keyWritable = NullWritable.get();
BooleanWritable valueWritable = new BooleanWritable();
valueWritable.set(true);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadByte() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, ByteWritable.class);
NullWritable keyWritable = NullWritable.get();
ByteWritable valueWritable = new ByteWritable();
byte value = 3;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.message(0).body(byte.class).isEqualTo(3);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadFloat() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, FloatWritable.class);
NullWritable keyWritable = NullWritable.get();
FloatWritable valueWritable = new FloatWritable();
float value = 3.1415926535f;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadDouble() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, DoubleWritable.class);
NullWritable keyWritable = NullWritable.get();
DoubleWritable valueWritable = new DoubleWritable();
double value = 3.1415926535;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadInt() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, IntWritable.class);
NullWritable keyWritable = NullWritable.get();
IntWritable valueWritable = new IntWritable();
int value = 314159265;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadLong() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-long").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, LongWritable.class);
NullWritable keyWritable = NullWritable.get();
LongWritable valueWritable = new LongWritable();
long value = 31415926535L;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadBytes() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-bytes").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BytesWritable.class);
NullWritable keyWritable = NullWritable.get();
BytesWritable valueWritable = new BytesWritable();
String value = "CIAO!";
valueWritable.set(value.getBytes(), 0, value.getBytes().length);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadString() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, Text.class);
NullWritable keyWritable = NullWritable.get();
Text valueWritable = new Text();
String value = "CIAO!";
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Test
public void testReadStringArrayFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs1, "target/test/test-camel-string1", Text.class, CompressionType.NONE, new Progressable() {
@Override
public void progress() {
}
});
Text valueWritable = new Text();
String value = "CIAO!";
valueWritable.set(value);
writer.append(valueWritable);
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.getParent().toUri() + "?fileSystemType=LOCAL&fileType=ARRAY_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
@Override
public void tearDown() throws Exception {
if (!canTest()) {
return;
}
super.tearDown();
Thread.sleep(100);
Configuration conf = new Configuration();
Path dir = new Path("target/test");
FileSystem fs = FileSystem.get(dir.toUri(), conf);
fs.delete(dir, true);
}
private Writer createWriter(Configuration conf, Path file, Class<?> keyClass,
Class<?> valueClass) throws IOException {
return SequenceFile.createWriter(conf, SequenceFile.Writer.file(file),
SequenceFile.Writer.keyClass(keyClass), SequenceFile.Writer.valueClass(valueClass));
}
}
| jonmcewen/camel | components/camel-hdfs2/src/test/java/org/apache/camel/component/hdfs2/HdfsConsumerTest.java | Java | apache-2.0 | 22,556 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.uid;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.test.ESTestCase;
/**
* test per-segment lookup of version-related data structures
*/
public class VersionLookupTests extends ESTestCase {
/**
* test version lookup actually works
*/
public void testSimple() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
// to have deleted docs
.setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
doc.add(new Field(IdFieldMapper.NAME, "6", IdFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME);
// found doc
DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(1, result.version); // Elassandra always returns version 1L
assertEquals(0, result.docId);
// not found doc
assertNull(lookup.lookupVersion(new BytesRef("7"), randomBoolean(), segment));
// deleted doc
writer.deleteDocuments(new Term(IdFieldMapper.NAME, "6"));
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME);
assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment));
reader.close();
writer.close();
dir.close();
}
/**
* test version lookup with two documents matching the ID
*/
public void testTwoDocuments() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
.setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
doc.add(new Field(IdFieldMapper.NAME, "6", IdFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
writer.addDocument(doc);
writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME);
// return the last doc when there are duplicates
DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(1, result.version);
assertEquals(1, result.docId);
// delete the first doc only
assertTrue(writer.tryDeleteDocument(reader, 0) >= 0);
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME);
result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(1, result.version);
assertEquals(1, result.docId);
// delete both docs
assertTrue(writer.tryDeleteDocument(reader, 1) >= 0);
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME);
assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment));
reader.close();
writer.close();
dir.close();
}
}
| strapdata/elassandra | server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java | Java | apache-2.0 | 5,574 |
/**
* Copyright 2013 Netherlands Forensic Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.minvenj.nfi.storm.kafka;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.CONFIG_FAIL_HANDLER;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.DEFAULT_FAIL_HANDLER;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.createFailHandlerFromString;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.createKafkaConfig;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.getMaxBufSize;
import static nl.minvenj.nfi.storm.kafka.util.ConfigUtils.getTopic;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Queue;
import java.util.SortedMap;
import java.util.TreeMap;
import kafka.message.InvalidMessageException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.spout.RawScheme;
import backtype.storm.spout.Scheme;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichSpout;
import backtype.storm.topology.OutputFieldsDeclarer;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.ConsumerTimeoutException;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import nl.minvenj.nfi.storm.kafka.fail.FailHandler;
import nl.minvenj.nfi.storm.kafka.util.ConfigUtils;
import nl.minvenj.nfi.storm.kafka.util.KafkaMessageId;
/**
* Storm spout reading messages from kafka, emitting them as single field tuples.
*
* Implementation tracks a queue of message ids (partition and offset) and a set of those ids that are pending to be
* acknowledged by the topology. The buffer will only be populated with new message when *all* messages from the buffer
* have been acknowledged because the {@link ConsumerConnector} allows committing of the currently processed offset only
* through {@link kafka.javaapi.consumer.ConsumerConnector#commitOffsets()}, which commits *all* offsets that have been
* read, which does not necessarily correspond to the offsets that were successfully processed by the storm topology.
* Optimizing this behaviour is work for the (near) future.
*
* Aside from the properties used to configure the kafka consumer, the kafka spout reads the following configuration
* parameters in storm configuration:
* <ul>
* <li>{@code kafka.spout.topic}: the kafka topic to read messages from (default {@code storm});</li>
* <li>{@code kafka.spout.fail.handler}: the policy to be used when messages fail, whether to replay them, default
* {@code "reliable"} (either {@code "reliable"}, {@code "unreliable"} or a fully qualified class name of an
* implementation of {@link FailHandler});</li>
* <li>{@code kafka.spout.consumer.group}: The kafka consumer group id.</li>
* <li>{@code kafka.spout.buffer.size.max}: The maximum number of kafka messages to buffer.</li>
* </ul>
*
* @author Netherlands Forensics Institute
*/
public class KafkaSpout implements IRichSpout {
private static final long serialVersionUID = -1L;
private static final Logger LOG = LoggerFactory.getLogger(KafkaSpout.class);
protected final Scheme _serializationScheme;
/**
* Collection of messages being processed by the topology (either waiting to be emitted or waiting to be
* acknowledged). Processed message offset is committed when this is becomes empty.
*
* @see #fillBuffer()
*/
protected final SortedMap<KafkaMessageId, byte[]> _inProgress = new TreeMap<KafkaMessageId, byte[]>();
/**
* Queue of messages waiting to be emitted by this spout.
*
* @see #fillBuffer()
*/
protected final Queue<KafkaMessageId> _queue = new LinkedList<KafkaMessageId>();
protected String _topic;
protected int _bufSize;
protected FailHandler _failHandler;
protected ConsumerIterator<byte[], byte[]> _iterator;
protected transient SpoutOutputCollector _collector;
protected transient ConsumerConnector _consumer;
/**
* Creates a new kafka spout to be submitted in a storm topology. Configuration is read from storm config when the
* spout is opened. Uses a {@link RawScheme} to serialize messages from kafka as a single {@code byte[]}.
*/
public KafkaSpout() {
_serializationScheme = new RawScheme();
}
/**
* Creates a new kafka spout to be submitted in a storm topology with the provided {@link Scheme}. This impacts
* output fields, see {@link #declareOutputFields(OutputFieldsDeclarer)}). Configuration is read from storm config
* when the spout is opened.
*
* @param serializationScheme The serialization to apply to messages read from kafka.
*/
public KafkaSpout(final Scheme serializationScheme) {
_serializationScheme = serializationScheme;
}
/**
* Creates a new kafka spout to be submitted in a storm topology. Configuration is read from storm config when the
* spout is opened.
*
* @param topic The kafka topic to read messages from.
*/
public KafkaSpout(final String topic) {
this();
_topic = topic;
}
/**
* Creates a new kafka spout to be submitted in a storm topology with the provided {@link Scheme}. This impacts
* output fields, see {@link #declareOutputFields(OutputFieldsDeclarer)}). Configuration is read from storm config
* when the spout is opened.
*
* @param topic The kafka topic to read messages from.
* @param serializationScheme The serialization to apply to messages read from kafka.
*/
public KafkaSpout(final String topic, final Scheme serializationScheme) {
this(serializationScheme);
_topic = topic;
}
/**
* Convenience method assigning a {@link FailHandler} instance to this kafka spout. If the configured value is
* {@code null}, {@link ConfigUtils#DEFAULT_FAIL_HANDLER} will be used, otherwise the creation is delegated to
* {@link ConfigUtils#createFailHandlerFromString(String)}.
*
* @param failHandler The configuration value for the failure policy.
*/
protected void createFailHandler(final String failHandler) {
if (failHandler == null) {
_failHandler = DEFAULT_FAIL_HANDLER;
}
else {
_failHandler = createFailHandlerFromString(failHandler);
}
}
/**
* Ensures an initialized kafka {@link ConsumerConnector} is present.
*
* @param config The storm configuration passed to {@link #open(Map, TopologyContext, SpoutOutputCollector)}.
* @throws IllegalArgumentException When a required configuration parameter is missing or a sanity check fails.
*/
protected void createConsumer(final Map<String, Object> config) {
final Properties consumerConfig = createKafkaConfig(config);
LOG.info("connecting kafka client to zookeeper at {} as client group {}",
consumerConfig.getProperty("zookeeper.connect"),
consumerConfig.getProperty("group.id"));
_consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerConfig));
}
/**
* Refills the buffer with messages from the configured kafka topic if available.
*
* @return Whether the buffer contains messages to be emitted after this call.
* @throws IllegalStateException When current buffer is not empty or messages not acknowledged by topology.
*/
protected boolean fillBuffer() {
if (!_inProgress.isEmpty() || !_queue.isEmpty()) {
throw new IllegalStateException("cannot fill buffer when buffer or pending messages are non-empty");
}
if (_iterator == null) {
// create a stream of messages from _consumer using the streams as defined on construction
final Map<String, List<KafkaStream<byte[], byte[]>>> streams = _consumer.createMessageStreams(Collections.singletonMap(_topic, 1));
_iterator = streams.get(_topic).get(0).iterator();
}
// We'll iterate the stream in a try-clause; kafka stream will poll its client channel for the next message,
// throwing a ConsumerTimeoutException when the configured timeout is exceeded.
try {
int size = 0;
while (size < _bufSize && _iterator.hasNext()) {
final MessageAndMetadata<byte[], byte[]> message = _iterator.next();
final KafkaMessageId id = new KafkaMessageId(message.partition(), message.offset());
_inProgress.put(id, message.message());
size++;
}
}
catch (final InvalidMessageException e) {
LOG.warn(e.getMessage(), e);
}
catch (final ConsumerTimeoutException e) {
// ignore, storm will call nextTuple again at some point in the near future
// timeout does *not* mean that no messages were read (state is checked below)
}
if (_inProgress.size() > 0) {
// set _queue to all currently pending kafka message ids
_queue.addAll(_inProgress.keySet());
LOG.debug("buffer now has {} messages to be emitted", _queue.size());
// message(s) appended to buffer
return true;
}
else {
// no messages appended to buffer
return false;
}
}
@Override
public void declareOutputFields(final OutputFieldsDeclarer declarer) {
// delegate fields mapping to specified scheme (single field "bytes" by default)
declarer.declare(_serializationScheme.getOutputFields());
}
@Override
public Map<String, Object> getComponentConfiguration() {
return null;
}
@Override
public void open(final Map config, final TopologyContext topology, final SpoutOutputCollector collector) {
_collector = collector;
if (_topic == null) {
_topic = getTopic((Map<String, Object>) config);
}
_bufSize = getMaxBufSize((Map<String, Object>) config);
createFailHandler((String) config.get(CONFIG_FAIL_HANDLER));
// ensure availability of kafka consumer
createConsumer((Map<String, Object>) config);
// inform the failure policy of spout being opened
_failHandler.open(config, topology, collector);
LOG.info("kafka spout opened, reading from topic {}, using failure policy {}", _topic, _failHandler.getIdentifier());
}
@Override
public void close() {
// reset state by setting members to null
_collector = null;
_iterator = null;
if (_consumer != null) {
try {
_consumer.shutdown();
}
finally {
_consumer = null;
}
}
_failHandler.close();
}
@Override
public void activate() {
_failHandler.activate();
}
@Override
public void deactivate() {
_failHandler.deactivate();
}
@Override
public void nextTuple() {
// next tuple available when _queue contains ids or fillBuffer() is allowed and indicates more messages were available
// see class documentation for implementation note on the rationale behind this condition
if (!_queue.isEmpty() || (_inProgress.isEmpty() && fillBuffer())) {
final KafkaMessageId nextId = _queue.poll();
if (nextId != null) {
final byte[] message = _inProgress.get(nextId);
// the next id from buffer should correspond to a message in the pending map
if (message == null) {
throw new IllegalStateException("no pending message for next id " + nextId);
}
// use specified scheme to deserialize messages (single-field Values by default)
_collector.emit(_serializationScheme.deserialize(message), nextId);
LOG.debug("emitted kafka message id {} ({} bytes payload)", nextId, message.length);
}
}
}
@Override
public void ack(final Object o) {
if (o instanceof KafkaMessageId) {
final KafkaMessageId id = (KafkaMessageId) o;
// message corresponding to o is no longer pending
_inProgress.remove(id);
LOG.debug("kafka message {} acknowledged", id);
if (_inProgress.isEmpty()) {
// commit offsets to zookeeper when pending is now empty
// (buffer will be filled on next call to nextTuple())
LOG.debug("all pending messages acknowledged, committing client offsets");
_consumer.commitOffsets();
}
// notify fail handler of tuple success
_failHandler.ack(id);
}
}
@Override
public void fail(final Object o) {
if (o instanceof KafkaMessageId) {
final KafkaMessageId id = (KafkaMessageId) o;
// delegate decision of replaying the message to failure policy
if (_failHandler.shouldReplay(id)) {
LOG.debug("kafka message id {} failed in topology, adding to buffer again", id);
_queue.add(id);
}
else {
LOG.debug("kafka message id {} failed in topology, delegating failure to policy", id);
// remove message from pending; _failHandler will take action if needed
_failHandler.fail(id, _inProgress.remove(id));
}
}
}
}
| chengxj/kafka-spout | src/main/java/nl/minvenj/nfi/storm/kafka/KafkaSpout.java | Java | apache-2.0 | 14,268 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
namespace Microsoft.WindowsAzure.Management.Websites.Services.WebEntities
{
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Utilities;
/// <summary>
/// Class that represents usage of the quota resource.
/// </summary>
[DataContract(Namespace = UriElements.ServiceNamespace)]
public class Usage
{
/// <summary>
/// Name of the quota
/// </summary>
[DataMember]
public string Name { get; set; }
/// <summary>
/// Name of the quota resource
/// </summary>
[DataMember]
public string ResourceName { get; set; }
/// <summary>
/// Units of measurement for the quota resource
/// </summary>
[DataMember]
public string Unit { get; set; }
/// <summary>
/// The current value of the resource counter
/// </summary>
[DataMember]
public long CurrentValue { get; set; }
/// <summary>
/// The resource limit
/// </summary>
[DataMember]
public long Limit { get; set; }
/// <summary>
/// Next reset time for the resource counter
/// </summary>
[DataMember]
public DateTime NextResetTime { get; set; }
/// <summary>
/// ComputeMode used for this usage
/// </summary>
[DataMember]
public ComputeModeOptions? ComputeMode { get; set; }
/// <summary>
/// SiteMode used for this usage
/// </summary>
[DataMember]
public string SiteMode { get; set; }
}
/// <summary>
/// Collection of usage
/// </summary>
[CollectionDataContract(Namespace = UriElements.ServiceNamespace)]
public class Usages : List<Usage>
{
/// <summary>
/// Empty collection
/// </summary>
public Usages() { }
/// <summary>
/// Initialize from list
/// </summary>
/// <param name="plans"></param>
public Usages(List<Usage> usages) : base(usages) { }
}
}
| saratrallapalli/azure-sdk-tools | WindowsAzurePowershell/src/Management.WebSites/Services/WebEntities/Usage.cs | C# | apache-2.0 | 2,879 |
package com.planet_ink.coffee_mud.Behaviors;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.core.exceptions.ScriptParseException;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2006-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class ScriptableEverymob extends StdBehavior implements ScriptingEngine
{
@Override public String ID(){return "ScriptableEverymob";}
@Override protected int canImproveCode(){return Behavior.CAN_ROOMS|Behavior.CAN_AREAS;}
private boolean started=false;
private Scriptable sampleB=null;
@Override
public String accountForYourself()
{
return "complex triggered behaving";
}
private void giveUpTheScript(Area metroA, MOB M)
{
if((M==null)
||(!M.isMonster())
||(M.getStartRoom()==null)
||(metroA==null)
||(!metroA.inMyMetroArea(M.getStartRoom().getArea()))
||(M.fetchBehavior("Scriptable")!=null))
return;
final Scriptable S=new Scriptable();
S.setParms(getParms());
S.setSavable(false);
M.addBehavior(S);
S.setSavable(false);
sampleB=S;
}
private Area determineArea(Environmental forMe)
{
if(forMe instanceof Room)
return ((Room)forMe).getArea();
else
if(forMe instanceof Area)
return (Area)forMe;
return null;
}
private Enumeration determineRooms(Environmental forMe)
{
if(forMe instanceof Room)
return new XVector(forMe).elements();
else
if(forMe instanceof Area)
return ((Area)forMe).getMetroMap();
return null;
}
private void giveEveryoneTheScript(Environmental forMe)
{
if((CMProps.getBoolVar(CMProps.Bool.MUDSTARTED))
&&(!started))
{
started = true;
final Enumeration rooms=determineRooms(forMe);
final Area A=determineArea(forMe);
if((A!=null)&&(rooms!=null))
{
Room R=null;
for(;rooms.hasMoreElements();)
{
R=(Room)rooms.nextElement();
for(int m=0;m<R.numInhabitants();m++)
giveUpTheScript(A,R.fetchInhabitant(m));
}
}
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if((!started)&&(ticking instanceof Environmental))
giveEveryoneTheScript((Environmental)ticking);
return super.tick(ticking, tickID);
}
@Override
public void startBehavior(PhysicalAgent forMe)
{
giveEveryoneTheScript(forMe);
}
@Override
public void executeMsg(Environmental host, CMMsg msg)
{
if((msg.target() instanceof Room)
&&(msg.targetMinor()==CMMsg.TYP_LOOK))
giveUpTheScript(determineArea(host),msg.source());
super.executeMsg(host,msg);
}
@Override
public String defaultQuestName()
{
return (sampleB==null)?"":sampleB.defaultQuestName();
}
@Override
public void dequeResponses()
{
if(sampleB!=null)
sampleB.dequeResponses();
}
@Override public List<String> externalFiles(){return (sampleB==null)?null:sampleB.externalFiles();}
@Override
public boolean endQuest(PhysicalAgent hostObj, MOB mob, String quest)
{
return (sampleB==null)?false:sampleB.endQuest(hostObj, mob, quest);
}
@Override
public boolean eval(PhysicalAgent scripted, MOB source,
Environmental target, MOB monster, Item primaryItem,
Item secondaryItem, String msg, Object[] tmp, String[][] eval,
int startEval)
{
return (sampleB==null)?false:sampleB.eval(scripted, source, target, monster, primaryItem, secondaryItem, msg, tmp, eval, startEval);
}
@Override
public String execute(PhysicalAgent scripted, MOB source,
Environmental target, MOB monster, Item primaryItem,
Item secondaryItem, DVector script, String msg, Object[] tmp)
{
return (sampleB==null)?"":sampleB.execute(scripted, source, target, monster, primaryItem, secondaryItem, script, msg, tmp);
}
@Override
public String getLocalVarXML()
{
return (sampleB==null)?"":sampleB.getLocalVarXML();
}
@Override
public MOB getMakeMOB(Tickable ticking)
{
return (sampleB==null)?null:sampleB.getMakeMOB(ticking);
}
@Override
public String getScript()
{
return (sampleB==null)?"":sampleB.getScript();
}
@Override
public String getScriptResourceKey()
{
return (sampleB==null)?"":sampleB.getScriptResourceKey();
}
@Override
public String getVar(String context, String variable)
{
return (sampleB==null)?"":sampleB.getVar(context, variable);
}
@Override
public String getVarScope()
{
return (sampleB==null)?"":sampleB.getVarScope();
}
@Override
public boolean isVar(String context, String variable)
{
return (sampleB==null)?false:sampleB.isVar(context, variable);
}
@Override
public String[] parseEval(String evaluable) throws ScriptParseException {
return (sampleB==null)?new String[0]:sampleB.parseEval(evaluable);
}
@Override
public void setLocalVarXML(String xml)
{
if(sampleB!=null)
sampleB.setLocalVarXML(xml);
}
@Override
public void setScript(String newParms)
{
if(sampleB!=null)
sampleB.setScript(newParms);
}
@Override
public void setVar(String context, String variable, String value)
{
if(sampleB!=null)
sampleB.setVar(context, variable, value);
}
@Override
public void setVarScope(String scope)
{
if(sampleB!=null)
sampleB.setVarScope(scope);
}
@Override
public String varify(MOB source, Environmental target,
PhysicalAgent scripted, MOB monster, Item primaryItem,
Item secondaryItem, String msg, Object[] tmp, String varifyable)
{
return (sampleB==null)?"":sampleB.varify(source, target, scripted, monster, primaryItem, secondaryItem, msg, tmp, varifyable);
}
@Override
public String functify(PhysicalAgent scripted, MOB source, Environmental target, MOB monster, Item primaryItem,
Item secondaryItem, String msg, Object[] tmp, String evaluable)
{
return (sampleB==null)?"":sampleB.functify(scripted, source, target, monster, primaryItem, secondaryItem, msg, tmp, evaluable);
}
}
| Tycheo/coffeemud | com/planet_ink/coffee_mud/Behaviors/ScriptableEverymob.java | Java | apache-2.0 | 7,236 |
//===--- SILGenDecl.cpp - Implements Lowering of ASTs -> SIL for Decls ----===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#include "Initialization.h"
#include "LValue.h"
#include "RValue.h"
#include "SILGen.h"
#include "SILGenDynamicCast.h"
#include "Scope.h"
#include "SwitchEnumBuilder.h"
#include "swift/AST/ASTMangler.h"
#include "swift/AST/GenericEnvironment.h"
#include "swift/AST/Module.h"
#include "swift/AST/NameLookup.h"
#include "swift/AST/ProtocolConformance.h"
#include "swift/Basic/ProfileCounter.h"
#include "swift/SIL/FormalLinkage.h"
#include "swift/SIL/PrettyStackTrace.h"
#include "swift/SIL/SILArgument.h"
#include "swift/SIL/SILDebuggerClient.h"
#include "swift/SIL/SILType.h"
#include "swift/SIL/TypeLowering.h"
#include "llvm/ADT/SmallString.h"
#include <iterator>
using namespace swift;
using namespace Lowering;
void Initialization::_anchor() {}
void SILDebuggerClient::anchor() {}
namespace {
/// A "null" initialization that indicates that any value being initialized
/// into this initialization should be discarded. This represents AnyPatterns
/// (that is, 'var (_)') that bind to values without storing them.
class BlackHoleInitialization : public Initialization {
public:
BlackHoleInitialization() {}
bool canSplitIntoTupleElements() const override {
return true;
}
MutableArrayRef<InitializationPtr>
splitIntoTupleElements(SILGenFunction &SGF, SILLocation loc,
CanType type,
SmallVectorImpl<InitializationPtr> &buf) override {
// "Destructure" an ignored binding into multiple ignored bindings.
for (auto fieldType : cast<TupleType>(type)->getElementTypes()) {
(void) fieldType;
buf.push_back(InitializationPtr(new BlackHoleInitialization()));
}
return buf;
}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override {
/// This just ignores the provided value.
}
void finishUninitialized(SILGenFunction &SGF) override {
// do nothing
}
};
} // end anonymous namespace
static void copyOrInitValueIntoHelper(
SILGenFunction &SGF, SILLocation loc, ManagedValue value, bool isInit,
ArrayRef<InitializationPtr> subInitializations,
llvm::function_ref<ManagedValue(ManagedValue, unsigned, SILType)> func) {
auto sourceType = value.getType().castTo<TupleType>();
auto sourceSILType = value.getType();
for (unsigned i = 0, e = sourceType->getNumElements(); i != e; ++i) {
SILType fieldTy = sourceSILType.getTupleElementType(i);
ManagedValue elt = func(value, i, fieldTy);
subInitializations[i]->copyOrInitValueInto(SGF, loc, elt, isInit);
subInitializations[i]->finishInitialization(SGF);
}
}
void TupleInitialization::copyOrInitValueInto(SILGenFunction &SGF,
SILLocation loc,
ManagedValue value, bool isInit) {
// In the object case, emit a destructure operation and return.
if (value.getType().isObject()) {
return SGF.B.emitDestructureValueOperation(
loc, value, [&](unsigned i, ManagedValue subValue) {
auto &subInit = SubInitializations[i];
subInit->copyOrInitValueInto(SGF, loc, subValue, isInit);
subInit->finishInitialization(SGF);
});
}
// In the address case, we forward the underlying value and store it
// into memory and then create a +1 cleanup. since we assume here
// that we have a +1 value since we are forwarding into memory.
assert(value.isPlusOne(SGF) && "Can not store a +0 value into memory?!");
value = ManagedValue::forUnmanaged(value.forward(SGF));
return copyOrInitValueIntoHelper(
SGF, loc, value, isInit, SubInitializations,
[&](ManagedValue aggregate, unsigned i,
SILType fieldType) -> ManagedValue {
ManagedValue elt =
SGF.B.createTupleElementAddr(loc, value, i, fieldType);
if (!fieldType.isAddressOnly(SGF.F)) {
return SGF.B.createLoadTake(loc, elt);
}
return SGF.emitManagedRValueWithCleanup(elt.getValue());
});
}
void TupleInitialization::finishUninitialized(SILGenFunction &SGF) {
for (auto &subInit : SubInitializations) {
subInit->finishUninitialized(SGF);
}
}
namespace {
class CleanupClosureConstant : public Cleanup {
SILValue closure;
public:
CleanupClosureConstant(SILValue closure) : closure(closure) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
SGF.B.emitDestroyValueOperation(l, closure);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "CleanupClosureConstant\n"
<< "State:" << getState() << "\n"
<< "closure:" << closure << "\n";
#endif
}
};
} // end anonymous namespace
SubstitutionMap SILGenFunction::getForwardingSubstitutionMap() {
return F.getForwardingSubstitutionMap();
}
void SILGenFunction::visitFuncDecl(FuncDecl *fd) {
// Generate the local function body.
SGM.emitFunction(fd);
}
MutableArrayRef<InitializationPtr>
SingleBufferInitialization::
splitIntoTupleElements(SILGenFunction &SGF, SILLocation loc, CanType type,
SmallVectorImpl<InitializationPtr> &buf) {
assert(SplitCleanups.empty() && "getting sub-initializations twice?");
auto address = getAddressForInPlaceInitialization(SGF, loc);
return splitSingleBufferIntoTupleElements(SGF, loc, type, address,
buf, SplitCleanups);
}
MutableArrayRef<InitializationPtr>
SingleBufferInitialization::
splitSingleBufferIntoTupleElements(SILGenFunction &SGF, SILLocation loc,
CanType type, SILValue baseAddr,
SmallVectorImpl<InitializationPtr> &buf,
TinyPtrVector<CleanupHandle::AsPointer> &splitCleanups) {
// Destructure the buffer into per-element buffers.
for (auto i : indices(cast<TupleType>(type)->getElementTypes())) {
// Project the element.
SILValue eltAddr = SGF.B.createTupleElementAddr(loc, baseAddr, i);
// Create an initialization to initialize the element.
auto &eltTL = SGF.getTypeLowering(eltAddr->getType());
auto eltInit = SGF.useBufferAsTemporary(eltAddr, eltTL);
// Remember the element cleanup.
auto eltCleanup = eltInit->getInitializedCleanup();
if (eltCleanup.isValid())
splitCleanups.push_back(eltCleanup);
buf.emplace_back(eltInit.release());
}
return buf;
}
void SingleBufferInitialization::
copyOrInitValueIntoSingleBuffer(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit,
SILValue destAddr) {
// Emit an unchecked access around initialization of the local buffer to
// silence access marker verification.
//
// FIXME: This is not a good place for FormalEvaluationScope +
// UnenforcedFormalAccess. However, there's no way to identify the buffer
// initialization sequence after SILGen, and no easy way to wrap the
// Initialization in an access during top-level expression evaluation.
FormalEvaluationScope scope(SGF);
if (!isInit) {
assert(value.getValue() != destAddr && "copying in place?!");
SILValue accessAddr =
UnenforcedFormalAccess::enter(SGF, loc, destAddr, SILAccessKind::Modify);
value.copyInto(SGF, loc, accessAddr);
return;
}
// If we didn't evaluate into the initialization buffer, do so now.
if (value.getValue() != destAddr) {
SILValue accessAddr =
UnenforcedFormalAccess::enter(SGF, loc, destAddr, SILAccessKind::Modify);
value.forwardInto(SGF, loc, accessAddr);
} else {
// If we did evaluate into the initialization buffer, disable the
// cleanup.
value.forwardCleanup(SGF);
}
}
void SingleBufferInitialization::finishInitialization(SILGenFunction &SGF) {
// Forward all of the split element cleanups, assuming we made any.
for (CleanupHandle eltCleanup : SplitCleanups)
SGF.Cleanups.forwardCleanup(eltCleanup);
}
bool KnownAddressInitialization::isInPlaceInitializationOfGlobal() const {
return isa<GlobalAddrInst>(address);
}
bool TemporaryInitialization::isInPlaceInitializationOfGlobal() const {
return isa<GlobalAddrInst>(Addr);
}
void TemporaryInitialization::finishInitialization(SILGenFunction &SGF) {
SingleBufferInitialization::finishInitialization(SGF);
if (Cleanup.isValid())
SGF.Cleanups.setCleanupState(Cleanup, CleanupState::Active);
}
namespace {
class ReleaseValueCleanup : public Cleanup {
SILValue v;
public:
ReleaseValueCleanup(SILValue v) : v(v) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
if (v->getType().isAddress())
SGF.B.createDestroyAddr(l, v);
else
SGF.B.emitDestroyValueOperation(l, v);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "ReleaseValueCleanup\n"
<< "State:" << getState() << "\n"
<< "Value:" << v << "\n";
#endif
}
};
} // end anonymous namespace
namespace {
/// Cleanup to destroy an initialized variable.
class DeallocStackCleanup : public Cleanup {
SILValue Addr;
public:
DeallocStackCleanup(SILValue addr) : Addr(addr) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
SGF.B.createDeallocStack(l, Addr);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "DeallocStackCleanup\n"
<< "State:" << getState() << "\n"
<< "Addr:" << Addr << "\n";
#endif
}
};
} // end anonymous namespace
namespace {
/// Cleanup to destroy an initialized 'var' variable.
class DestroyLocalVariable : public Cleanup {
VarDecl *Var;
public:
DestroyLocalVariable(VarDecl *var) : Var(var) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
SGF.destroyLocalVariable(l, Var);
}
void dump(SILGenFunction &SGF) const override {
#ifndef NDEBUG
llvm::errs() << "DestroyLocalVariable\n"
<< "State:" << getState() << "\n"
<< "Decl: ";
Var->print(llvm::errs());
llvm::errs() << "\n";
if (isActive()) {
auto loc = SGF.VarLocs[Var];
assert((loc.box || loc.value) && "One of box or value should be set");
if (loc.box) {
llvm::errs() << "Box: " << loc.box << "\n";
} else {
llvm::errs() << "Value: " << loc.value << "\n";
}
}
llvm::errs() << "\n";
#endif
}
};
} // end anonymous namespace
namespace {
/// Cleanup to destroy an uninitialized local variable.
class DeallocateUninitializedLocalVariable : public Cleanup {
VarDecl *Var;
public:
DeallocateUninitializedLocalVariable(VarDecl *var) : Var(var) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
SGF.deallocateUninitializedLocalVariable(l, Var);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "DeallocateUninitializedLocalVariable\n"
<< "State:" << getState() << "\n";
// TODO: Make sure we dump var.
llvm::errs() << "\n";
#endif
}
};
} // end anonymous namespace
namespace {
/// An initialization of a local 'var'.
class LocalVariableInitialization : public SingleBufferInitialization {
/// The local variable decl being initialized.
VarDecl *decl;
SILGenFunction &SGF;
/// The cleanup we pushed to deallocate the local variable before it
/// gets initialized.
CleanupHandle DeallocCleanup;
/// The cleanup we pushed to destroy and deallocate the local variable.
CleanupHandle ReleaseCleanup;
bool DidFinish = false;
public:
/// Sets up an initialization for the allocated box. This pushes a
/// CleanupUninitializedBox cleanup that will be replaced when
/// initialization is completed.
LocalVariableInitialization(VarDecl *decl,
Optional<MarkUninitializedInst::Kind> kind,
uint16_t ArgNo, SILGenFunction &SGF)
: decl(decl), SGF(SGF) {
assert(decl->getDeclContext()->isLocalContext() &&
"can't emit a local var for a non-local var decl");
assert(decl->hasStorage() && "can't emit storage for a computed variable");
assert(!SGF.VarLocs.count(decl) && "Already have an entry for this decl?");
auto boxType = SGF.SGM.Types
.getContextBoxTypeForCapture(decl,
SGF.SGM.Types.getLoweredRValueType(decl->getType()),
SGF.F.getGenericEnvironment(),
/*mutable*/ true);
// The variable may have its lifetime extended by a closure, heap-allocate
// it using a box.
SILDebugVariable DbgVar(decl->isLet(), ArgNo);
SILValue allocBox = SGF.B.createAllocBox(decl, boxType, DbgVar);
// Mark the memory as uninitialized, so DI will track it for us.
if (kind)
allocBox = SGF.B.createMarkUninitialized(decl, allocBox, kind.getValue());
SILValue addr = SGF.B.createProjectBox(decl, allocBox, 0);
/// Remember that this is the memory location that we're emitting the
/// decl to.
SGF.VarLocs[decl] = SILGenFunction::VarLoc::get(addr, allocBox);
// Push a cleanup to destroy the local variable. This has to be
// inactive until the variable is initialized.
SGF.Cleanups.pushCleanupInState<DestroyLocalVariable>(CleanupState::Dormant,
decl);
ReleaseCleanup = SGF.Cleanups.getTopCleanup();
// Push a cleanup to deallocate the local variable.
SGF.Cleanups.pushCleanup<DeallocateUninitializedLocalVariable>(decl);
DeallocCleanup = SGF.Cleanups.getTopCleanup();
}
~LocalVariableInitialization() override {
assert(DidFinish && "did not call VarInit::finishInitialization!");
}
SILValue getAddress() const {
assert(SGF.VarLocs.count(decl) && "did not emit var?!");
return SGF.VarLocs[decl].value;
}
SILValue getAddressForInPlaceInitialization(SILGenFunction &SGF,
SILLocation loc) override {
return getAddress();
}
bool isInPlaceInitializationOfGlobal() const override {
return isa<GlobalAddrInst>(getAddress());
}
void finishUninitialized(SILGenFunction &SGF) override {
LocalVariableInitialization::finishInitialization(SGF);
}
void finishInitialization(SILGenFunction &SGF) override {
SingleBufferInitialization::finishInitialization(SGF);
assert(!DidFinish &&
"called LocalVariableInitialization::finishInitialization twice!");
SGF.Cleanups.setCleanupState(DeallocCleanup, CleanupState::Dead);
SGF.Cleanups.setCleanupState(ReleaseCleanup, CleanupState::Active);
DidFinish = true;
}
};
} // end anonymous namespace
namespace {
/// Initialize a writeback buffer that receives the value of a 'let'
/// declaration.
class LetValueInitialization : public Initialization {
/// The VarDecl for the let decl.
VarDecl *vd;
/// The address of the buffer used for the binding, if this is an address-only
/// let.
SILValue address;
/// The cleanup we pushed to destroy the local variable.
CleanupHandle DestroyCleanup;
/// Cleanups we introduced when splitting.
TinyPtrVector<CleanupHandle::AsPointer> SplitCleanups;
bool DidFinish = false;
public:
LetValueInitialization(VarDecl *vd, SILGenFunction &SGF) : vd(vd) {
auto &lowering = SGF.getTypeLowering(vd->getType());
// Decide whether we need a temporary stack buffer to evaluate this 'let'.
// There are three cases we need to handle here: parameters, initialized (or
// bound) decls, and uninitialized ones.
bool needsTemporaryBuffer;
bool isUninitialized = false;
assert(!isa<ParamDecl>(vd)
&& "should not bind function params on this path");
if (vd->getParentPatternBinding() && !vd->getParentInitializer()) {
// This value is uninitialized (and unbound) if it has a pattern binding
// decl, with no initializer value.
assert(!vd->hasNonPatternBindingInit() && "Bound values aren't uninit!");
// If this is a let-value without an initializer, then we need a temporary
// buffer. DI will make sure it is only assigned to once.
needsTemporaryBuffer = true;
isUninitialized = true;
} else {
// If this is a let with an initializer or bound value, we only need a
// buffer if the type is address only.
needsTemporaryBuffer =
lowering.isAddressOnly() && SGF.silConv.useLoweredAddresses();
}
if (needsTemporaryBuffer) {
address = SGF.emitTemporaryAllocation(vd, lowering.getLoweredType());
if (isUninitialized)
address = SGF.B.createMarkUninitializedVar(vd, address);
DestroyCleanup = SGF.enterDormantTemporaryCleanup(address, lowering);
SGF.VarLocs[vd] = SILGenFunction::VarLoc::get(address);
} else if (!lowering.isTrivial()) {
// Push a cleanup to destroy the let declaration. This has to be
// inactive until the variable is initialized: if control flow exits the
// before the value is bound, we don't want to destroy the value.
SGF.Cleanups.pushCleanupInState<DestroyLocalVariable>(
CleanupState::Dormant, vd);
DestroyCleanup = SGF.Cleanups.getTopCleanup();
} else {
DestroyCleanup = CleanupHandle::invalid();
}
}
~LetValueInitialization() override {
assert(DidFinish && "did not call LetValueInit::finishInitialization!");
}
bool hasAddress() const { return (bool)address; }
bool canPerformInPlaceInitialization() const override {
return hasAddress();
}
bool isInPlaceInitializationOfGlobal() const override {
return isa<GlobalAddrInst>(address);
}
SILValue getAddressForInPlaceInitialization(SILGenFunction &SGF,
SILLocation loc) override {
// Emit into the buffer that 'let's produce for address-only values if
// we have it.
assert(hasAddress());
return address;
}
/// Return true if we can get the addresses of elements with the
/// 'getSubInitializationsForTuple' method.
///
/// Let-value initializations cannot be broken into constituent pieces if a
/// scalar value needs to be bound. If there is an address in play, then we
/// can initialize the address elements of the tuple though.
bool canSplitIntoTupleElements() const override {
return hasAddress();
}
MutableArrayRef<InitializationPtr>
splitIntoTupleElements(SILGenFunction &SGF, SILLocation loc, CanType type,
SmallVectorImpl<InitializationPtr> &buf) override {
assert(SplitCleanups.empty());
auto address = getAddressForInPlaceInitialization(SGF, loc);
return SingleBufferInitialization
::splitSingleBufferIntoTupleElements(SGF, loc, type, address, buf,
SplitCleanups);
}
void bindValue(SILValue value, SILGenFunction &SGF) {
assert(!SGF.VarLocs.count(vd) && "Already emitted this vardecl?");
// If we're binding an address to this let value, then we can use it as an
// address later. This happens when binding an address only parameter to
// an argument, for example.
if (value->getType().isAddress())
address = value;
SGF.VarLocs[vd] = SILGenFunction::VarLoc::get(value);
// Emit a debug_value[_addr] instruction to record the start of this value's
// lifetime.
SILLocation PrologueLoc(vd);
PrologueLoc.markAsPrologue();
SILDebugVariable DbgVar(vd->isLet(), /*ArgNo=*/0);
if (address)
SGF.B.createDebugValueAddr(PrologueLoc, value, DbgVar);
else
SGF.B.createDebugValue(PrologueLoc, value, DbgVar);
}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override {
// If this let value has an address, we can handle it just like a single
// buffer value.
if (hasAddress())
return SingleBufferInitialization::
copyOrInitValueIntoSingleBuffer(SGF, loc, value, isInit, address);
// Otherwise, we bind the value.
if (isInit) {
// Disable the rvalue expression cleanup, since the let value
// initialization has a cleanup that lives for the entire scope of the
// let declaration.
bindValue(value.forward(SGF), SGF);
} else {
// Disable the expression cleanup of the copy, since the let value
// initialization has a cleanup that lives for the entire scope of the
// let declaration.
bindValue(value.copyUnmanaged(SGF, loc).forward(SGF), SGF);
}
}
void finishUninitialized(SILGenFunction &SGF) override {
LetValueInitialization::finishInitialization(SGF);
}
void finishInitialization(SILGenFunction &SGF) override {
assert(!DidFinish &&
"called LetValueInit::finishInitialization twice!");
assert(SGF.VarLocs.count(vd) && "Didn't bind a value to this let!");
// Deactivate any cleanups we made when splitting the tuple.
for (auto cleanup : SplitCleanups)
SGF.Cleanups.forwardCleanup(cleanup);
// Activate the destroy cleanup.
if (DestroyCleanup != CleanupHandle::invalid())
SGF.Cleanups.setCleanupState(DestroyCleanup, CleanupState::Active);
DidFinish = true;
}
};
} // end anonymous namespace
namespace {
/// Initialize a variable of reference-storage type.
class ReferenceStorageInitialization : public Initialization {
InitializationPtr VarInit;
public:
ReferenceStorageInitialization(InitializationPtr &&subInit)
: VarInit(std::move(subInit)) {
assert(VarInit->canPerformInPlaceInitialization());
}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override {
auto address = VarInit->getAddressForInPlaceInitialization(SGF, loc);
// If this is not an initialization, copy the value before we translateIt,
// translation expects a +1 value.
if (isInit)
value.forwardInto(SGF, loc, address);
else
value.copyInto(SGF, loc, address);
}
void finishUninitialized(SILGenFunction &SGF) override {
ReferenceStorageInitialization::finishInitialization(SGF);
}
void finishInitialization(SILGenFunction &SGF) override {
VarInit->finishInitialization(SGF);
}
};
} // end anonymous namespace
namespace {
/// Abstract base class for refutable pattern initializations.
class RefutablePatternInitialization : public Initialization {
/// This is the label to jump to if the pattern fails to match.
JumpDest failureDest;
public:
RefutablePatternInitialization(JumpDest failureDest)
: failureDest(failureDest) {
assert(failureDest.isValid() &&
"Refutable patterns can only exist in failable conditions");
}
JumpDest getFailureDest() const { return failureDest; }
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override = 0;
void bindVariable(SILLocation loc, VarDecl *var, ManagedValue value,
CanType formalValueType, SILGenFunction &SGF) {
// Initialize the variable value.
InitializationPtr init = SGF.emitInitializationForVarDecl(var, var->isLet());
RValue(SGF, loc, formalValueType, value).forwardInto(SGF, loc, init.get());
}
};
} // end anonymous namespace
namespace {
class ExprPatternInitialization : public RefutablePatternInitialization {
ExprPattern *P;
public:
ExprPatternInitialization(ExprPattern *P, JumpDest patternFailDest)
: RefutablePatternInitialization(patternFailDest), P(P) {}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override;
};
} // end anonymous namespace
void ExprPatternInitialization::
copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) {
assert(isInit && "Only initialization is supported for refutable patterns");
FullExpr scope(SGF.Cleanups, CleanupLocation(P));
bindVariable(P, P->getMatchVar(), value,
P->getType()->getCanonicalType(), SGF);
// Emit the match test.
SILValue testBool;
{
FullExpr scope(SGF.Cleanups, CleanupLocation(P->getMatchExpr()));
testBool = SGF.emitRValueAsSingleValue(P->getMatchExpr()).
getUnmanagedValue();
}
assert(testBool->getType().getASTType()->isBool());
auto i1Value = SGF.emitUnwrapIntegerResult(loc, testBool);
SILBasicBlock *contBB = SGF.B.splitBlockForFallthrough();
auto falseBB = SGF.Cleanups.emitBlockForCleanups(getFailureDest(), loc);
SGF.B.createCondBranch(loc, i1Value, contBB, falseBB);
SGF.B.setInsertionPoint(contBB);
}
namespace {
class EnumElementPatternInitialization : public RefutablePatternInitialization {
EnumElementDecl *ElementDecl;
InitializationPtr subInitialization;
public:
EnumElementPatternInitialization(EnumElementDecl *ElementDecl,
InitializationPtr &&subInitialization,
JumpDest patternFailDest)
: RefutablePatternInitialization(patternFailDest), ElementDecl(ElementDecl),
subInitialization(std::move(subInitialization)) {}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override {
assert(isInit && "Only initialization is supported for refutable patterns");
emitEnumMatch(value, ElementDecl, subInitialization.get(), getFailureDest(),
loc, SGF);
}
static void emitEnumMatch(ManagedValue value, EnumElementDecl *ElementDecl,
Initialization *subInit, JumpDest FailureDest,
SILLocation loc, SILGenFunction &SGF);
void finishInitialization(SILGenFunction &SGF) override {
if (subInitialization.get())
subInitialization->finishInitialization(SGF);
}
};
} // end anonymous namespace
/// If \p elt belongs to an enum that has exactly two cases and that can be
/// exhaustively switched, return the other case. Otherwise, return nullptr.
static EnumElementDecl *getOppositeBinaryDecl(const SILGenFunction &SGF,
const EnumElementDecl *elt) {
const EnumDecl *enumDecl = elt->getParentEnum();
if (!enumDecl->isEffectivelyExhaustive(SGF.SGM.SwiftModule,
SGF.F.getResilienceExpansion())) {
return nullptr;
}
EnumDecl::ElementRange range = enumDecl->getAllElements();
auto iter = range.begin();
if (iter == range.end())
return nullptr;
bool seenDecl = false;
EnumElementDecl *result = nullptr;
if (*iter == elt) {
seenDecl = true;
} else {
result = *iter;
}
++iter;
if (iter == range.end())
return nullptr;
if (seenDecl) {
assert(!result);
result = *iter;
} else {
if (elt != *iter)
return nullptr;
seenDecl = true;
}
++iter;
// If we reach this point, we saw the decl we were looking for and one other
// case. If we have any additional cases, then we do not have a binary enum.
if (iter != range.end())
return nullptr;
// This is always true since we have already returned earlier nullptr if we
// did not see the decl at all.
assert(seenDecl);
return result;
}
void EnumElementPatternInitialization::emitEnumMatch(
ManagedValue value, EnumElementDecl *eltDecl, Initialization *subInit,
JumpDest failureDest, SILLocation loc, SILGenFunction &SGF) {
// Create all of the blocks early so we can maintain a consistent ordering
// (and update less tests). Break this at your fingers parallel.
//
// *NOTE* This needs to be in reverse order to preserve the textual SIL.
auto *contBlock = SGF.createBasicBlock();
auto *someBlock = SGF.createBasicBlock();
auto *defaultBlock = SGF.createBasicBlock();
auto *originalBlock = SGF.B.getInsertionBB();
SwitchEnumBuilder switchBuilder(SGF.B, loc, value);
// Handle the none case.
//
// *NOTE*: Since we are performing an initialization here, it is *VERY*
// important that we emit the negative case first. The reason why is that
// currently the initialization has a dormant cleanup in a scope that may be
// after the failureDest depth. Once we run the positive case, this
// initialization will be enabled. Thus if we run the negative case /after/
// the positive case, a cleanup will be emitted for the initialization on the
// negative path... but the actual initialization happened on the positive
// path, causing a use (the destroy on the negative path) to be created that
// does not dominate its definition (in the positive path).
auto handler = [&SGF, &loc, &failureDest](ManagedValue mv,
SwitchCaseFullExpr &&expr) {
expr.exit();
SGF.Cleanups.emitBranchAndCleanups(failureDest, loc);
};
// If we have a binary enum, do not emit a true default case. This ensures
// that we do not emit a destroy_value on a .None.
bool inferredBinaryEnum = false;
if (auto *otherDecl = getOppositeBinaryDecl(SGF, eltDecl)) {
inferredBinaryEnum = true;
switchBuilder.addCase(otherDecl, defaultBlock, nullptr, handler);
} else {
switchBuilder.addDefaultCase(
defaultBlock, nullptr, handler,
SwitchEnumBuilder::DefaultDispatchTime::BeforeNormalCases);
}
// Always insert the some case at the front of the list. In the default case,
// this will not matter, but in the case where we have a binary enum, we want
// to preserve the old ordering of .some/.none. to make it easier to update
// tests.
switchBuilder.addCase(
eltDecl, someBlock, contBlock,
[&SGF, &loc, &eltDecl, &subInit, &value](ManagedValue mv,
SwitchCaseFullExpr &&expr) {
// If the enum case has no bound value, we're done.
if (!eltDecl->hasAssociatedValues()) {
assert(
subInit == nullptr &&
"Cannot have a subinit when there is no value to match against");
expr.exitAndBranch(loc);
return;
}
if (subInit == nullptr) {
// If there is no subinitialization, then we are done matching. Don't
// bother projecting out the any elements value only to ignore it.
expr.exitAndBranch(loc);
return;
}
// Otherwise, the bound value for the enum case is available.
SILType eltTy = value.getType().getEnumElementType(eltDecl, SGF.SGM.M);
auto &eltTL = SGF.getTypeLowering(eltTy);
if (mv.getType().isAddress()) {
// If the enum is address-only, take from the enum we have and load it
// if
// the element value is loadable.
assert((eltTL.isTrivial() || mv.hasCleanup()) &&
"must be able to consume value");
mv = SGF.B.createUncheckedTakeEnumDataAddr(loc, mv, eltDecl, eltTy);
// Load a loadable data value.
if (eltTL.isLoadable())
mv = SGF.B.createLoadTake(loc, mv);
}
// If the payload is indirect, project it out of the box.
if (eltDecl->isIndirect() || eltDecl->getParentEnum()->isIndirect()) {
ManagedValue boxedValue = SGF.B.createProjectBox(loc, mv, 0);
auto &boxedTL = SGF.getTypeLowering(boxedValue.getType());
// We must treat the boxed value as +0 since it may be shared. Copy it
// if nontrivial.
//
// NOTE: The APIs that we are usinng here will ensure that if we have
// a trivial value, the load_borrow will become a load [trivial] and
// the copies will be "automagically" elided.
if (boxedTL.isLoadable() || !SGF.silConv.useLoweredAddresses()) {
UnenforcedAccess access;
SILValue accessAddress = access.beginAccess(
SGF, loc, boxedValue.getValue(), SILAccessKind::Read);
auto mvAccessAddress = ManagedValue::forUnmanaged(accessAddress);
{
Scope loadScope(SGF, loc);
ManagedValue borrowedVal =
SGF.B.createLoadBorrow(loc, mvAccessAddress);
mv = loadScope.popPreservingValue(
borrowedVal.copyUnmanaged(SGF, loc));
}
access.endAccess(SGF);
} else {
// If we do not have a loadable value, just do a copy of the
// boxedValue.
mv = boxedValue.copyUnmanaged(SGF, loc);
}
}
// Reabstract to the substituted type, if needed.
CanType substEltTy =
value.getType()
.getASTType()
->getTypeOfMember(SGF.SGM.M.getSwiftModule(), eltDecl,
eltDecl->getArgumentInterfaceType())
->getCanonicalType();
AbstractionPattern origEltTy =
(eltDecl == SGF.getASTContext().getOptionalSomeDecl()
? AbstractionPattern(substEltTy)
: SGF.SGM.M.Types.getAbstractionPattern(eltDecl));
mv = SGF.emitOrigToSubstValue(loc, mv, origEltTy, substEltTy);
// Pass the +1 value down into the sub initialization.
subInit->copyOrInitValueInto(SGF, loc, mv, /*is an init*/ true);
expr.exitAndBranch(loc);
});
std::move(switchBuilder).emit();
// If we inferred a binary enum, put the asked for case first so we preserve
// the current code structure. This just ensures that less test updates are
// needed.
if (inferredBinaryEnum) {
if (auto *switchEnum =
dyn_cast<SwitchEnumInst>(originalBlock->getTerminator())) {
switchEnum->swapCase(0, 1);
} else {
auto *switchEnumAddr =
cast<SwitchEnumAddrInst>(originalBlock->getTerminator());
switchEnumAddr->swapCase(0, 1);
}
}
// Reset the insertion point to the end of contBlock.
SGF.B.setInsertionPoint(contBlock);
}
namespace {
class IsPatternInitialization : public RefutablePatternInitialization {
IsPattern *pattern;
InitializationPtr subInitialization;
public:
IsPatternInitialization(IsPattern *pattern,
InitializationPtr &&subInitialization,
JumpDest patternFailDest)
: RefutablePatternInitialization(patternFailDest), pattern(pattern),
subInitialization(std::move(subInitialization)) {}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override;
void finishInitialization(SILGenFunction &SGF) override {
if (subInitialization.get())
subInitialization->finishInitialization(SGF);
}
};
} // end anonymous namespace
void IsPatternInitialization::
copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) {
assert(isInit && "Only initialization is supported for refutable patterns");
// Try to perform the cast to the destination type, producing an optional that
// indicates whether we succeeded.
auto destType = OptionalType::get(pattern->getCastTypeLoc().getType());
value =
emitConditionalCheckedCast(SGF, loc, value, pattern->getType(), destType,
pattern->getCastKind(), SGFContext(),
ProfileCounter(), ProfileCounter())
.getAsSingleValue(SGF, loc);
// Now that we have our result as an optional, we can use an enum projection
// to do all the work.
EnumElementPatternInitialization::
emitEnumMatch(value, SGF.getASTContext().getOptionalSomeDecl(),
subInitialization.get(), getFailureDest(), loc, SGF);
}
namespace {
class BoolPatternInitialization : public RefutablePatternInitialization {
BoolPattern *pattern;
public:
BoolPatternInitialization(BoolPattern *pattern,
JumpDest patternFailDest)
: RefutablePatternInitialization(patternFailDest), pattern(pattern) {}
void copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) override;
};
} // end anonymous namespace
void BoolPatternInitialization::
copyOrInitValueInto(SILGenFunction &SGF, SILLocation loc,
ManagedValue value, bool isInit) {
assert(isInit && "Only initialization is supported for refutable patterns");
// Extract the i1 from the Bool struct.
auto i1Value = SGF.emitUnwrapIntegerResult(loc, value.forward(SGF));
// Branch on the boolean based on whether we're testing for true or false.
SILBasicBlock *trueBB = SGF.B.splitBlockForFallthrough();
auto contBB = trueBB;
auto falseBB = SGF.Cleanups.emitBlockForCleanups(getFailureDest(), loc);
if (!pattern->getValue())
std::swap(trueBB, falseBB);
SGF.B.createCondBranch(loc, i1Value, trueBB, falseBB);
SGF.B.setInsertionPoint(contBB);
}
namespace {
/// InitializationForPattern - A visitor for traversing a pattern, generating
/// SIL code to allocate the declared variables, and generating an
/// Initialization representing the needed initializations.
///
/// It is important that any Initialization created for a pattern that might
/// not have an immediate initializer implement finishUninitialized. Note
/// that this only applies to irrefutable patterns.
struct InitializationForPattern
: public PatternVisitor<InitializationForPattern, InitializationPtr>
{
SILGenFunction &SGF;
/// This is the place that should be jumped to if the pattern fails to match.
/// This is invalid for irrefutable pattern initializations.
JumpDest patternFailDest;
InitializationForPattern(SILGenFunction &SGF, JumpDest patternFailDest)
: SGF(SGF), patternFailDest(patternFailDest) {}
// Paren, Typed, and Var patterns are noops, just look through them.
InitializationPtr visitParenPattern(ParenPattern *P) {
return visit(P->getSubPattern());
}
InitializationPtr visitTypedPattern(TypedPattern *P) {
return visit(P->getSubPattern());
}
InitializationPtr visitVarPattern(VarPattern *P) {
return visit(P->getSubPattern());
}
// AnyPatterns (i.e, _) don't require any storage. Any value bound here will
// just be dropped.
InitializationPtr visitAnyPattern(AnyPattern *P) {
return InitializationPtr(new BlackHoleInitialization());
}
// Bind to a named pattern by creating a memory location and initializing it
// with the initial value.
InitializationPtr visitNamedPattern(NamedPattern *P) {
if (!P->getDecl()->hasName()) {
// Unnamed parameters don't require any storage. Any value bound here will
// just be dropped.
return InitializationPtr(new BlackHoleInitialization());
}
return SGF.emitInitializationForVarDecl(P->getDecl(), P->getDecl()->isLet());
}
// Bind a tuple pattern by aggregating the component variables into a
// TupleInitialization.
InitializationPtr visitTuplePattern(TuplePattern *P) {
TupleInitialization *init = new TupleInitialization();
for (auto &elt : P->getElements())
init->SubInitializations.push_back(visit(elt.getPattern()));
return InitializationPtr(init);
}
InitializationPtr visitEnumElementPattern(EnumElementPattern *P) {
InitializationPtr subInit;
if (auto *subP = P->getSubPattern())
subInit = visit(subP);
auto *res = new EnumElementPatternInitialization(P->getElementDecl(),
std::move(subInit),
patternFailDest);
return InitializationPtr(res);
}
InitializationPtr visitOptionalSomePattern(OptionalSomePattern *P) {
InitializationPtr subInit = visit(P->getSubPattern());
auto *res = new EnumElementPatternInitialization(P->getElementDecl(),
std::move(subInit),
patternFailDest);
return InitializationPtr(res);
}
InitializationPtr visitIsPattern(IsPattern *P) {
InitializationPtr subInit;
if (auto *subP = P->getSubPattern())
subInit = visit(subP);
return InitializationPtr(new IsPatternInitialization(P, std::move(subInit),
patternFailDest));
}
InitializationPtr visitBoolPattern(BoolPattern *P) {
return InitializationPtr(new BoolPatternInitialization(P, patternFailDest));
}
InitializationPtr visitExprPattern(ExprPattern *P) {
return InitializationPtr(new ExprPatternInitialization(P, patternFailDest));
}
};
} // end anonymous namespace
InitializationPtr
SILGenFunction::emitInitializationForVarDecl(VarDecl *vd, bool forceImmutable) {
// If this is a computed variable, we don't need to do anything here.
// We'll generate the getter and setter when we see their FuncDecls.
if (!vd->hasStorage())
return InitializationPtr(new BlackHoleInitialization());
if (vd->isDebuggerVar()) {
DebuggerClient *DebugClient = SGM.SwiftModule->getDebugClient();
assert(DebugClient && "Debugger variables with no debugger client");
SILDebuggerClient *SILDebugClient = DebugClient->getAsSILDebuggerClient();
assert(SILDebugClient && "Debugger client doesn't support SIL");
SILValue SV = SILDebugClient->emitLValueForVariable(vd, B);
VarLocs[vd] = SILGenFunction::VarLoc::get(SV);
return InitializationPtr(new KnownAddressInitialization(SV));
}
CanType varType = vd->getType()->getCanonicalType();
assert(!isa<InOutType>(varType) && "local variables should never be inout");
// If this is a 'let' initialization for a non-global, set up a
// let binding, which stores the initialization value into VarLocs directly.
if (forceImmutable && vd->getDeclContext()->isLocalContext() &&
!isa<ReferenceStorageType>(varType))
return InitializationPtr(new LetValueInitialization(vd, *this));
// If the variable has no initial value, emit a mark_uninitialized instruction
// so that DI tracks and enforces validity of it.
bool isUninitialized =
vd->getParentPatternBinding() && !vd->getParentInitializer();
// If this is a global variable, initialize it without allocations or
// cleanups.
InitializationPtr Result;
if (!vd->getDeclContext()->isLocalContext()) {
auto *silG = SGM.getSILGlobalVariable(vd, NotForDefinition);
B.createAllocGlobal(vd, silG);
SILValue addr = B.createGlobalAddr(vd, silG);
if (isUninitialized)
addr = B.createMarkUninitializedVar(vd, addr);
VarLocs[vd] = SILGenFunction::VarLoc::get(addr);
Result = InitializationPtr(new KnownAddressInitialization(addr));
} else {
Optional<MarkUninitializedInst::Kind> uninitKind;
if (isUninitialized) {
uninitKind = MarkUninitializedInst::Kind::Var;
}
Result = emitLocalVariableWithCleanup(vd, uninitKind);
}
// If we're initializing a weak or unowned variable, this requires a change in
// type.
if (isa<ReferenceStorageType>(varType))
Result = InitializationPtr(new
ReferenceStorageInitialization(std::move(Result)));
return Result;
}
void SILGenFunction::emitPatternBinding(PatternBindingDecl *PBD,
unsigned pbdEntry) {
auto &entry = PBD->getPatternList()[pbdEntry];
auto initialization = emitPatternBindingInitialization(entry.getPattern(),
JumpDest::invalid());
// If an initial value expression was specified by the decl, emit it into
// the initialization. Otherwise, mark it uninitialized for DI to resolve.
if (auto *Init = entry.getExecutableInit()) {
FullExpr Scope(Cleanups, CleanupLocation(Init));
emitExprInto(Init, initialization.get(), SILLocation(PBD));
} else {
initialization->finishUninitialized(*this);
}
}
void SILGenFunction::visitPatternBindingDecl(PatternBindingDecl *PBD) {
// Allocate the variables and build up an Initialization over their
// allocated storage.
for (unsigned i : indices(PBD->getPatternList())) {
emitPatternBinding(PBD, i);
}
}
void SILGenFunction::visitVarDecl(VarDecl *D) {
// We handle emitting the variable storage when we see the pattern binding.
// Emit the variable's accessors.
D->visitEmittedAccessors([&](AccessorDecl *accessor) {
SGM.emitFunction(accessor);
});
}
/// Emit literals for the major, minor, and subminor components of the version
/// and return a tuple of SILValues for them.
static std::tuple<SILValue, SILValue, SILValue>
emitVersionLiterals(SILLocation loc, SILGenBuilder &B, ASTContext &ctx,
llvm::VersionTuple Vers) {
unsigned major = Vers.getMajor();
unsigned minor =
(Vers.getMinor().hasValue() ? Vers.getMinor().getValue() : 0);
unsigned subminor =
(Vers.getSubminor().hasValue() ? Vers.getSubminor().getValue() : 0);
SILType wordType = SILType::getBuiltinWordType(ctx);
SILValue majorValue = B.createIntegerLiteral(loc, wordType, major);
SILValue minorValue = B.createIntegerLiteral(loc, wordType, minor);
SILValue subminorValue = B.createIntegerLiteral(loc, wordType, subminor);
return std::make_tuple(majorValue, minorValue, subminorValue);
}
/// Emit a check that returns 1 if the running OS version is in
/// the specified version range and 0 otherwise. The returned SILValue
/// (which has type Builtin.Int1) represents the result of this check.
SILValue SILGenFunction::emitOSVersionRangeCheck(SILLocation loc,
const VersionRange &range) {
// Emit constants for the checked version range.
SILValue majorValue;
SILValue minorValue;
SILValue subminorValue;
std::tie(majorValue, minorValue, subminorValue) =
emitVersionLiterals(loc, B, getASTContext(), range.getLowerEndpoint());
// Emit call to _stdlib_isOSVersionAtLeast(major, minor, patch)
FuncDecl *versionQueryDecl =
getASTContext().getIsOSVersionAtLeastDecl();
assert(versionQueryDecl);
auto silDeclRef = SILDeclRef(versionQueryDecl);
SILValue availabilityGTEFn = emitGlobalFunctionRef(
loc, silDeclRef, getConstantInfo(silDeclRef));
SILValue args[] = {majorValue, minorValue, subminorValue};
return B.createApply(loc, availabilityGTEFn, SubstitutionMap(), args);
}
/// Emit the boolean test and/or pattern bindings indicated by the specified
/// stmt condition. If the condition fails, control flow is transferred to the
/// specified JumpDest. The insertion point is left in the block where the
/// condition has matched and any bound variables are in scope.
///
void SILGenFunction::emitStmtCondition(StmtCondition Cond, JumpDest FalseDest,
SILLocation loc,
ProfileCounter NumTrueTaken,
ProfileCounter NumFalseTaken) {
assert(B.hasValidInsertionPoint() &&
"emitting condition at unreachable point");
for (const auto &elt : Cond) {
SILLocation booleanTestLoc = loc;
SILValue booleanTestValue;
switch (elt.getKind()) {
case StmtConditionElement::CK_PatternBinding: {
InitializationPtr initialization =
InitializationForPattern(*this, FalseDest).visit(elt.getPattern());
// Emit the initial value into the initialization.
FullExpr Scope(Cleanups, CleanupLocation(elt.getInitializer()));
emitExprInto(elt.getInitializer(), initialization.get());
// Pattern bindings handle their own tests, we don't need a boolean test.
continue;
}
case StmtConditionElement::CK_Boolean: { // Handle boolean conditions.
auto *expr = elt.getBoolean();
// Evaluate the condition as an i1 value (guaranteed by Sema).
FullExpr Scope(Cleanups, CleanupLocation(expr));
booleanTestValue = emitRValue(expr).forwardAsSingleValue(*this, expr);
booleanTestValue = emitUnwrapIntegerResult(expr, booleanTestValue);
booleanTestLoc = expr;
break;
}
case StmtConditionElement::CK_Availability:
// Check the running OS version to determine whether it is in the range
// specified by elt.
VersionRange OSVersion = elt.getAvailability()->getAvailableRange();
assert(!OSVersion.isEmpty());
if (OSVersion.isAll()) {
// If there's no check for the current platform, this condition is
// trivially true.
SILType i1 = SILType::getBuiltinIntegerType(1, getASTContext());
booleanTestValue = B.createIntegerLiteral(loc, i1, true);
} else {
booleanTestValue = emitOSVersionRangeCheck(loc, OSVersion);
}
break;
}
// Now that we have a boolean test as a Builtin.i1, emit the branch.
assert(booleanTestValue->getType().
castTo<BuiltinIntegerType>()->isFixedWidth(1) &&
"Sema forces conditions to have Builtin.i1 type");
// Just branch on the condition. On failure, we unwind any active cleanups,
// on success we fall through to a new block.
auto FailBB = Cleanups.emitBlockForCleanups(FalseDest, loc);
SILBasicBlock *ContBB = createBasicBlock();
B.createCondBranch(booleanTestLoc, booleanTestValue, ContBB, FailBB,
NumTrueTaken, NumFalseTaken);
// Finally, emit the continue block and keep emitting the rest of the
// condition.
B.emitBlock(ContBB);
}
}
InitializationPtr
SILGenFunction::emitPatternBindingInitialization(Pattern *P,
JumpDest failureDest) {
return InitializationForPattern(*this, failureDest).visit(P);
}
/// Enter a cleanup to deallocate the given location.
CleanupHandle SILGenFunction::enterDeallocStackCleanup(SILValue temp) {
assert(temp->getType().isAddress() && "dealloc must have an address type");
Cleanups.pushCleanup<DeallocStackCleanup>(temp);
return Cleanups.getTopCleanup();
}
CleanupHandle SILGenFunction::enterDestroyCleanup(SILValue valueOrAddr) {
Cleanups.pushCleanup<ReleaseValueCleanup>(valueOrAddr);
return Cleanups.getTopCleanup();
}
namespace {
/// A cleanup that deinitializes an opaque existential container
/// before a value has been stored into it, or after its value was taken.
class DeinitExistentialCleanup: public Cleanup {
SILValue existentialAddr;
CanType concreteFormalType;
ExistentialRepresentation repr;
public:
DeinitExistentialCleanup(SILValue existentialAddr,
CanType concreteFormalType,
ExistentialRepresentation repr)
: existentialAddr(existentialAddr),
concreteFormalType(concreteFormalType),
repr(repr) {}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
switch (repr) {
case ExistentialRepresentation::None:
case ExistentialRepresentation::Class:
case ExistentialRepresentation::Metatype:
llvm_unreachable("cannot cleanup existential");
case ExistentialRepresentation::Opaque:
if (SGF.silConv.useLoweredAddresses()) {
SGF.B.createDeinitExistentialAddr(l, existentialAddr);
} else {
SGF.B.createDeinitExistentialValue(l, existentialAddr);
}
break;
case ExistentialRepresentation::Boxed:
auto box = SGF.B.createLoad(l, existentialAddr,
LoadOwnershipQualifier::Take);
SGF.B.createDeallocExistentialBox(l, concreteFormalType, box);
break;
}
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "DeinitExistentialCleanup\n"
<< "State:" << getState() << "\n"
<< "Value:" << existentialAddr << "\n";
#endif
}
};
} // end anonymous namespace
/// Enter a cleanup to emit a DeinitExistentialAddr or DeinitExistentialBox
/// of the specified value.
CleanupHandle SILGenFunction::enterDeinitExistentialCleanup(
CleanupState state,
SILValue addr,
CanType concreteFormalType,
ExistentialRepresentation repr) {
assert(addr->getType().isAddress());
Cleanups.pushCleanupInState<DeinitExistentialCleanup>(state, addr,
concreteFormalType, repr);
return Cleanups.getTopCleanup();
}
/// Create a LocalVariableInitialization for the uninitialized var.
InitializationPtr SILGenFunction::emitLocalVariableWithCleanup(
VarDecl *vd, Optional<MarkUninitializedInst::Kind> kind, unsigned ArgNo) {
return InitializationPtr(
new LocalVariableInitialization(vd, kind, ArgNo, *this));
}
/// Create an Initialization for an uninitialized temporary.
std::unique_ptr<TemporaryInitialization>
SILGenFunction::emitTemporary(SILLocation loc, const TypeLowering &tempTL) {
SILValue addr = emitTemporaryAllocation(loc, tempTL.getLoweredType());
return useBufferAsTemporary(addr, tempTL);
}
std::unique_ptr<TemporaryInitialization>
SILGenFunction::emitFormalAccessTemporary(SILLocation loc,
const TypeLowering &tempTL) {
SILValue addr = emitTemporaryAllocation(loc, tempTL.getLoweredType());
CleanupHandle cleanup =
enterDormantFormalAccessTemporaryCleanup(addr, loc, tempTL);
return std::unique_ptr<TemporaryInitialization>(
new TemporaryInitialization(addr, cleanup));
}
/// Create an Initialization for an uninitialized buffer.
std::unique_ptr<TemporaryInitialization>
SILGenFunction::useBufferAsTemporary(SILValue addr,
const TypeLowering &tempTL) {
CleanupHandle cleanup = enterDormantTemporaryCleanup(addr, tempTL);
return std::unique_ptr<TemporaryInitialization>(
new TemporaryInitialization(addr, cleanup));
}
CleanupHandle
SILGenFunction::enterDormantTemporaryCleanup(SILValue addr,
const TypeLowering &tempTL) {
if (tempTL.isTrivial())
return CleanupHandle::invalid();
Cleanups.pushCleanupInState<ReleaseValueCleanup>(CleanupState::Dormant, addr);
return Cleanups.getCleanupsDepth();
}
namespace {
struct FormalAccessReleaseValueCleanup : Cleanup {
FormalEvaluationContext::stable_iterator Depth;
FormalAccessReleaseValueCleanup() : Depth() {}
void setState(SILGenFunction &SGF, CleanupState newState) override {
if (newState == CleanupState::Dead) {
getEvaluation(SGF).setFinished();
}
Cleanup::setState(SGF, newState);
}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
getEvaluation(SGF).finish(SGF);
}
void dump(SILGenFunction &SGF) const override {
#ifndef NDEBUG
llvm::errs() << "FormalAccessReleaseValueCleanup "
<< "State:" << getState() << "\n"
<< "Value:" << getValue(SGF) << "\n";
#endif
}
OwnedFormalAccess &getEvaluation(SILGenFunction &SGF) const {
auto &evaluation = *SGF.FormalEvalContext.find(Depth);
assert(evaluation.getKind() == FormalAccess::Owned);
return static_cast<OwnedFormalAccess &>(evaluation);
}
SILValue getValue(SILGenFunction &SGF) const {
return getEvaluation(SGF).getValue();
}
};
} // end anonymous namespace
ManagedValue
SILGenFunction::emitFormalAccessManagedBufferWithCleanup(SILLocation loc,
SILValue addr) {
assert(isInFormalEvaluationScope() && "Must be in formal evaluation scope");
auto &lowering = getTypeLowering(addr->getType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(addr);
auto &cleanup = Cleanups.pushCleanup<FormalAccessReleaseValueCleanup>();
CleanupHandle handle = Cleanups.getTopCleanup();
FormalEvalContext.push<OwnedFormalAccess>(loc, handle, addr);
cleanup.Depth = FormalEvalContext.stable_begin();
return ManagedValue(addr, handle);
}
ManagedValue
SILGenFunction::emitFormalAccessManagedRValueWithCleanup(SILLocation loc,
SILValue value) {
assert(isInFormalEvaluationScope() && "Must be in formal evaluation scope");
auto &lowering = getTypeLowering(value->getType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(value);
auto &cleanup = Cleanups.pushCleanup<FormalAccessReleaseValueCleanup>();
CleanupHandle handle = Cleanups.getTopCleanup();
FormalEvalContext.push<OwnedFormalAccess>(loc, handle, value);
cleanup.Depth = FormalEvalContext.stable_begin();
return ManagedValue(value, handle);
}
CleanupHandle SILGenFunction::enterDormantFormalAccessTemporaryCleanup(
SILValue addr, SILLocation loc, const TypeLowering &tempTL) {
assert(isInFormalEvaluationScope() && "Must be in formal evaluation scope");
if (tempTL.isTrivial())
return CleanupHandle::invalid();
auto &cleanup = Cleanups.pushCleanup<FormalAccessReleaseValueCleanup>();
CleanupHandle handle = Cleanups.getTopCleanup();
Cleanups.setCleanupState(handle, CleanupState::Dormant);
FormalEvalContext.push<OwnedFormalAccess>(loc, handle, addr);
cleanup.Depth = FormalEvalContext.stable_begin();
return handle;
}
void SILGenFunction::destroyLocalVariable(SILLocation silLoc, VarDecl *vd) {
assert(vd->getDeclContext()->isLocalContext() &&
"can't emit a local var for a non-local var decl");
assert(vd->hasStorage() && "can't emit storage for a computed variable");
assert(VarLocs.count(vd) && "var decl wasn't emitted?!");
auto loc = VarLocs[vd];
// For a heap variable, the box is responsible for the value. We just need
// to give up our retain count on it.
if (loc.box) {
B.emitDestroyValueOperation(silLoc, loc.box);
return;
}
// For 'let' bindings, we emit a release_value or destroy_addr, depending on
// whether we have an address or not.
SILValue Val = loc.value;
if (!Val->getType().isAddress())
B.emitDestroyValueOperation(silLoc, Val);
else
B.createDestroyAddr(silLoc, Val);
}
void SILGenFunction::deallocateUninitializedLocalVariable(SILLocation silLoc,
VarDecl *vd) {
assert(vd->getDeclContext()->isLocalContext() &&
"can't emit a local var for a non-local var decl");
assert(vd->hasStorage() && "can't emit storage for a computed variable");
assert(VarLocs.count(vd) && "var decl wasn't emitted?!");
auto loc = VarLocs[vd];
// Ignore let values captured without a memory location.
if (!loc.value->getType().isAddress()) return;
assert(loc.box && "captured var should have been given a box");
B.createDeallocBox(silLoc, loc.box);
}
| devincoughlin/swift | lib/SILGen/SILGenDecl.cpp | C++ | apache-2.0 | 59,534 |
from __future__ import absolute_import
from django.conf import settings
from django.core.mail import EmailMessage
from typing import Any, Mapping, Optional, Text
from zerver.lib.actions import internal_send_message
from zerver.lib.send_email import FromAddress
from zerver.lib.redis_utils import get_redis_client
from zerver.models import get_realm, get_system_bot, \
UserProfile, Realm
import time
client = get_redis_client()
def has_enough_time_expired_since_last_message(sender_email, min_delay):
# type: (Text, float) -> bool
# This function returns a boolean, but it also has the side effect
# of noting that a new message was received.
key = 'zilencer:feedback:%s' % (sender_email,)
t = int(time.time())
last_time = client.getset(key, t)
if last_time is None:
return True
delay = t - int(last_time)
return delay > min_delay
def get_ticket_number():
# type: () -> int
num_file = '/var/tmp/.feedback-bot-ticket-number'
try:
ticket_number = int(open(num_file).read()) + 1
except Exception:
ticket_number = 1
open(num_file, 'w').write('%d' % (ticket_number,))
return ticket_number
def deliver_feedback_by_zulip(message):
# type: (Mapping[str, Any]) -> None
subject = "%s" % (message["sender_email"],)
if len(subject) > 60:
subject = subject[:57].rstrip() + "..."
content = u''
sender_email = message['sender_email']
# We generate ticket numbers if it's been more than a few minutes
# since their last message. This avoids some noise when people use
# enter-send.
need_ticket = has_enough_time_expired_since_last_message(sender_email, 180)
if need_ticket:
ticket_number = get_ticket_number()
content += '\n~~~'
content += '\nticket Z%03d (@support please ack)' % (ticket_number,)
content += '\nsender: %s' % (message['sender_full_name'],)
content += '\nemail: %s' % (sender_email,)
if 'sender_realm_str' in message:
content += '\nrealm: %s' % (message['sender_realm_str'],)
content += '\n~~~'
content += '\n\n'
content += message['content']
user_profile = get_system_bot(settings.FEEDBACK_BOT)
internal_send_message(user_profile.realm, settings.FEEDBACK_BOT,
"stream", settings.FEEDBACK_STREAM, subject, content)
def handle_feedback(event):
# type: (Mapping[str, Any]) -> None
if not settings.ENABLE_FEEDBACK:
return
if settings.FEEDBACK_EMAIL is not None:
to_email = settings.FEEDBACK_EMAIL
subject = "Zulip feedback from %s" % (event["sender_email"],)
content = event["content"]
from_email = '"%s" <%s>' % (event["sender_full_name"], FromAddress.SUPPORT)
headers = {'Reply-To': '"%s" <%s>' % (event["sender_full_name"], event["sender_email"])}
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
msg.send()
if settings.FEEDBACK_STREAM is not None:
deliver_feedback_by_zulip(event)
| vaidap/zulip | zerver/lib/feedback.py | Python | apache-2.0 | 3,063 |
package com.blogspot.toomuchcoding.book.chapter6._3_VerifyingAtMostTimesOfMethodInvocation;
import com.blogspot.toomuchcoding.book.chapter6.common.TaxService;
import com.blogspot.toomuchcoding.book.chapter6.common.TaxUpdater;
import com.blogspot.toomuchcoding.common.testng.MockitoTestNGListener;
import com.blogspot.toomuchcoding.person.Person;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.testng.annotations.Listeners;
import org.testng.annotations.Test;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyDouble;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.verify;
@Listeners(MockitoTestNGListener.class)
public class TaxUpdaterTestNgTest {
@Mock TaxService taxService;
@InjectMocks TaxUpdater systemUnderTest;
@Test
public void should_send_at_most_two_messages_through_the_web_service() {
// when
systemUnderTest.updateTaxFactorFor(new Person(), new Person());
// then
verify(taxService, atMost(2)).updateMeanTaxFactor(any(Person.class), anyDouble());
}
}
| davidhuizhou/mockito-cookbook | chapter06/src/test/java/com/blogspot/toomuchcoding/book/chapter6/_3_VerifyingAtMostTimesOfMethodInvocation/TaxUpdaterTestNgTest.java | Java | apache-2.0 | 1,104 |
package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Spell_Feeblemind extends Spell
{
@Override public String ID() { return "Spell_Feeblemind"; }
private final static String localizedName = CMLib.lang().L("Feeblemind");
@Override public String name() { return localizedName; }
private final static String localizedStaticDisplay = CMLib.lang().L("(Feeblemind spell)");
@Override public String displayText() { return localizedStaticDisplay; }
@Override public int abstractQuality(){return Ability.QUALITY_MALICIOUS;}
@Override protected int canAffectCode(){return CAN_MOBS;}
@Override public int classificationCode(){ return Ability.ACODE_SPELL|Ability.DOMAIN_ENCHANTMENT;}
@Override
public void affectCharStats(MOB affected, CharStats affectableStats)
{
super.affectCharStats(affected,affectableStats);
affectableStats.setStat(CharStats.STAT_INTELLIGENCE,1);
affectableStats.setStat(CharStats.STAT_SAVE_MIND,affectableStats.getStat(CharStats.STAT_SAVE_MIND)-25);
}
@Override
public void unInvoke()
{
// undo the affects of this spell
if(!(affected instanceof MOB))
return;
final MOB mob=(MOB)affected;
super.unInvoke();
if(canBeUninvoked())
mob.tell(L("You begin to remember some rather simple things--like your name. The feeblemind spell must be wearing off."));
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
final MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null)
return false;
Room R=CMLib.map().roomLocation(target);
if(R==null)
R=mob.location();
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
invoker=mob;
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> cast(s) at <T-NAMESELF>.^?"));
final CMMsg msg2=CMClass.getMsg(mob,target,this,CMMsg.MSK_CAST_MALICIOUS_VERBAL|CMMsg.TYP_MIND|(auto?CMMsg.MASK_ALWAYS:0),null);
if((R.okMessage(mob,msg))&&(R.okMessage(mob,msg2)))
{
R.send(mob,msg);
R.send(mob,msg2);
if((msg.value()<=0)&&(msg2.value()<=0))
{
R.show(target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> begin(s) to feel a bit stupid."));
success=maliciousAffect(mob,target,asLevel,0,-1)!=null;
}
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> cast(s) to <T-NAMESELF>, but the spell fizzles."));
// return whether it worked
return success;
}
}
| Tycheo/coffeemud | com/planet_ink/coffee_mud/Abilities/Spells/Spell_Feeblemind.java | Java | apache-2.0 | 4,076 |
#!/bin/env python
import os
import re
import sys
ws = re.compile(r'-')
f = open("list.txt")
names = f.readlines()
f.close()
for name in names:
name = name[0:-1]
newname = ""
for token in ws.split(name):
newname += token[0].upper()
newname += token[1:]
cmd = "cp %s %s" % (name,newname)
print cmd
os.system(cmd)
| wh81752/flaka | opt/rename.py | Python | apache-2.0 | 351 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.collection;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.nutch.net.URLFilter;
import org.apache.xerces.util.DOMUtil;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
/**
* SubCollection represents a subset of index, you can define url patterns that
* will indicate that particular page (url) is part of SubCollection.
*/
public class Subcollection extends Configured implements URLFilter {
public static final String TAG_COLLECTIONS = "subcollections";
public static final String TAG_COLLECTION = "subcollection";
public static final String TAG_WHITELIST = "whitelist";
public static final String TAG_BLACKLIST = "blacklist";
public static final String TAG_NAME = "name";
public static final String TAG_ID = "id";
ArrayList<String> blackList = new ArrayList<String>();
ArrayList<String> whiteList = new ArrayList<String>();
/**
* SubCollection identifier
*/
String id;
/**
* SubCollection name
*/
String name;
/**
* SubCollection whitelist as String
*/
String wlString;
/**
* SubCollection blacklist as String
*/
String blString;
/**
* public Constructor
*
* @param id
* id of SubCollection
* @param name
* name of SubCollection
*/
public Subcollection(String id, String name, Configuration conf) {
this(conf);
this.id = id;
this.name = name;
}
public Subcollection(Configuration conf) {
super(conf);
}
/**
* @return Returns the name
*/
public String getName() {
return name;
}
/**
* @return Returns the id
*/
public String getId() {
return id;
}
/**
* Returns whitelist
*
* @return Whitelist entries
*/
public ArrayList<String> getWhiteList() {
return whiteList;
}
/**
* Returns whitelist String
*
* @return Whitelist String
*/
public String getWhiteListString() {
return wlString;
}
/**
* Returns blacklist String
*
* @return Blacklist String
*/
public String getBlackListString() {
return blString;
}
/**
* @param whiteList
* The whiteList to set.
*/
public void setWhiteList(ArrayList<String> whiteList) {
this.whiteList = whiteList;
}
/**
* Simple "indexOf" currentFilter for matching patterns.
*
* <pre>
* rules for evaluation are as follows:
* 1. if pattern matches in blacklist then url is rejected
* 2. if pattern matches in whitelist then url is allowed
* 3. url is rejected
* </pre>
*
* @see org.apache.nutch.net.URLFilter#filter(java.lang.String)
*/
public String filter(String urlString) {
// first the blacklist
Iterator<String> i = blackList.iterator();
while (i.hasNext()) {
String row = i.next();
if (urlString.indexOf(row) != -1)
return null;
}
// then whitelist
i = whiteList.iterator();
while (i.hasNext()) {
String row = i.next();
if (urlString.indexOf(row) != -1)
return urlString;
}
return null;
}
/**
* Initialize Subcollection from dom element
*
* @param collection
*/
public void initialize(Element collection) {
this.id = DOMUtil.getChildText(
collection.getElementsByTagName(TAG_ID).item(0)).trim();
this.name = DOMUtil.getChildText(
collection.getElementsByTagName(TAG_NAME).item(0)).trim();
this.wlString = DOMUtil.getChildText(
collection.getElementsByTagName(TAG_WHITELIST).item(0)).trim();
parseList(this.whiteList, wlString);
// Check if there's a blacklist we need to parse
NodeList nodeList = collection.getElementsByTagName(TAG_BLACKLIST);
if (nodeList.getLength() > 0) {
this.blString = DOMUtil.getChildText(nodeList.item(0)).trim();
parseList(this.blackList, blString);
}
}
/**
* Create a list of patterns from chunk of text, patterns are separated with
* newline
*
* @param list
* @param text
*/
protected void parseList(ArrayList<String> list, String text) {
list.clear();
StringTokenizer st = new StringTokenizer(text, "\n\r");
while (st.hasMoreElements()) {
String line = (String) st.nextElement();
list.add(line.trim());
}
}
/**
* Set contents of blacklist from String
*
* @param list
* the blacklist contents
*/
public void setBlackList(String list) {
this.blString = list;
parseList(blackList, list);
}
/**
* Set contents of whitelist from String
*
* @param list
* the whitelist contents
*/
public void setWhiteList(String list) {
this.wlString = list;
parseList(whiteList, list);
}
}
| supermy/nutch2 | src/plugin/subcollection/src/java/org/apache/nutch/collection/Subcollection.java | Java | apache-2.0 | 5,636 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.util.lang;
import org.apache.wicket.util.string.StringValueConversionException;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Locale;
import static org.junit.jupiter.api.Assertions.*;
/**
* Tests the <code>Bytes</code> class.
*/
public class BytesTest
{
/**
* Backup of the default locale.
*/
private Locale originalFormatLocale = null;
private Locale originalDefaultLocale = null;
/**
* Save the default locale.
*/
@BeforeEach
public void before()
{
originalFormatLocale = Locale.getDefault(Locale.Category.FORMAT);
originalDefaultLocale = Locale.getDefault();
// these tests run in US locale for formatting and German default locale - they should still
// work with split
// locale.
Locale.setDefault(Locale.GERMANY);
Locale.setDefault(Locale.Category.FORMAT, Locale.US);
}
/**
* Restore the default locale.
*/
@AfterEach
public void after()
{
Locale.setDefault(originalDefaultLocale);
Locale.setDefault(Locale.Category.FORMAT, originalFormatLocale);
}
/**
* Tests the values.
*/
@Test
public void teraBytes()
{
assertEquals(Bytes.gigabytes(1024), Bytes.terabytes(1));
assertEquals(Bytes.gigabytes(1024.0), Bytes.terabytes(1.0));
assertEquals(Bytes.gigabytes(1024.0), Bytes.terabytes(1));
assertEquals(1L, Bytes.bytes(1).bytes());
assertEquals(1024L, Bytes.kilobytes(1).bytes());
assertEquals(1024L * 1024, Bytes.megabytes(1).bytes());
assertEquals(1024L * 1024 * 1024, Bytes.gigabytes(1).bytes());
assertEquals(1024L * 1024 * 1024 * 1024, Bytes.terabytes(1).bytes());
assertEquals(1.5, Bytes.bytes(1536).kilobytes());
assertEquals(1.0, Bytes.kilobytes(1).kilobytes());
assertEquals(0.5, Bytes.bytes(512).kilobytes());
assertEquals(1.5, Bytes.kilobytes(1536).megabytes());
assertEquals(1.0, Bytes.megabytes(1).megabytes());
assertEquals(0.5, Bytes.kilobytes(512).megabytes());
assertEquals(1.5, Bytes.megabytes(1536).gigabytes());
assertEquals(1.0, Bytes.gigabytes(1).gigabytes());
assertEquals(0.5, Bytes.megabytes(512).gigabytes());
assertEquals(1.5, Bytes.gigabytes(1536).terabytes());
assertEquals(1.0, Bytes.terabytes(1).terabytes());
assertEquals(0.5, Bytes.gigabytes(512).terabytes());
}
/**
* Tests the valueOf method.
*
* @throws StringValueConversionException
*/
@Test
public void valueOf() throws StringValueConversionException
{
assertEquals(Bytes.valueOf("1024GB"), Bytes.valueOf("1TB"));
assertEquals(Bytes.valueOf("1024MB"), Bytes.valueOf("1GB"));
assertEquals(Bytes.valueOf("1024KB"), Bytes.valueOf("1MB"));
assertEquals(Bytes.valueOf("1024B"), Bytes.valueOf("1KB"));
assertEquals(Bytes.valueOf("2048GB"), Bytes.valueOf("2TB"));
assertEquals(Bytes.valueOf("2048MB"), Bytes.valueOf("2GB"));
assertEquals(Bytes.valueOf("2048KB"), Bytes.valueOf("2MB"));
assertEquals(Bytes.valueOf("2048B"), Bytes.valueOf("2KB"));
assertEquals(Bytes.valueOf("1024GB", Locale.GERMAN), Bytes.valueOf("1TB"));
assertEquals(Bytes.valueOf("1024MB", Locale.GERMAN), Bytes.valueOf("1GB"));
assertEquals(Bytes.valueOf("1024KB", Locale.GERMAN), Bytes.valueOf("1MB"));
assertEquals(Bytes.valueOf("1024B", Locale.GERMAN), Bytes.valueOf("1KB"));
assertEquals(Bytes.valueOf("2048GB", Locale.GERMAN), Bytes.valueOf("2TB"));
assertEquals(Bytes.valueOf("2048MB", Locale.GERMAN), Bytes.valueOf("2GB"));
assertEquals(Bytes.valueOf("2048KB", Locale.GERMAN), Bytes.valueOf("2MB"));
assertEquals(Bytes.valueOf("2048B", Locale.GERMAN), Bytes.valueOf("2KB"));
try
{
Bytes.valueOf("1PB");
fail("Exception expected");
}
catch (StringValueConversionException e)
{
assertTrue(true);
}
try
{
Bytes.valueOf("baPB");
fail("Exception expected");
}
catch (StringValueConversionException e)
{
assertTrue(true);
}
}
/**
* Tests the toString() method.
*/
@Test
public void testToString()
{
assertEquals("1 bytes", Bytes.bytes(1).toString());
assertEquals("1KB", Bytes.bytes(1024).toString());
assertEquals("1MB", Bytes.bytes(1024 * 1024L).toString());
assertEquals("1GB", Bytes.bytes(1024 * 1024 * 1024L).toString());
assertEquals("1TB", Bytes.bytes(1024 * 1024 * 1024 * 1024L).toString());
assertEquals("1.5KB", Bytes.bytes(1024 * 1.5).toString());
assertEquals("1 bytes", Bytes.bytes(1).toString(Locale.GERMAN));
}
/**
* Negative values are not supported
*/
@Test
public void negative()
{
assertThrows(IllegalArgumentException.class, () -> {
Bytes.bytes(-1);
fail("Bytes should not support negative values!");
});
}
}
| mosoft521/wicket | wicket-util/src/test/java/org/apache/wicket/util/lang/BytesTest.java | Java | apache-2.0 | 5,445 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.Shared.Extensions;
using Microsoft.CodeAnalysis.Editor.Shared.Tagging;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Editor.Tagging;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.InlineHints;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Tagging;
using Roslyn.Utilities;
using VSUtilities = Microsoft.VisualStudio.Utilities;
namespace Microsoft.CodeAnalysis.Editor.InlineHints
{
/// <summary>
/// The TaggerProvider that calls upon the service in order to locate the spans and names
/// </summary>
[Export(typeof(IViewTaggerProvider))]
[VSUtilities.ContentType(ContentTypeNames.RoslynContentType)]
[TagType(typeof(InlineHintDataTag))]
[VSUtilities.Name(nameof(InlineHintsDataTaggerProvider))]
internal class InlineHintsDataTaggerProvider : AsynchronousViewTaggerProvider<InlineHintDataTag>
{
private readonly IAsynchronousOperationListener _listener;
protected override SpanTrackingMode SpanTrackingMode => SpanTrackingMode.EdgeInclusive;
/// <summary>
/// We want to make sure that if the user edits the space that the tag exists in that it goes away and they
/// don't see stale tags sticking around in random locations until the next update. A good example of when this
/// is desirable is 'cut line'. If the tags aren't removed, then the line will be gone but the tags will remain
/// at whatever points the tracking spans moved them to.
/// </summary>
protected override TaggerTextChangeBehavior TextChangeBehavior => TaggerTextChangeBehavior.RemoveTagsThatIntersectEdits;
[Obsolete(MefConstruction.ImportingConstructorMessage, error: true)]
[ImportingConstructor]
public InlineHintsDataTaggerProvider(
IThreadingContext threadingContext,
IAsynchronousOperationListenerProvider listenerProvider)
: base(threadingContext, listenerProvider.GetListener(FeatureAttribute.InlineParameterNameHints))
{
_listener = listenerProvider.GetListener(FeatureAttribute.InlineParameterNameHints);
}
protected override TaggerDelay EventChangeDelay => TaggerDelay.Short;
protected override ITaggerEventSource CreateEventSource(ITextView textViewOpt, ITextBuffer subjectBuffer)
{
return TaggerEventSources.Compose(
TaggerEventSources.OnViewSpanChanged(ThreadingContext, textViewOpt),
TaggerEventSources.OnWorkspaceChanged(subjectBuffer, _listener),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.DisplayAllOverride),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.EnabledForParameters),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForLiteralParameters),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForObjectCreationParameters),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForOtherParameters),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.SuppressForParametersThatMatchMethodIntent),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.SuppressForParametersThatDifferOnlyBySuffix),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.EnabledForTypes),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForImplicitVariableTypes),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForLambdaParameterTypes),
TaggerEventSources.OnOptionChanged(subjectBuffer, InlineHintsOptions.ForImplicitObjectCreation));
}
protected override IEnumerable<SnapshotSpan> GetSpansToTag(ITextView textView, ITextBuffer subjectBuffer)
{
this.AssertIsForeground();
// Find the visible span some 100 lines +/- what's actually in view. This way
// if the user scrolls up/down, we'll already have the results.
var visibleSpanOpt = textView.GetVisibleLinesSpan(subjectBuffer, extraLines: 100);
if (visibleSpanOpt == null)
{
// Couldn't find anything visible, just fall back to tagging all hint locations
return base.GetSpansToTag(textView, subjectBuffer);
}
return SpecializedCollections.SingletonEnumerable(visibleSpanOpt.Value);
}
protected override async Task ProduceTagsAsync(TaggerContext<InlineHintDataTag> context, DocumentSnapshotSpan documentSnapshotSpan, int? caretPosition)
{
var cancellationToken = context.CancellationToken;
var document = documentSnapshotSpan.Document;
if (document == null)
return;
var service = document.GetLanguageService<IInlineHintsService>();
if (service == null)
return;
var snapshotSpan = documentSnapshotSpan.SnapshotSpan;
var hints = await service.GetInlineHintsAsync(document, snapshotSpan.Span.ToTextSpan(), cancellationToken).ConfigureAwait(false);
foreach (var hint in hints)
{
// If we don't have any text to actually show the user, then don't make a tag.
if (hint.DisplayParts.Sum(p => p.ToString().Length) == 0)
continue;
context.AddTag(new TagSpan<InlineHintDataTag>(
hint.Span.ToSnapshotSpan(snapshotSpan.Snapshot),
new InlineHintDataTag(hint)));
}
}
}
}
| physhi/roslyn | src/EditorFeatures/Core/InlineHints/InlineHintsDataTaggerProvider.cs | C# | apache-2.0 | 6,332 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.admin.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.analysis.NameOrDefinition;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.Mockito.mock;
public class RestAnalyzeActionTests extends ESTestCase {
public void testParseXContentForAnalyzeRequest() throws Exception {
try (XContentParser content = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("filter", "lowercase")
.endObject())) {
AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test");
assertThat(analyzeRequest.text().length, equalTo(1));
assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
assertThat(analyzeRequest.tokenizer().name, equalTo("keyword"));
assertThat(analyzeRequest.tokenFilters().size(), equalTo(1));
for (NameOrDefinition filter : analyzeRequest.tokenFilters()) {
assertThat(filter.name, equalTo("lowercase"));
}
}
}
public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Exception {
try (XContentParser content = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.startArray("filter")
.value("lowercase")
.startObject()
.field("type", "stop")
.array("stopwords", "foo", "buzz")
.endObject()
.endArray()
.startArray("char_filter")
.startObject()
.field("type", "mapping")
.array("mappings", "ph => f", "qu => q")
.endObject()
.endArray()
.field("normalizer", "normalizer")
.endObject())) {
AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test");
assertThat(analyzeRequest.text().length, equalTo(1));
assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
assertThat(analyzeRequest.tokenizer().name, equalTo("keyword"));
assertThat(analyzeRequest.tokenFilters().size(), equalTo(2));
assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase"));
assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue());
assertThat(analyzeRequest.charFilters().size(), equalTo(1));
assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue());
assertThat(analyzeRequest.normalizer(), equalTo("normalizer"));
}
}
public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() {
RestAnalyzeAction action = new RestAnalyzeAction(mock(RestController.class));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
.withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build();
IOException e = expectThrows(IOException.class, () -> action.handleRequest(request, null, null));
assertThat(e.getMessage(), containsString("expecting double-quote"));
}
public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("unknown", "keyword")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(invalidContent, "for test"));
assertThat(e.getMessage(), containsString("unknown field [unknown]"));
}
}
public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception {
try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("explain", "fals")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test"));
assertThat(e.getMessage(), containsString("failed to parse field [explain]"));
}
}
public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception {
try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("normalizer", true)
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test"));
assertThat(e.getMessage(), containsString("normalizer doesn't support values of type: VALUE_BOOLEAN"));
}
}
public void testDeprecatedParamIn2xException() throws Exception {
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser,"for test"));
assertThat(e.getMessage(), containsString("unknown field [filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("token_filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [token_filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("char_filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [char_filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("token_filter", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [token_filter]"));
}
}
}
| coding0011/elasticsearch | server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java | Java | apache-2.0 | 8,872 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.form.engine.impl.persistence.deploy;
import java.io.Serializable;
import org.activiti.form.engine.impl.persistence.entity.FormEntity;
/**
* @author Tijs Rademakers
*/
public class FormCacheEntry implements Serializable {
private static final long serialVersionUID = 1L;
protected FormEntity formEntity;
protected String formJson;
public FormCacheEntry(FormEntity formEntity, String formJson) {
this.formEntity = formEntity;
this.formJson = formJson;
}
public FormEntity getFormEntity() {
return formEntity;
}
public void setFormEntity(FormEntity formEntity) {
this.formEntity = formEntity;
}
public String getFormJson() {
return formJson;
}
public void setFormJson(String formJson) {
this.formJson = formJson;
}
}
| roberthafner/flowable-engine | modules/flowable-form-engine/src/main/java/org/activiti/form/engine/impl/persistence/deploy/FormCacheEntry.java | Java | apache-2.0 | 1,359 |
/**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IKAADATADEMULTIPLEXER_HPP_
#define IKAADATADEMULTIPLEXER_HPP_
#include <cstdint>
#include <vector>
namespace kaa {
enum class DemultiplexerReturnCode {
SUCCESS = 0,
FAILURE,
REDIRECT
};
/**
* Demultiplexer is responsible for deserializing of response data and notifying
* appropriate services.
*
* Required in user implementation of any kind of data channel.
*
*/
class IKaaDataDemultiplexer {
public:
/**
* Processes the given response bytes.
*
* @param response buffer which to be processed.
*
*/
virtual DemultiplexerReturnCode processResponse(const std::vector<std::uint8_t> &response) = 0;
virtual ~IKaaDataDemultiplexer() {}
};
} // namespace kaa
#endif /* IKAADATADEMULTIPLEXER_HPP_ */
| abohomol/kaa | client/client-multi/client-cpp/kaa/channel/IKaaDataDemultiplexer.hpp | C++ | apache-2.0 | 1,385 |
#region License
// Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// The latest version of this file can be found at https://github.com/jeremyskinner/FluentValidation
#endregion
namespace FluentValidation.Tests {
using System;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using System.Threading;
using Internal;
using Xunit;
using Validators;
using System.Reflection;
public class LessThanValidatorTester {
int value = 1;
public LessThanValidatorTester() {
CultureScope.SetDefaultCulture();
}
[Fact]
public void Should_fail_when_greater_than_input() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(value));
var result = validator.Validate(new Person{Id=2});
result.IsValid.ShouldBeFalse();
}
[Fact]
public void Should_succeed_when_less_than_input() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(value));
var result = validator.Validate(new Person{Id=0});
result.IsValid.ShouldBeTrue();
}
[Fact]
public void Should_fail_when_equal_to_input() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(value));
var result = validator.Validate(new Person{Id=value});
result.IsValid.ShouldBeFalse();
}
[Fact]
public void Should_set_default_validation_message_when_validation_fails() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(value));
var result = validator.Validate(new Person{Id=2});
result.Errors.Single().ErrorMessage.ShouldEqual("'Id' must be less than '1'.");
}
[Fact]
public void Validates_against_property() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(x => x.AnotherInt));
var result = validator.Validate(new Person { Id = 2, AnotherInt = 1 });
result.IsValid.ShouldBeFalse();
}
[Fact]
public void Should_throw_when_value_to_compare_is_null() {
Expression<Func<Person, int>> nullExpression = null;
typeof(ArgumentNullException).ShouldBeThrownBy(() =>
new TestValidator(v => v.RuleFor(x => x.Id).LessThan(nullExpression))
);
}
[Fact]
public void Should_not_throw_when_value_to_compare_is_of_different_type() {
new LessThanValidator(10M).IsValid(5M, 10).ShouldBeTrue();
}
[Fact]
public void Extracts_property_from_expression() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(x => x.AnotherInt));
var propertyValidator = validator.CreateDescriptor().GetValidatorsForMember("Id").OfType<LessThanValidator>().Single();
#if CoreCLR
propertyValidator.MemberToCompare.ShouldEqual(typeof(Person).GetRuntimeProperty("AnotherInt"));
#else
propertyValidator.MemberToCompare.ShouldEqual(typeof(Person).GetProperty("AnotherInt"));
#endif
}
[Fact]
public void Validates_with_nullable_property() {
var validator = new TestValidator(v => v.RuleFor(x => x.Id).LessThan(x => x.NullableInt));
var resultNull = validator.Validate(new Person { Id = 0, NullableInt = null });
var resultLess = validator.Validate(new Person { Id = 0, NullableInt = -1 });
var resultEqual = validator.Validate(new Person { Id = 0, NullableInt = 0 });
var resultMore = validator.Validate(new Person { Id = 0, NullableInt = 1 });
resultNull.IsValid.ShouldBeFalse();
resultLess.IsValid.ShouldBeFalse();
resultEqual.IsValid.ShouldBeFalse();
resultMore.IsValid.ShouldBeTrue();
}
[Fact]
public void Validates_nullable_with_nullable_property() {
var validator = new TestValidator(v => v.RuleFor(x => x.NullableInt).LessThan(x => x.OtherNullableInt));
var resultNull = validator.Validate(new Person { NullableInt = 0, OtherNullableInt = null });
var resultLess = validator.Validate(new Person { NullableInt = 0, OtherNullableInt = -1 });
var resultEqual = validator.Validate(new Person { NullableInt = 0, OtherNullableInt = 0 });
var resultMore = validator.Validate(new Person { NullableInt = 0, OtherNullableInt = 1 });
resultNull.IsValid.ShouldBeFalse();
resultLess.IsValid.ShouldBeFalse();
resultEqual.IsValid.ShouldBeFalse();
resultMore.IsValid.ShouldBeTrue();
}
[Fact]
public void Extracts_property_from_constant_using_expression() {
IComparisonValidator validator = new LessThanValidator(2);
validator.ValueToCompare.ShouldEqual(2);
}
[Fact]
public void Comparison_type() {
var validator = new LessThanValidator(1);
validator.Comparison.ShouldEqual(Comparison.LessThan);
}
[Fact]
public void Validates_with_nullable_when_property_is_null() {
var validator = new TestValidator(v => v.RuleFor(x => x.NullableInt).LessThan(5));
var result = validator.Validate(new Person());
result.IsValid.ShouldBeTrue();
}
[Fact]
public void Validates_with_nullable_when_property_not_null() {
var validator = new TestValidator(v => v.RuleFor(x => x.NullableInt).LessThan(5));
var result = validator.Validate(new Person { NullableInt = 10 });
result.IsValid.ShouldBeFalse();
}
[Fact]
public void Validates_with_nullable_when_property_null_cross_property() {
var validator = new TestValidator(v => v.RuleFor(x => x.NullableInt).LessThan(x => x.Id));
var result = validator.Validate(new Person {Id = 5});
result.IsValid.ShouldBeTrue();
}
[Fact]
public void Validates_with_nullable_when_property_not_null_cross_property() {
var validator = new TestValidator(v => v.RuleFor(x => x.NullableInt).LessThan(x => x.Id));
var result = validator.Validate(new Person {NullableInt = 10, Id = 5});
result.IsValid.ShouldBeFalse();
}
}
} | robv8r/FluentValidation | src/FluentValidation.Tests/LessThanValidatorTester.cs | C# | apache-2.0 | 6,363 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.eclipse.debug;
import java.util.ArrayList;
import java.util.List;
import org.drools.eclipse.DroolsEclipsePlugin;
import org.eclipse.debug.core.DebugException;
import org.eclipse.debug.core.model.IValue;
import org.eclipse.debug.core.model.IVariable;
import org.eclipse.jdt.debug.core.IJavaArray;
import org.eclipse.jdt.debug.core.IJavaObject;
import org.eclipse.jdt.debug.core.IJavaValue;
/**
* The Application Data View content provider.
*/
public class ApplicationDataViewContentProvider extends DroolsDebugViewContentProvider {
private DroolsDebugEventHandlerView view;
public ApplicationDataViewContentProvider(DroolsDebugEventHandlerView view) {
this.view = view;
}
protected String getEmptyString() {
return "The selected working memory has no globals defined.";
}
public Object[] getChildren(Object obj) {
try {
IVariable[] variables = null;
if (obj != null && obj instanceof IJavaObject
&& ("org.drools.core.impl.StatefulKnowledgeSessionImpl".equals(
((IJavaObject) obj).getReferenceTypeName()) ||
"org.drools.core.common.AbstractWorkingMemory".equals(
((IJavaObject) obj).getReferenceTypeName()) ||
"org.drools.core.reteoo.ReteooStatefulSession".equals(
((IJavaObject) obj).getReferenceTypeName()) ||
// for backwards compatibility
"org.drools.reteoo.ReteooStatefulSession".equals(
((IJavaObject) obj).getReferenceTypeName()))) {
variables = getApplicationDataElements((IJavaObject) obj);
} else if (obj instanceof IVariable) {
if (view.isShowLogicalStructure()) {
IValue value = getLogicalValue(((IVariable) obj).getValue(), new ArrayList<String>());
variables = value.getVariables();
}
if (variables == null) {
variables = ((IVariable) obj).getValue().getVariables();
}
}
if (variables == null) {
return new Object[0];
} else {
cache(obj, variables);
return variables;
}
} catch (DebugException e) {
DroolsEclipsePlugin.log(e);
return new Object[0];
}
}
private IVariable[] getApplicationDataElements(IJavaObject stackObj) throws DebugException {
IValue objects = null;
try {
objects = DebugUtil.getValueByExpression("return ((org.drools.core.base.MapGlobalResolver) getGlobalResolver()).getGlobals();", stackObj);
} catch (RuntimeException e) {
// backwards compatibility
objects = DebugUtil.getValueByExpression("return ((org.drools.base.MapGlobalResolver) getGlobalResolver()).getGlobals();", stackObj);
}
if (objects instanceof IJavaArray) {
IJavaArray array = (IJavaArray) objects;
List<VariableWrapper> result = new ArrayList<VariableWrapper>();
IJavaValue[] javaVals = array.getValues();
for ( int i = 0; i < javaVals.length; i++ ) {
IJavaValue mapEntry = javaVals[i];
String key = null;
IJavaValue value = null;
IVariable[] vars = mapEntry.getVariables();
for ( int j = 0; j < vars.length; j++ ) {
IVariable var = vars[j];
if ("key".equals(var.getName())) {
key = var.getValue().getValueString();
} else if ("value".equals(var.getName())) {
value = (IJavaValue) var.getValue();
}
}
result.add(new VariableWrapper(key, value));
}
return result.toArray(new IVariable[result.size()]);
}
return null;
}
}
| psiroky/droolsjbpm-tools | drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/ApplicationDataViewContentProvider.java | Java | apache-2.0 | 4,650 |
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/c/builtin_op_data.h"
#include "tensorflow/lite/c/common.h"
#include "tensorflow/lite/kernels/internal/common.h"
#include "tensorflow/lite/kernels/internal/quantization_util.h"
#include "tensorflow/lite/kernels/internal/reference/conv.h"
#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h"
#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
#include "tensorflow/lite/kernels/kernel_util.h"
#include "tensorflow/lite/kernels/padding.h"
#include "tensorflow/lite/micro/kernels/conv.h"
#include "tensorflow/lite/micro/kernels/kernel_util.h"
namespace tflite {
const int kConvInputTensor = 0;
const int kConvWeightsTensor = 1;
const int kConvBiasTensor = 2;
const int kConvOutputTensor = 0;
// Conv is quantized along dimension 0:
// https://www.tensorflow.org/lite/performance/quantization_spec
const int kConvQuantizedDimension = 0;
// Returns a ConvParams struct with all the parameters needed for a
// float computation.
ConvParams ConvParamsFloat(const TfLiteConvParams& params,
const OpDataConv& data) {
ConvParams op_params;
CalculateActivationRange(params.activation, &op_params.float_activation_min,
&op_params.float_activation_max);
op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding);
op_params.padding_values.width = data.padding.width;
op_params.padding_values.height = data.padding.height;
op_params.stride_width = params.stride_width;
op_params.stride_height = params.stride_height;
op_params.dilation_width_factor = params.dilation_width_factor;
op_params.dilation_height_factor = params.dilation_height_factor;
return op_params;
}
// Returns a ConvParams struct with all the parameters needed for a
// quantized computation.
ConvParams ConvParamsQuantized(const TfLiteConvParams& params,
const OpDataConv& data) {
ConvParams op_params;
op_params.input_offset = -data.input_zero_point;
op_params.weights_offset = -data.filter_zero_point;
op_params.output_offset = data.output_zero_point;
op_params.output_multiplier = data.output_multiplier;
op_params.output_shift = -data.output_shift;
op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding);
op_params.padding_values.height = data.padding.height;
op_params.padding_values.width = data.padding.width;
op_params.stride_height = params.stride_height;
op_params.stride_width = params.stride_width;
op_params.dilation_height_factor = params.dilation_height_factor;
op_params.dilation_width_factor = params.dilation_width_factor;
op_params.quantized_activation_min = data.output_activation_min;
op_params.quantized_activation_max = data.output_activation_max;
return op_params;
}
TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node,
const TfLiteConvParams& params, int width,
int height, int filter_width,
int filter_height, int out_width,
int out_height, const TfLiteType data_type,
OpDataConv* data) {
bool has_bias = node->inputs->size == 3;
// Check number of inputs/outputs
TF_LITE_ENSURE(context, has_bias || node->inputs->size == 2);
TF_LITE_ENSURE_EQ(context, node->outputs->size, 1);
// Matching GetWindowedOutputSize in TensorFlow.
auto padding = params.padding;
data->padding = ComputePaddingHeightWidth(
params.stride_height, params.stride_width, params.dilation_height_factor,
params.dilation_width_factor, height, width, filter_height, filter_width,
padding, &out_height, &out_width);
MicroContext* micro_context = GetMicroContext(context);
TfLiteTensor* input =
micro_context->AllocateTempInputTensor(node, kConvInputTensor);
TF_LITE_ENSURE(context, input != nullptr);
TfLiteTensor* filter =
micro_context->AllocateTempInputTensor(node, kConvWeightsTensor);
TF_LITE_ENSURE(context, filter != nullptr);
TfLiteTensor* bias =
micro_context->AllocateTempInputTensor(node, kConvBiasTensor);
TfLiteTensor* output =
micro_context->AllocateTempOutputTensor(node, kConvOutputTensor);
TF_LITE_ENSURE(context, output != nullptr);
// Note that quantized inference requires that all tensors have their
// parameters set. This is usually done during quantized training.
if (data_type != kTfLiteFloat32) {
int output_channels = filter->dims->data[kConvQuantizedDimension];
TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams(
context, input, filter, bias, output, params.activation,
&data->output_multiplier, &data->output_shift,
&data->output_activation_min, &data->output_activation_max,
data->per_channel_output_multiplier, data->per_channel_output_shift,
output_channels));
}
data->input_zero_point = input->params.zero_point;
data->filter_zero_point = filter->params.zero_point;
data->output_zero_point = output->params.zero_point;
micro_context->DeallocateTempTfLiteTensor(input);
micro_context->DeallocateTempTfLiteTensor(filter);
micro_context->DeallocateTempTfLiteTensor(output);
micro_context->DeallocateTempTfLiteTensor(bias);
return kTfLiteOk;
}
TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node) {
TFLITE_DCHECK(node->user_data != nullptr);
TFLITE_DCHECK(node->builtin_data != nullptr);
OpDataConv* data = static_cast<OpDataConv*>(node->user_data);
const auto& params =
*(static_cast<const TfLiteConvParams*>(node->builtin_data));
MicroContext* micro_context = GetMicroContext(context);
TfLiteTensor* output =
micro_context->AllocateTempOutputTensor(node, kConvOutputTensor);
TF_LITE_ENSURE(context, output != nullptr);
TfLiteTensor* input =
micro_context->AllocateTempInputTensor(node, kConvInputTensor);
TF_LITE_ENSURE(context, input != nullptr);
TfLiteTensor* filter =
micro_context->AllocateTempInputTensor(node, kConvWeightsTensor);
TF_LITE_ENSURE(context, filter != nullptr);
const int input_width = input->dims->data[2];
const int input_height = input->dims->data[1];
const int filter_width = filter->dims->data[2];
const int filter_height = filter->dims->data[1];
const int output_width = output->dims->data[2];
const int output_height = output->dims->data[1];
// Dynamically allocate per-channel quantization parameters.
const int num_channels = filter->dims->data[kConvQuantizedDimension];
data->per_channel_output_multiplier =
static_cast<int32_t*>(context->AllocatePersistentBuffer(
context, num_channels * sizeof(int32_t)));
data->per_channel_output_shift =
static_cast<int32_t*>(context->AllocatePersistentBuffer(
context, num_channels * sizeof(int32_t)));
// All per-channel quantized tensors need valid zero point and scale arrays.
if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) {
TF_LITE_ENSURE_EQ(context, filter->quantization.type,
kTfLiteAffineQuantization);
const auto* affine_quantization =
static_cast<TfLiteAffineQuantization*>(filter->quantization.params);
TFLITE_DCHECK(affine_quantization != nullptr);
TFLITE_DCHECK(affine_quantization->scale != nullptr);
TFLITE_DCHECK(affine_quantization->zero_point != nullptr);
TF_LITE_ENSURE(context,
affine_quantization->scale->size == 1 ||
affine_quantization->scale->size ==
filter->dims->data[kConvQuantizedDimension]);
}
TF_LITE_ENSURE_STATUS(CalculateOpDataConv(
context, node, params, input_width, input_height, filter_width,
filter_height, output_width, output_height, input->type, data));
micro_context->DeallocateTempTfLiteTensor(filter);
micro_context->DeallocateTempTfLiteTensor(input);
micro_context->DeallocateTempTfLiteTensor(output);
return kTfLiteOk;
}
} // namespace tflite
| google/CFU-Playground | third_party/tflite-micro/tensorflow/lite/micro/kernels/conv_common.cc | C++ | apache-2.0 | 8,686 |
/*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
import java.math.BigInteger;
/**
*
* @author fdefalco
*/
public class EvidenceSummary {
@JsonProperty("evidence_group_name")
public String evidence_group_name;
@JsonProperty("evidence_id")
public BigInteger evidence_id;
@JsonProperty("evidence_type")
public String evidence_type;
@JsonProperty("supports")
public boolean supports;
@JsonProperty("evidence_count")
public double evidence_count;
}
| rkboyce/WebAPI | src/main/java/org/ohdsi/webapi/evidence/EvidenceSummary.java | Java | apache-2.0 | 1,145 |
/*
* Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
*/
define([
'underscore',
'knockout',
"protocol",
'query-form-model',
'core-basedir/reports/qe/ui/js/common/qe.model.config'
], function (_, Knockout, protocolUtils, QueryFormModel, qeModelConfig) {
var SearchFlowFormModel = QueryFormModel.extend({
defaultSelectFields: ['flow_class_id', 'direction_ing'],
constructor: function (modelData) {
var defaultConfig = qeModelConfig.getQueryModelConfig({
table_name: cowc.FLOW_RECORD_TABLE,
limit: 5000,
time_range: 600,
table_type: cowc.QE_FLOW_TABLE_TYPE,
filters: '',
filter_json: '',
select: "other_vrouter_ip, vrouter, vrouter_ip," +
"sourcevn, sourceip, sport, destvn," +
"destip, dport, protocol, agg-bytes," +
"agg-packets, direction_ing",
query_prefix: cowc.FR_QUERY_PREFIX});
modelData = $.extend(true, {}, defaultConfig, modelData);
QueryFormModel.prototype.constructor.call(this, modelData);
this.setTableFieldValues();
return this;
},
setTableFieldValues: function () {
var searchFlowModel = this.model();
var valueOptionList = {
vrouter: [],
sourcevn: [],
destvn: [],
protocol: []
};
searchFlowModel.attributes.where_data_object['value_option_list'] =
valueOptionList;
var protocolData = [];
$.each(protocolUtils.protocolList, function(idx, obj) {
protocolData.push(obj['name']);
});
valueOptionList['protocol'] = protocolData;
$.ajax({
url: '/api/admin/networks',
dataType: 'json'
}).done(function (response){
var vnList =
getValueByJsonPath(response, 'virtual-networks', []),
results = [];
for (var i = 0; i < vnList.length; i++) {
var vnObj = vnList[i];
if (vnObj['fq_name'] != null) {
var fqn = vnObj['fq_name'].join(':');
results.push(fqn);
}
}
valueOptionList['sourcevn'] = results;
valueOptionList['destvn'] = results;
});
$.ajax({
url: '/api/admin/monitor/infrastructure/vrouters/cached-summary',
dataType: 'json'
}).done(function (response) {
var vRouterList = getValueByJsonPath(response,'data',[]),
results = [];
for (var i = 0; i < vRouterList.length; i++) {
var vRouterName = vRouterList[i]['name'];
results.push(vRouterName);
}
valueOptionList['vrouter'] = results;
});
},
getTimeGranularityUnits: function() {
var self = this;
return Knockout.computed(function () {
var timeRange = self.time_range(),
fromTime = new Date(self.from_time()).getTime(),
toTime = new Date(self.to_time()).getTime(),
timeGranularityUnits = [];
timeGranularityUnits.push({id: "secs", text: "secs"});
if (timeRange == -1) {
timeRange = (toTime - fromTime) / 1000;
}
if (timeRange > 60) {
timeGranularityUnits.push({id: "mins", text: "mins"});
}
if (timeRange > 3600) {
timeGranularityUnits.push({id: "hrs", text: "hrs"});
}
if (timeRange > 86400) {
timeGranularityUnits.push({id: "days", text: "days"});
}
return timeGranularityUnits;
}, this);
},
validations: {}
});
return SearchFlowFormModel;
});
| vishnuvv/contrail-web-controller | webroot/monitor/infrastructure/underlay/ui/js/models/SearchFlowFormModel.js | JavaScript | apache-2.0 | 4,508 |
package com.sandwich.util.io;
import java.io.File;
import java.io.IOException;
public abstract class ForEachFileAction extends ExistingFileAction {
public ForEachFileAction(String... strings) {
super(strings);
}
public void onDirectory(File dir) throws IOException {
for (String fileName : dir.list()) {
operate(new File(dir, fileName));
}
}
}
| matyb/java-koans | lib/src/main/java/com/sandwich/util/io/ForEachFileAction.java | Java | apache-2.0 | 362 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.server;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.progress.util.ReadTask;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.*;
import com.intellij.util.SmartList;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.concurrency.SequentialTaskExecutor;
import io.netty.channel.Channel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.api.CmdlineProtoUtil;
import org.jetbrains.jps.api.CmdlineRemoteProto;
import org.jetbrains.org.objectweb.asm.Opcodes;
import java.util.*;
import java.util.concurrent.ExecutorService;
/**
* @author Eugene Zhuravlev
*/
public abstract class DefaultMessageHandler implements BuilderMessageHandler {
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.server.DefaultMessageHandler");
public static final long CONSTANT_SEARCH_TIME_LIMIT = 60 * 1000L; // one minute
private final Project myProject;
private final ExecutorService myTaskExecutor = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("DefaultMessageHandler pool");
private volatile long myConstantSearchTime = 0L;
protected DefaultMessageHandler(Project project) {
myProject = project;
}
@Override
public void buildStarted(UUID sessionId) {
}
@Override
public final void handleBuildMessage(final Channel channel, final UUID sessionId, final CmdlineRemoteProto.Message.BuilderMessage msg) {
//noinspection EnumSwitchStatementWhichMissesCases
switch (msg.getType()) {
case BUILD_EVENT:
final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent event = msg.getBuildEvent();
if (event.getEventType() == CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.Type.CUSTOM_BUILDER_MESSAGE && event.hasCustomBuilderMessage()) {
final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.CustomBuilderMessage message = event.getCustomBuilderMessage();
if (!myProject.isDisposed()) {
myProject.getMessageBus().syncPublisher(CustomBuilderMessageHandler.TOPIC).messageReceived(
message.getBuilderId(), message.getMessageType(), message.getMessageText()
);
}
}
handleBuildEvent(sessionId, event);
break;
case COMPILE_MESSAGE:
handleCompileMessage(sessionId, msg.getCompileMessage());
break;
case CONSTANT_SEARCH_TASK:
final CmdlineRemoteProto.Message.BuilderMessage.ConstantSearchTask task = msg.getConstantSearchTask();
handleConstantSearchTask(channel, sessionId, task);
break;
}
}
protected abstract void handleCompileMessage(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.CompileMessage message);
protected abstract void handleBuildEvent(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.BuildEvent event);
private void handleConstantSearchTask(final Channel channel, final UUID sessionId, final CmdlineRemoteProto.Message.BuilderMessage.ConstantSearchTask task) {
ProgressIndicatorUtils.scheduleWithWriteActionPriority(myTaskExecutor, new ReadTask() {
@Override
public Continuation runBackgroundProcess(@NotNull ProgressIndicator indicator) throws ProcessCanceledException {
return DumbService.getInstance(myProject).runReadActionInSmartMode(() -> {
doHandleConstantSearchTask(channel, sessionId, task);
return null;
});
}
@Override
public void onCanceled(@NotNull ProgressIndicator indicator) {
DumbService.getInstance(myProject).runWhenSmart(() -> handleConstantSearchTask(channel, sessionId, task));
}
});
}
private void doHandleConstantSearchTask(Channel channel, UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.ConstantSearchTask task) {
final String ownerClassName = task.getOwnerClassName();
final String fieldName = task.getFieldName();
final int accessFlags = task.getAccessFlags();
final boolean accessChanged = task.getIsAccessChanged();
final boolean isRemoved = task.getIsFieldRemoved();
boolean canceled = false;
final Ref<Boolean> isSuccess = Ref.create(Boolean.TRUE);
final Set<String> affectedPaths = Collections.synchronizedSet(new HashSet<String>()); // PsiSearchHelper runs multiple threads
final long searchStart = System.currentTimeMillis();
try {
if (myConstantSearchTime > CONSTANT_SEARCH_TIME_LIMIT) {
// skipping constant search and letting the build rebuild dependent modules
isSuccess.set(Boolean.FALSE);
LOG.debug("Total constant search time exceeded time limit for this build session");
}
else if(isDumbMode()) {
// do not wait until dumb mode finishes
isSuccess.set(Boolean.FALSE);
LOG.debug("Constant search task: cannot search in dumb mode");
}
else {
final String qualifiedName = ownerClassName.replace('$', '.');
handleCompileMessage(sessionId, CmdlineProtoUtil.createCompileProgressMessageResponse(
"Searching for usages of changed/removed constants for class " + qualifiedName
).getCompileMessage());
final PsiClass[] classes = ReadAction
.compute(() -> JavaPsiFacade.getInstance(myProject).findClasses(qualifiedName, GlobalSearchScope.allScope(myProject)));
try {
if (isRemoved) {
ApplicationManager.getApplication().runReadAction(() -> {
if (classes.length > 0) {
for (PsiClass aClass : classes) {
final boolean success = aClass.isValid() && performRemovedConstantSearch(aClass, fieldName, accessFlags, affectedPaths);
if (!success) {
isSuccess.set(Boolean.FALSE);
break;
}
}
}
else {
isSuccess.set(
performRemovedConstantSearch(null, fieldName, accessFlags, affectedPaths)
);
}
});
}
else {
if (classes.length > 0) {
final Collection<PsiField> changedFields = ReadAction.compute(() -> {
final List<PsiField> fields = new SmartList<>();
for (PsiClass aClass : classes) {
if (!aClass.isValid()) {
return Collections.emptyList();
}
final PsiField changedField = aClass.findFieldByName(fieldName, false);
if (changedField != null) {
fields.add(changedField);
}
}
return fields;
});
if (changedFields.isEmpty()) {
isSuccess.set(Boolean.FALSE);
LOG.debug("Constant search task: field " + fieldName + " not found in classes " + qualifiedName);
}
else {
for (final PsiField changedField : changedFields) {
if (!accessChanged && isPrivate(accessFlags)) {
// optimization: don't need to search, cause may be used only in this class
continue;
}
if (!affectDirectUsages(changedField, accessChanged, affectedPaths)) {
isSuccess.set(Boolean.FALSE);
break;
}
}
}
}
else {
isSuccess.set(Boolean.FALSE);
LOG.debug("Constant search task: class " + qualifiedName + " not found");
}
}
}
catch (Throwable e) {
isSuccess.set(Boolean.FALSE);
LOG.debug("Constant search task: failed with message " + e.getMessage());
}
}
}
catch (ProcessCanceledException e) {
canceled = true;
throw e;
}
finally {
myConstantSearchTime += (System.currentTimeMillis() - searchStart);
if (!canceled) {
notifyConstantSearchFinished(channel, sessionId, ownerClassName, fieldName, isSuccess, affectedPaths);
}
}
}
private static void notifyConstantSearchFinished(Channel channel,
UUID sessionId,
String ownerClassName,
String fieldName,
Ref<Boolean> isSuccess, Set<String> affectedPaths) {
final CmdlineRemoteProto.Message.ControllerMessage.ConstantSearchResult.Builder builder =
CmdlineRemoteProto.Message.ControllerMessage.ConstantSearchResult.newBuilder();
builder.setOwnerClassName(ownerClassName);
builder.setFieldName(fieldName);
if (isSuccess.get()) {
builder.setIsSuccess(true);
builder.addAllPath(affectedPaths);
LOG.debug("Constant search task: " + affectedPaths.size() + " affected files found");
}
else {
builder.setIsSuccess(false);
LOG.debug("Constant search task: unsuccessful");
}
channel.writeAndFlush(CmdlineProtoUtil.toMessage(sessionId, CmdlineRemoteProto.Message.ControllerMessage.newBuilder().setType(
CmdlineRemoteProto.Message.ControllerMessage.Type.CONSTANT_SEARCH_RESULT).setConstantSearchResult(builder.build()).build()
));
}
private boolean isDumbMode() {
final DumbService dumbService = DumbService.getInstance(myProject);
boolean isDumb = dumbService.isDumb();
if (isDumb) {
// wait some time
for (int idx = 0; idx < 5; idx++) {
TimeoutUtil.sleep(10L);
isDumb = dumbService.isDumb();
if (!isDumb) {
break;
}
}
}
return isDumb;
}
private boolean performRemovedConstantSearch(@Nullable final PsiClass aClass, String fieldName, int fieldAccessFlags, final Set<String> affectedPaths) {
final PsiSearchHelper psiSearchHelper = PsiSearchHelper.SERVICE.getInstance(myProject);
final Ref<Boolean> result = new Ref<>(Boolean.TRUE);
final PsiFile fieldContainingFile = aClass != null? aClass.getContainingFile() : null;
SearchScope searchScope = getSearchScope(aClass, fieldAccessFlags);
if (containsUnloadedModules(searchScope)) {
LOG.debug("Constant search tasks: there may be usages of " + (aClass!= null ? aClass.getQualifiedName() + "::": "") + fieldName + " in unloaded modules");
return false;
}
processIdentifiers(psiSearchHelper, new PsiElementProcessor<PsiIdentifier>() {
@Override
public boolean execute(@NotNull PsiIdentifier identifier) {
try {
final PsiElement parent = identifier.getParent();
if (parent instanceof PsiReferenceExpression) {
final PsiClass ownerClass = getOwnerClass(parent);
if (ownerClass != null && ownerClass.getQualifiedName() != null) {
final PsiFile usageFile = ownerClass.getContainingFile();
if (usageFile != null && !usageFile.equals(fieldContainingFile)) {
final VirtualFile vFile = usageFile.getOriginalFile().getVirtualFile();
if (vFile != null) {
affectedPaths.add(vFile.getPath());
}
}
}
}
return true;
}
catch (PsiInvalidElementAccessException ignored) {
result.set(Boolean.FALSE);
LOG.debug("Constant search task: PIEAE thrown while searching of usages of removed constant");
return false;
}
}
}, fieldName, searchScope, UsageSearchContext.IN_CODE);
return result.get();
}
private SearchScope getSearchScope(PsiClass aClass, int fieldAccessFlags) {
SearchScope searchScope = GlobalSearchScope.projectScope(myProject);
if (aClass != null && isPackageLocal(fieldAccessFlags)) {
final PsiFile containingFile = aClass.getContainingFile();
if (containingFile instanceof PsiJavaFile) {
final String packageName = ((PsiJavaFile)containingFile).getPackageName();
final PsiPackage aPackage = JavaPsiFacade.getInstance(myProject).findPackage(packageName);
if (aPackage != null) {
searchScope = PackageScope.packageScope(aPackage, false);
searchScope = searchScope.intersectWith(aClass.getUseScope());
}
}
}
return searchScope;
}
private static boolean processIdentifiers(PsiSearchHelper helper, @NotNull final PsiElementProcessor<PsiIdentifier> processor, @NotNull final String identifier, @NotNull SearchScope searchScope, short searchContext) {
TextOccurenceProcessor processor1 =
(element, offsetInElement) -> !(element instanceof PsiIdentifier) || processor.execute((PsiIdentifier)element);
SearchScope javaScope = searchScope instanceof GlobalSearchScope
? GlobalSearchScope.getScopeRestrictedByFileTypes((GlobalSearchScope)searchScope, JavaFileType.INSTANCE)
: searchScope;
return helper.processElementsWithWord(processor1, javaScope, identifier, searchContext, true, false);
}
private boolean affectDirectUsages(final PsiField psiField,
final boolean ignoreAccessScope,
final Set<String> affectedPaths) throws ProcessCanceledException {
return ReadAction.compute(() -> {
if (psiField.isValid()) {
final PsiFile fieldContainingFile = psiField.getContainingFile();
final Set<PsiFile> processedFiles = new HashSet<>();
if (fieldContainingFile != null) {
processedFiles.add(fieldContainingFile);
}
// if field is invalid, the file might be changed, so next time it is compiled,
// the constant value change, if any, will be processed
final Collection<PsiReferenceExpression> references = doFindReferences(psiField, ignoreAccessScope);
if (references == null) {
return false;
}
for (final PsiReferenceExpression ref : references) {
final PsiElement usage = ref.getElement();
final PsiFile containingPsi = usage.getContainingFile();
if (containingPsi != null && processedFiles.add(containingPsi)) {
final VirtualFile vFile = containingPsi.getOriginalFile().getVirtualFile();
if (vFile != null) {
affectedPaths.add(vFile.getPath());
}
}
}
}
return true;
});
}
@Nullable("returns null if search failed")
private Collection<PsiReferenceExpression> doFindReferences(final PsiField psiField, boolean ignoreAccessScope) {
final SmartList<PsiReferenceExpression> result = new SmartList<>();
final SearchScope searchScope = (ignoreAccessScope? psiField.getContainingFile() : psiField).getUseScope();
if (containsUnloadedModules(searchScope)) {
PsiClass aClass = psiField.getContainingClass();
LOG.debug("Constant search tasks: there may be usages of " + (aClass != null ? aClass.getQualifiedName() + "::" : "") + psiField.getName() + " in unloaded modules");
return null;
}
processIdentifiers(PsiSearchHelper.SERVICE.getInstance(myProject), new PsiElementProcessor<PsiIdentifier>() {
@Override
public boolean execute(@NotNull PsiIdentifier identifier) {
final PsiElement parent = identifier.getParent();
if (parent instanceof PsiReferenceExpression) {
final PsiReferenceExpression refExpression = (PsiReferenceExpression)parent;
if (refExpression.isReferenceTo(psiField)) {
synchronized (result) {
// processor's code may be invoked from multiple threads
result.add(refExpression);
}
}
}
return true;
}
}, psiField.getName(), searchScope, UsageSearchContext.IN_CODE);
return result;
}
@Nullable
private static PsiClass getOwnerClass(PsiElement element) {
while (!(element instanceof PsiFile)) {
if (element instanceof PsiClass && element.getParent() instanceof PsiJavaFile) { // top-level class
final PsiClass psiClass = (PsiClass)element;
if (JspPsiUtil.isInJspFile(psiClass)) {
return null;
}
final PsiFile containingFile = psiClass.getContainingFile();
if (containingFile == null) {
return null;
}
return JavaLanguage.INSTANCE.equals(containingFile.getLanguage())? psiClass : null;
}
element = element.getParent();
}
return null;
}
private static boolean containsUnloadedModules(SearchScope scope) {
if (scope instanceof LocalSearchScope) {
return false;
}
else if (scope instanceof GlobalSearchScope) {
return !((GlobalSearchScope)scope).getUnloadedModulesBelongingToScope().isEmpty();
}
else {
//cannot happen now, every SearchScope's implementation extends either LocalSearchScope or GlobalSearchScope
return true;
}
}
private static boolean isPackageLocal(int flags) {
return (Opcodes.ACC_PUBLIC & flags) == 0 && (Opcodes.ACC_PROTECTED & flags) == 0 && (Opcodes.ACC_PRIVATE & flags) == 0;
}
private static boolean isPrivate(int flags) {
return (Opcodes.ACC_PRIVATE & flags) != 0;
}
}
| ThiagoGarciaAlves/intellij-community | java/compiler/impl/src/com/intellij/compiler/server/DefaultMessageHandler.java | Java | apache-2.0 | 18,505 |
package org.xbib.elasticsearch.action.ingest;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.xbib.elasticsearch.action.ingest.leader.IngestLeaderShardRequest;
import org.xbib.elasticsearch.action.ingest.leader.IngestLeaderShardResponse;
import org.xbib.elasticsearch.action.ingest.leader.TransportLeaderShardIngestAction;
import org.xbib.elasticsearch.action.ingest.replica.IngestReplicaShardRequest;
import org.xbib.elasticsearch.action.ingest.replica.TransportReplicaShardIngestAction;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class TransportIngestAction extends HandledTransportAction<IngestRequest, IngestResponse> {
private final boolean allowIdGeneration;
private final ClusterService clusterService;
private final TransportLeaderShardIngestAction leaderShardIngestAction;
private final TransportReplicaShardIngestAction replicaShardIngestAction;
@Inject
public TransportIngestAction(Settings settings, ThreadPool threadPool,
TransportService transportService, ClusterService clusterService,
TransportLeaderShardIngestAction leaderShardIngestAction,
TransportReplicaShardIngestAction replicaShardIngestAction,
ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, IngestAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, IngestRequest.class);
this.clusterService = clusterService;
this.leaderShardIngestAction = leaderShardIngestAction;
this.replicaShardIngestAction = replicaShardIngestAction;
this.allowIdGeneration = this.settings.getAsBoolean("action.allow_id_generation", true);
}
@Override
protected void doExecute(final IngestRequest ingestRequest, final ActionListener<IngestResponse> listener) {
final long startTime = System.currentTimeMillis();
final IngestResponse ingestResponse = new IngestResponse();
ingestResponse.setIngestId(ingestRequest.ingestId());
ClusterState clusterState = clusterService.state();
try {
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE);
} catch (ClusterBlockException e) {
listener.onFailure(e);
return;
}
final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver);
MetaData metaData = clusterState.metaData();
final List<ActionRequest<?>> requests = new LinkedList<>();
for (ActionRequest<?> request : ingestRequest.requests()) {
String concreteIndex = concreteIndices.resolveIfAbsent((DocumentRequest)request);
if (request instanceof IndexRequest) {
try {
IndexRequest indexRequest = (IndexRequest) request;
indexRequest.routing(metaData.resolveIndexRouting(indexRequest.routing(), concreteIndex));
indexRequest.index(concreteIndex);
MappingMetaData mappingMd = null;
if (metaData.hasIndex(concreteIndex)) {
mappingMd = metaData.index(concreteIndex).mappingOrDefault(indexRequest.type());
}
indexRequest.process(metaData, mappingMd, allowIdGeneration, concreteIndex);
requests.add(indexRequest);
} catch (Throwable e) {
logger.error(e.getMessage(), e);
ingestResponse.addFailure(new IngestActionFailure(-1L, null, ExceptionsHelper.detailedMessage(e)));
}
} else if (request instanceof DeleteRequest) {
DeleteRequest deleteRequest = (DeleteRequest) request;
deleteRequest.routing(metaData.resolveIndexRouting(deleteRequest.routing(), concreteIndex));
deleteRequest.index(concreteIndex);
requests.add(deleteRequest);
} else {
throw new ElasticsearchException("action request not known: " + request.getClass().getName());
}
}
// second, go over all the requests and create a shard request map
Map<ShardId, List<ActionRequest<?>>> requestsByShard = new HashMap<>();
for (ActionRequest<?> request : requests) {
if (request instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) request;
String concreteIndex = concreteIndices.getConcreteIndex(indexRequest.index());
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, indexRequest.type(), indexRequest.id(), indexRequest.routing()).shardId();
List<ActionRequest<?>> list = requestsByShard.get(shardId);
if (list == null) {
list = new LinkedList<>();
requestsByShard.put(shardId, list);
}
list.add(request);
} else if (request instanceof DeleteRequest) {
DeleteRequest deleteRequest = (DeleteRequest) request;
String concreteIndex = concreteIndices.getConcreteIndex(deleteRequest.index());
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId();
List<ActionRequest<?>> list = requestsByShard.get(shardId);
if (list == null) {
list = new LinkedList<>();
requestsByShard.put(shardId, list);
}
list.add(deleteRequest);
}
}
if (requestsByShard.isEmpty()) {
logger.error("no shards to execute ingest");
ingestResponse.setSuccessSize(0)
.addFailure(new IngestActionFailure(-1L, null, "no shards to execute ingest"))
.setTookInMillis(System.currentTimeMillis() - startTime);
listener.onResponse(ingestResponse);
return;
}
// third, for each shard, execute leader/replica action
final AtomicInteger successCount = new AtomicInteger(0);
final AtomicInteger responseCounter = new AtomicInteger(requestsByShard.size());
for (Map.Entry<ShardId, List<ActionRequest<?>>> entry : requestsByShard.entrySet()) {
final ShardId shardId = entry.getKey();
final List<ActionRequest<?>> actionRequests = entry.getValue();
final IngestLeaderShardRequest ingestLeaderShardRequest = new IngestLeaderShardRequest()
.setIngestId(ingestRequest.ingestId())
.setShardId(shardId)
.setActionRequests(actionRequests)
.timeout(ingestRequest.timeout())
.requiredConsistency(ingestRequest.requiredConsistency());
leaderShardIngestAction.execute(ingestLeaderShardRequest, new ActionListener<IngestLeaderShardResponse>() {
@Override
public void onResponse(IngestLeaderShardResponse ingestLeaderShardResponse) {
long millis = System.currentTimeMillis() - startTime;
ingestResponse.setIngestId(ingestRequest.ingestId());
ingestResponse.setLeaderResponse(ingestLeaderShardResponse);
successCount.addAndGet(ingestLeaderShardResponse.getSuccessCount());
int quorumShards = ingestLeaderShardResponse.getQuorumShards();
if (quorumShards < 0) {
ingestResponse.addFailure(new IngestActionFailure(ingestRequest.ingestId(), shardId, "quorum not reached for shard " + shardId));
} else if (quorumShards > 0) {
responseCounter.incrementAndGet();
final IngestReplicaShardRequest ingestReplicaShardRequest =
new IngestReplicaShardRequest(ingestLeaderShardRequest.getIngestId(),
ingestLeaderShardRequest.getShardId(),
ingestLeaderShardRequest.getActionRequests());
ingestReplicaShardRequest.timeout(ingestRequest.timeout());
replicaShardIngestAction.execute(ingestReplicaShardRequest, new ActionListener<TransportReplicaShardIngestAction.ReplicaOperationResponse>() {
@Override
public void onResponse(TransportReplicaShardIngestAction.ReplicaOperationResponse response) {
long millis = Math.max(1, System.currentTimeMillis() - startTime);
ingestResponse.addReplicaResponses(response.responses());
if (responseCounter.decrementAndGet() == 0) {
ingestResponse.setSuccessSize(successCount.get())
.setTookInMillis(millis);
listener.onResponse(ingestResponse);
}
}
@Override
public void onFailure(Throwable e) {
long millis = Math.max(1, System.currentTimeMillis() - startTime);
logger.error(e.getMessage(), e);
ingestResponse.addFailure(new IngestActionFailure(ingestRequest.ingestId(), shardId, ExceptionsHelper.detailedMessage(e)));
if (responseCounter.decrementAndGet() == 0) {
ingestResponse.setSuccessSize(successCount.get())
.setTookInMillis(millis);
listener.onResponse(ingestResponse);
}
}
});
}
if (responseCounter.decrementAndGet() == 0) {
ingestResponse.setSuccessSize(successCount.get()).setTookInMillis(millis);
listener.onResponse(ingestResponse);
}
}
@Override
public void onFailure(Throwable e) {
long millis = System.currentTimeMillis() - startTime;
logger.error(e.getMessage(), e);
ingestResponse.addFailure(new IngestActionFailure(-1L, shardId, ExceptionsHelper.detailedMessage(e)));
if (responseCounter.decrementAndGet() == 0) {
ingestResponse.setSuccessSize(successCount.get()).setTookInMillis(millis);
listener.onResponse(ingestResponse);
}
}
});
}
}
private static class ConcreteIndices {
private final ClusterState state;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final Map<String, String> indices = new HashMap<>();
ConcreteIndices(ClusterState state, IndexNameExpressionResolver indexNameExpressionResolver) {
this.state = state;
this.indexNameExpressionResolver = indexNameExpressionResolver;
}
String getConcreteIndex(String indexOrAlias) {
return indices.get(indexOrAlias);
}
String resolveIfAbsent(DocumentRequest<?> request) {
String concreteIndex = indices.get(request.index());
if (concreteIndex == null) {
concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request);
indices.put(request.index(), concreteIndex);
}
return concreteIndex;
}
}
}
| jprante/elasticsearch-helper | src/main/java/org/xbib/elasticsearch/action/ingest/TransportIngestAction.java | Java | apache-2.0 | 13,193 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.devtools.j2objc.GenerationTest;
import com.google.devtools.j2objc.Options;
import java.io.IOException;
/**
* Unit tests for {@link Rewriter}.
*
* @author Keith Stanger
*/
public class GwtConverterTest extends GenerationTest {
@Override
protected void setUp() throws IOException {
super.setUp();
addSourceFile(
"package com.google.gwt.core.client;"
+ "public class GWT { public static <T> T create(Class<T> classLiteral) { return null; } "
+ " public static boolean isClient() { return false; }"
+ " public static boolean isScript() { return false; } }",
"com/google/gwt/core/client/GWT.java");
addSourceFile(
"package com.google.common.annotations; "
+ "import java.lang.annotation.*; "
+ "@Retention(RetentionPolicy.CLASS) "
+ "@Target({ ElementType.METHOD }) "
+ "public @interface GwtIncompatible { "
+ " String value(); }",
"com/google/common/annotations/GwtIncompatible.java");
}
@Override
protected void tearDown() throws Exception {
Options.setStripGwtIncompatibleMethods(false);
super.tearDown();
}
public void testGwtCreate() throws IOException {
String translation = translateSourceFile(
"import com.google.gwt.core.client.GWT;"
+ "class Test { "
+ " Test INSTANCE = GWT.create(Test.class);"
+ " String FOO = foo();" // Regression requires subsequent non-mapped method invocation.
+ " static String foo() { return \"foo\"; } }", "Test", "Test.m");
assertTranslation(translation, "Test_set_INSTANCE_(self, [Test_class_() newInstance]);");
}
public void testGwtIsScript() throws IOException {
String translation = translateSourceFile(
"import com.google.gwt.core.client.GWT;"
+ "class Test { boolean test() { "
+ " if (GWT.isClient() || GWT.isScript()) { return true; } return false; }}",
"Test", "Test.m");
assertTranslatedLines(translation, "- (jboolean)test {", "return NO;", "}");
}
// Verify GwtIncompatible method is not stripped by default.
public void testGwtIncompatibleStrip() throws IOException {
Options.setStripGwtIncompatibleMethods(true);
String translation = translateSourceFile(
"import com.google.common.annotations.GwtIncompatible;"
+ "class Test { "
+ " @GwtIncompatible(\"don't use\") boolean test() { return false; }}",
"Test", "Test.h");
assertNotInTranslation(translation, "- (BOOL)test;");
}
// Verify GwtIncompatible method is stripped with flag.
public void testGwtIncompatibleNoStrip() throws IOException {
String translation = translateSourceFile(
"import com.google.common.annotations.GwtIncompatible;"
+ "class Test { "
+ " @GwtIncompatible(\"don't use\") boolean test() { return false; }}",
"Test", "Test.h");
assertTranslation(translation, "- (jboolean)test;");
}
// Verify GwtIncompatible method is not stripped with flag, if
// value is in GwtConverter.compatibleAPIs list.
public void testGwtIncompatibleNoStripKnownValue() throws IOException {
Options.setStripGwtIncompatibleMethods(true);
String translation = translateSourceFile(
"import com.google.common.annotations.GwtIncompatible;"
+ "class Test { "
+ " @GwtIncompatible(\"reflection\") boolean test() { return false; }}",
"Test", "Test.h");
assertTranslation(translation, "- (jboolean)test;");
}
}
| Sellegit/j2objc | translator/src/test/java/com/google/devtools/j2objc/translate/GwtConverterTest.java | Java | apache-2.0 | 4,120 |
# Copyright (c) 2007-2019 UShareSoft, All rights reserved
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from texttable import Texttable
from ussclicore.utils import generics_utils
def scan_status(scan):
if (scan.status.complete and not scan.status.error and not scan.status.cancelled):
return "Done"
elif(not scan.status.complete and not scan.status.error and not scan.status.cancelled):
return str(scan.status.percentage)+"%"
else:
return "Error"
def scan_table(scanInstances, scan = None):
table = Texttable(800)
table.set_cols_dtype(["t", "t", "t", "t", "t"])
table.set_cols_align(["c", "l", "c", "c", "c"])
table.header(["Id", "Name", "Status", "Distribution", "With overlay"])
if scan:
table.add_row([scan.dbId, "\t"+scan.name, scan_status(scan), "", ""])
return table
for myScannedInstance in scanInstances:
withOverlayStr = ''
if myScannedInstance.overlayIncluded:
withOverlayStr = 'X'
table.add_row([myScannedInstance.dbId, myScannedInstance.name, "", myScannedInstance.distribution.name + " "+ myScannedInstance.distribution.version + " " + myScannedInstance.distribution.arch, withOverlayStr])
scans = generics_utils.order_list_object_by(myScannedInstance.scans.scan, "name")
for lscan in scans:
table.add_row([lscan.dbId, "\t"+lscan.name, scan_status(lscan), "", ""])
return table
| emuus/hammr | hammr/utils/scan_utils.py | Python | apache-2.0 | 1,969 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aurora.scheduler.state;
import org.apache.aurora.scheduler.storage.entities.ILock;
import org.apache.aurora.scheduler.storage.entities.ILockKey;
/**
* Defines all {@link ILock} primitives like: acquire, release, validate.
*/
public interface LockManager {
/**
* Creates, saves and returns a new {@link ILock} with the specified {@link ILockKey}.
* This method is not re-entrant, i.e. attempting to acquire a lock with the
* same key would throw a {@link LockException}.
*
* @param lockKey A key uniquely identify the lock to be created.
* @param user Name of the user requesting a lock.
* @return A new ILock instance.
* @throws LockException In case the lock with specified key already exists.
*/
ILock acquireLock(ILockKey lockKey, String user) throws LockException;
/**
* Releases (removes) the specified {@link ILock} from the system.
*
* @param lock {@link ILock} to remove from the system.
*/
void releaseLock(ILock lock);
/**
* Asserts that an entity is not locked.
*
* @param context Operation context to validate with the provided lock.
* @throws LockException If provided context is locked.
*/
void assertNotLocked(ILockKey context) throws LockException;
/**
* Returns all available locks stored.
*
* @return Set of {@link ILock} instances.
*/
Iterable<ILock> getLocks();
/**
* Thrown when {@link ILock} related operation failed.
*/
class LockException extends Exception {
public LockException(String msg) {
super(msg);
}
}
}
| protochron/aurora | src/main/java/org/apache/aurora/scheduler/state/LockManager.java | Java | apache-2.0 | 2,134 |
(function() {
'use strict';
angular.module('newplayer.service', []);
})();
| thenewgroup/elx-newplayer | app/scripts/service/service.module.js | JavaScript | apache-2.0 | 80 |
<?php
/**
* @file
* Contains \Drupal\Component\Utility\SortArray.
*/
namespace Drupal\Component\Utility;
/**
* Provides generic array sorting helper methods.
*/
class SortArray {
/**
* Sorts a structured array by the 'weight' element.
*
* Note that the sorting is by the 'weight' array element, not by the render
* element property '#weight'.
*
* Callback for uasort() used in various functions.
*
* @param array $a
* First item for comparison. The compared items should be associative
* arrays that optionally include a 'weight' element. For items without a
* 'weight' element, a default value of 0 will be used.
* @param array $b
* Second item for comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByWeightElement(array $a, array $b) {
return static::sortByKeyInt($a, $b, 'weight');
}
/**
* Sorts a structured array by '#weight' property.
*
* Callback for uasort() within element_children().
*
* @param array $a
* First item for comparison. The compared items should be associative
* arrays that optionally include a '#weight' key.
* @param array $b
* Second item for comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByWeightProperty($a, $b) {
return static::sortByKeyInt($a, $b, '#weight');
}
/**
* Sorts a structured array by 'title' key (no # prefix).
*
* Callback for uasort() within system_admin_index().
*
* @param array $a
* First item for comparison. The compared items should be associative arrays
* that optionally include a 'title' key.
* @param array $b
* Second item for comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByTitleElement($a, $b) {
return static::sortByKeyString($a, $b, 'title');
}
/**
* Sorts a structured array by '#title' property.
*
* Callback for uasort() within:
* - system_modules()
* - theme_simpletest_test_table()
*
* @param array $a
* First item for comparison. The compared items should be associative arrays
* that optionally include a '#title' key.
* @param array $b
* Second item for comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByTitleProperty($a, $b) {
return static::sortByKeyString($a, $b, '#title');
}
/**
* Sorts a string array item by an arbitrary key.
*
* @param array $a
* First item for comparison.
* @param array $b
* Second item for comparison.
* @param string $key
* The key to use in the comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByKeyString($a, $b, $key) {
$a_title = (is_array($a) && isset($a[$key])) ? $a[$key] : '';
$b_title = (is_array($b) && isset($b[$key])) ? $b[$key] : '';
return strnatcasecmp($a_title, $b_title);
}
/**
* Sorts an integer array item by an arbitrary key.
*
* @param array $a
* First item for comparison.
* @param array $b
* Second item for comparison.
* @param string $key
* The key to use in the comparison.
*
* @return int
* The comparison result for uasort().
*/
public static function sortByKeyInt($a, $b, $key) {
$a_weight = (is_array($a) && isset($a[$key])) ? $a[$key] : 0;
$b_weight = (is_array($b) && isset($b[$key])) ? $b[$key] : 0;
if ($a_weight == $b_weight) {
return 0;
}
return ($a_weight < $b_weight) ? -1 : 1;
}
}
| nickopris/musicapp | www/core/lib/Drupal/Component/Utility/SortArray.php | PHP | apache-2.0 | 3,665 |
namespace Xilium.CefGlue
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Xilium.CefGlue.Interop;
/// <summary>
/// Callback interface for CefBrowserHost::GetNavigationEntries. The methods of
/// this class will be called on the browser process UI thread.
/// </summary>
public abstract unsafe partial class CefNavigationEntryVisitor
{
private int visit(cef_navigation_entry_visitor_t* self, cef_navigation_entry_t* entry, int current, int index, int total)
{
CheckSelf(self);
var m_entry = CefNavigationEntry.FromNative(entry);
var m_result = Visit(m_entry, current != 0, index, total);
m_entry.Dispose();
return m_result ? 1 : 0;
}
/// <summary>
/// Method that will be executed. Do not keep a reference to |entry| outside of
/// this callback. Return true to continue visiting entries or false to stop.
/// |current| is true if this entry is the currently loaded navigation entry.
/// |index| is the 0-based index of this entry and |total| is the total number
/// of entries.
/// </summary>
protected abstract bool Visit(CefNavigationEntry entry, bool current, int index, int total);
}
}
| mindthegab/SFE-Minuet-DesktopClient | minuet/CefGlue/Classes.Handlers/CefNavigationEntryVisitor.cs | C# | apache-2.0 | 1,363 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.el;
/**
*
*/
public abstract class ValueExpression extends Expression {
public abstract Class<?> getExpectedType();
public abstract Class<?> getType(ELContext context) throws NullPointerException, PropertyNotFoundException, ELException;
public abstract boolean isReadOnly(ELContext context) throws NullPointerException, PropertyNotFoundException, ELException;
public abstract void setValue(ELContext context, Object value) throws NullPointerException, PropertyNotFoundException, PropertyNotWritableException, ELException;
public abstract Object getValue(ELContext context) throws NullPointerException, PropertyNotFoundException, ELException;
public ValueReference getValueReference(ELContext context){
return null;
}
}
| salyh/javamailspec | geronimo-el_2.2_spec/src/main/java/javax/el/ValueExpression.java | Java | apache-2.0 | 1,582 |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.test.acceptance.framework.testhelpers;
import com.thoughtworks.selenium.Selenium;
public class CustomPropertiesHelper {
private static final String UPDATE_PAGE = "customPropertiesUpdate.ftl";
private final Selenium selenium;
public CustomPropertiesHelper(Selenium selenium) {
this.selenium = selenium;
}
/**
* Sets the language and country code.
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* for more details about these two settings.
* @param languageCode The language code.
* @param countryCode The country code.
*/
public void setLocale(String languageCode, String countryCode) {
selenium.open(UPDATE_PAGE + "?Localization.LanguageCode="+languageCode+"&Localization.CountryCode=" + countryCode);
}
/**
* Sets the number of digits after the decimal sign to <tt>digits</tt>.
* @param digits Number of digits after the decimal.
*/
public void setDigitsAfterDecimal(int digits) {
selenium.open(UPDATE_PAGE + "?AccountingRules.DigitsAfterDecimal=" + digits);
}
/**
* Sets the minimum age constraint for the clients
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* @param minimumAge The minimum age for clients.
*/
public void setMinimumAgeForClients(int minimumAge) {
selenium.open(UPDATE_PAGE + "?ClientRules.MinimumAgeForNewClients=" + minimumAge);
}
/**
* Sets the maximum age constraint for the clients
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* @param maximumAge The maximum age for clients.
*/
public void setMaximumAgeForClients(int maximumAge) {
selenium.open(UPDATE_PAGE + "?ClientRules.MaximumAgeForNewClients=" + maximumAge);
}
/**
* This is to decide whether the user requires to store the client family information
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* @param req True if its required and false if its not required
*/
public void setAreFamilyDetailsRequired(boolean req) {
selenium.open(UPDATE_PAGE + "?ClientInformation.AreFamilyDetailsRequired=" + req);
}
/**
* This is to set the maximum number of family members per client
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* @param maximumNumberOfFamilyMembers Specifies the maximum number of family members for each client
*/
public void setMaximumNumberOfFamilyMemebers(int maximumNumberOfFamilyMembers) {
selenium.open(UPDATE_PAGE + "?ClientInformation.MaximumNumberOfFamilyMembers=" + maximumNumberOfFamilyMembers);
}
/**
* Sets the number of digits after the decimal for interest sign to <tt>digits</tt>.
* @param digits Number of digits after the decimal.
*/
public void setDigitsAfterDecimalForInterest(int digits) {
selenium.open(UPDATE_PAGE + "?AccountingRules.DigitsAfterDecimalForInterest=" + digits);
}
/**
* Sets maximum allowed interest rate <tt>digits</tt>.
* @param interest maximum interest rate.
*/
public void setMaxInterest(int interest) {
selenium.open(UPDATE_PAGE + "?AccountingRules.MaxInterest=" + interest);
}
/**
* Sets minimum allowed interest rate <tt>digits</tt>.
* @param interest minimum interest rate.
*/
public void setMinInterest(int interest) {
selenium.open(UPDATE_PAGE + "?AccountingRules.MinInterest=" + interest);
}
/**
* Sets Working days of the week.
* @param workingDays working days.
*/
public void setWorkingDays(String workingDays) {
selenium.open(UPDATE_PAGE + "?FiscalCalendarRules.WorkingDays=" + workingDays);
}
/**
* This is to set the additional currencies
* See application\src\main\resources\org\mifos\config\resources\applicationConfiguration.default.properties
* @param additionalCurrencies additional currencies
*/
public void setAdditionalCurrenciesCode(String additionalCurrencies) {
selenium.open(UPDATE_PAGE + "?AccountingRules.AdditionalCurrencyCodes=" + additionalCurrencies);
}
}
| maduhu/mifos-head | acceptanceTests/src/test/java/org/mifos/test/acceptance/framework/testhelpers/CustomPropertiesHelper.java | Java | apache-2.0 | 5,175 |
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.lang.closed
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.lang;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.SQLWarning;
import com.pivotal.gemfirexd.internal.tools.ij;
import com.pivotal.gemfirexd.internal.tools.JDBCDisplayUtil;
import org.apache.derbyTesting.functionTests.util.TestUtil;
/**
Test execution of closed JDBC objects. Executing or accessing a closed
object should report that it is closed.
<p>
Note that alot of this behavior is not very specifically specified in
the JDBC guide, so this test is local to our own handler. Running JBMS
under other handlers (such as weblogic) may produce different results due
to how they cache data and reuse client-side objects.
*/
public class closed implements Runnable {
private static boolean jsr169_test = false;
public static void main(String[] args) {
System.out.println("Test closed starting");
boolean passed = true;
try {
Connection conn;
// use the ij utility to read the property file and
// make the initial connection.
ij.getPropertyArg(args);
conn = ij.startJBMS();
passed = testDerby62(conn) && passed;
// want all tests to run regardless of intermediate errors
passed = testStatement(conn) && passed;
passed = testPreparedStatement(conn) && passed;
passed = testResultSet(conn) && passed;
// this test needs to be last, because the connection will
// be closed by it.
passed = testConnection(conn) && passed;
if (!conn.isClosed()) {
passed = false;
System.out.println("FAIL -- connection not closed by test");
conn.close();
}
// shutdown the database
System.out.println("Test database shutdown ...");
passed = shutdownTest("wombat", "shutdown=true");
// shutdown the system
System.out.println("Test system shutdown ...");
passed = shutdownTest("", "shutdown=true");
} catch (Throwable e) {
passed = false;
System.out.println("FAIL -- unexpected exception:");
JDBCDisplayUtil.ShowException(System.out, e);
}
if (passed)
System.out.println("PASS");
System.out.println("Test closed finished");
}
static boolean shutdownTest(String databaseName, String shutdownString) throws SQLException {
// static boolean shutdownTest(String databaseName, String shutdownString) throws SQLException, IllegalAccessException, ClassNotFoundException, InstantiationException {
boolean passed = true;
Connection c1 = TestUtil.getConnection("wombat",null);
Connection c2 = TestUtil.getConnection("wombat",null);
Connection c3a = TestUtil.getConnection("wombat",null);
Connection c3b = TestUtil.getConnection("wombat",null);
try {
c3a.createStatement().execute("DROP TABLE CLOSED.LOCKME");
} catch (SQLException sqle) {
}
try {
c3a.createStatement().execute("DROP PROCEDURE SLEEP");
} catch (SQLException sqle) {
}
c3a.createStatement().execute("CREATE TABLE CLOSED.LOCKME(i int)");
c3a.createStatement().execute("create procedure sleep(t INTEGER) dynamic result sets 0 language java external name 'java.lang.Thread.sleep' parameter style java");
c3a.setAutoCommit(false);
c3a.createStatement().execute("LOCK TABLE CLOSED.LOCKME IN SHARE MODE");
closed r2 = new closed(c2, "CALL sleep(10000)");
closed r3 = new closed(c3b, "LOCK TABLE CLOSED.LOCKME IN EXCLUSIVE MODE");
Thread t2 = new Thread(r2);
t2.start();
Thread t3 = new Thread(r3);
t3.start();
try {
Thread.currentThread().sleep(2000);
} catch (InterruptedException ie) {
System.out.println(ie);
}
SQLException s = null;
try {
TestUtil.getConnection(databaseName, shutdownString);
} catch (SQLException sqle) {
s = sqle;
}
try {
t2.join();
} catch (InterruptedException ie) {
System.out.println(ie);
}
try {
t3.join();
} catch (InterruptedException ie) {
System.out.println(ie);
}
System.out.println(r2.result);
System.out.println(r3.result);
if (s != null)
JDBCDisplayUtil.ShowException(System.out, s);
if (!c1.isClosed()) {
passed = false;
System.out.println("FAIL -- connection not shutdown " + databaseName + ";" + shutdownString);
c1.close();
}
if (!c2.isClosed()) {
passed = false;
System.out.println("FAIL -- active connection not shutdown " + databaseName + ";" + shutdownString);
c2.close();
}
System.out.println("Shutdown test completed.");
return passed;
}
// for the shutdown test
private Connection cc;
private String sql;
String result;
private closed(Connection cc, String sql) {
this.cc = cc;
this.sql = sql;
}
public void run() {
try {
cc.createStatement().execute(sql);
result = "Sleep thread completed " + sql;
} catch (SQLException sqle) {
// this is to avoid different cannons for different JVMs since
// an java.lang.InterruptedException is thrown.
StringBuilder sb = new StringBuilder();
sb.append(sql);
sb.append(" - ");
sb.append(sqle.getSQLState());
while (sqle != null)
{
if (sqle != null) {
sb.append(", ");
sb.append(sqle.getSQLState());
sb.append(" -- ");
if (sqle.getMessage().indexOf("InterruptedException") != -1)
sb.append("InterruptedException");
else
{
sb.append(sqle.getMessage());
sqle.printStackTrace(System.out);
}
} else {
sb.append(sqle.getMessage());
}
sqle = sqle.getNextException();
}
result = sb.toString();
}
}
static boolean testStatement(Connection conn) throws SQLException {
Statement s;
boolean passed = true;
s = conn.createStatement();
s.execute("create table t (i int)");
s.execute("create table s (i int)");
try {
s.execute("create table u (i int)");
} catch (SQLException se) {
// out impl lets you execute from closed, as stmt object is reusable
// after it is closed.
passed = false; // won't pass unless caught
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on execute of closed statement");
return passed;
}
static boolean testPreparedStatement(Connection conn) throws SQLException {
PreparedStatement ps;
boolean passed = true;
ps = conn.prepareStatement("insert into t values (1)");
ps.execute();
ps.execute();
ps.close();
try {
passed = false; // won't pass unless caught
ps.execute();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on execute of closed prepared statement");
return passed;
}
static boolean testResultSet(Connection conn) throws SQLException {
PreparedStatement ps;
Statement s;
ResultSet rs;
boolean passed = true;
// first, get a few values into a table:
ps = conn.prepareStatement("insert into s values (1)");
ps.execute();
ps.execute();
ps.execute();
ps.execute();
ps.execute();
ps.close();
s = conn.createStatement();
rs = s.executeQuery("select * from s");
rs.next();
rs.next();
rs.close();
try {
passed = false; // won't pass unless caught
rs.next();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on next of closed result set");
// now see that rs after statement closed is closed also
rs = s.executeQuery("select * from s");
rs.next();
rs.next();
s.close();
try {
passed = false; // won't pass unless caught
rs.next();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on next of result set with closed statement");
return passed;
}
static boolean testConnection(Connection conn) throws SQLException {
DatabaseMetaData dmd;
ResultSet rs;
Statement s;
PreparedStatement ps;
boolean passed = true;
dmd = conn.getMetaData();
s = conn.createStatement();
ps = conn.prepareStatement("create table w (i int)");
rs = dmd.getTables("%","%","%",null); // should work
conn.close();
// should not be able to execute an existing statement
try {
passed = false; // won't pass unless caught
s.execute("create table x (i int)");
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on statement execute after connection close");
// should not be able to execute an existing prepared statement
try {
passed = false; // won't pass unless caught
ps.execute();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on prepared statement execute after connection close");
// should not be able to create a statement...
try {
passed = false; // won't pass unless caught
s = conn.createStatement();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on statement creation after connection close");
// should not be able to prepare a statement...
try {
passed = false; // won't pass unless caught
ps = conn.prepareStatement("create table z (i int)");
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on statement preparation after connection close");
// should not be able to see metadata info...
try {
passed = false; // won't pass unless caught
rs.next();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on metadata reading after connection close");
// should not be able to get any more metadata info...
try {
passed = false; // won't pass unless caught
rs = dmd.getColumns("%","%","%","%");
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on metadata collecting after connection close");
// should not be able to get metadata object...
try {
passed = false; // won't pass unless caught
dmd = conn.getMetaData();
} catch (SQLException se) {
passed = true;
// could verify exception #...
JDBCDisplayUtil.ShowSQLException(System.out,se);
}
if (!passed)
System.out.println("FAIL -- no error on getting metadata after connection close");
return passed;
}
static boolean testDerby62(Connection conn) throws SQLException {
System.out.println("Test case for Derby-62 - serialization error with SQLException");
try {
conn.createStatement().execute("DROP TABLE APP.DERBY62_DAIN_SUNDSTROM");
return false;
} catch (SQLException sqle) {
boolean passed = true;
try {
// ensure we can serialize this exception.
java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream(1024));
oos.writeObject(sqle);
oos.close();
} catch (java.io.IOException ioe)
{
System.out.println("IOException " + ioe.getMessage());
passed = false;
}
System.out.println(sqle.getMessage());
return passed;
}
}
}
| papicella/snappy-store | gemfirexd/tools/src/testing/java/org/apache/derbyTesting/functionTests/tests/lang/closed.java | Java | apache-2.0 | 12,672 |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise;
import io.netty.handler.ssl.ApplicationProtocolNames;
import io.netty.util.AsciiString;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.internal.UnstableApi;
import static io.netty.buffer.Unpooled.directBuffer;
import static io.netty.buffer.Unpooled.unreleasableBuffer;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.Http2Exception.headerListSizeError;
import static io.netty.util.CharsetUtil.UTF_8;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Constants and utility method used for encoding/decoding HTTP2 frames.
*/
@UnstableApi
public final class Http2CodecUtil {
public static final int CONNECTION_STREAM_ID = 0;
public static final int HTTP_UPGRADE_STREAM_ID = 1;
public static final CharSequence HTTP_UPGRADE_SETTINGS_HEADER = AsciiString.cached("HTTP2-Settings");
public static final CharSequence HTTP_UPGRADE_PROTOCOL_NAME = "h2c";
public static final CharSequence TLS_UPGRADE_PROTOCOL_NAME = ApplicationProtocolNames.HTTP_2;
public static final int PING_FRAME_PAYLOAD_LENGTH = 8;
public static final short MAX_UNSIGNED_BYTE = 0xff;
/**
* The maximum number of padding bytes. That is the 255 padding bytes appended to the end of a frame and the 1 byte
* pad length field.
*/
public static final int MAX_PADDING = 256;
public static final long MAX_UNSIGNED_INT = 0xffffffffL;
public static final int FRAME_HEADER_LENGTH = 9;
public static final int SETTING_ENTRY_LENGTH = 6;
public static final int PRIORITY_ENTRY_LENGTH = 5;
public static final int INT_FIELD_LENGTH = 4;
public static final short MAX_WEIGHT = 256;
public static final short MIN_WEIGHT = 1;
private static final ByteBuf CONNECTION_PREFACE =
unreleasableBuffer(directBuffer(24).writeBytes("PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n".getBytes(UTF_8)))
.asReadOnly();
private static final int MAX_PADDING_LENGTH_LENGTH = 1;
public static final int DATA_FRAME_HEADER_LENGTH = FRAME_HEADER_LENGTH + MAX_PADDING_LENGTH_LENGTH;
public static final int HEADERS_FRAME_HEADER_LENGTH =
FRAME_HEADER_LENGTH + MAX_PADDING_LENGTH_LENGTH + INT_FIELD_LENGTH + 1;
public static final int PRIORITY_FRAME_LENGTH = FRAME_HEADER_LENGTH + PRIORITY_ENTRY_LENGTH;
public static final int RST_STREAM_FRAME_LENGTH = FRAME_HEADER_LENGTH + INT_FIELD_LENGTH;
public static final int PUSH_PROMISE_FRAME_HEADER_LENGTH =
FRAME_HEADER_LENGTH + MAX_PADDING_LENGTH_LENGTH + INT_FIELD_LENGTH;
public static final int GO_AWAY_FRAME_HEADER_LENGTH = FRAME_HEADER_LENGTH + 2 * INT_FIELD_LENGTH;
public static final int WINDOW_UPDATE_FRAME_LENGTH = FRAME_HEADER_LENGTH + INT_FIELD_LENGTH;
public static final int CONTINUATION_FRAME_HEADER_LENGTH = FRAME_HEADER_LENGTH + MAX_PADDING_LENGTH_LENGTH;
public static final char SETTINGS_HEADER_TABLE_SIZE = 1;
public static final char SETTINGS_ENABLE_PUSH = 2;
public static final char SETTINGS_MAX_CONCURRENT_STREAMS = 3;
public static final char SETTINGS_INITIAL_WINDOW_SIZE = 4;
public static final char SETTINGS_MAX_FRAME_SIZE = 5;
public static final char SETTINGS_MAX_HEADER_LIST_SIZE = 6;
public static final int NUM_STANDARD_SETTINGS = 6;
public static final long MAX_HEADER_TABLE_SIZE = MAX_UNSIGNED_INT;
public static final long MAX_CONCURRENT_STREAMS = MAX_UNSIGNED_INT;
public static final int MAX_INITIAL_WINDOW_SIZE = Integer.MAX_VALUE;
public static final int MAX_FRAME_SIZE_LOWER_BOUND = 0x4000;
public static final int MAX_FRAME_SIZE_UPPER_BOUND = 0xffffff;
public static final long MAX_HEADER_LIST_SIZE = MAX_UNSIGNED_INT;
public static final long MIN_HEADER_TABLE_SIZE = 0;
public static final long MIN_CONCURRENT_STREAMS = 0;
public static final int MIN_INITIAL_WINDOW_SIZE = 0;
public static final long MIN_HEADER_LIST_SIZE = 0;
public static final int DEFAULT_WINDOW_SIZE = 65535;
public static final short DEFAULT_PRIORITY_WEIGHT = 16;
public static final int DEFAULT_HEADER_TABLE_SIZE = 4096;
/**
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">The initial value of this setting is unlimited</a>.
* However in practice we don't want to allow our peers to use unlimited memory by default. So we take advantage
* of the <q>For any given request, a lower limit than what is advertised MAY be enforced.</q> loophole.
*/
public static final long DEFAULT_HEADER_LIST_SIZE = 8192;
public static final int DEFAULT_MAX_FRAME_SIZE = MAX_FRAME_SIZE_LOWER_BOUND;
/**
* The assumed minimum value for {@code SETTINGS_MAX_CONCURRENT_STREAMS} as
* recommended by the <a herf="https://tools.ietf.org/html/rfc7540#section-6.5.2">HTTP/2 spec</a>.
*/
public static final int SMALLEST_MAX_CONCURRENT_STREAMS = 100;
static final int DEFAULT_MAX_RESERVED_STREAMS = SMALLEST_MAX_CONCURRENT_STREAMS;
static final int DEFAULT_MIN_ALLOCATION_CHUNK = 1024;
/**
* Calculate the threshold in bytes which should trigger a {@code GO_AWAY} if a set of headers exceeds this amount.
* @param maxHeaderListSize
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">SETTINGS_MAX_HEADER_LIST_SIZE</a> for the local
* endpoint.
* @return the threshold in bytes which should trigger a {@code GO_AWAY} if a set of headers exceeds this amount.
*/
public static long calculateMaxHeaderListSizeGoAway(long maxHeaderListSize) {
// This is equivalent to `maxHeaderListSize * 1.25` but we avoid floating point multiplication.
return maxHeaderListSize + (maxHeaderListSize >>> 2);
}
public static final long DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT_MILLIS = MILLISECONDS.convert(30, SECONDS);
public static final int DEFAULT_MAX_QUEUED_CONTROL_FRAMES = 10000;
/**
* Returns {@code true} if the stream is an outbound stream.
*
* @param server {@code true} if the endpoint is a server, {@code false} otherwise.
* @param streamId the stream identifier
*/
public static boolean isOutboundStream(boolean server, int streamId) {
boolean even = (streamId & 1) == 0;
return streamId > 0 && server == even;
}
/**
* Returns true if the {@code streamId} is a valid HTTP/2 stream identifier.
*/
public static boolean isStreamIdValid(int streamId) {
return streamId >= 0;
}
static boolean isStreamIdValid(int streamId, boolean server) {
return isStreamIdValid(streamId) && server == ((streamId & 1) == 0);
}
/**
* Indicates whether or not the given value for max frame size falls within the valid range.
*/
public static boolean isMaxFrameSizeValid(int maxFrameSize) {
return maxFrameSize >= MAX_FRAME_SIZE_LOWER_BOUND && maxFrameSize <= MAX_FRAME_SIZE_UPPER_BOUND;
}
/**
* Returns a buffer containing the {@link #CONNECTION_PREFACE}.
*/
public static ByteBuf connectionPrefaceBuf() {
// Return a duplicate so that modifications to the reader index will not affect the original buffer.
return CONNECTION_PREFACE.retainedDuplicate();
}
/**
* Iteratively looks through the causality chain for the given exception and returns the first
* {@link Http2Exception} or {@code null} if none.
*/
public static Http2Exception getEmbeddedHttp2Exception(Throwable cause) {
while (cause != null) {
if (cause instanceof Http2Exception) {
return (Http2Exception) cause;
}
cause = cause.getCause();
}
return null;
}
/**
* Creates a buffer containing the error message from the given exception. If the cause is
* {@code null} returns an empty buffer.
*/
public static ByteBuf toByteBuf(ChannelHandlerContext ctx, Throwable cause) {
if (cause == null || cause.getMessage() == null) {
return Unpooled.EMPTY_BUFFER;
}
return ByteBufUtil.writeUtf8(ctx.alloc(), cause.getMessage());
}
/**
* Reads a big-endian (31-bit) integer from the buffer.
*/
public static int readUnsignedInt(ByteBuf buf) {
return buf.readInt() & 0x7fffffff;
}
/**
* Writes an HTTP/2 frame header to the output buffer.
*/
public static void writeFrameHeader(ByteBuf out, int payloadLength, byte type,
Http2Flags flags, int streamId) {
out.ensureWritable(FRAME_HEADER_LENGTH + payloadLength);
writeFrameHeaderInternal(out, payloadLength, type, flags, streamId);
}
/**
* Calculate the amount of bytes that can be sent by {@code state}. The lower bound is {@code 0}.
*/
public static int streamableBytes(StreamByteDistributor.StreamState state) {
return max(0, (int) min(state.pendingBytes(), state.windowSize()));
}
/**
* Results in a RST_STREAM being sent for {@code streamId} due to violating
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">SETTINGS_MAX_HEADER_LIST_SIZE</a>.
* @param streamId The stream ID that was being processed when the exceptional condition occurred.
* @param maxHeaderListSize The max allowed size for a list of headers in bytes which was exceeded.
* @param onDecode {@code true} if the exception was encountered during decoder. {@code false} for encode.
* @throws Http2Exception a stream error.
*/
public static void headerListSizeExceeded(int streamId, long maxHeaderListSize,
boolean onDecode) throws Http2Exception {
throw headerListSizeError(streamId, PROTOCOL_ERROR, onDecode, "Header size exceeded max " +
"allowed size (%d)", maxHeaderListSize);
}
/**
* Results in a GO_AWAY being sent due to violating
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">SETTINGS_MAX_HEADER_LIST_SIZE</a> in an unrecoverable
* manner.
* @param maxHeaderListSize The max allowed size for a list of headers in bytes which was exceeded.
* @throws Http2Exception a connection error.
*/
public static void headerListSizeExceeded(long maxHeaderListSize) throws Http2Exception {
throw connectionError(PROTOCOL_ERROR, "Header size exceeded max " +
"allowed size (%d)", maxHeaderListSize);
}
static void writeFrameHeaderInternal(ByteBuf out, int payloadLength, byte type,
Http2Flags flags, int streamId) {
out.writeMedium(payloadLength);
out.writeByte(type);
out.writeByte(flags.value());
out.writeInt(streamId);
}
/**
* Provides the ability to associate the outcome of multiple {@link ChannelPromise}
* objects into a single {@link ChannelPromise} object.
*/
static final class SimpleChannelPromiseAggregator extends DefaultChannelPromise {
private final ChannelPromise promise;
private int expectedCount;
private int doneCount;
private Throwable lastFailure;
private boolean doneAllocating;
SimpleChannelPromiseAggregator(ChannelPromise promise, Channel c, EventExecutor e) {
super(c, e);
assert promise != null && !promise.isDone();
this.promise = promise;
}
/**
* Allocate a new promise which will be used to aggregate the overall success of this promise aggregator.
* @return A new promise which will be aggregated.
* {@code null} if {@link #doneAllocatingPromises()} was previously called.
*/
public ChannelPromise newPromise() {
assert !doneAllocating : "Done allocating. No more promises can be allocated.";
++expectedCount;
return this;
}
/**
* Signify that no more {@link #newPromise()} allocations will be made.
* The aggregation can not be successful until this method is called.
* @return The promise that is the aggregation of all promises allocated with {@link #newPromise()}.
*/
public ChannelPromise doneAllocatingPromises() {
if (!doneAllocating) {
doneAllocating = true;
if (doneCount == expectedCount || expectedCount == 0) {
return setPromise();
}
}
return this;
}
@Override
public boolean tryFailure(Throwable cause) {
if (allowFailure()) {
++doneCount;
lastFailure = cause;
if (allPromisesDone()) {
return tryPromise();
}
// TODO: We break the interface a bit here.
// Multiple failure events can be processed without issue because this is an aggregation.
return true;
}
return false;
}
/**
* Fail this object if it has not already been failed.
* <p>
* This method will NOT throw an {@link IllegalStateException} if called multiple times
* because that may be expected.
*/
@Override
public ChannelPromise setFailure(Throwable cause) {
if (allowFailure()) {
++doneCount;
lastFailure = cause;
if (allPromisesDone()) {
return setPromise();
}
}
return this;
}
@Override
public ChannelPromise setSuccess(Void result) {
if (awaitingPromises()) {
++doneCount;
if (allPromisesDone()) {
setPromise();
}
}
return this;
}
@Override
public boolean trySuccess(Void result) {
if (awaitingPromises()) {
++doneCount;
if (allPromisesDone()) {
return tryPromise();
}
// TODO: We break the interface a bit here.
// Multiple success events can be processed without issue because this is an aggregation.
return true;
}
return false;
}
private boolean allowFailure() {
return awaitingPromises() || expectedCount == 0;
}
private boolean awaitingPromises() {
return doneCount < expectedCount;
}
private boolean allPromisesDone() {
return doneCount == expectedCount && doneAllocating;
}
private ChannelPromise setPromise() {
if (lastFailure == null) {
promise.setSuccess();
return super.setSuccess(null);
} else {
promise.setFailure(lastFailure);
return super.setFailure(lastFailure);
}
}
private boolean tryPromise() {
if (lastFailure == null) {
promise.trySuccess();
return super.trySuccess(null);
} else {
promise.tryFailure(lastFailure);
return super.tryFailure(lastFailure);
}
}
}
public static void verifyPadding(int padding) {
if (padding < 0 || padding > MAX_PADDING) {
throw new IllegalArgumentException(String.format("Invalid padding '%d'. Padding must be between 0 and " +
"%d (inclusive).", padding, MAX_PADDING));
}
}
private Http2CodecUtil() { }
}
| gerdriesselmann/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2CodecUtil.java | Java | apache-2.0 | 16,850 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.cube.kv;
public class RowConstants {
public static final int ROWKEY_COL_DEFAULT_LENGTH = 256;
// row key lower bound
public static final byte ROWKEY_LOWER_BYTE = 0;
// row key upper bound
public static final byte ROWKEY_UPPER_BYTE = (byte) 0xff;
// row key cuboid id length
public static final int ROWKEY_CUBOIDID_LEN = 8;
// row key shard length
public static final int ROWKEY_SHARDID_LEN = 2;
public static final int ROWKEY_SHARD_AND_CUBOID_LEN = ROWKEY_CUBOIDID_LEN + ROWKEY_SHARDID_LEN;
public static final byte BYTE_ZERO = 0;
public static final byte BYTE_ONE = 1;
// row value delimiter
public static final byte ROWVALUE_DELIMITER_BYTE = 7;
public static final String ROWVALUE_DELIMITER_STRING = String.valueOf((char) 7);
public static final byte[] ROWVALUE_DELIMITER_BYTES = { 7 };
public static final int ROWKEY_BUFFER_SIZE = 65 * 256;// a little more than 64 dimensions * 256 bytes each
}
| apache/kylin | core-cube/src/main/java/org/apache/kylin/cube/kv/RowConstants.java | Java | apache-2.0 | 1,808 |
/*
* Copyright © 2012-2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package co.cask.coopr.http.guice;
import co.cask.coopr.common.conf.Constants;
import co.cask.coopr.http.handler.AdminHandler;
import co.cask.coopr.http.handler.ClusterHandler;
import co.cask.coopr.http.handler.MetricHandler;
import co.cask.coopr.http.handler.NodeHandler;
import co.cask.coopr.http.handler.PluginHandler;
import co.cask.coopr.http.handler.ProvisionerHandler;
import co.cask.coopr.http.handler.RPCHandler;
import co.cask.coopr.http.handler.StatusHandler;
import co.cask.coopr.http.handler.SuperadminHandler;
import co.cask.coopr.http.handler.TaskHandler;
import co.cask.coopr.http.handler.UserHandler;
import co.cask.http.HttpHandler;
import com.google.inject.AbstractModule;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Names;
/**
* Guice bindings for http related classes.
*/
public class HttpModule extends AbstractModule {
@Override
protected void configure() {
Multibinder<HttpHandler> externalHandlerBinder =
Multibinder.newSetBinder(binder(), HttpHandler.class, Names.named(Constants.HandlersNames.EXTERNAL));
externalHandlerBinder.addBinding().to(AdminHandler.class);
externalHandlerBinder.addBinding().to(ClusterHandler.class);
externalHandlerBinder.addBinding().to(NodeHandler.class);
externalHandlerBinder.addBinding().to(StatusHandler.class);
externalHandlerBinder.addBinding().to(RPCHandler.class);
externalHandlerBinder.addBinding().to(SuperadminHandler.class);
externalHandlerBinder.addBinding().to(PluginHandler.class);
externalHandlerBinder.addBinding().to(UserHandler.class);
externalHandlerBinder.addBinding().to(MetricHandler.class);
Multibinder<HttpHandler> internalHandlerBinder =
Multibinder.newSetBinder(binder(), HttpHandler.class, Names.named(Constants.HandlersNames.INTERNAL));
internalHandlerBinder.addBinding().to(TaskHandler.class);
internalHandlerBinder.addBinding().to(ProvisionerHandler.class);
}
}
| caskdata/coopr | coopr-server/src/main/java/co/cask/coopr/http/guice/HttpModule.java | Java | apache-2.0 | 2,568 |
/**
* Copyright 2005-2016 Red Hat, Inc.
* <p>
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.arquillian.cube.kubernetes.impl.event;
import org.arquillian.cube.kubernetes.impl.DefaultSession;
import org.arquillian.cube.kubernetes.impl.SessionCreatedEvent;
public class Stop extends SessionCreatedEvent {
public Stop(DefaultSession session) {
super(session);
}
}
| arquillian/arquillian-cube | kubernetes/kubernetes/src/main/java/org/arquillian/cube/kubernetes/impl/event/Stop.java | Java | apache-2.0 | 937 |
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: combos/unmarshaler/theproto3.proto
package theproto3
import (
bytes "bytes"
compress_gzip "compress/gzip"
encoding_binary "encoding/binary"
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
proto "github.com/gogo/protobuf/proto"
github_com_gogo_protobuf_protoc_gen_gogo_descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
both "github.com/gogo/protobuf/test/combos/both"
github_com_gogo_protobuf_test_custom "github.com/gogo/protobuf/test/custom"
io "io"
io_ioutil "io/ioutil"
math "math"
math_bits "math/bits"
reflect "reflect"
strconv "strconv"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type MapEnum int32
const (
MA MapEnum = 0
MB MapEnum = 1
MC MapEnum = 2
)
var MapEnum_name = map[int32]string{
0: "MA",
1: "MB",
2: "MC",
}
var MapEnum_value = map[string]int32{
"MA": 0,
"MB": 1,
"MC": 2,
}
func (MapEnum) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{0}
}
type Message_Humour int32
const (
UNKNOWN Message_Humour = 0
PUNS Message_Humour = 1
SLAPSTICK Message_Humour = 2
BILL_BAILEY Message_Humour = 3
)
var Message_Humour_name = map[int32]string{
0: "UNKNOWN",
1: "PUNS",
2: "SLAPSTICK",
3: "BILL_BAILEY",
}
var Message_Humour_value = map[string]int32{
"UNKNOWN": 0,
"PUNS": 1,
"SLAPSTICK": 2,
"BILL_BAILEY": 3,
}
func (Message_Humour) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{0, 0}
}
type Message struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,proto3,enum=theproto3.Message_Humour" json:"hilarity,omitempty"`
HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm,proto3" json:"height_in_cm,omitempty"`
Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"`
ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount,proto3" json:"result_count,omitempty"`
TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman,proto3" json:"true_scotsman,omitempty"`
Score float32 `protobuf:"fixed32,9,opt,name=score,proto3" json:"score,omitempty"`
Key []uint64 `protobuf:"varint,5,rep,packed,name=key,proto3" json:"key,omitempty"`
Nested *Nested `protobuf:"bytes,6,opt,name=nested,proto3" json:"nested,omitempty"`
Terrain map[int64]*Nested `protobuf:"bytes,10,rep,name=terrain,proto3" json:"terrain,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
Proto2Field *both.NinOptNative `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field,proto3" json:"proto2_field,omitempty"`
Proto2Value map[int64]*both.NinOptEnum `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value,proto3" json:"proto2_value,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Message) Reset() { *m = Message{} }
func (*Message) ProtoMessage() {}
func (*Message) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{0}
}
func (m *Message) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Message.Marshal(b, m, deterministic)
}
func (m *Message) XXX_Merge(src proto.Message) {
xxx_messageInfo_Message.Merge(m, src)
}
func (m *Message) XXX_Size() int {
return xxx_messageInfo_Message.Size(m)
}
func (m *Message) XXX_DiscardUnknown() {
xxx_messageInfo_Message.DiscardUnknown(m)
}
var xxx_messageInfo_Message proto.InternalMessageInfo
type Nested struct {
Bunny string `protobuf:"bytes,1,opt,name=bunny,proto3" json:"bunny,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Nested) Reset() { *m = Nested{} }
func (*Nested) ProtoMessage() {}
func (*Nested) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{1}
}
func (m *Nested) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Nested.Marshal(b, m, deterministic)
}
func (m *Nested) XXX_Merge(src proto.Message) {
xxx_messageInfo_Nested.Merge(m, src)
}
func (m *Nested) XXX_Size() int {
return xxx_messageInfo_Nested.Size(m)
}
func (m *Nested) XXX_DiscardUnknown() {
xxx_messageInfo_Nested.DiscardUnknown(m)
}
var xxx_messageInfo_Nested proto.InternalMessageInfo
type AllMaps struct {
StringToDoubleMap map[string]float64 `protobuf:"bytes,1,rep,name=StringToDoubleMap,proto3" json:"StringToDoubleMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
StringToFloatMap map[string]float32 `protobuf:"bytes,2,rep,name=StringToFloatMap,proto3" json:"StringToFloatMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Int32Map map[int32]int32 `protobuf:"bytes,3,rep,name=Int32Map,proto3" json:"Int32Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Int64Map map[int64]int64 `protobuf:"bytes,4,rep,name=Int64Map,proto3" json:"Int64Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Uint32Map map[uint32]uint32 `protobuf:"bytes,5,rep,name=Uint32Map,proto3" json:"Uint32Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Uint64Map map[uint64]uint64 `protobuf:"bytes,6,rep,name=Uint64Map,proto3" json:"Uint64Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Sint32Map map[int32]int32 `protobuf:"bytes,7,rep,name=Sint32Map,proto3" json:"Sint32Map,omitempty" protobuf_key:"zigzag32,1,opt,name=key,proto3" protobuf_val:"zigzag32,2,opt,name=value,proto3"`
Sint64Map map[int64]int64 `protobuf:"bytes,8,rep,name=Sint64Map,proto3" json:"Sint64Map,omitempty" protobuf_key:"zigzag64,1,opt,name=key,proto3" protobuf_val:"zigzag64,2,opt,name=value,proto3"`
Fixed32Map map[uint32]uint32 `protobuf:"bytes,9,rep,name=Fixed32Map,proto3" json:"Fixed32Map,omitempty" protobuf_key:"fixed32,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Sfixed32Map map[int32]int32 `protobuf:"bytes,10,rep,name=Sfixed32Map,proto3" json:"Sfixed32Map,omitempty" protobuf_key:"fixed32,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Fixed64Map map[uint64]uint64 `protobuf:"bytes,11,rep,name=Fixed64Map,proto3" json:"Fixed64Map,omitempty" protobuf_key:"fixed64,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
Sfixed64Map map[int64]int64 `protobuf:"bytes,12,rep,name=Sfixed64Map,proto3" json:"Sfixed64Map,omitempty" protobuf_key:"fixed64,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
BoolMap map[bool]bool `protobuf:"bytes,13,rep,name=BoolMap,proto3" json:"BoolMap,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
StringMap map[string]string `protobuf:"bytes,14,rep,name=StringMap,proto3" json:"StringMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
StringToBytesMap map[string][]byte `protobuf:"bytes,15,rep,name=StringToBytesMap,proto3" json:"StringToBytesMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
StringToEnumMap map[string]MapEnum `protobuf:"bytes,16,rep,name=StringToEnumMap,proto3" json:"StringToEnumMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=theproto3.MapEnum"`
StringToMsgMap map[string]*FloatingPoint `protobuf:"bytes,17,rep,name=StringToMsgMap,proto3" json:"StringToMsgMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AllMaps) Reset() { *m = AllMaps{} }
func (*AllMaps) ProtoMessage() {}
func (*AllMaps) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{2}
}
func (m *AllMaps) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *AllMaps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AllMaps.Marshal(b, m, deterministic)
}
func (m *AllMaps) XXX_Merge(src proto.Message) {
xxx_messageInfo_AllMaps.Merge(m, src)
}
func (m *AllMaps) XXX_Size() int {
return xxx_messageInfo_AllMaps.Size(m)
}
func (m *AllMaps) XXX_DiscardUnknown() {
xxx_messageInfo_AllMaps.DiscardUnknown(m)
}
var xxx_messageInfo_AllMaps proto.InternalMessageInfo
type AllMapsOrdered struct {
StringToDoubleMap map[string]float64 `protobuf:"bytes,1,rep,name=StringToDoubleMap,proto3" json:"StringToDoubleMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
StringToFloatMap map[string]float32 `protobuf:"bytes,2,rep,name=StringToFloatMap,proto3" json:"StringToFloatMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Int32Map map[int32]int32 `protobuf:"bytes,3,rep,name=Int32Map,proto3" json:"Int32Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Int64Map map[int64]int64 `protobuf:"bytes,4,rep,name=Int64Map,proto3" json:"Int64Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Uint32Map map[uint32]uint32 `protobuf:"bytes,5,rep,name=Uint32Map,proto3" json:"Uint32Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Uint64Map map[uint64]uint64 `protobuf:"bytes,6,rep,name=Uint64Map,proto3" json:"Uint64Map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Sint32Map map[int32]int32 `protobuf:"bytes,7,rep,name=Sint32Map,proto3" json:"Sint32Map,omitempty" protobuf_key:"zigzag32,1,opt,name=key,proto3" protobuf_val:"zigzag32,2,opt,name=value,proto3"`
Sint64Map map[int64]int64 `protobuf:"bytes,8,rep,name=Sint64Map,proto3" json:"Sint64Map,omitempty" protobuf_key:"zigzag64,1,opt,name=key,proto3" protobuf_val:"zigzag64,2,opt,name=value,proto3"`
Fixed32Map map[uint32]uint32 `protobuf:"bytes,9,rep,name=Fixed32Map,proto3" json:"Fixed32Map,omitempty" protobuf_key:"fixed32,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Sfixed32Map map[int32]int32 `protobuf:"bytes,10,rep,name=Sfixed32Map,proto3" json:"Sfixed32Map,omitempty" protobuf_key:"fixed32,1,opt,name=key,proto3" protobuf_val:"fixed32,2,opt,name=value,proto3"`
Fixed64Map map[uint64]uint64 `protobuf:"bytes,11,rep,name=Fixed64Map,proto3" json:"Fixed64Map,omitempty" protobuf_key:"fixed64,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
Sfixed64Map map[int64]int64 `protobuf:"bytes,12,rep,name=Sfixed64Map,proto3" json:"Sfixed64Map,omitempty" protobuf_key:"fixed64,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
BoolMap map[bool]bool `protobuf:"bytes,13,rep,name=BoolMap,proto3" json:"BoolMap,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
StringMap map[string]string `protobuf:"bytes,14,rep,name=StringMap,proto3" json:"StringMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
StringToBytesMap map[string][]byte `protobuf:"bytes,15,rep,name=StringToBytesMap,proto3" json:"StringToBytesMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
StringToEnumMap map[string]MapEnum `protobuf:"bytes,16,rep,name=StringToEnumMap,proto3" json:"StringToEnumMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=theproto3.MapEnum"`
StringToMsgMap map[string]*FloatingPoint `protobuf:"bytes,17,rep,name=StringToMsgMap,proto3" json:"StringToMsgMap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AllMapsOrdered) Reset() { *m = AllMapsOrdered{} }
func (*AllMapsOrdered) ProtoMessage() {}
func (*AllMapsOrdered) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{3}
}
func (m *AllMapsOrdered) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *AllMapsOrdered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AllMapsOrdered.Marshal(b, m, deterministic)
}
func (m *AllMapsOrdered) XXX_Merge(src proto.Message) {
xxx_messageInfo_AllMapsOrdered.Merge(m, src)
}
func (m *AllMapsOrdered) XXX_Size() int {
return xxx_messageInfo_AllMapsOrdered.Size(m)
}
func (m *AllMapsOrdered) XXX_DiscardUnknown() {
xxx_messageInfo_AllMapsOrdered.DiscardUnknown(m)
}
var xxx_messageInfo_AllMapsOrdered proto.InternalMessageInfo
type MessageWithMap struct {
NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping,proto3" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping,proto3" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping,proto3" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MessageWithMap) Reset() { *m = MessageWithMap{} }
func (*MessageWithMap) ProtoMessage() {}
func (*MessageWithMap) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{4}
}
func (m *MessageWithMap) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *MessageWithMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_MessageWithMap.Marshal(b, m, deterministic)
}
func (m *MessageWithMap) XXX_Merge(src proto.Message) {
xxx_messageInfo_MessageWithMap.Merge(m, src)
}
func (m *MessageWithMap) XXX_Size() int {
return xxx_messageInfo_MessageWithMap.Size(m)
}
func (m *MessageWithMap) XXX_DiscardUnknown() {
xxx_messageInfo_MessageWithMap.DiscardUnknown(m)
}
var xxx_messageInfo_MessageWithMap proto.InternalMessageInfo
type FloatingPoint struct {
F float64 `protobuf:"fixed64,1,opt,name=f,proto3" json:"f,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *FloatingPoint) Reset() { *m = FloatingPoint{} }
func (*FloatingPoint) ProtoMessage() {}
func (*FloatingPoint) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{5}
}
func (m *FloatingPoint) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *FloatingPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_FloatingPoint.Marshal(b, m, deterministic)
}
func (m *FloatingPoint) XXX_Merge(src proto.Message) {
xxx_messageInfo_FloatingPoint.Merge(m, src)
}
func (m *FloatingPoint) XXX_Size() int {
return xxx_messageInfo_FloatingPoint.Size(m)
}
func (m *FloatingPoint) XXX_DiscardUnknown() {
xxx_messageInfo_FloatingPoint.DiscardUnknown(m)
}
var xxx_messageInfo_FloatingPoint proto.InternalMessageInfo
type Uint128Pair struct {
Left github_com_gogo_protobuf_test_custom.Uint128 `protobuf:"bytes,1,opt,name=left,proto3,customtype=github.com/gogo/protobuf/test/custom.Uint128" json:"left"`
Right *github_com_gogo_protobuf_test_custom.Uint128 `protobuf:"bytes,2,opt,name=right,proto3,customtype=github.com/gogo/protobuf/test/custom.Uint128" json:"right,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Uint128Pair) Reset() { *m = Uint128Pair{} }
func (*Uint128Pair) ProtoMessage() {}
func (*Uint128Pair) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{6}
}
func (m *Uint128Pair) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Uint128Pair) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Uint128Pair.Marshal(b, m, deterministic)
}
func (m *Uint128Pair) XXX_Merge(src proto.Message) {
xxx_messageInfo_Uint128Pair.Merge(m, src)
}
func (m *Uint128Pair) XXX_Size() int {
return xxx_messageInfo_Uint128Pair.Size(m)
}
func (m *Uint128Pair) XXX_DiscardUnknown() {
xxx_messageInfo_Uint128Pair.DiscardUnknown(m)
}
var xxx_messageInfo_Uint128Pair proto.InternalMessageInfo
type ContainsNestedMap struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ContainsNestedMap) Reset() { *m = ContainsNestedMap{} }
func (*ContainsNestedMap) ProtoMessage() {}
func (*ContainsNestedMap) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{7}
}
func (m *ContainsNestedMap) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ContainsNestedMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ContainsNestedMap.Marshal(b, m, deterministic)
}
func (m *ContainsNestedMap) XXX_Merge(src proto.Message) {
xxx_messageInfo_ContainsNestedMap.Merge(m, src)
}
func (m *ContainsNestedMap) XXX_Size() int {
return xxx_messageInfo_ContainsNestedMap.Size(m)
}
func (m *ContainsNestedMap) XXX_DiscardUnknown() {
xxx_messageInfo_ContainsNestedMap.DiscardUnknown(m)
}
var xxx_messageInfo_ContainsNestedMap proto.InternalMessageInfo
type ContainsNestedMap_NestedMap struct {
NestedMapField map[string]float64 `protobuf:"bytes,1,rep,name=NestedMapField,proto3" json:"NestedMapField,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ContainsNestedMap_NestedMap) Reset() { *m = ContainsNestedMap_NestedMap{} }
func (*ContainsNestedMap_NestedMap) ProtoMessage() {}
func (*ContainsNestedMap_NestedMap) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{7, 0}
}
func (m *ContainsNestedMap_NestedMap) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ContainsNestedMap_NestedMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ContainsNestedMap_NestedMap.Marshal(b, m, deterministic)
}
func (m *ContainsNestedMap_NestedMap) XXX_Merge(src proto.Message) {
xxx_messageInfo_ContainsNestedMap_NestedMap.Merge(m, src)
}
func (m *ContainsNestedMap_NestedMap) XXX_Size() int {
return xxx_messageInfo_ContainsNestedMap_NestedMap.Size(m)
}
func (m *ContainsNestedMap_NestedMap) XXX_DiscardUnknown() {
xxx_messageInfo_ContainsNestedMap_NestedMap.DiscardUnknown(m)
}
var xxx_messageInfo_ContainsNestedMap_NestedMap proto.InternalMessageInfo
type NotPacked struct {
Key []uint64 `protobuf:"varint,5,rep,name=key,proto3" json:"key,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *NotPacked) Reset() { *m = NotPacked{} }
func (*NotPacked) ProtoMessage() {}
func (*NotPacked) Descriptor() ([]byte, []int) {
return fileDescriptor_e24bba79c1e35a1f, []int{8}
}
func (m *NotPacked) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *NotPacked) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_NotPacked.Marshal(b, m, deterministic)
}
func (m *NotPacked) XXX_Merge(src proto.Message) {
xxx_messageInfo_NotPacked.Merge(m, src)
}
func (m *NotPacked) XXX_Size() int {
return xxx_messageInfo_NotPacked.Size(m)
}
func (m *NotPacked) XXX_DiscardUnknown() {
xxx_messageInfo_NotPacked.DiscardUnknown(m)
}
var xxx_messageInfo_NotPacked proto.InternalMessageInfo
func init() {
proto.RegisterEnum("theproto3.MapEnum", MapEnum_name, MapEnum_value)
proto.RegisterEnum("theproto3.Message_Humour", Message_Humour_name, Message_Humour_value)
proto.RegisterType((*Message)(nil), "theproto3.Message")
proto.RegisterMapType((map[int64]*both.NinOptEnum)(nil), "theproto3.Message.Proto2ValueEntry")
proto.RegisterMapType((map[int64]*Nested)(nil), "theproto3.Message.TerrainEntry")
proto.RegisterType((*Nested)(nil), "theproto3.Nested")
proto.RegisterType((*AllMaps)(nil), "theproto3.AllMaps")
proto.RegisterMapType((map[bool]bool)(nil), "theproto3.AllMaps.BoolMapEntry")
proto.RegisterMapType((map[uint32]uint32)(nil), "theproto3.AllMaps.Fixed32MapEntry")
proto.RegisterMapType((map[uint64]uint64)(nil), "theproto3.AllMaps.Fixed64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMaps.Int32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMaps.Int64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMaps.Sfixed32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMaps.Sfixed64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMaps.Sint32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMaps.Sint64MapEntry")
proto.RegisterMapType((map[string]string)(nil), "theproto3.AllMaps.StringMapEntry")
proto.RegisterMapType((map[string][]byte)(nil), "theproto3.AllMaps.StringToBytesMapEntry")
proto.RegisterMapType((map[string]float64)(nil), "theproto3.AllMaps.StringToDoubleMapEntry")
proto.RegisterMapType((map[string]MapEnum)(nil), "theproto3.AllMaps.StringToEnumMapEntry")
proto.RegisterMapType((map[string]float32)(nil), "theproto3.AllMaps.StringToFloatMapEntry")
proto.RegisterMapType((map[string]*FloatingPoint)(nil), "theproto3.AllMaps.StringToMsgMapEntry")
proto.RegisterMapType((map[uint32]uint32)(nil), "theproto3.AllMaps.Uint32MapEntry")
proto.RegisterMapType((map[uint64]uint64)(nil), "theproto3.AllMaps.Uint64MapEntry")
proto.RegisterType((*AllMapsOrdered)(nil), "theproto3.AllMapsOrdered")
proto.RegisterMapType((map[bool]bool)(nil), "theproto3.AllMapsOrdered.BoolMapEntry")
proto.RegisterMapType((map[uint32]uint32)(nil), "theproto3.AllMapsOrdered.Fixed32MapEntry")
proto.RegisterMapType((map[uint64]uint64)(nil), "theproto3.AllMapsOrdered.Fixed64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMapsOrdered.Int32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMapsOrdered.Int64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMapsOrdered.Sfixed32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMapsOrdered.Sfixed64MapEntry")
proto.RegisterMapType((map[int32]int32)(nil), "theproto3.AllMapsOrdered.Sint32MapEntry")
proto.RegisterMapType((map[int64]int64)(nil), "theproto3.AllMapsOrdered.Sint64MapEntry")
proto.RegisterMapType((map[string]string)(nil), "theproto3.AllMapsOrdered.StringMapEntry")
proto.RegisterMapType((map[string][]byte)(nil), "theproto3.AllMapsOrdered.StringToBytesMapEntry")
proto.RegisterMapType((map[string]float64)(nil), "theproto3.AllMapsOrdered.StringToDoubleMapEntry")
proto.RegisterMapType((map[string]MapEnum)(nil), "theproto3.AllMapsOrdered.StringToEnumMapEntry")
proto.RegisterMapType((map[string]float32)(nil), "theproto3.AllMapsOrdered.StringToFloatMapEntry")
proto.RegisterMapType((map[string]*FloatingPoint)(nil), "theproto3.AllMapsOrdered.StringToMsgMapEntry")
proto.RegisterMapType((map[uint32]uint32)(nil), "theproto3.AllMapsOrdered.Uint32MapEntry")
proto.RegisterMapType((map[uint64]uint64)(nil), "theproto3.AllMapsOrdered.Uint64MapEntry")
proto.RegisterType((*MessageWithMap)(nil), "theproto3.MessageWithMap")
proto.RegisterMapType((map[bool][]byte)(nil), "theproto3.MessageWithMap.ByteMappingEntry")
proto.RegisterMapType((map[int64]*FloatingPoint)(nil), "theproto3.MessageWithMap.MsgMappingEntry")
proto.RegisterMapType((map[int32]string)(nil), "theproto3.MessageWithMap.NameMappingEntry")
proto.RegisterType((*FloatingPoint)(nil), "theproto3.FloatingPoint")
proto.RegisterType((*Uint128Pair)(nil), "theproto3.Uint128Pair")
proto.RegisterType((*ContainsNestedMap)(nil), "theproto3.ContainsNestedMap")
proto.RegisterType((*ContainsNestedMap_NestedMap)(nil), "theproto3.ContainsNestedMap.NestedMap")
proto.RegisterMapType((map[string]float64)(nil), "theproto3.ContainsNestedMap.NestedMap.NestedMapFieldEntry")
proto.RegisterType((*NotPacked)(nil), "theproto3.NotPacked")
}
func init() { proto.RegisterFile("combos/unmarshaler/theproto3.proto", fileDescriptor_e24bba79c1e35a1f) }
var fileDescriptor_e24bba79c1e35a1f = []byte{
// 1612 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x99, 0xcf, 0x6f, 0xdb, 0x46,
0x16, 0xc7, 0x35, 0xfa, 0xad, 0xa7, 0x1f, 0xa6, 0x27, 0xd9, 0x85, 0xd6, 0xc0, 0xd2, 0xb2, 0x02,
0x24, 0x4a, 0xb0, 0x91, 0xb3, 0x4e, 0xb2, 0x9b, 0xba, 0x69, 0x53, 0x4b, 0xb1, 0x10, 0x37, 0xb6,
0xe2, 0x4a, 0x76, 0xdc, 0x22, 0x40, 0x0d, 0xca, 0xa6, 0x25, 0x22, 0x12, 0x69, 0x90, 0xa3, 0xa0,
0xbe, 0xe5, 0xcf, 0xe8, 0xad, 0xe8, 0xad, 0xc7, 0x22, 0x87, 0xa2, 0xc7, 0xf6, 0xe6, 0x63, 0x80,
0x5e, 0x8a, 0x1e, 0x82, 0x58, 0xbd, 0xe4, 0x98, 0x63, 0x8e, 0xc5, 0xcc, 0x50, 0xd2, 0x48, 0x1c,
0x8a, 0x4d, 0x2f, 0xbd, 0xf8, 0x24, 0xce, 0xf3, 0xfb, 0x7e, 0xe6, 0x71, 0x38, 0xf3, 0xf8, 0x05,
0x0d, 0xc5, 0x03, 0xab, 0xd7, 0xb2, 0x9c, 0xe5, 0xbe, 0xd9, 0xd3, 0x6c, 0xa7, 0xa3, 0x75, 0x75,
0x7b, 0x99, 0x74, 0xf4, 0x63, 0xdb, 0x22, 0xd6, 0xcd, 0x32, 0xfb, 0xc1, 0xa9, 0x51, 0x60, 0xe1,
0x7a, 0xdb, 0x20, 0x9d, 0x7e, 0xab, 0x7c, 0x60, 0xf5, 0x96, 0xdb, 0x56, 0xdb, 0x5a, 0x66, 0xf1,
0x56, 0xff, 0x88, 0x8d, 0xd8, 0x80, 0x5d, 0x71, 0xe5, 0xc2, 0xff, 0x7d, 0xd3, 0x89, 0xee, 0x90,
0x65, 0x77, 0xee, 0x96, 0x45, 0x3a, 0x74, 0x52, 0x1a, 0xe3, 0xc2, 0xe2, 0xcf, 0x31, 0x48, 0x6c,
0xe9, 0x8e, 0xa3, 0xb5, 0x75, 0x8c, 0x21, 0x6a, 0x6a, 0x3d, 0x3d, 0x8f, 0x0a, 0xa8, 0x94, 0x6a,
0xb0, 0x6b, 0x7c, 0x1b, 0x92, 0x1d, 0xa3, 0xab, 0xd9, 0x06, 0x39, 0xc9, 0x87, 0x0b, 0xa8, 0x94,
0x5b, 0xf9, 0x57, 0x79, 0x5c, 0xb6, 0xab, 0x2c, 0x3f, 0xe8, 0xf7, 0xac, 0xbe, 0xdd, 0x18, 0xa5,
0xe2, 0x02, 0x64, 0x3a, 0xba, 0xd1, 0xee, 0x90, 0x7d, 0xc3, 0xdc, 0x3f, 0xe8, 0xe5, 0x23, 0x05,
0x54, 0xca, 0x36, 0x80, 0xc7, 0x36, 0xcc, 0x6a, 0x8f, 0x4e, 0x76, 0xa8, 0x11, 0x2d, 0x1f, 0x2d,
0xa0, 0x52, 0xa6, 0xc1, 0xae, 0xf1, 0x12, 0x64, 0x6c, 0xdd, 0xe9, 0x77, 0xc9, 0xfe, 0x81, 0xd5,
0x37, 0x49, 0x3e, 0x51, 0x40, 0xa5, 0x48, 0x23, 0xcd, 0x63, 0x55, 0x1a, 0xc2, 0x97, 0x20, 0x4b,
0xec, 0xbe, 0xbe, 0xef, 0x1c, 0x58, 0xc4, 0xe9, 0x69, 0x66, 0x3e, 0x59, 0x40, 0xa5, 0x64, 0x23,
0x43, 0x83, 0x4d, 0x37, 0x86, 0x2f, 0x42, 0xcc, 0x39, 0xb0, 0x6c, 0x3d, 0x9f, 0x2a, 0xa0, 0x52,
0xb8, 0xc1, 0x07, 0x58, 0x81, 0xc8, 0x53, 0xfd, 0x24, 0x1f, 0x2b, 0x44, 0x4a, 0xd1, 0x06, 0xbd,
0xc4, 0x57, 0x21, 0x6e, 0xea, 0x0e, 0xd1, 0x0f, 0xf3, 0xf1, 0x02, 0x2a, 0xa5, 0x57, 0xe6, 0x85,
0x5b, 0xab, 0xb3, 0x3f, 0x34, 0xdc, 0x04, 0xfc, 0x01, 0x24, 0x88, 0x6e, 0xdb, 0x9a, 0x61, 0xe6,
0xa1, 0x10, 0x29, 0xa5, 0x57, 0x16, 0x25, 0xcb, 0xb0, 0xc3, 0x33, 0xd6, 0x4d, 0x62, 0x9f, 0x34,
0x86, 0xf9, 0xf8, 0x36, 0x64, 0x58, 0xde, 0xca, 0xfe, 0x91, 0xa1, 0x77, 0x0f, 0xf3, 0x69, 0x36,
0x17, 0x2e, 0xb3, 0xa7, 0x50, 0x37, 0xcc, 0x47, 0xc7, 0xa4, 0xae, 0x11, 0xe3, 0x99, 0xde, 0x48,
0xf3, 0xbc, 0x1a, 0x4d, 0xc3, 0xb5, 0x91, 0xec, 0x99, 0xd6, 0xed, 0xeb, 0xf9, 0x2c, 0x9b, 0xf6,
0x92, 0x64, 0xda, 0x6d, 0x96, 0xf6, 0x98, 0x66, 0xf1, 0xa9, 0x5d, 0x0e, 0x8b, 0x2c, 0x6c, 0x41,
0x46, 0xac, 0x6b, 0xb8, 0x0c, 0x88, 0xad, 0x2d, 0x5b, 0x86, 0x2b, 0x10, 0xe3, 0x53, 0x84, 0xfd,
0x56, 0x81, 0xff, 0x7d, 0x35, 0x7c, 0x07, 0x2d, 0x6c, 0x83, 0x32, 0x3d, 0x9f, 0x04, 0x79, 0x79,
0x12, 0xa9, 0x88, 0x37, 0xbb, 0x6e, 0xf6, 0x7b, 0x02, 0xb1, 0x78, 0x0f, 0xe2, 0x7c, 0xff, 0xe0,
0x34, 0x24, 0x76, 0xeb, 0x0f, 0xeb, 0x8f, 0xf6, 0xea, 0x4a, 0x08, 0x27, 0x21, 0xba, 0xbd, 0x5b,
0x6f, 0x2a, 0x08, 0x67, 0x21, 0xd5, 0xdc, 0x5c, 0xdb, 0x6e, 0xee, 0x6c, 0x54, 0x1f, 0x2a, 0x61,
0x3c, 0x07, 0xe9, 0xca, 0xc6, 0xe6, 0xe6, 0x7e, 0x65, 0x6d, 0x63, 0x73, 0xfd, 0x0b, 0x25, 0x52,
0x54, 0x21, 0xce, 0xeb, 0xa4, 0x0f, 0xbe, 0xd5, 0x37, 0xcd, 0x13, 0x77, 0x0b, 0xf3, 0x41, 0xf1,
0x05, 0x86, 0xc4, 0x5a, 0xb7, 0xbb, 0xa5, 0x1d, 0x3b, 0x78, 0x0f, 0xe6, 0x9b, 0xc4, 0x36, 0xcc,
0xf6, 0x8e, 0x75, 0xdf, 0xea, 0xb7, 0xba, 0xfa, 0x96, 0x76, 0x9c, 0x47, 0x6c, 0x69, 0xaf, 0x0a,
0xf7, 0xed, 0xa6, 0x97, 0x3d, 0xb9, 0x7c, 0x81, 0xbd, 0x0c, 0xbc, 0x03, 0xca, 0x30, 0x58, 0xeb,
0x5a, 0x1a, 0xa1, 0xdc, 0x30, 0xe3, 0x96, 0x66, 0x70, 0x87, 0xa9, 0x1c, 0xeb, 0x21, 0xe0, 0xbb,
0x90, 0xdc, 0x30, 0xc9, 0xcd, 0x15, 0x4a, 0x8b, 0x30, 0x5a, 0x41, 0x42, 0x1b, 0xa6, 0x70, 0xca,
0x48, 0xe1, 0xaa, 0xff, 0x77, 0x8b, 0xaa, 0xa3, 0xb3, 0xd4, 0x2c, 0x65, 0xac, 0x66, 0x43, 0x7c,
0x0f, 0x52, 0xbb, 0xc6, 0x70, 0xf2, 0x18, 0x93, 0x2f, 0x49, 0xe4, 0xa3, 0x1c, 0xae, 0x1f, 0x6b,
0x86, 0x00, 0x3e, 0x7f, 0x7c, 0x26, 0x40, 0x28, 0x60, 0xac, 0xa1, 0x80, 0xe6, 0xa8, 0x82, 0x84,
0x2f, 0xa0, 0x39, 0x55, 0x41, 0x53, 0xac, 0xa0, 0x39, 0xaa, 0x20, 0x39, 0x13, 0x20, 0x56, 0x30,
0x1a, 0xe3, 0x0a, 0x40, 0xcd, 0xf8, 0x4a, 0x3f, 0xe4, 0x25, 0xa4, 0x18, 0xa1, 0x28, 0x21, 0x8c,
0x93, 0x38, 0x42, 0x50, 0xe1, 0x75, 0x48, 0x37, 0x8f, 0xc6, 0x10, 0xf0, 0x9c, 0xe3, 0x51, 0x19,
0x47, 0x53, 0x14, 0x51, 0x37, 0x2a, 0x85, 0xdf, 0x4c, 0x7a, 0x76, 0x29, 0xc2, 0xdd, 0x08, 0xaa,
0x71, 0x29, 0x1c, 0x92, 0x09, 0x28, 0x45, 0xa0, 0x88, 0x3a, 0xda, 0x0c, 0x2b, 0x96, 0x45, 0x33,
0xdd, 0xae, 0xb4, 0x28, 0x41, 0xb8, 0x19, 0x6e, 0x33, 0x74, 0x47, 0xec, 0x89, 0xb0, 0x4d, 0x4e,
0xc5, 0x39, 0xff, 0x27, 0x32, 0xcc, 0x19, 0x3e, 0x91, 0xe1, 0x58, 0x3c, 0x67, 0x95, 0x13, 0xa2,
0x3b, 0x94, 0x33, 0x17, 0x78, 0xce, 0x86, 0xa9, 0x53, 0xe7, 0x6c, 0x18, 0xc6, 0x9f, 0xc1, 0xdc,
0x30, 0x46, 0xdb, 0x13, 0x85, 0x2a, 0x0c, 0x7a, 0x65, 0x06, 0xd4, 0xcd, 0xe4, 0xcc, 0x69, 0x3d,
0xae, 0x43, 0x6e, 0x18, 0xda, 0x72, 0xd8, 0xed, 0xce, 0x33, 0xe2, 0xe5, 0x19, 0x44, 0x9e, 0xc8,
0x81, 0x53, 0xea, 0x85, 0xfb, 0xf0, 0x4f, 0x79, 0x37, 0x12, 0xdb, 0x6f, 0x8a, 0xb7, 0xdf, 0x8b,
0x62, 0xfb, 0x45, 0x62, 0xfb, 0xae, 0xc2, 0x3f, 0xa4, 0xbd, 0x27, 0x08, 0x12, 0x16, 0x21, 0x1f,
0x42, 0x76, 0xa2, 0xe5, 0x88, 0xe2, 0x98, 0x44, 0x1c, 0xf3, 0x8a, 0xc7, 0x5b, 0x4b, 0xf2, 0xf6,
0x98, 0x10, 0x47, 0x44, 0xf1, 0x5d, 0xc8, 0x4d, 0xf6, 0x1b, 0x51, 0x9d, 0x95, 0xa8, 0xb3, 0x12,
0xb5, 0x7c, 0xee, 0xa8, 0x44, 0x1d, 0x9d, 0x52, 0x37, 0x7d, 0xe7, 0x9e, 0x97, 0xa8, 0xe7, 0x25,
0x6a, 0xf9, 0xdc, 0x58, 0xa2, 0xc6, 0xa2, 0xfa, 0x23, 0x98, 0x9b, 0x6a, 0x31, 0xa2, 0x3c, 0x21,
0x91, 0x27, 0x44, 0xf9, 0xc7, 0xa0, 0x4c, 0x37, 0x17, 0x51, 0x3f, 0x27, 0xd1, 0xcf, 0xc9, 0xa6,
0x97, 0x57, 0x1f, 0x97, 0xc8, 0xe3, 0xd2, 0xe9, 0xe5, 0x7a, 0x45, 0xa2, 0x57, 0x44, 0xfd, 0x2a,
0x64, 0xc4, 0x6e, 0x22, 0x6a, 0x93, 0x12, 0x6d, 0x72, 0x7a, 0xdd, 0x27, 0x9a, 0x49, 0xd0, 0x4e,
0x4f, 0xf9, 0x1c, 0x97, 0x89, 0x16, 0x12, 0x04, 0xc9, 0x88, 0x90, 0xc7, 0x70, 0x51, 0xd6, 0x32,
0x24, 0x8c, 0x92, 0xc8, 0xc8, 0x51, 0x8f, 0x38, 0x36, 0x7b, 0x54, 0x35, 0x61, 0x9c, 0x16, 0x9e,
0xc0, 0x05, 0x49, 0xe3, 0x90, 0x60, 0xcb, 0x93, 0x6e, 0x2c, 0x2f, 0x60, 0x59, 0x13, 0x30, 0xcc,
0xf6, 0xb6, 0x65, 0x98, 0x44, 0x74, 0x65, 0x3f, 0x5c, 0x80, 0x9c, 0xdb, 0x9e, 0x1e, 0xd9, 0x87,
0xba, 0xad, 0x1f, 0xe2, 0x2f, 0xfd, 0xbd, 0xd3, 0x0d, 0x6f, 0x53, 0x73, 0x55, 0xef, 0x61, 0xa1,
0x9e, 0xf8, 0x5a, 0xa8, 0xe5, 0x60, 0x7c, 0x90, 0x93, 0xaa, 0x7a, 0x9c, 0xd4, 0x15, 0x7f, 0xa8,
0x9f, 0xa1, 0xaa, 0x7a, 0x0c, 0xd5, 0x6c, 0x88, 0xd4, 0x57, 0xd5, 0xbc, 0xbe, 0xaa, 0xe4, 0x4f,
0xf1, 0xb7, 0x57, 0x35, 0xaf, 0xbd, 0x0a, 0xe0, 0xc8, 0x5d, 0x56, 0xcd, 0xeb, 0xb2, 0x66, 0x70,
0xfc, 0xcd, 0x56, 0xcd, 0x6b, 0xb6, 0x02, 0x38, 0x72, 0xcf, 0xb5, 0x21, 0xf1, 0x5c, 0x57, 0xfd,
0x41, 0xb3, 0xac, 0xd7, 0xa6, 0xcc, 0x7a, 0x5d, 0x9b, 0x51, 0xd4, 0x4c, 0x07, 0xb6, 0x21, 0x71,
0x60, 0x41, 0x85, 0xf9, 0x18, 0xb1, 0x4d, 0x99, 0x11, 0x0b, 0x2c, 0xcc, 0xcf, 0x8f, 0x7d, 0x32,
0xed, 0xc7, 0x2e, 0xfb, 0x93, 0xe4, 0xb6, 0xac, 0xe6, 0xb5, 0x65, 0xa5, 0xa0, 0x33, 0x27, 0x73,
0x67, 0x4f, 0x7c, 0xdd, 0xd9, 0x9f, 0x38, 0xc2, 0x41, 0x26, 0xed, 0x73, 0x3f, 0x93, 0x56, 0x0e,
0x66, 0xcf, 0xf6, 0x6a, 0xbb, 0x3e, 0x5e, 0xed, 0x7a, 0x30, 0xf8, 0xdc, 0xb2, 0x9d, 0x5b, 0xb6,
0x73, 0xcb, 0x76, 0x6e, 0xd9, 0xfe, 0x7e, 0xcb, 0xb6, 0x1a, 0xfd, 0xfa, 0xdb, 0x45, 0x54, 0xfc,
0x25, 0x02, 0x39, 0xf7, 0xcb, 0xe0, 0x9e, 0x41, 0x3a, 0xb4, 0xbd, 0x6d, 0x41, 0xc6, 0xd4, 0x7a,
0xfa, 0x7e, 0x4f, 0x3b, 0x3e, 0x36, 0xcc, 0xb6, 0xeb, 0xd9, 0xae, 0x79, 0x3f, 0x25, 0xba, 0x82,
0x72, 0x5d, 0xeb, 0xd1, 0x5e, 0x45, 0x93, 0xdd, 0xd7, 0x8d, 0x39, 0x8e, 0xe0, 0x4f, 0x21, 0xdd,
0x73, 0xda, 0x23, 0x5a, 0xd8, 0xf3, 0x22, 0x9c, 0xa2, 0xf1, 0x3b, 0x1d, 0xc3, 0xa0, 0x37, 0x0a,
0xd0, 0xd2, 0x5a, 0x27, 0x64, 0x5c, 0x5a, 0x24, 0xa8, 0x34, 0xfa, 0x4c, 0x27, 0x4b, 0x6b, 0x8d,
0x23, 0x74, 0xdb, 0x4e, 0xd7, 0x1e, 0xd4, 0xe9, 0x26, 0x36, 0xcf, 0x1e, 0xcc, 0x4d, 0x55, 0x2b,
0x39, 0xf3, 0x7f, 0xe1, 0xd9, 0xd0, 0xc2, 0xa6, 0x2b, 0x0f, 0x3a, 0x13, 0xe2, 0x86, 0x2c, 0xfe,
0x1b, 0xb2, 0x13, 0x6c, 0x9c, 0x01, 0x74, 0xc4, 0xa4, 0xa8, 0x81, 0x8e, 0x8a, 0xdf, 0x20, 0x48,
0xd3, 0x3e, 0xf9, 0xdf, 0x95, 0x3b, 0xdb, 0x9a, 0x61, 0xe3, 0x07, 0x10, 0xed, 0xea, 0x47, 0x84,
0x25, 0x64, 0x2a, 0xb7, 0x4e, 0x5f, 0x2d, 0x86, 0x7e, 0x7b, 0xb5, 0xf8, 0x9f, 0x80, 0xff, 0x12,
0xf4, 0x1d, 0x62, 0xf5, 0xca, 0x2e, 0xa7, 0xc1, 0x08, 0xb8, 0x06, 0x31, 0xdb, 0x68, 0x77, 0x08,
0x2f, 0xa9, 0x72, 0xe3, 0xbd, 0x31, 0x5c, 0x5e, 0x3c, 0x45, 0x30, 0x5f, 0xb5, 0x4c, 0xa2, 0x19,
0xa6, 0xc3, 0xbf, 0xd6, 0xd2, 0x37, 0xe4, 0x0b, 0x04, 0xa9, 0xd1, 0x08, 0xb7, 0x20, 0x37, 0x1a,
0xb0, 0x8f, 0xe0, 0xee, 0x4e, 0x5d, 0x15, 0x56, 0xd8, 0xc3, 0x28, 0x4b, 0xae, 0x98, 0xd8, 0x7d,
0x27, 0x4f, 0x06, 0x17, 0xd6, 0xe0, 0x82, 0x24, 0xed, 0x7d, 0x5e, 0xc8, 0xc5, 0x25, 0x48, 0xd5,
0x2d, 0xb2, 0xad, 0x1d, 0x3c, 0x65, 0x9f, 0x9c, 0xc7, 0xff, 0x55, 0xa8, 0x84, 0x95, 0x10, 0x13,
0x5f, 0x5b, 0x82, 0x84, 0x7b, 0xfa, 0x71, 0x1c, 0xc2, 0x5b, 0x6b, 0x4a, 0x88, 0xfd, 0x56, 0x14,
0xc4, 0x7e, 0xab, 0x4a, 0xb8, 0xb2, 0x79, 0x7a, 0xa6, 0x86, 0x5e, 0x9e, 0xa9, 0xa1, 0x5f, 0xcf,
0xd4, 0xd0, 0xeb, 0x33, 0x15, 0xbd, 0x39, 0x53, 0xd1, 0xdb, 0x33, 0x15, 0xbd, 0x3b, 0x53, 0xd1,
0xf3, 0x81, 0x8a, 0xbe, 0x1b, 0xa8, 0xe8, 0xfb, 0x81, 0x8a, 0x7e, 0x1c, 0xa8, 0xe8, 0xa7, 0x81,
0x8a, 0x4e, 0x07, 0x6a, 0xe8, 0xe5, 0x40, 0x45, 0xaf, 0x07, 0x2a, 0x7a, 0x33, 0x50, 0x43, 0x6f,
0x07, 0x2a, 0x7a, 0x37, 0x50, 0x43, 0xcf, 0x7f, 0x57, 0x43, 0xad, 0x38, 0x5f, 0x9e, 0x3f, 0x02,
0x00, 0x00, 0xff, 0xff, 0xda, 0xba, 0x48, 0xa4, 0x67, 0x1a, 0x00, 0x00,
}
func (this *Message) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *Nested) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *AllMaps) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *AllMapsOrdered) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *MessageWithMap) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *FloatingPoint) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *Uint128Pair) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *ContainsNestedMap) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *ContainsNestedMap_NestedMap) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func (this *NotPacked) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return Theproto3Description()
}
func Theproto3Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
d := &github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet{}
var gzipped = []byte{
// 8097 bytes of a gzipped FileDescriptorSet
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x7d, 0x5b, 0x70, 0x23, 0xd7,
0x99, 0x1e, 0x1b, 0x0d, 0x90, 0xc0, 0x0f, 0x90, 0x6c, 0x36, 0x67, 0x28, 0x88, 0x1a, 0x91, 0x33,
0xd0, 0x68, 0x44, 0xd1, 0x12, 0x67, 0x86, 0xc3, 0xb9, 0x61, 0x2c, 0x69, 0x01, 0x10, 0x1c, 0x71,
0x4c, 0x82, 0x74, 0x93, 0xb4, 0x34, 0x56, 0x12, 0x54, 0x13, 0x38, 0x24, 0x21, 0x01, 0xdd, 0x58,
0x74, 0x43, 0x12, 0x55, 0xa9, 0x94, 0xb2, 0x4e, 0x36, 0xde, 0xdc, 0x93, 0x4d, 0x2a, 0x5e, 0xc7,
0x17, 0x79, 0xb7, 0x76, 0xed, 0xdd, 0xdc, 0xbc, 0xce, 0xc6, 0xd9, 0x75, 0x52, 0x59, 0xe5, 0xc1,
0xc9, 0xe4, 0x25, 0xe5, 0x4d, 0x5e, 0x52, 0xae, 0x94, 0xca, 0x1a, 0x3b, 0xb5, 0x4e, 0xe2, 0x24,
0xce, 0x46, 0x55, 0x71, 0x95, 0xf7, 0x61, 0xeb, 0xdc, 0xba, 0x4f, 0x1f, 0x34, 0xd0, 0xe0, 0x48,
0xb2, 0xf7, 0xc1, 0x2f, 0x33, 0xe8, 0x73, 0xfe, 0xef, 0xeb, 0xbf, 0xff, 0xcb, 0x39, 0x7f, 0x9f,
0x73, 0x00, 0xc2, 0xbd, 0x3c, 0x9c, 0x3d, 0xb4, 0xed, 0xc3, 0x26, 0xba, 0xd8, 0xee, 0xd8, 0xae,
0xbd, 0xdf, 0x3d, 0xb8, 0x58, 0x47, 0x4e, 0xad, 0xd3, 0x68, 0xbb, 0x76, 0x67, 0x89, 0xb4, 0xe9,
0x93, 0x54, 0x62, 0x89, 0x4b, 0xe4, 0x36, 0x61, 0x6a, 0xad, 0xd1, 0x44, 0xab, 0x9e, 0xe0, 0x0e,
0x72, 0xf5, 0x1b, 0x10, 0x3f, 0x68, 0x34, 0x51, 0x56, 0x39, 0xab, 0x2e, 0xa4, 0x97, 0xcf, 0x2f,
0x49, 0xa0, 0xa5, 0x20, 0x62, 0x1b, 0x37, 0x1b, 0x04, 0x91, 0xfb, 0x5e, 0x1c, 0xa6, 0x43, 0x7a,
0x75, 0x1d, 0xe2, 0x96, 0xd9, 0xc2, 0x8c, 0xca, 0x42, 0xca, 0x20, 0x9f, 0xf5, 0x2c, 0x8c, 0xb5,
0xcd, 0xda, 0x2b, 0xe6, 0x21, 0xca, 0xc6, 0x48, 0x33, 0xbf, 0xd4, 0xe7, 0x00, 0xea, 0xa8, 0x8d,
0xac, 0x3a, 0xb2, 0x6a, 0xc7, 0x59, 0xf5, 0xac, 0xba, 0x90, 0x32, 0x84, 0x16, 0xfd, 0x23, 0x30,
0xd5, 0xee, 0xee, 0x37, 0x1b, 0xb5, 0xaa, 0x20, 0x06, 0x67, 0xd5, 0x85, 0x84, 0xa1, 0xd1, 0x8e,
0x55, 0x5f, 0xf8, 0x09, 0x98, 0x7c, 0x0d, 0x99, 0xaf, 0x88, 0xa2, 0x69, 0x22, 0x3a, 0x81, 0x9b,
0x05, 0xc1, 0x12, 0x64, 0x5a, 0xc8, 0x71, 0xcc, 0x43, 0x54, 0x75, 0x8f, 0xdb, 0x28, 0x1b, 0x27,
0x4f, 0x7f, 0xb6, 0xe7, 0xe9, 0xe5, 0x27, 0x4f, 0x33, 0xd4, 0xee, 0x71, 0x1b, 0xe9, 0x05, 0x48,
0x21, 0xab, 0xdb, 0xa2, 0x0c, 0x89, 0x3e, 0xf6, 0x2b, 0x5b, 0xdd, 0x96, 0xcc, 0x92, 0xc4, 0x30,
0x46, 0x31, 0xe6, 0xa0, 0xce, 0xab, 0x8d, 0x1a, 0xca, 0x8e, 0x12, 0x82, 0x27, 0x7a, 0x08, 0x76,
0x68, 0xbf, 0xcc, 0xc1, 0x71, 0x7a, 0x09, 0x52, 0xe8, 0x75, 0x17, 0x59, 0x4e, 0xc3, 0xb6, 0xb2,
0x63, 0x84, 0xe4, 0xf1, 0x10, 0x2f, 0xa2, 0x66, 0x5d, 0xa6, 0xf0, 0x71, 0xfa, 0x35, 0x18, 0xb3,
0xdb, 0x6e, 0xc3, 0xb6, 0x9c, 0x6c, 0xf2, 0xac, 0xb2, 0x90, 0x5e, 0x3e, 0x13, 0x1a, 0x08, 0x5b,
0x54, 0xc6, 0xe0, 0xc2, 0xfa, 0x3a, 0x68, 0x8e, 0xdd, 0xed, 0xd4, 0x50, 0xb5, 0x66, 0xd7, 0x51,
0xb5, 0x61, 0x1d, 0xd8, 0xd9, 0x14, 0x21, 0x98, 0xef, 0x7d, 0x10, 0x22, 0x58, 0xb2, 0xeb, 0x68,
0xdd, 0x3a, 0xb0, 0x8d, 0x09, 0x27, 0x70, 0xad, 0xcf, 0xc0, 0xa8, 0x73, 0x6c, 0xb9, 0xe6, 0xeb,
0xd9, 0x0c, 0x89, 0x10, 0x76, 0x95, 0xfb, 0xbd, 0x51, 0x98, 0x1c, 0x26, 0xc4, 0x6e, 0x41, 0xe2,
0x00, 0x3f, 0x65, 0x36, 0x76, 0x12, 0x1b, 0x50, 0x4c, 0xd0, 0x88, 0xa3, 0x0f, 0x68, 0xc4, 0x02,
0xa4, 0x2d, 0xe4, 0xb8, 0xa8, 0x4e, 0x23, 0x42, 0x1d, 0x32, 0xa6, 0x80, 0x82, 0x7a, 0x43, 0x2a,
0xfe, 0x40, 0x21, 0xf5, 0x22, 0x4c, 0x7a, 0x2a, 0x55, 0x3b, 0xa6, 0x75, 0xc8, 0x63, 0xf3, 0x62,
0x94, 0x26, 0x4b, 0x65, 0x8e, 0x33, 0x30, 0xcc, 0x98, 0x40, 0x81, 0x6b, 0x7d, 0x15, 0xc0, 0xb6,
0x90, 0x7d, 0x50, 0xad, 0xa3, 0x5a, 0x33, 0x9b, 0xec, 0x63, 0xa5, 0x2d, 0x2c, 0xd2, 0x63, 0x25,
0x9b, 0xb6, 0xd6, 0x9a, 0xfa, 0x4d, 0x3f, 0xd4, 0xc6, 0xfa, 0x44, 0xca, 0x26, 0x4d, 0xb2, 0x9e,
0x68, 0xdb, 0x83, 0x89, 0x0e, 0xc2, 0x71, 0x8f, 0xea, 0xec, 0xc9, 0x52, 0x44, 0x89, 0xa5, 0xc8,
0x27, 0x33, 0x18, 0x8c, 0x3e, 0xd8, 0x78, 0x47, 0xbc, 0xd4, 0x1f, 0x03, 0xaf, 0xa1, 0x4a, 0xc2,
0x0a, 0xc8, 0x28, 0x94, 0xe1, 0x8d, 0x15, 0xb3, 0x85, 0x66, 0xdf, 0x80, 0x89, 0xa0, 0x79, 0xf4,
0x53, 0x90, 0x70, 0x5c, 0xb3, 0xe3, 0x92, 0x28, 0x4c, 0x18, 0xf4, 0x42, 0xd7, 0x40, 0x45, 0x56,
0x9d, 0x8c, 0x72, 0x09, 0x03, 0x7f, 0xd4, 0x7f, 0xce, 0x7f, 0x60, 0x95, 0x3c, 0xf0, 0x85, 0x5e,
0x8f, 0x06, 0x98, 0xe5, 0xe7, 0x9e, 0xbd, 0x0e, 0xe3, 0x81, 0x07, 0x18, 0xf6, 0xd6, 0xb9, 0x3f,
0x0f, 0xa7, 0x43, 0xa9, 0xf5, 0x17, 0xe1, 0x54, 0xd7, 0x6a, 0x58, 0x2e, 0xea, 0xb4, 0x3b, 0x08,
0x47, 0x2c, 0xbd, 0x55, 0xf6, 0x0f, 0xc7, 0xfa, 0xc4, 0xdc, 0x9e, 0x28, 0x4d, 0x59, 0x8c, 0xe9,
0x6e, 0x6f, 0xe3, 0x62, 0x2a, 0xf9, 0xfd, 0x31, 0xed, 0xcd, 0x37, 0xdf, 0x7c, 0x33, 0x96, 0xfb,
0xcc, 0x28, 0x9c, 0x0a, 0xcb, 0x99, 0xd0, 0xf4, 0x9d, 0x81, 0x51, 0xab, 0xdb, 0xda, 0x47, 0x1d,
0x62, 0xa4, 0x84, 0xc1, 0xae, 0xf4, 0x02, 0x24, 0x9a, 0xe6, 0x3e, 0x6a, 0x66, 0xe3, 0x67, 0x95,
0x85, 0x89, 0xe5, 0x8f, 0x0c, 0x95, 0x95, 0x4b, 0x1b, 0x18, 0x62, 0x50, 0xa4, 0xfe, 0x2c, 0xc4,
0xd9, 0x10, 0x8d, 0x19, 0x16, 0x87, 0x63, 0xc0, 0xb9, 0x64, 0x10, 0x9c, 0xfe, 0x08, 0xa4, 0xf0,
0xff, 0x34, 0x36, 0x46, 0x89, 0xce, 0x49, 0xdc, 0x80, 0xe3, 0x42, 0x9f, 0x85, 0x24, 0x49, 0x93,
0x3a, 0xe2, 0x53, 0x9b, 0x77, 0x8d, 0x03, 0xab, 0x8e, 0x0e, 0xcc, 0x6e, 0xd3, 0xad, 0xbe, 0x6a,
0x36, 0xbb, 0x88, 0x04, 0x7c, 0xca, 0xc8, 0xb0, 0xc6, 0x4f, 0xe0, 0x36, 0x7d, 0x1e, 0xd2, 0x34,
0xab, 0x1a, 0x56, 0x1d, 0xbd, 0x4e, 0x46, 0xcf, 0x84, 0x41, 0x13, 0x6d, 0x1d, 0xb7, 0xe0, 0xdb,
0xbf, 0xec, 0xd8, 0x16, 0x0f, 0x4d, 0x72, 0x0b, 0xdc, 0x40, 0x6e, 0x7f, 0x5d, 0x1e, 0xb8, 0x1f,
0x0d, 0x7f, 0x3c, 0x39, 0xa6, 0x72, 0x5f, 0x8f, 0x41, 0x9c, 0x8c, 0x17, 0x93, 0x90, 0xde, 0xbd,
0xbb, 0x5d, 0xae, 0xae, 0x6e, 0xed, 0x15, 0x37, 0xca, 0x9a, 0xa2, 0x4f, 0x00, 0x90, 0x86, 0xb5,
0x8d, 0xad, 0xc2, 0xae, 0x16, 0xf3, 0xae, 0xd7, 0x2b, 0xbb, 0xd7, 0x56, 0x34, 0xd5, 0x03, 0xec,
0xd1, 0x86, 0xb8, 0x28, 0x70, 0x65, 0x59, 0x4b, 0xe8, 0x1a, 0x64, 0x28, 0xc1, 0xfa, 0x8b, 0xe5,
0xd5, 0x6b, 0x2b, 0xda, 0x68, 0xb0, 0xe5, 0xca, 0xb2, 0x36, 0xa6, 0x8f, 0x43, 0x8a, 0xb4, 0x14,
0xb7, 0xb6, 0x36, 0xb4, 0xa4, 0xc7, 0xb9, 0xb3, 0x6b, 0xac, 0x57, 0x6e, 0x6b, 0x29, 0x8f, 0xf3,
0xb6, 0xb1, 0xb5, 0xb7, 0xad, 0x81, 0xc7, 0xb0, 0x59, 0xde, 0xd9, 0x29, 0xdc, 0x2e, 0x6b, 0x69,
0x4f, 0xa2, 0x78, 0x77, 0xb7, 0xbc, 0xa3, 0x65, 0x02, 0x6a, 0x5d, 0x59, 0xd6, 0xc6, 0xbd, 0x5b,
0x94, 0x2b, 0x7b, 0x9b, 0xda, 0x84, 0x3e, 0x05, 0xe3, 0xf4, 0x16, 0x5c, 0x89, 0x49, 0xa9, 0xe9,
0xda, 0x8a, 0xa6, 0xf9, 0x8a, 0x50, 0x96, 0xa9, 0x40, 0xc3, 0xb5, 0x15, 0x4d, 0xcf, 0x95, 0x20,
0x41, 0xa2, 0x4b, 0xd7, 0x61, 0x62, 0xa3, 0x50, 0x2c, 0x6f, 0x54, 0xb7, 0xb6, 0x77, 0xd7, 0xb7,
0x2a, 0x85, 0x0d, 0x4d, 0xf1, 0xdb, 0x8c, 0xf2, 0xc7, 0xf7, 0xd6, 0x8d, 0xf2, 0xaa, 0x16, 0x13,
0xdb, 0xb6, 0xcb, 0x85, 0xdd, 0xf2, 0xaa, 0xa6, 0xe6, 0x6a, 0x70, 0x2a, 0x6c, 0x9c, 0x0c, 0xcd,
0x0c, 0xc1, 0xc5, 0xb1, 0x3e, 0x2e, 0x26, 0x5c, 0x3d, 0x2e, 0xfe, 0x6e, 0x0c, 0xa6, 0x43, 0xe6,
0x8a, 0xd0, 0x9b, 0x3c, 0x07, 0x09, 0x1a, 0xa2, 0x74, 0xf6, 0x7c, 0x32, 0x74, 0xd2, 0x21, 0x01,
0xdb, 0x33, 0x83, 0x12, 0x9c, 0x58, 0x41, 0xa8, 0x7d, 0x2a, 0x08, 0x4c, 0xd1, 0x33, 0xa6, 0xff,
0xd9, 0x9e, 0x31, 0x9d, 0x4e, 0x7b, 0xd7, 0x86, 0x99, 0xf6, 0x48, 0xdb, 0xc9, 0xc6, 0xf6, 0x44,
0xc8, 0xd8, 0x7e, 0x0b, 0xa6, 0x7a, 0x88, 0x86, 0x1e, 0x63, 0x3f, 0xa5, 0x40, 0xb6, 0x9f, 0x71,
0x22, 0x46, 0xba, 0x58, 0x60, 0xa4, 0xbb, 0x25, 0x5b, 0xf0, 0x5c, 0x7f, 0x27, 0xf4, 0xf8, 0xfa,
0xcb, 0x0a, 0xcc, 0x84, 0x57, 0x8a, 0xa1, 0x3a, 0x3c, 0x0b, 0xa3, 0x2d, 0xe4, 0x1e, 0xd9, 0xbc,
0x5a, 0xba, 0x10, 0x32, 0x07, 0xe3, 0x6e, 0xd9, 0xd9, 0x0c, 0x25, 0x4e, 0xe2, 0x6a, 0xbf, 0x72,
0x8f, 0x6a, 0xd3, 0xa3, 0xe9, 0x2f, 0xc5, 0xe0, 0x74, 0x28, 0x79, 0xa8, 0xa2, 0x8f, 0x02, 0x34,
0xac, 0x76, 0xd7, 0xa5, 0x15, 0x11, 0x1d, 0x60, 0x53, 0xa4, 0x85, 0x0c, 0x5e, 0x78, 0xf0, 0xec,
0xba, 0x5e, 0xbf, 0x4a, 0xfa, 0x81, 0x36, 0x11, 0x81, 0x1b, 0xbe, 0xa2, 0x71, 0xa2, 0xe8, 0x5c,
0x9f, 0x27, 0xed, 0x09, 0xcc, 0x4b, 0xa0, 0xd5, 0x9a, 0x0d, 0x64, 0xb9, 0x55, 0xc7, 0xed, 0x20,
0xb3, 0xd5, 0xb0, 0x0e, 0xc9, 0x0c, 0x92, 0xcc, 0x27, 0x0e, 0xcc, 0xa6, 0x83, 0x8c, 0x49, 0xda,
0xbd, 0xc3, 0x7b, 0x31, 0x82, 0x04, 0x50, 0x47, 0x40, 0x8c, 0x06, 0x10, 0xb4, 0xdb, 0x43, 0xe4,
0xfe, 0x7a, 0x0a, 0xd2, 0x42, 0x5d, 0xad, 0x9f, 0x83, 0xcc, 0xcb, 0xe6, 0xab, 0x66, 0x95, 0xbf,
0x2b, 0x51, 0x4b, 0xa4, 0x71, 0xdb, 0x36, 0x7b, 0x5f, 0xba, 0x04, 0xa7, 0x88, 0x88, 0xdd, 0x75,
0x51, 0xa7, 0x5a, 0x6b, 0x9a, 0x8e, 0x43, 0x8c, 0x96, 0x24, 0xa2, 0x3a, 0xee, 0xdb, 0xc2, 0x5d,
0x25, 0xde, 0xa3, 0x5f, 0x85, 0x69, 0x82, 0x68, 0x75, 0x9b, 0x6e, 0xa3, 0xdd, 0x44, 0x55, 0xfc,
0xf6, 0xe6, 0x90, 0x99, 0xc4, 0xd3, 0x6c, 0x0a, 0x4b, 0x6c, 0x32, 0x01, 0xac, 0x91, 0xa3, 0xaf,
0xc2, 0xa3, 0x04, 0x76, 0x88, 0x2c, 0xd4, 0x31, 0x5d, 0x54, 0x45, 0x3f, 0xdf, 0x35, 0x9b, 0x4e,
0xd5, 0xb4, 0xea, 0xd5, 0x23, 0xd3, 0x39, 0xca, 0x9e, 0xc2, 0x04, 0xc5, 0x58, 0x56, 0x31, 0x1e,
0xc6, 0x82, 0xb7, 0x99, 0x5c, 0x99, 0x88, 0x15, 0xac, 0xfa, 0xf3, 0xa6, 0x73, 0xa4, 0xe7, 0x61,
0x86, 0xb0, 0x38, 0x6e, 0xa7, 0x61, 0x1d, 0x56, 0x6b, 0x47, 0xa8, 0xf6, 0x4a, 0xb5, 0xeb, 0x1e,
0xdc, 0xc8, 0x3e, 0x22, 0xde, 0x9f, 0x68, 0xb8, 0x43, 0x64, 0x4a, 0x58, 0x64, 0xcf, 0x3d, 0xb8,
0xa1, 0xef, 0x40, 0x06, 0x3b, 0xa3, 0xd5, 0x78, 0x03, 0x55, 0x0f, 0xec, 0x0e, 0x99, 0x1a, 0x27,
0x42, 0x86, 0x26, 0xc1, 0x82, 0x4b, 0x5b, 0x0c, 0xb0, 0x69, 0xd7, 0x51, 0x3e, 0xb1, 0xb3, 0x5d,
0x2e, 0xaf, 0x1a, 0x69, 0xce, 0xb2, 0x66, 0x77, 0x70, 0x40, 0x1d, 0xda, 0x9e, 0x81, 0xd3, 0x34,
0xa0, 0x0e, 0x6d, 0x6e, 0xde, 0xab, 0x30, 0x5d, 0xab, 0xd1, 0x67, 0x6e, 0xd4, 0xaa, 0xec, 0x1d,
0xcb, 0xc9, 0x6a, 0x01, 0x63, 0xd5, 0x6a, 0xb7, 0xa9, 0x00, 0x8b, 0x71, 0x47, 0xbf, 0x09, 0xa7,
0x7d, 0x63, 0x89, 0xc0, 0xa9, 0x9e, 0xa7, 0x94, 0xa1, 0x57, 0x61, 0xba, 0x7d, 0xdc, 0x0b, 0xd4,
0x03, 0x77, 0x6c, 0x1f, 0xcb, 0xb0, 0xeb, 0x70, 0xaa, 0x7d, 0xd4, 0xee, 0xc5, 0x2d, 0x8a, 0x38,
0xbd, 0x7d, 0xd4, 0x96, 0x81, 0x8f, 0x93, 0x17, 0xee, 0x0e, 0xaa, 0x99, 0x2e, 0xaa, 0x67, 0x1f,
0x12, 0xc5, 0x85, 0x0e, 0xfd, 0x22, 0x68, 0xb5, 0x5a, 0x15, 0x59, 0xe6, 0x7e, 0x13, 0x55, 0xcd,
0x0e, 0xb2, 0x4c, 0x27, 0x3b, 0x2f, 0x0a, 0x4f, 0xd4, 0x6a, 0x65, 0xd2, 0x5b, 0x20, 0x9d, 0xfa,
0x22, 0x4c, 0xd9, 0xfb, 0x2f, 0xd7, 0x68, 0x48, 0x56, 0xdb, 0x1d, 0x74, 0xd0, 0x78, 0x3d, 0x7b,
0x9e, 0xd8, 0x77, 0x12, 0x77, 0x90, 0x80, 0xdc, 0x26, 0xcd, 0xfa, 0x93, 0xa0, 0xd5, 0x9c, 0x23,
0xb3, 0xd3, 0x26, 0x63, 0xb2, 0xd3, 0x36, 0x6b, 0x28, 0xfb, 0x38, 0x15, 0xa5, 0xed, 0x15, 0xde,
0x8c, 0x53, 0xc2, 0x79, 0xad, 0x71, 0xe0, 0x72, 0xc6, 0x27, 0x68, 0x4a, 0x90, 0x36, 0xc6, 0xb6,
0x00, 0x1a, 0x36, 0x45, 0xe0, 0xc6, 0x0b, 0x44, 0x6c, 0xa2, 0x7d, 0xd4, 0x16, 0xef, 0xfb, 0x18,
0x8c, 0x63, 0x49, 0xff, 0xa6, 0x4f, 0xd2, 0x82, 0xac, 0x7d, 0x24, 0xdc, 0x71, 0x05, 0x66, 0xb0,
0x50, 0x0b, 0xb9, 0x66, 0xdd, 0x74, 0x4d, 0x41, 0xfa, 0x29, 0x22, 0x8d, 0xed, 0xbe, 0xc9, 0x3a,
0x03, 0x7a, 0x76, 0xba, 0xfb, 0xc7, 0x5e, 0x64, 0x3d, 0x4d, 0xf5, 0xc4, 0x6d, 0x3c, 0xb6, 0x3e,
0xb4, 0xa2, 0x3b, 0x97, 0x87, 0x8c, 0x18, 0xf8, 0x7a, 0x0a, 0x68, 0xe8, 0x6b, 0x0a, 0xae, 0x82,
0x4a, 0x5b, 0xab, 0xb8, 0x7e, 0xf9, 0x64, 0x59, 0x8b, 0xe1, 0x3a, 0x6a, 0x63, 0x7d, 0xb7, 0x5c,
0x35, 0xf6, 0x2a, 0xbb, 0xeb, 0x9b, 0x65, 0x4d, 0x15, 0x0b, 0xf6, 0x6f, 0xc6, 0x60, 0x22, 0xf8,
0xee, 0xa5, 0x7f, 0x14, 0x1e, 0xe2, 0x0b, 0x25, 0x0e, 0x72, 0xab, 0xaf, 0x35, 0x3a, 0x24, 0x17,
0x5b, 0x26, 0x9d, 0x17, 0xbd, 0x68, 0x38, 0xc5, 0xa4, 0x76, 0x90, 0xfb, 0x42, 0xa3, 0x83, 0x33,
0xad, 0x65, 0xba, 0xfa, 0x06, 0xcc, 0x5b, 0x76, 0xd5, 0x71, 0x4d, 0xab, 0x6e, 0x76, 0xea, 0x55,
0x7f, 0x89, 0xaa, 0x6a, 0xd6, 0x6a, 0xc8, 0x71, 0x6c, 0x3a, 0x07, 0x7a, 0x2c, 0x67, 0x2c, 0x7b,
0x87, 0x09, 0xfb, 0x93, 0x43, 0x81, 0x89, 0x4a, 0x91, 0xab, 0xf6, 0x8b, 0xdc, 0x47, 0x20, 0xd5,
0x32, 0xdb, 0x55, 0x64, 0xb9, 0x9d, 0x63, 0x52, 0x71, 0x27, 0x8d, 0x64, 0xcb, 0x6c, 0x97, 0xf1,
0xf5, 0x4f, 0xe6, 0xc5, 0xe7, 0xbf, 0xaa, 0x90, 0x11, 0xab, 0x6e, 0xfc, 0x12, 0x53, 0x23, 0x13,
0x94, 0x42, 0x86, 0xb0, 0xc7, 0x06, 0xd6, 0xe8, 0x4b, 0x25, 0x3c, 0x73, 0xe5, 0x47, 0x69, 0x2d,
0x6c, 0x50, 0x24, 0xae, 0x1a, 0x70, 0x68, 0x21, 0x5a, 0x7b, 0x24, 0x0d, 0x76, 0xa5, 0xdf, 0x86,
0xd1, 0x97, 0x1d, 0xc2, 0x3d, 0x4a, 0xb8, 0xcf, 0x0f, 0xe6, 0xbe, 0xb3, 0x43, 0xc8, 0x53, 0x77,
0x76, 0xaa, 0x95, 0x2d, 0x63, 0xb3, 0xb0, 0x61, 0x30, 0xb8, 0xfe, 0x30, 0xc4, 0x9b, 0xe6, 0x1b,
0xc7, 0xc1, 0x39, 0x8e, 0x34, 0x0d, 0x6b, 0xf8, 0x87, 0x21, 0xfe, 0x1a, 0x32, 0x5f, 0x09, 0xce,
0x2c, 0xa4, 0xe9, 0x43, 0x0c, 0xfd, 0x8b, 0x90, 0x20, 0xf6, 0xd2, 0x01, 0x98, 0xc5, 0xb4, 0x11,
0x3d, 0x09, 0xf1, 0xd2, 0x96, 0x81, 0xc3, 0x5f, 0x83, 0x0c, 0x6d, 0xad, 0x6e, 0xaf, 0x97, 0x4b,
0x65, 0x2d, 0x96, 0xbb, 0x0a, 0xa3, 0xd4, 0x08, 0x38, 0x35, 0x3c, 0x33, 0x68, 0x23, 0xec, 0x92,
0x71, 0x28, 0xbc, 0x77, 0x6f, 0xb3, 0x58, 0x36, 0xb4, 0x98, 0xe8, 0x5e, 0x07, 0x32, 0x62, 0xc1,
0xfd, 0x93, 0x89, 0xa9, 0x6f, 0x28, 0x90, 0x16, 0x0a, 0x68, 0x5c, 0xf9, 0x98, 0xcd, 0xa6, 0xfd,
0x5a, 0xd5, 0x6c, 0x36, 0x4c, 0x87, 0x05, 0x05, 0x90, 0xa6, 0x02, 0x6e, 0x19, 0xd6, 0x69, 0x3f,
0x11, 0xe5, 0xbf, 0xa0, 0x80, 0x26, 0xd7, 0xae, 0x92, 0x82, 0xca, 0x4f, 0x55, 0xc1, 0xcf, 0x29,
0x30, 0x11, 0x2c, 0x58, 0x25, 0xf5, 0xce, 0xfd, 0x54, 0xd5, 0xfb, 0x4e, 0x0c, 0xc6, 0x03, 0x65,
0xea, 0xb0, 0xda, 0xfd, 0x3c, 0x4c, 0x35, 0xea, 0xa8, 0xd5, 0xb6, 0x5d, 0x64, 0xd5, 0x8e, 0xab,
0x4d, 0xf4, 0x2a, 0x6a, 0x66, 0x73, 0x64, 0xa0, 0xb8, 0x38, 0xb8, 0x10, 0x5e, 0x5a, 0xf7, 0x71,
0x1b, 0x18, 0x96, 0x9f, 0x5e, 0x5f, 0x2d, 0x6f, 0x6e, 0x6f, 0xed, 0x96, 0x2b, 0xa5, 0xbb, 0xd5,
0xbd, 0xca, 0xc7, 0x2a, 0x5b, 0x2f, 0x54, 0x0c, 0xad, 0x21, 0x89, 0x7d, 0x88, 0xa9, 0xbe, 0x0d,
0x9a, 0xac, 0x94, 0xfe, 0x10, 0x84, 0xa9, 0xa5, 0x8d, 0xe8, 0xd3, 0x30, 0x59, 0xd9, 0xaa, 0xee,
0xac, 0xaf, 0x96, 0xab, 0xe5, 0xb5, 0xb5, 0x72, 0x69, 0x77, 0x87, 0x2e, 0x6d, 0x78, 0xd2, 0xbb,
0xc1, 0xa4, 0xfe, 0xac, 0x0a, 0xd3, 0x21, 0x9a, 0xe8, 0x05, 0xf6, 0x52, 0x42, 0xdf, 0x93, 0x9e,
0x1e, 0x46, 0xfb, 0x25, 0x5c, 0x15, 0x6c, 0x9b, 0x1d, 0x97, 0xbd, 0xc3, 0x3c, 0x09, 0xd8, 0x4a,
0x96, 0xdb, 0x38, 0x68, 0xa0, 0x0e, 0x5b, 0x09, 0xa2, 0x6f, 0x2a, 0x93, 0x7e, 0x3b, 0x5d, 0x0c,
0x7a, 0x0a, 0xf4, 0xb6, 0xed, 0x34, 0xdc, 0xc6, 0xab, 0xa8, 0xda, 0xb0, 0xf8, 0xb2, 0x11, 0x7e,
0x73, 0x89, 0x1b, 0x1a, 0xef, 0x59, 0xb7, 0x5c, 0x4f, 0xda, 0x42, 0x87, 0xa6, 0x24, 0x8d, 0x07,
0x70, 0xd5, 0xd0, 0x78, 0x8f, 0x27, 0x7d, 0x0e, 0x32, 0x75, 0xbb, 0x8b, 0xcb, 0x39, 0x2a, 0x87,
0xe7, 0x0b, 0xc5, 0x48, 0xd3, 0x36, 0x4f, 0x84, 0x15, 0xea, 0xfe, 0x7a, 0x55, 0xc6, 0x48, 0xd3,
0x36, 0x2a, 0xf2, 0x04, 0x4c, 0x9a, 0x87, 0x87, 0x1d, 0x4c, 0xce, 0x89, 0xe8, 0xab, 0xc7, 0x84,
0xd7, 0x4c, 0x04, 0x67, 0xef, 0x40, 0x92, 0xdb, 0x01, 0x4f, 0xc9, 0xd8, 0x12, 0xd5, 0x36, 0x7d,
0x9f, 0x8e, 0x2d, 0xa4, 0x8c, 0xa4, 0xc5, 0x3b, 0xcf, 0x41, 0xa6, 0xe1, 0x54, 0xfd, 0xe5, 0xf7,
0xd8, 0xd9, 0xd8, 0x42, 0xd2, 0x48, 0x37, 0x1c, 0x6f, 0xe9, 0x32, 0xf7, 0xe5, 0x18, 0x4c, 0x04,
0xb7, 0x0f, 0xf4, 0x55, 0x48, 0x36, 0xed, 0x9a, 0x49, 0x42, 0x8b, 0xee, 0x5d, 0x2d, 0x44, 0xec,
0x38, 0x2c, 0x6d, 0x30, 0x79, 0xc3, 0x43, 0xce, 0xfe, 0x47, 0x05, 0x92, 0xbc, 0x59, 0x9f, 0x81,
0x78, 0xdb, 0x74, 0x8f, 0x08, 0x5d, 0xa2, 0x18, 0xd3, 0x14, 0x83, 0x5c, 0xe3, 0x76, 0xa7, 0x6d,
0x5a, 0x24, 0x04, 0x58, 0x3b, 0xbe, 0xc6, 0x7e, 0x6d, 0x22, 0xb3, 0x4e, 0xde, 0x6b, 0xec, 0x56,
0x0b, 0x59, 0xae, 0xc3, 0xfd, 0xca, 0xda, 0x4b, 0xac, 0x59, 0xff, 0x08, 0x4c, 0xb9, 0x1d, 0xb3,
0xd1, 0x0c, 0xc8, 0xc6, 0x89, 0xac, 0xc6, 0x3b, 0x3c, 0xe1, 0x3c, 0x3c, 0xcc, 0x79, 0xeb, 0xc8,
0x35, 0x6b, 0x47, 0xa8, 0xee, 0x83, 0x46, 0xc9, 0xfa, 0xc5, 0x43, 0x4c, 0x60, 0x95, 0xf5, 0x73,
0x6c, 0xee, 0x0f, 0x14, 0x98, 0xe2, 0x6f, 0x62, 0x75, 0xcf, 0x58, 0x9b, 0x00, 0xa6, 0x65, 0xd9,
0xae, 0x68, 0xae, 0xde, 0x50, 0xee, 0xc1, 0x2d, 0x15, 0x3c, 0x90, 0x21, 0x10, 0xcc, 0xb6, 0x00,
0xfc, 0x9e, 0xbe, 0x66, 0x9b, 0x87, 0x34, 0xdb, 0x1b, 0x22, 0x1b, 0x8c, 0xf4, 0xdd, 0x1d, 0x68,
0x13, 0x7e, 0x65, 0xd3, 0x4f, 0x41, 0x62, 0x1f, 0x1d, 0x36, 0x2c, 0xb6, 0xe2, 0x4b, 0x2f, 0xf8,
0x0a, 0x4b, 0xdc, 0x5b, 0x61, 0x29, 0xbe, 0x04, 0xd3, 0x35, 0xbb, 0x25, 0xab, 0x5b, 0xd4, 0xa4,
0xf5, 0x03, 0xe7, 0x79, 0xe5, 0x93, 0xe0, 0x97, 0x98, 0x3f, 0x52, 0x94, 0x5f, 0x8d, 0xa9, 0xb7,
0xb7, 0x8b, 0xbf, 0x15, 0x9b, 0xbd, 0x4d, 0xa1, 0xdb, 0xfc, 0x49, 0x0d, 0x74, 0xd0, 0x44, 0x35,
0xac, 0x3d, 0xfc, 0xc6, 0x47, 0xe0, 0xe9, 0xc3, 0x86, 0x7b, 0xd4, 0xdd, 0x5f, 0xaa, 0xd9, 0xad,
0x8b, 0x87, 0xf6, 0xa1, 0xed, 0xef, 0xa9, 0xe2, 0x2b, 0x72, 0x41, 0x3e, 0xb1, 0x7d, 0xd5, 0x94,
0xd7, 0x3a, 0x1b, 0xb9, 0x09, 0x9b, 0xaf, 0xc0, 0x34, 0x13, 0xae, 0x92, 0x8d, 0x1d, 0xfa, 0x7a,
0xa2, 0x0f, 0x5c, 0x1c, 0xcb, 0xfe, 0xf6, 0xf7, 0xc8, 0x74, 0x6d, 0x4c, 0x31, 0x28, 0xee, 0xa3,
0x6f, 0x30, 0x79, 0x03, 0x4e, 0x07, 0xf8, 0x68, 0x6a, 0xa2, 0x4e, 0x04, 0xe3, 0x37, 0x19, 0xe3,
0xb4, 0xc0, 0xb8, 0xc3, 0xa0, 0xf9, 0x12, 0x8c, 0x9f, 0x84, 0xeb, 0xdf, 0x31, 0xae, 0x0c, 0x12,
0x49, 0x6e, 0xc3, 0x24, 0x21, 0xa9, 0x75, 0x1d, 0xd7, 0x6e, 0x91, 0x71, 0x6f, 0x30, 0xcd, 0xbf,
0xff, 0x1e, 0xcd, 0x95, 0x09, 0x0c, 0x2b, 0x79, 0xa8, 0x7c, 0x1e, 0xc8, 0x5e, 0x56, 0x1d, 0xd5,
0x9a, 0x11, 0x0c, 0xf7, 0x98, 0x22, 0x9e, 0x7c, 0xfe, 0x13, 0x70, 0x0a, 0x7f, 0x26, 0xc3, 0x92,
0xa8, 0x49, 0xf4, 0x4a, 0x5a, 0xf6, 0x0f, 0x3e, 0x45, 0xd3, 0x71, 0xda, 0x23, 0x10, 0x74, 0x12,
0xbc, 0x78, 0x88, 0x5c, 0x17, 0x75, 0x9c, 0xaa, 0xd9, 0x0c, 0x53, 0x4f, 0x58, 0x8a, 0xc8, 0xfe,
0xca, 0x0f, 0x82, 0x5e, 0xbc, 0x4d, 0x91, 0x85, 0x66, 0x33, 0xbf, 0x07, 0x0f, 0x85, 0x44, 0xc5,
0x10, 0x9c, 0x9f, 0x65, 0x9c, 0xa7, 0x7a, 0x22, 0x03, 0xd3, 0x6e, 0x03, 0x6f, 0xf7, 0x7c, 0x39,
0x04, 0xe7, 0x3f, 0x64, 0x9c, 0x3a, 0xc3, 0x72, 0x97, 0x62, 0xc6, 0x3b, 0x30, 0xf5, 0x2a, 0xea,
0xec, 0xdb, 0x0e, 0x5b, 0xfe, 0x19, 0x82, 0xee, 0x73, 0x8c, 0x6e, 0x92, 0x01, 0xc9, 0x7a, 0x10,
0xe6, 0xba, 0x09, 0xc9, 0x03, 0xb3, 0x86, 0x86, 0xa0, 0xf8, 0x3c, 0xa3, 0x18, 0xc3, 0xf2, 0x18,
0x5a, 0x80, 0xcc, 0xa1, 0xcd, 0x66, 0xa6, 0x68, 0xf8, 0x17, 0x18, 0x3c, 0xcd, 0x31, 0x8c, 0xa2,
0x6d, 0xb7, 0xbb, 0x4d, 0x3c, 0x6d, 0x45, 0x53, 0x7c, 0x91, 0x53, 0x70, 0x0c, 0xa3, 0x38, 0x81,
0x59, 0xdf, 0xe2, 0x14, 0x8e, 0x60, 0xcf, 0xe7, 0x20, 0x6d, 0x5b, 0xcd, 0x63, 0xdb, 0x1a, 0x46,
0x89, 0x2f, 0x31, 0x06, 0x60, 0x10, 0x4c, 0x70, 0x0b, 0x52, 0xc3, 0x3a, 0xe2, 0xd7, 0x7f, 0xc0,
0xd3, 0x83, 0x7b, 0xe0, 0x36, 0x4c, 0xf2, 0x01, 0xaa, 0x61, 0x5b, 0x43, 0x50, 0xfc, 0x06, 0xa3,
0x98, 0x10, 0x60, 0xec, 0x31, 0x5c, 0xe4, 0xb8, 0x87, 0x68, 0x18, 0x92, 0x2f, 0xf3, 0xc7, 0x60,
0x10, 0x66, 0xca, 0x7d, 0x64, 0xd5, 0x8e, 0x86, 0x63, 0xf8, 0x0a, 0x37, 0x25, 0xc7, 0x60, 0x8a,
0x12, 0x8c, 0xb7, 0xcc, 0x8e, 0x73, 0x64, 0x36, 0x87, 0x72, 0xc7, 0x6f, 0x32, 0x8e, 0x8c, 0x07,
0x62, 0x16, 0xe9, 0x5a, 0x27, 0xa1, 0xf9, 0x2d, 0x6e, 0x11, 0x01, 0xc6, 0x52, 0xcf, 0x71, 0xc9,
0x5a, 0xd9, 0x49, 0xd8, 0xfe, 0x11, 0x4f, 0x3d, 0x8a, 0xdd, 0x14, 0x19, 0x6f, 0x41, 0xca, 0x69,
0xbc, 0x31, 0x14, 0xcd, 0x3f, 0xe6, 0x9e, 0x26, 0x00, 0x0c, 0xbe, 0x0b, 0x0f, 0x87, 0x4e, 0x13,
0x43, 0x90, 0xfd, 0x13, 0x46, 0x36, 0x13, 0x32, 0x55, 0xb0, 0x21, 0xe1, 0xa4, 0x94, 0xff, 0x94,
0x0f, 0x09, 0x48, 0xe2, 0xda, 0xc6, 0xef, 0x0a, 0x8e, 0x79, 0x70, 0x32, 0xab, 0xfd, 0x33, 0x6e,
0x35, 0x8a, 0x0d, 0x58, 0x6d, 0x17, 0x66, 0x18, 0xe3, 0xc9, 0xfc, 0xfa, 0x55, 0x3e, 0xb0, 0x52,
0xf4, 0x5e, 0xd0, 0xbb, 0x2f, 0xc1, 0xac, 0x67, 0x4e, 0x5e, 0x94, 0x3a, 0xd5, 0x96, 0xd9, 0x1e,
0x82, 0xf9, 0xb7, 0x19, 0x33, 0x1f, 0xf1, 0xbd, 0xaa, 0xd6, 0xd9, 0x34, 0xdb, 0x98, 0xfc, 0x45,
0xc8, 0x72, 0xf2, 0xae, 0xd5, 0x41, 0x35, 0xfb, 0xd0, 0x6a, 0xbc, 0x81, 0xea, 0x43, 0x50, 0x7f,
0x4d, 0x72, 0xd5, 0x9e, 0x00, 0xc7, 0xcc, 0xeb, 0xa0, 0x79, 0xb5, 0x4a, 0xb5, 0xd1, 0x6a, 0xdb,
0x1d, 0x37, 0x82, 0xf1, 0x9f, 0x73, 0x4f, 0x79, 0xb8, 0x75, 0x02, 0xcb, 0x97, 0x61, 0x82, 0x5c,
0x0e, 0x1b, 0x92, 0xbf, 0xc3, 0x88, 0xc6, 0x7d, 0x14, 0x1b, 0x38, 0x6a, 0x76, 0xab, 0x6d, 0x76,
0x86, 0x19, 0xff, 0xfe, 0x05, 0x1f, 0x38, 0x18, 0x84, 0x0d, 0x1c, 0xee, 0x71, 0x1b, 0xe1, 0xd9,
0x7e, 0x08, 0x86, 0xaf, 0xf3, 0x81, 0x83, 0x63, 0x18, 0x05, 0x2f, 0x18, 0x86, 0xa0, 0xf8, 0x97,
0x9c, 0x82, 0x63, 0x30, 0xc5, 0xc7, 0xfd, 0x89, 0xb6, 0x83, 0x0e, 0x1b, 0x8e, 0xdb, 0xa1, 0xa5,
0xf0, 0x60, 0xaa, 0xdf, 0xfd, 0x41, 0xb0, 0x08, 0x33, 0x04, 0x28, 0x1e, 0x89, 0xd8, 0x12, 0x2a,
0x79, 0x53, 0x8a, 0x56, 0xec, 0xf7, 0xf8, 0x48, 0x24, 0xc0, 0xb0, 0x6e, 0x42, 0x85, 0x88, 0xcd,
0x5e, 0xc3, 0xef, 0x07, 0x43, 0xd0, 0x7d, 0x43, 0x52, 0x6e, 0x87, 0x63, 0x31, 0xa7, 0x50, 0xff,
0x74, 0xad, 0x57, 0xd0, 0xf1, 0x50, 0xd1, 0xf9, 0xaf, 0xa4, 0xfa, 0x67, 0x8f, 0x22, 0xe9, 0x18,
0x32, 0x29, 0xd5, 0x53, 0x7a, 0xd4, 0x29, 0xa0, 0xec, 0x5f, 0x7c, 0x8f, 0x3d, 0x6f, 0xb0, 0x9c,
0xca, 0x6f, 0xe0, 0x20, 0x0f, 0x16, 0x3d, 0xd1, 0x64, 0x9f, 0x7a, 0xcf, 0x8b, 0xf3, 0x40, 0xcd,
0x93, 0x5f, 0x83, 0xf1, 0x40, 0xc1, 0x13, 0x4d, 0xf5, 0x97, 0x18, 0x55, 0x46, 0xac, 0x77, 0xf2,
0x57, 0x21, 0x8e, 0x8b, 0x97, 0x68, 0xf8, 0x5f, 0x66, 0x70, 0x22, 0x9e, 0x7f, 0x06, 0x92, 0xbc,
0x68, 0x89, 0x86, 0xfe, 0x22, 0x83, 0x7a, 0x10, 0x0c, 0xe7, 0x05, 0x4b, 0x34, 0xfc, 0xaf, 0x70,
0x38, 0x87, 0x60, 0xf8, 0xf0, 0x26, 0x7c, 0xfb, 0xaf, 0xc5, 0xd9, 0xa4, 0xc3, 0x6d, 0x77, 0x0b,
0xc6, 0x58, 0xa5, 0x12, 0x8d, 0xfe, 0x25, 0x76, 0x73, 0x8e, 0xc8, 0x5f, 0x87, 0xc4, 0x90, 0x06,
0xff, 0x1b, 0x0c, 0x4a, 0xe5, 0xf3, 0x25, 0x48, 0x0b, 0xd5, 0x49, 0x34, 0xfc, 0x6f, 0x32, 0xb8,
0x88, 0xc2, 0xaa, 0xb3, 0xea, 0x24, 0x9a, 0xe0, 0x6f, 0x71, 0xd5, 0x19, 0x02, 0x9b, 0x8d, 0x17,
0x26, 0xd1, 0xe8, 0xbf, 0xcd, 0xad, 0xce, 0x21, 0xf9, 0xe7, 0x20, 0xe5, 0x4d, 0x36, 0xd1, 0xf8,
0xbf, 0xc3, 0xf0, 0x3e, 0x06, 0x5b, 0x40, 0x98, 0xec, 0xa2, 0x29, 0xfe, 0x2e, 0xb7, 0x80, 0x80,
0xc2, 0x69, 0x24, 0x17, 0x30, 0xd1, 0x4c, 0xbf, 0xcc, 0xd3, 0x48, 0xaa, 0x5f, 0xb0, 0x37, 0xc9,
0x98, 0x1f, 0x4d, 0xf1, 0xf7, 0xb8, 0x37, 0x89, 0x3c, 0x56, 0x43, 0xae, 0x08, 0xa2, 0x39, 0xfe,
0x01, 0x57, 0x43, 0x2a, 0x08, 0xf2, 0xdb, 0xa0, 0xf7, 0x56, 0x03, 0xd1, 0x7c, 0x9f, 0x61, 0x7c,
0x53, 0x3d, 0xc5, 0x40, 0xfe, 0x05, 0x98, 0x09, 0xaf, 0x04, 0xa2, 0x59, 0x7f, 0xe5, 0x3d, 0xe9,
0xdd, 0x4d, 0x2c, 0x04, 0xf2, 0xbb, 0xfe, 0x94, 0x22, 0x56, 0x01, 0xd1, 0xb4, 0x9f, 0x7d, 0x2f,
0x38, 0x70, 0x8b, 0x45, 0x40, 0xbe, 0x00, 0xe0, 0x4f, 0xc0, 0xd1, 0x5c, 0x9f, 0x63, 0x5c, 0x02,
0x08, 0xa7, 0x06, 0x9b, 0x7f, 0xa3, 0xf1, 0x9f, 0xe7, 0xa9, 0xc1, 0x10, 0x38, 0x35, 0xf8, 0xd4,
0x1b, 0x8d, 0xfe, 0x02, 0x4f, 0x0d, 0x0e, 0xc1, 0x91, 0x2d, 0xcc, 0x6e, 0xd1, 0x0c, 0x5f, 0xe2,
0x91, 0x2d, 0xa0, 0xf2, 0x15, 0x98, 0xea, 0x99, 0x10, 0xa3, 0xa9, 0x7e, 0x95, 0x51, 0x69, 0xf2,
0x7c, 0x28, 0x4e, 0x5e, 0x6c, 0x32, 0x8c, 0x66, 0xfb, 0x35, 0x69, 0xf2, 0x62, 0x73, 0x61, 0xfe,
0x16, 0x24, 0xad, 0x6e, 0xb3, 0x89, 0x93, 0x47, 0x1f, 0x7c, 0x72, 0x2f, 0xfb, 0xdf, 0x7f, 0xcc,
0xac, 0xc3, 0x01, 0xf9, 0xab, 0x90, 0x40, 0xad, 0x7d, 0x54, 0x8f, 0x42, 0xfe, 0x8f, 0x1f, 0xf3,
0x01, 0x13, 0x4b, 0xe7, 0x9f, 0x03, 0xa0, 0x4b, 0x23, 0x64, 0xdb, 0x2f, 0x02, 0xfb, 0x3f, 0x7f,
0xcc, 0xce, 0xd4, 0xf8, 0x10, 0x9f, 0x80, 0x9e, 0xd0, 0x19, 0x4c, 0xf0, 0x83, 0x20, 0x01, 0xf1,
0xc8, 0x4d, 0x18, 0x7b, 0xd9, 0xb1, 0x2d, 0xd7, 0x3c, 0x8c, 0x42, 0xff, 0x2f, 0x86, 0xe6, 0xf2,
0xd8, 0x60, 0x2d, 0xbb, 0x83, 0x5c, 0xf3, 0xd0, 0x89, 0xc2, 0xfe, 0x6f, 0x86, 0xf5, 0x00, 0x18,
0x5c, 0x33, 0x1d, 0x77, 0x98, 0xe7, 0xfe, 0x3f, 0x1c, 0xcc, 0x01, 0x58, 0x69, 0xfc, 0xf9, 0x15,
0x74, 0x1c, 0x85, 0xfd, 0x21, 0x57, 0x9a, 0xc9, 0xe7, 0x9f, 0x81, 0x14, 0xfe, 0x48, 0x0f, 0xca,
0x45, 0x80, 0xff, 0x2f, 0x03, 0xfb, 0x08, 0x7c, 0x67, 0xc7, 0xad, 0xbb, 0x8d, 0x68, 0x63, 0xff,
0x11, 0xf3, 0x34, 0x97, 0xcf, 0x17, 0x20, 0xed, 0xb8, 0xf5, 0x7a, 0x97, 0xd5, 0xa7, 0x11, 0xf0,
0xff, 0xf7, 0x63, 0x6f, 0xc9, 0xc2, 0xc3, 0x60, 0x6f, 0xbf, 0xf6, 0x8a, 0xdb, 0xb6, 0xc9, 0x36,
0x47, 0x14, 0xc3, 0x7b, 0x8c, 0x41, 0x80, 0x14, 0xcb, 0xe1, 0xcb, 0xb7, 0x70, 0xdb, 0xbe, 0x6d,
0xd3, 0x85, 0xdb, 0x4f, 0xe6, 0xa2, 0x57, 0x60, 0xe1, 0xbf, 0x35, 0xe1, 0x7a, 0x5f, 0x31, 0x3c,
0x15, 0x5f, 0xac, 0xd9, 0xad, 0x7d, 0xdb, 0xb9, 0xb8, 0x6f, 0xbb, 0x47, 0x17, 0xdd, 0x23, 0x84,
0xdb, 0xd8, 0x92, 0x6d, 0x1c, 0x7f, 0x9e, 0x3d, 0xd9, 0x3a, 0x2f, 0xd9, 0xc5, 0xaf, 0x34, 0xf0,
0xa3, 0x55, 0xc8, 0x46, 0x8a, 0x7e, 0x06, 0x46, 0xc9, 0xc3, 0x5e, 0x26, 0x9b, 0x95, 0x4a, 0x31,
0x7e, 0xef, 0x9d, 0xf9, 0x11, 0x83, 0xb5, 0x79, 0xbd, 0xcb, 0x64, 0xa5, 0x3b, 0x16, 0xe8, 0x5d,
0xf6, 0x7a, 0xaf, 0xd0, 0xc5, 0xee, 0x40, 0xef, 0x15, 0xaf, 0x77, 0x85, 0x2c, 0x7b, 0xab, 0x81,
0xde, 0x15, 0xaf, 0xf7, 0x2a, 0xd9, 0xda, 0x19, 0x0f, 0xf4, 0x5e, 0xf5, 0x7a, 0xaf, 0x91, 0x0d,
0x9d, 0x78, 0xa0, 0xf7, 0x9a, 0xd7, 0x7b, 0x9d, 0xec, 0xe5, 0x4c, 0x05, 0x7a, 0xaf, 0x7b, 0xbd,
0x37, 0xc8, 0x1e, 0x8e, 0x1e, 0xe8, 0xbd, 0xe1, 0xf5, 0xde, 0x24, 0x27, 0xaf, 0xc6, 0x02, 0xbd,
0x37, 0xf5, 0x39, 0x18, 0xa3, 0x4f, 0x7e, 0x89, 0x6c, 0xf8, 0x4f, 0xb2, 0x6e, 0xde, 0xe8, 0xf7,
0x5f, 0x26, 0xa7, 0xac, 0x46, 0x83, 0xfd, 0x97, 0xfd, 0xfe, 0x65, 0xf2, 0x85, 0x0f, 0x2d, 0xd8,
0xbf, 0xec, 0xf7, 0x5f, 0xc9, 0x8e, 0x93, 0x93, 0x66, 0x81, 0xfe, 0x2b, 0x7e, 0xff, 0x4a, 0x76,
0x02, 0x67, 0x4c, 0xb0, 0x7f, 0xc5, 0xef, 0xbf, 0x9a, 0x9d, 0x3c, 0xab, 0x2c, 0x64, 0x82, 0xfd,
0x57, 0x73, 0xbf, 0x40, 0xdc, 0x6b, 0xf9, 0xee, 0x9d, 0x09, 0xba, 0xd7, 0x73, 0xec, 0x4c, 0xd0,
0xb1, 0x9e, 0x4b, 0x67, 0x82, 0x2e, 0xf5, 0x9c, 0x39, 0x13, 0x74, 0xa6, 0xe7, 0xc6, 0x99, 0xa0,
0x1b, 0x3d, 0x07, 0xce, 0x04, 0x1d, 0xe8, 0xb9, 0x6e, 0x26, 0xe8, 0x3a, 0xcf, 0x69, 0x33, 0x41,
0xa7, 0x79, 0xee, 0x9a, 0x09, 0xba, 0xcb, 0x73, 0x54, 0x56, 0x72, 0x94, 0xef, 0xa2, 0xac, 0xe4,
0x22, 0xdf, 0x39, 0x59, 0xc9, 0x39, 0xbe, 0x5b, 0xb2, 0x92, 0x5b, 0x7c, 0x87, 0x64, 0x25, 0x87,
0xf8, 0xae, 0xc8, 0x4a, 0xae, 0xf0, 0x9d, 0xc0, 0x72, 0xcc, 0x40, 0xed, 0x90, 0x1c, 0x53, 0x07,
0xe6, 0x98, 0x3a, 0x30, 0xc7, 0xd4, 0x81, 0x39, 0xa6, 0x0e, 0xcc, 0x31, 0x75, 0x60, 0x8e, 0xa9,
0x03, 0x73, 0x4c, 0x1d, 0x98, 0x63, 0xea, 0xc0, 0x1c, 0x53, 0x07, 0xe7, 0x98, 0x1a, 0x91, 0x63,
0x6a, 0x44, 0x8e, 0xa9, 0x11, 0x39, 0xa6, 0x46, 0xe4, 0x98, 0x1a, 0x91, 0x63, 0x6a, 0xdf, 0x1c,
0xf3, 0xdd, 0x3b, 0x13, 0x74, 0x6f, 0x68, 0x8e, 0xa9, 0x7d, 0x72, 0x4c, 0xed, 0x93, 0x63, 0x6a,
0x9f, 0x1c, 0x53, 0xfb, 0xe4, 0x98, 0xda, 0x27, 0xc7, 0xd4, 0x3e, 0x39, 0xa6, 0xf6, 0xc9, 0x31,
0xb5, 0x5f, 0x8e, 0xa9, 0x7d, 0x73, 0x4c, 0xed, 0x9b, 0x63, 0x6a, 0xdf, 0x1c, 0x53, 0xfb, 0xe6,
0x98, 0xda, 0x37, 0xc7, 0x54, 0x31, 0xc7, 0xfe, 0xb5, 0x0a, 0x3a, 0xcd, 0xb1, 0x6d, 0x72, 0x64,
0x8c, 0xb9, 0x62, 0x4e, 0xca, 0xb4, 0x51, 0xec, 0x3a, 0xcd, 0x77, 0xc9, 0x9c, 0x94, 0x6b, 0xc1,
0xfe, 0x65, 0xaf, 0x9f, 0x67, 0x5b, 0xb0, 0xff, 0x8a, 0xd7, 0xcf, 0xf3, 0x2d, 0xd8, 0xbf, 0xe2,
0xf5, 0xf3, 0x8c, 0x0b, 0xf6, 0x5f, 0xf5, 0xfa, 0x79, 0xce, 0x05, 0xfb, 0xaf, 0x79, 0xfd, 0x3c,
0xeb, 0x82, 0xfd, 0xd7, 0xbd, 0x7e, 0x9e, 0x77, 0xc1, 0xfe, 0x1b, 0x5e, 0x3f, 0xcf, 0xbc, 0x60,
0xff, 0x4d, 0xfd, 0xac, 0x9c, 0x7b, 0x5c, 0xc0, 0x73, 0xed, 0x59, 0x39, 0xfb, 0x24, 0x89, 0xcb,
0xbe, 0x04, 0xcf, 0x3f, 0x49, 0x62, 0xd9, 0x97, 0xe0, 0x19, 0x28, 0x49, 0x5c, 0xc9, 0x7d, 0x9a,
0xb8, 0xcf, 0x92, 0xdd, 0x37, 0x2b, 0xb9, 0x2f, 0x26, 0xb8, 0x6e, 0x56, 0x72, 0x5d, 0x4c, 0x70,
0xdb, 0xac, 0xe4, 0xb6, 0x98, 0xe0, 0xb2, 0x59, 0xc9, 0x65, 0x31, 0xc1, 0x5d, 0xb3, 0x92, 0xbb,
0x62, 0x82, 0xab, 0x66, 0x25, 0x57, 0xc5, 0x04, 0x37, 0xcd, 0x4a, 0x6e, 0x8a, 0x09, 0x2e, 0x9a,
0x95, 0x5c, 0x14, 0x13, 0xdc, 0x33, 0x2b, 0xb9, 0x27, 0x26, 0xb8, 0xe6, 0x8c, 0xec, 0x9a, 0x98,
0xe8, 0x96, 0x33, 0xb2, 0x5b, 0x62, 0xa2, 0x4b, 0xce, 0xc8, 0x2e, 0x89, 0x89, 0xee, 0x38, 0x23,
0xbb, 0x23, 0x26, 0xba, 0xe2, 0x8f, 0x63, 0xbc, 0x22, 0xdc, 0x71, 0x3b, 0xdd, 0x9a, 0xfb, 0xbe,
0x2a, 0xc2, 0x4b, 0x81, 0xf2, 0x21, 0xbd, 0xac, 0x2f, 0x91, 0x82, 0x55, 0xac, 0x38, 0xa5, 0x19,
0xec, 0x52, 0xa0, 0xb0, 0x10, 0x10, 0x56, 0x38, 0x62, 0xe5, 0x7d, 0xd5, 0x86, 0x97, 0x02, 0x65,
0x46, 0xb4, 0x7e, 0x37, 0x3e, 0xf4, 0x8a, 0xed, 0xed, 0x18, 0xaf, 0xd8, 0x98, 0xf9, 0x4f, 0x5a,
0xb1, 0x2d, 0x46, 0x9b, 0xdc, 0x33, 0xf6, 0x62, 0xb4, 0xb1, 0x7b, 0x66, 0x9d, 0x61, 0x2b, 0xb8,
0xc5, 0x68, 0xd3, 0x7a, 0x46, 0xfd, 0x60, 0xeb, 0x2d, 0x16, 0xc1, 0x06, 0x6a, 0x87, 0x44, 0xf0,
0x49, 0xeb, 0xad, 0x4b, 0x81, 0xa1, 0xe4, 0xa4, 0x11, 0xac, 0x9e, 0x38, 0x82, 0x4f, 0x5a, 0x79,
0x5d, 0x0a, 0x0c, 0x2f, 0x27, 0x8e, 0xe0, 0x0f, 0xa1, 0x1e, 0x62, 0x11, 0xec, 0x9b, 0xff, 0xa4,
0xf5, 0xd0, 0x62, 0xb4, 0xc9, 0x43, 0x23, 0x58, 0x3d, 0x41, 0x04, 0x0f, 0x53, 0x1f, 0x2d, 0x46,
0x9b, 0x36, 0x3c, 0x82, 0xdf, 0x77, 0x35, 0xf3, 0x45, 0x05, 0xa6, 0x2a, 0x8d, 0x7a, 0xb9, 0xb5,
0x8f, 0xea, 0x75, 0x54, 0x67, 0x76, 0xbc, 0x14, 0x18, 0x09, 0xfa, 0xb8, 0xfa, 0x5b, 0xef, 0xcc,
0xfb, 0x16, 0xbe, 0x0a, 0x49, 0x6a, 0xd3, 0x4b, 0x97, 0xb2, 0xf7, 0x94, 0x88, 0x11, 0xce, 0x13,
0xd5, 0xcf, 0x71, 0xd8, 0xe5, 0x4b, 0xd9, 0xff, 0xa4, 0x08, 0xa3, 0x9c, 0xd7, 0x9c, 0xfb, 0x65,
0xa2, 0xa1, 0xf5, 0xbe, 0x35, 0xbc, 0x38, 0x94, 0x86, 0x82, 0x6e, 0x8f, 0xf4, 0xe8, 0x26, 0x68,
0xd5, 0x85, 0xc9, 0x4a, 0xa3, 0x5e, 0x21, 0x3f, 0x35, 0x30, 0x8c, 0x4a, 0x54, 0x46, 0x1a, 0x0f,
0x2e, 0x05, 0xc2, 0x52, 0x44, 0x78, 0x21, 0x1d, 0x1c, 0x23, 0x72, 0x0d, 0x7c, 0x5b, 0x2b, 0x70,
0xdb, 0xc5, 0x7e, 0xb7, 0xf5, 0x47, 0x76, 0xef, 0x86, 0x8b, 0xfd, 0x6e, 0xe8, 0xe7, 0x90, 0x77,
0xab, 0xd7, 0xf9, 0xe4, 0x4c, 0x0f, 0x6e, 0xe9, 0x67, 0x20, 0xb6, 0x4e, 0xcf, 0x95, 0x67, 0x8a,
0x19, 0xac, 0xd4, 0xb7, 0xdf, 0x99, 0x8f, 0xef, 0x75, 0x1b, 0x75, 0x23, 0xb6, 0x5e, 0xd7, 0xef,
0x40, 0xe2, 0x13, 0xec, 0x0b, 0xaf, 0x58, 0x60, 0x85, 0x09, 0x3c, 0x15, 0xb1, 0xc4, 0x44, 0xa8,
0x97, 0xf6, 0x1a, 0x96, 0x7b, 0x79, 0xf9, 0x86, 0x41, 0x29, 0x72, 0x7f, 0x06, 0x80, 0xde, 0x73,
0xd5, 0x74, 0x8e, 0xf4, 0x0a, 0x67, 0xa6, 0xb7, 0xbe, 0xf1, 0xed, 0x77, 0xe6, 0x57, 0x86, 0x61,
0x7d, 0xba, 0x6e, 0x3a, 0x47, 0x4f, 0xbb, 0xc7, 0x6d, 0xb4, 0x54, 0x3c, 0x76, 0x91, 0xc3, 0xd9,
0xdb, 0x7c, 0xd6, 0x63, 0xcf, 0x95, 0x15, 0x9e, 0x2b, 0x19, 0x78, 0xa6, 0xb5, 0xe0, 0x33, 0x5d,
0x7a, 0xd0, 0xe7, 0x79, 0x9d, 0x4f, 0x12, 0x92, 0x25, 0xd5, 0x28, 0x4b, 0xaa, 0xef, 0xd7, 0x92,
0x6d, 0x3e, 0x3e, 0x4a, 0xcf, 0xaa, 0x0e, 0x7a, 0x56, 0xf5, 0xfd, 0x3c, 0xeb, 0xff, 0xa7, 0xd9,
0xea, 0xe5, 0xd3, 0x9e, 0x45, 0xcf, 0xb4, 0xfe, 0xe9, 0x5a, 0x0b, 0xfa, 0x40, 0xab, 0x80, 0x7c,
0xfc, 0xde, 0x5b, 0xf3, 0x4a, 0xee, 0x8b, 0x31, 0xfe, 0xe4, 0x34, 0x91, 0x1e, 0xec, 0xc9, 0xff,
0xb4, 0xd4, 0x54, 0x1f, 0x86, 0x85, 0xbe, 0xa0, 0xc0, 0x4c, 0xcf, 0x48, 0x4e, 0xcd, 0xf4, 0xc1,
0x0e, 0xe7, 0xd6, 0x49, 0x87, 0x73, 0xa6, 0xe0, 0xef, 0x28, 0x70, 0x4a, 0x1a, 0x5e, 0xa9, 0x7a,
0x17, 0x25, 0xf5, 0x1e, 0xea, 0xbd, 0x13, 0x11, 0x14, 0xb4, 0x13, 0xdd, 0x2b, 0x01, 0x04, 0x66,
0xcf, 0xef, 0x2b, 0x92, 0xdf, 0xcf, 0x78, 0x80, 0x10, 0x73, 0xf1, 0x08, 0x60, 0x6a, 0xdb, 0x10,
0xdf, 0xed, 0x20, 0xa4, 0xcf, 0x41, 0x6c, 0xab, 0xc3, 0x34, 0x9c, 0xa0, 0xf8, 0xad, 0x4e, 0xb1,
0x63, 0x5a, 0xb5, 0x23, 0x23, 0xb6, 0xd5, 0xd1, 0xcf, 0x81, 0x5a, 0x60, 0x5f, 0xb6, 0x4f, 0x2f,
0x4f, 0x52, 0x81, 0x82, 0x55, 0x67, 0x12, 0xb8, 0x4f, 0x9f, 0x83, 0xf8, 0x06, 0x32, 0x0f, 0x98,
0x12, 0x40, 0x65, 0x70, 0x8b, 0x41, 0xda, 0xd9, 0x0d, 0x5f, 0x84, 0x24, 0x27, 0xd6, 0xcf, 0x63,
0xc4, 0x81, 0xcb, 0x6e, 0xcb, 0x10, 0x58, 0x1d, 0x36, 0x73, 0x91, 0x5e, 0xfd, 0x02, 0x24, 0x8c,
0xc6, 0xe1, 0x91, 0xcb, 0x6e, 0xde, 0x2b, 0x46, 0xbb, 0x73, 0x77, 0x21, 0xe5, 0x69, 0xf4, 0x01,
0x53, 0xaf, 0xd2, 0x47, 0xd3, 0x67, 0xc5, 0xf9, 0x84, 0xaf, 0x5b, 0xd2, 0x26, 0xfd, 0x2c, 0x24,
0x77, 0xdc, 0x8e, 0x3f, 0xe8, 0xf3, 0x8a, 0xd4, 0x6b, 0xcd, 0xfd, 0x82, 0x02, 0xc9, 0x55, 0x84,
0xda, 0xc4, 0xe0, 0x8f, 0x43, 0x7c, 0xd5, 0x7e, 0xcd, 0x62, 0x0a, 0x4e, 0x31, 0x8b, 0xe2, 0x6e,
0x66, 0x53, 0xd2, 0xad, 0x3f, 0x2e, 0xda, 0x7d, 0xda, 0xb3, 0xbb, 0x20, 0x47, 0x6c, 0x9f, 0x0b,
0xd8, 0x9e, 0x39, 0x10, 0x0b, 0xf5, 0xd8, 0xff, 0x3a, 0xa4, 0x85, 0xbb, 0xe8, 0x0b, 0x4c, 0x8d,
0x98, 0x0c, 0x14, 0x6d, 0x85, 0x25, 0x72, 0x08, 0xc6, 0x03, 0x37, 0xc6, 0x50, 0xc1, 0xc4, 0x7d,
0xa0, 0xc4, 0xcc, 0x8b, 0x41, 0x33, 0x87, 0x8b, 0x32, 0x53, 0x5f, 0xa2, 0x36, 0x22, 0xe6, 0x3e,
0x4f, 0x83, 0xb3, 0xbf, 0x13, 0xf1, 0xe7, 0x5c, 0x02, 0xd4, 0x4a, 0xa3, 0x99, 0x7b, 0x06, 0x80,
0xa6, 0x7c, 0xd9, 0xea, 0xb6, 0xa4, 0xac, 0x9b, 0xe0, 0x06, 0xde, 0x3d, 0x42, 0xbb, 0xc8, 0x21,
0x22, 0xc1, 0x7a, 0x0a, 0x0f, 0x30, 0x40, 0x53, 0x8c, 0xe0, 0x9f, 0x8c, 0xc4, 0x87, 0x56, 0x62,
0x58, 0x34, 0x4b, 0x45, 0xef, 0x22, 0xb7, 0x60, 0xd9, 0xee, 0x11, 0xea, 0x48, 0x88, 0x65, 0xfd,
0x4a, 0x20, 0x61, 0x27, 0x96, 0x1f, 0xf1, 0x10, 0x7d, 0x41, 0x57, 0x72, 0x5f, 0x25, 0x0a, 0xe2,
0x52, 0xa0, 0xe7, 0x01, 0xd5, 0x21, 0x1e, 0x50, 0xbf, 0x16, 0xa8, 0xdf, 0x06, 0xa8, 0x29, 0xbd,
0x5a, 0xde, 0x0c, 0xbc, 0xe7, 0x0c, 0x56, 0x36, 0xf8, 0x8e, 0xc9, 0x6d, 0xca, 0x55, 0x7e, 0x32,
0x52, 0xe5, 0x3e, 0xd5, 0xed, 0x49, 0x6d, 0xaa, 0x0e, 0x6b, 0xd3, 0x6f, 0x78, 0x15, 0x07, 0xfd,
0x45, 0x13, 0xf2, 0x5b, 0x40, 0xfa, 0x53, 0x91, 0xbe, 0xcf, 0x2b, 0x25, 0x4f, 0xd5, 0x95, 0x61,
0xdd, 0x9f, 0x8f, 0x15, 0x8b, 0x9e, 0xba, 0xd7, 0x4f, 0x10, 0x02, 0xf9, 0x58, 0xa9, 0xe4, 0x0d,
0xdb, 0xc9, 0x4f, 0xbf, 0x35, 0xaf, 0x7c, 0xe5, 0xad, 0xf9, 0x91, 0xdc, 0x6f, 0x2a, 0x30, 0xc5,
0x24, 0x85, 0xc0, 0x7d, 0x5a, 0x52, 0xfe, 0x34, 0x1f, 0x33, 0xc2, 0x2c, 0xf0, 0x13, 0x0b, 0xde,
0x6f, 0x2a, 0x90, 0xed, 0xd1, 0x95, 0xdb, 0xfb, 0xd2, 0x50, 0x2a, 0xe7, 0x95, 0xf2, 0x4f, 0xdf,
0xe6, 0x77, 0x21, 0xb1, 0xdb, 0x68, 0xa1, 0x0e, 0x9e, 0x09, 0xf0, 0x07, 0xaa, 0x32, 0xdf, 0xcc,
0xa1, 0x4d, 0xbc, 0x8f, 0x2a, 0x17, 0xe8, 0x5b, 0xd6, 0xb3, 0x10, 0x5f, 0x35, 0x5d, 0x93, 0x68,
0x90, 0xf1, 0xc6, 0x57, 0xd3, 0x35, 0x73, 0x57, 0x20, 0xb3, 0x79, 0x4c, 0x0e, 0x22, 0xd5, 0xc9,
0x19, 0x94, 0x60, 0xf5, 0xc7, 0xeb, 0xd5, 0xcb, 0x8b, 0x89, 0x64, 0x5d, 0xbb, 0xa7, 0xe4, 0xe3,
0x44, 0x9f, 0x57, 0x61, 0x62, 0x0b, 0xab, 0x4d, 0x70, 0x04, 0x76, 0x16, 0x94, 0xcd, 0x60, 0x21,
0x24, 0xb2, 0x1a, 0xca, 0xa6, 0x54, 0x3e, 0xaa, 0x9e, 0x79, 0xa4, 0xb2, 0x4d, 0xf5, 0xca, 0xb6,
0xc5, 0x78, 0x72, 0x42, 0x9b, 0x5a, 0x8c, 0x27, 0x41, 0x1b, 0x67, 0xf7, 0xfd, 0x0f, 0x2a, 0x68,
0xb4, 0xd4, 0x59, 0x45, 0x07, 0x0d, 0xab, 0xe1, 0xf6, 0xd6, 0xab, 0x9e, 0xc6, 0xfa, 0x73, 0x90,
0xc2, 0x26, 0x5d, 0x63, 0x3f, 0x09, 0x88, 0x4d, 0x7f, 0x8e, 0x95, 0x28, 0x12, 0x05, 0x6b, 0x20,
0xa1, 0xe3, 0x63, 0xf4, 0x35, 0x50, 0x2b, 0x95, 0x4d, 0x36, 0xb9, 0xad, 0x0c, 0x84, 0xb2, 0xc3,
0x3e, 0xec, 0x8a, 0xb5, 0x39, 0x87, 0x06, 0x26, 0xd0, 0x57, 0x20, 0x56, 0xd9, 0x64, 0x05, 0xef,
0xf9, 0x61, 0x68, 0x8c, 0x58, 0x65, 0x73, 0xf6, 0xdf, 0x28, 0x30, 0x1e, 0x68, 0xd5, 0x73, 0x90,
0xa1, 0x0d, 0xc2, 0xe3, 0x8e, 0x1a, 0x81, 0x36, 0xae, 0x73, 0xec, 0x7d, 0xea, 0x3c, 0x5b, 0x80,
0x49, 0xa9, 0x5d, 0x5f, 0x02, 0x5d, 0x6c, 0x62, 0x4a, 0xd0, 0x9f, 0x23, 0x0b, 0xe9, 0xc9, 0x3d,
0x0a, 0xe0, 0xdb, 0xd5, 0xfb, 0x15, 0xad, 0x4a, 0x79, 0x67, 0xb7, 0xbc, 0xaa, 0x29, 0xb9, 0xaf,
0x2b, 0x90, 0x66, 0x65, 0x6b, 0xcd, 0x6e, 0x23, 0xbd, 0x08, 0x4a, 0x81, 0x45, 0xd0, 0x83, 0xe9,
0xad, 0x14, 0xf4, 0x8b, 0xa0, 0x14, 0x87, 0x77, 0xb5, 0x52, 0xd4, 0x97, 0x41, 0x29, 0x31, 0x07,
0x0f, 0xe7, 0x19, 0xa5, 0x94, 0xfb, 0x23, 0x15, 0xa6, 0xc5, 0x32, 0x9a, 0x8f, 0x27, 0xe7, 0x82,
0xef, 0x4d, 0xf9, 0xd4, 0xe5, 0xe5, 0x2b, 0x2b, 0x4b, 0xf8, 0x1f, 0x2f, 0x24, 0x73, 0xc1, 0x57,
0xa8, 0x3c, 0x78, 0x22, 0x97, 0xfb, 0x9d, 0x13, 0xc9, 0xc7, 0x05, 0x86, 0x9e, 0x73, 0x22, 0x81,
0xde, 0x9e, 0x73, 0x22, 0x81, 0xde, 0x9e, 0x73, 0x22, 0x81, 0xde, 0x9e, 0xbd, 0x80, 0x40, 0x6f,
0xcf, 0x39, 0x91, 0x40, 0x6f, 0xcf, 0x39, 0x91, 0x40, 0x6f, 0xef, 0x39, 0x11, 0xd6, 0xdd, 0xf7,
0x9c, 0x48, 0xb0, 0xbf, 0xf7, 0x9c, 0x48, 0xb0, 0xbf, 0xf7, 0x9c, 0x48, 0x3e, 0xee, 0x76, 0xba,
0xa8, 0xff, 0xae, 0x43, 0x10, 0x3f, 0xe8, 0x25, 0xd0, 0x1f, 0x81, 0xb7, 0x60, 0x92, 0x2e, 0x48,
0x94, 0x6c, 0xcb, 0x35, 0x1b, 0x16, 0xea, 0xe8, 0x1f, 0x85, 0x0c, 0x6d, 0xa2, 0xaf, 0x39, 0x61,
0xaf, 0x81, 0xb4, 0x9f, 0x8d, 0xb7, 0x01, 0xe9, 0xdc, 0x1f, 0xc7, 0x61, 0x86, 0x36, 0x54, 0xcc,
0x16, 0x0a, 0x9c, 0x32, 0xba, 0x20, 0xed, 0x29, 0x4d, 0x60, 0xf8, 0xfd, 0x77, 0xe6, 0x69, 0x6b,
0xc1, 0x8b, 0xa6, 0x0b, 0xd2, 0xee, 0x52, 0x50, 0xce, 0x9f, 0x80, 0x2e, 0x48, 0x27, 0x8f, 0x82,
0x72, 0xde, 0x7c, 0xe3, 0xc9, 0xf1, 0x33, 0x48, 0x41, 0xb9, 0x55, 0x2f, 0xca, 0x2e, 0x48, 0xa7,
0x91, 0x82, 0x72, 0x65, 0x2f, 0xde, 0x2e, 0x48, 0x7b, 0x4f, 0x41, 0xb9, 0x35, 0x2f, 0xf2, 0x2e,
0x48, 0xbb, 0x50, 0x41, 0xb9, 0xdb, 0x5e, 0x0c, 0x5e, 0x90, 0xce, 0x2a, 0x05, 0xe5, 0x9e, 0xf7,
0xa2, 0xf1, 0x82, 0x74, 0x6a, 0x29, 0x28, 0xb7, 0xee, 0xc5, 0xe5, 0x82, 0x7c, 0x7e, 0x29, 0x28,
0x78, 0xc7, 0x8f, 0xd0, 0x05, 0xf9, 0x24, 0x53, 0x50, 0xf2, 0x63, 0x7e, 0xac, 0x2e, 0xc8, 0x67,
0x9a, 0x82, 0x92, 0x1b, 0x7e, 0xd4, 0x2e, 0xc8, 0x7b, 0x65, 0x41, 0xc9, 0x4d, 0x3f, 0x7e, 0x17,
0xe4, 0x5d, 0xb3, 0xa0, 0x64, 0xc5, 0x8f, 0xe4, 0x05, 0x79, 0xff, 0x2c, 0x28, 0xb9, 0xe5, 0x2f,
0xa2, 0xff, 0xbe, 0x14, 0x7e, 0xc2, 0x29, 0xa8, 0x9c, 0x14, 0x7e, 0x10, 0x12, 0x7a, 0xd2, 0x40,
0x26, 0xc8, 0xf8, 0x61, 0x97, 0x93, 0xc2, 0x0e, 0x42, 0x42, 0x2e, 0x27, 0x85, 0x1c, 0x84, 0x84,
0x5b, 0x4e, 0x0a, 0x37, 0x08, 0x09, 0xb5, 0x9c, 0x14, 0x6a, 0x10, 0x12, 0x66, 0x39, 0x29, 0xcc,
0x20, 0x24, 0xc4, 0x72, 0x52, 0x88, 0x41, 0x48, 0x78, 0xe5, 0xa4, 0xf0, 0x82, 0x90, 0xd0, 0x3a,
0x2f, 0x87, 0x16, 0x84, 0x85, 0xd5, 0x79, 0x39, 0xac, 0x20, 0x2c, 0xa4, 0x1e, 0x93, 0x43, 0x2a,
0x75, 0xff, 0x9d, 0xf9, 0x04, 0x6e, 0x12, 0xa2, 0xe9, 0xbc, 0x1c, 0x4d, 0x10, 0x16, 0x49, 0xe7,
0xe5, 0x48, 0x82, 0xb0, 0x28, 0x3a, 0x2f, 0x47, 0x11, 0x84, 0x45, 0xd0, 0xdb, 0x72, 0x04, 0xf9,
0x67, 0x7c, 0x72, 0xd2, 0x96, 0x62, 0x54, 0x04, 0xa9, 0x43, 0x44, 0x90, 0x3a, 0x44, 0x04, 0xa9,
0x43, 0x44, 0x90, 0x3a, 0x44, 0x04, 0xa9, 0x43, 0x44, 0x90, 0x3a, 0x44, 0x04, 0xa9, 0x43, 0x44,
0x90, 0x3a, 0x4c, 0x04, 0xa9, 0x43, 0x45, 0x90, 0xda, 0x2f, 0x82, 0xce, 0xcb, 0x27, 0x1e, 0x20,
0x6c, 0x40, 0x3a, 0x2f, 0x6f, 0x7d, 0x46, 0x87, 0x90, 0x3a, 0x54, 0x08, 0xa9, 0xfd, 0x42, 0xe8,
0xf7, 0x55, 0x98, 0x0e, 0x84, 0x10, 0xdb, 0x1f, 0xfa, 0xa0, 0x46, 0xa0, 0x6b, 0x43, 0x1c, 0xb0,
0x08, 0x8b, 0xa9, 0x6b, 0x43, 0x6c, 0x52, 0x0f, 0x8a, 0xb3, 0xde, 0x51, 0xa8, 0x3c, 0xc4, 0x28,
0xb4, 0xe6, 0xc5, 0xd0, 0xb5, 0x21, 0x0e, 0x5e, 0xf4, 0xc6, 0xde, 0x8d, 0x41, 0x83, 0xc0, 0xf3,
0x43, 0x0d, 0x02, 0xeb, 0x43, 0x0d, 0x02, 0x77, 0x7c, 0x0f, 0xfe, 0x62, 0x0c, 0x4e, 0xf9, 0x1e,
0xa4, 0x9f, 0xc8, 0x2f, 0x6b, 0xe5, 0x84, 0x2d, 0x2a, 0x9d, 0x6f, 0xdb, 0x08, 0x6e, 0x8c, 0xad,
0xd7, 0xf5, 0xed, 0xe0, 0x66, 0x55, 0xfe, 0xa4, 0x1b, 0x38, 0x82, 0xc7, 0xd9, 0x62, 0xe8, 0x79,
0x50, 0xd7, 0xeb, 0x0e, 0x19, 0x2d, 0xc2, 0x6e, 0x5b, 0x32, 0x70, 0xb7, 0x6e, 0xc0, 0x28, 0x11,
0x77, 0x88, 0x7b, 0xdf, 0xcf, 0x8d, 0x57, 0x0d, 0xc6, 0x94, 0x7b, 0x5b, 0x81, 0xb3, 0x81, 0x50,
0xfe, 0x60, 0xb6, 0x0c, 0x6e, 0x0d, 0xb5, 0x65, 0x10, 0x48, 0x10, 0x7f, 0xfb, 0xe0, 0x89, 0xde,
0x9d, 0x6a, 0x31, 0x4b, 0xe4, 0xad, 0x84, 0xbf, 0x00, 0x13, 0xfe, 0x13, 0x90, 0x77, 0xb6, 0xab,
0xd1, 0xab, 0x99, 0x61, 0xa9, 0x79, 0x55, 0x5a, 0x45, 0x1b, 0x08, 0xf3, 0xb2, 0x35, 0x97, 0x87,
0xc9, 0x4a, 0xf0, 0x2b, 0x51, 0x51, 0x8b, 0x11, 0x49, 0x5c, 0x9a, 0xdf, 0xfb, 0xd2, 0xfc, 0x48,
0xee, 0x29, 0xc8, 0x88, 0xdf, 0x7a, 0x92, 0x80, 0x29, 0x0e, 0xcc, 0xc7, 0xbf, 0x85, 0xa5, 0xff,
0xbe, 0x02, 0xa7, 0x45, 0xf1, 0x17, 0x1a, 0xee, 0xd1, 0xba, 0x85, 0x6b, 0xfa, 0x67, 0x20, 0x89,
0x98, 0xe3, 0xd8, 0x8f, 0xe4, 0xb0, 0xf7, 0xc8, 0x50, 0xf1, 0x25, 0xf2, 0xaf, 0xe1, 0x41, 0xa4,
0x35, 0x0e, 0x7e, 0xdb, 0xe5, 0xd9, 0xc7, 0x21, 0x41, 0xf9, 0x83, 0x7a, 0x8d, 0x4b, 0x7a, 0xfd,
0x7a, 0x88, 0x5e, 0x24, 0x8e, 0xf4, 0x3b, 0x01, 0xbd, 0x84, 0xd7, 0xd5, 0x50, 0xf1, 0x25, 0x1e,
0x7c, 0xc5, 0x24, 0xae, 0xff, 0x48, 0x44, 0x45, 0x2b, 0xb9, 0x00, 0xc9, 0xb2, 0x2c, 0x13, 0xae,
0xe7, 0x2a, 0xc4, 0x2b, 0x76, 0x9d, 0xfc, 0x7c, 0x0f, 0xf9, 0x21, 0x6c, 0x66, 0x64, 0xf6, 0xab,
0xd8, 0x17, 0x20, 0x59, 0x3a, 0x6a, 0x34, 0xeb, 0x1d, 0x64, 0xb1, 0x3d, 0x7b, 0xb6, 0x84, 0x8e,
0x31, 0x86, 0xd7, 0x97, 0x2b, 0xc1, 0x54, 0xc5, 0xb6, 0x8a, 0xc7, 0xae, 0x38, 0x6e, 0x2c, 0x49,
0x29, 0xc2, 0xf6, 0x7c, 0xc8, 0xb7, 0x44, 0xb0, 0x40, 0x31, 0xf1, 0xed, 0x77, 0xe6, 0x95, 0x5d,
0x6f, 0xfd, 0x7c, 0x13, 0x1e, 0x62, 0xe9, 0xd3, 0x43, 0xb5, 0x1c, 0x45, 0x95, 0x62, 0xfb, 0xd4,
0x02, 0xdd, 0x3a, 0xa6, 0xb3, 0x42, 0xe9, 0x1e, 0x4c, 0x33, 0x5c, 0x14, 0x0d, 0xd4, 0x4c, 0x3d,
0x91, 0x66, 0xa1, 0x74, 0x4b, 0x51, 0x74, 0x92, 0x66, 0x8f, 0x41, 0xca, 0xeb, 0x13, 0xa2, 0x41,
0xcc, 0x94, 0xe5, 0xc5, 0x1c, 0xa4, 0x85, 0x84, 0xd5, 0x13, 0xa0, 0x14, 0xb4, 0x11, 0xfc, 0x5f,
0x51, 0x53, 0xf0, 0x7f, 0x25, 0x2d, 0xb6, 0xf8, 0x38, 0x4c, 0x4a, 0xeb, 0x97, 0xb8, 0x67, 0x55,
0x03, 0xfc, 0x5f, 0x59, 0x4b, 0xcf, 0xc6, 0x3f, 0xfd, 0x6b, 0x73, 0x23, 0x8b, 0xb7, 0x40, 0xef,
0x5d, 0xe9, 0xd4, 0x47, 0x21, 0x56, 0xc0, 0x94, 0x0f, 0x41, 0xac, 0x58, 0xd4, 0x94, 0xd9, 0xc9,
0xbf, 0xfa, 0xf9, 0xb3, 0xe9, 0x22, 0xf9, 0x4a, 0xf7, 0x5d, 0xe4, 0x16, 0x8b, 0x0c, 0xfc, 0x2c,
0x9c, 0x0e, 0x5d, 0x29, 0xc5, 0xf8, 0x52, 0x89, 0xe2, 0x57, 0x57, 0x7b, 0xf0, 0xab, 0xab, 0x04,
0xaf, 0xe4, 0xf9, 0x8e, 0x73, 0x41, 0x0f, 0x59, 0x97, 0xcc, 0xd6, 0x85, 0x1d, 0xee, 0x42, 0xfe,
0x59, 0x26, 0x5b, 0x0c, 0x95, 0x45, 0x11, 0x3b, 0xd6, 0xc5, 0x7c, 0x89, 0xe1, 0x4b, 0xa1, 0xf8,
0x03, 0x69, 0x5b, 0x35, 0x38, 0x43, 0x30, 0x92, 0x92, 0xa7, 0xf0, 0x6a, 0x28, 0xc9, 0x91, 0x70,
0xd8, 0x7d, 0xd5, 0x53, 0xb8, 0x1c, 0x2a, 0xdb, 0x88, 0x38, 0xf4, 0x55, 0xce, 0x5f, 0x64, 0x93,
0x7c, 0xe1, 0xb2, 0x7e, 0x9a, 0xe7, 0x68, 0x60, 0x04, 0x66, 0x06, 0xe2, 0x52, 0xf9, 0x12, 0x03,
0x14, 0xfb, 0x02, 0xfa, 0x5b, 0x89, 0x23, 0xf3, 0xcf, 0x33, 0x92, 0x52, 0x5f, 0x92, 0x08, 0x53,
0x71, 0x78, 0x71, 0xf7, 0xde, 0xbb, 0x73, 0x23, 0xdf, 0x7a, 0x77, 0x6e, 0xe4, 0xbf, 0xbc, 0x3b,
0x37, 0xf2, 0x9d, 0x77, 0xe7, 0x94, 0xef, 0xbf, 0x3b, 0xa7, 0xfc, 0xf0, 0xdd, 0x39, 0xe5, 0x47,
0xef, 0xce, 0x29, 0x6f, 0xde, 0x9f, 0x53, 0xbe, 0x72, 0x7f, 0x4e, 0xf9, 0xea, 0xfd, 0x39, 0xe5,
0x77, 0xef, 0xcf, 0x29, 0x6f, 0xdf, 0x9f, 0x53, 0xee, 0xdd, 0x9f, 0x53, 0xbe, 0x75, 0x7f, 0x4e,
0xf9, 0xce, 0xfd, 0x39, 0xe5, 0xfb, 0xf7, 0xe7, 0x46, 0x7e, 0x78, 0x7f, 0x4e, 0xf9, 0xd1, 0xfd,
0xb9, 0x91, 0x37, 0xbf, 0x3b, 0x37, 0xf2, 0xd6, 0x77, 0xe7, 0x46, 0xbe, 0xf2, 0xdd, 0x39, 0x05,
0xfe, 0x70, 0x05, 0x72, 0xec, 0x9b, 0x64, 0xc2, 0x97, 0x86, 0x2f, 0xba, 0x47, 0x88, 0x14, 0x05,
0x57, 0xf8, 0xaf, 0x80, 0x79, 0x0d, 0x27, 0xfc, 0x5e, 0xd9, 0xec, 0x83, 0x7e, 0x8b, 0x2d, 0xf7,
0x6f, 0x13, 0x30, 0xc6, 0x57, 0x83, 0xc3, 0x7e, 0x2b, 0xfd, 0x2a, 0x24, 0x8f, 0x1a, 0x4d, 0xb3,
0xd3, 0x70, 0x8f, 0xd9, 0x32, 0xe8, 0xc3, 0x4b, 0xbe, 0xda, 0x7c, 0xe1, 0xf4, 0xf9, 0x6e, 0xcb,
0xee, 0x76, 0x0c, 0x4f, 0x54, 0x3f, 0x0b, 0x99, 0x23, 0xd4, 0x38, 0x3c, 0x72, 0xab, 0x0d, 0xab,
0x5a, 0x6b, 0x91, 0x6a, 0x79, 0xdc, 0x00, 0xda, 0xb6, 0x6e, 0x95, 0x5a, 0xf8, 0x66, 0x75, 0xd3,
0x35, 0xc9, 0x5b, 0x7a, 0xc6, 0x20, 0x9f, 0xc9, 0xef, 0x1d, 0x23, 0xa7, 0xdb, 0x74, 0xab, 0x35,
0xbb, 0x6b, 0xb9, 0xa4, 0x9e, 0x55, 0x8d, 0x34, 0x6d, 0x2b, 0xe1, 0x26, 0xfd, 0x31, 0x18, 0x77,
0x3b, 0x5d, 0x54, 0x75, 0x6a, 0xb6, 0xeb, 0xb4, 0x4c, 0x8b, 0xd4, 0xb3, 0x49, 0x23, 0x83, 0x1b,
0x77, 0x58, 0x1b, 0xf9, 0x99, 0xfd, 0x9a, 0xdd, 0x41, 0xe4, 0x75, 0x3a, 0x66, 0xd0, 0x0b, 0x5d,
0x03, 0xf5, 0x15, 0x74, 0x4c, 0x5e, 0xd8, 0xe2, 0x06, 0xfe, 0xa8, 0x3f, 0x09, 0xa3, 0xf4, 0xef,
0xe4, 0x90, 0xea, 0x9a, 0x6c, 0x5e, 0x7b, 0x8f, 0x46, 0x17, 0x69, 0x0d, 0x26, 0xa0, 0xdf, 0x84,
0x31, 0x17, 0x75, 0x3a, 0x66, 0xc3, 0x22, 0x2f, 0x4f, 0xe9, 0xe5, 0xf9, 0x10, 0x33, 0xec, 0x52,
0x09, 0xf2, 0xab, 0xc0, 0x06, 0x97, 0xd7, 0xaf, 0x42, 0x86, 0xc8, 0x2d, 0x57, 0xe9, 0xdf, 0x12,
0x4a, 0xf7, 0x8d, 0xe7, 0x34, 0x95, 0xe3, 0x7b, 0x05, 0x1c, 0x46, 0x7f, 0x11, 0x71, 0x9c, 0xdc,
0xf6, 0xb1, 0x90, 0xdb, 0x92, 0xa1, 0x77, 0x99, 0x94, 0x8d, 0xf4, 0xd6, 0x8c, 0x87, 0xfe, 0x66,
0xe2, 0x26, 0x64, 0x44, 0xbd, 0xb8, 0x19, 0x68, 0xf9, 0x43, 0xcc, 0xf0, 0x84, 0xff, 0x77, 0x1a,
0xfa, 0x58, 0x81, 0xf6, 0xe7, 0x63, 0x37, 0x94, 0xd9, 0x6d, 0xd0, 0xe4, 0xfb, 0x85, 0x50, 0x5e,
0x08, 0x52, 0x6a, 0xe2, 0xc3, 0x92, 0x95, 0x72, 0x9f, 0x31, 0xf7, 0x1c, 0x8c, 0xd2, 0xf8, 0xd1,
0xd3, 0x30, 0xe6, 0xff, 0xd8, 0x66, 0x12, 0xe2, 0xdb, 0x7b, 0x95, 0x1d, 0xfa, 0xab, 0xb9, 0x3b,
0x1b, 0x85, 0xed, 0x9d, 0xdd, 0xf5, 0xd2, 0xc7, 0xb4, 0x98, 0x3e, 0x09, 0xe9, 0xe2, 0xfa, 0xc6,
0x46, 0xb5, 0x58, 0x58, 0xdf, 0x28, 0xdf, 0xd5, 0xd4, 0xdc, 0x1c, 0x8c, 0x52, 0x3d, 0xc9, 0xaf,
0xff, 0x75, 0x2d, 0xeb, 0x98, 0x97, 0x0f, 0xe4, 0x22, 0xf7, 0x35, 0x1d, 0xc6, 0x0a, 0xcd, 0xe6,
0xa6, 0xd9, 0x76, 0xf4, 0x17, 0x60, 0x8a, 0xfe, 0x2e, 0xc7, 0xae, 0xbd, 0x4a, 0x7e, 0xa4, 0x12,
0x0f, 0x0e, 0x0a, 0xfb, 0xfb, 0x14, 0xfe, 0x73, 0x33, 0xf1, 0xa5, 0x1e, 0x59, 0x6a, 0xe0, 0x5e,
0x0e, 0x7d, 0x17, 0x34, 0xde, 0xb8, 0xd6, 0xb4, 0x4d, 0x17, 0xf3, 0xc6, 0xd8, 0x6f, 0x48, 0xf6,
0xe7, 0xe5, 0xa2, 0x94, 0xb6, 0x87, 0x41, 0xff, 0x28, 0x24, 0xd7, 0x2d, 0xf7, 0xca, 0x32, 0x66,
0xe3, 0x7f, 0xfb, 0xa9, 0x97, 0x8d, 0x8b, 0x50, 0x16, 0x0f, 0xc1, 0xd0, 0xd7, 0x56, 0x30, 0x3a,
0x3e, 0x08, 0x4d, 0x44, 0x7c, 0x34, 0xb9, 0xd4, 0x9f, 0x83, 0x14, 0x7e, 0x3b, 0xa1, 0x37, 0x4f,
0xf0, 0xd2, 0xb5, 0x07, 0xee, 0xc9, 0x50, 0xbc, 0x8f, 0xe1, 0x04, 0xf4, 0xfe, 0xa3, 0x03, 0x09,
0x04, 0x05, 0x7c, 0x0c, 0x26, 0xd8, 0xf1, 0x34, 0x18, 0xeb, 0x4b, 0xb0, 0x23, 0x69, 0xb0, 0x23,
0x6a, 0xb0, 0xe3, 0x69, 0x90, 0x1c, 0x48, 0x20, 0x6a, 0xe0, 0x5d, 0xeb, 0x45, 0x80, 0xb5, 0xc6,
0xeb, 0xa8, 0x4e, 0x55, 0xa0, 0x7f, 0x19, 0x2a, 0x17, 0xc2, 0xe0, 0x0b, 0x51, 0x0a, 0x01, 0xa5,
0x97, 0x21, 0xbd, 0x73, 0xe0, 0x93, 0x40, 0x4f, 0x1e, 0x7b, 0x6a, 0x1c, 0x48, 0x2c, 0x22, 0xce,
0x53, 0x85, 0x3e, 0x4c, 0x7a, 0xb0, 0x2a, 0xc2, 0xd3, 0x08, 0x28, 0x5f, 0x15, 0x4a, 0x92, 0x89,
0x50, 0x45, 0x60, 0x11, 0x71, 0x78, 0x30, 0x2c, 0xda, 0x36, 0x96, 0x64, 0xa3, 0xd2, 0x7c, 0x08,
0x05, 0x93, 0x60, 0x83, 0x21, 0xbb, 0x22, 0x1e, 0x21, 0x41, 0x8e, 0xc1, 0x13, 0xfd, 0x3d, 0xc2,
0x65, 0xb8, 0x47, 0xf8, 0xb5, 0x98, 0x67, 0xe4, 0x44, 0x2b, 0xe6, 0x99, 0x8c, 0xcc, 0x33, 0x2e,
0x2a, 0xe5, 0x19, 0x6f, 0xd6, 0x3f, 0x0e, 0x93, 0xbc, 0x0d, 0x0f, 0x4f, 0x98, 0x54, 0x63, 0x7f,
0x3b, 0xaf, 0x3f, 0x29, 0x93, 0xa4, 0x9c, 0x32, 0x5e, 0xaf, 0xc0, 0x04, 0x6f, 0xda, 0x74, 0xc8,
0xe3, 0x4e, 0xb1, 0x3f, 0x8b, 0xd2, 0x9f, 0x91, 0x0a, 0x52, 0x42, 0x09, 0x3d, 0xbb, 0x0a, 0x33,
0xe1, 0xa3, 0x91, 0x38, 0xfc, 0xa6, 0xe8, 0xf0, 0x7b, 0x4a, 0x1c, 0x7e, 0x15, 0x71, 0xf8, 0x2e,
0xc1, 0xe9, 0xd0, 0xb1, 0x27, 0x8a, 0x24, 0x26, 0x92, 0xdc, 0x82, 0xf1, 0xc0, 0x90, 0x23, 0x82,
0x13, 0x21, 0xe0, 0x44, 0x2f, 0xd8, 0x0f, 0xad, 0x90, 0xd9, 0x23, 0x00, 0x56, 0x45, 0xf0, 0x47,
0x61, 0x22, 0x38, 0xde, 0x88, 0xe8, 0xf1, 0x10, 0xf4, 0x78, 0x08, 0x3a, 0xfc, 0xde, 0xf1, 0x10,
0x74, 0x5c, 0x42, 0xef, 0xf4, 0xbd, 0xf7, 0x54, 0x08, 0x7a, 0x2a, 0x04, 0x1d, 0x7e, 0x6f, 0x3d,
0x04, 0xad, 0x8b, 0xe8, 0x67, 0x60, 0x52, 0x1a, 0x62, 0x44, 0xf8, 0x58, 0x08, 0x7c, 0x4c, 0x84,
0x3f, 0x0b, 0x9a, 0x3c, 0xb8, 0x88, 0xf8, 0xc9, 0x10, 0xfc, 0x64, 0xd8, 0xed, 0xc3, 0xb5, 0x1f,
0x0d, 0x81, 0x8f, 0x86, 0xde, 0x3e, 0x1c, 0xaf, 0x85, 0xe0, 0x35, 0x11, 0x9f, 0x87, 0x8c, 0x38,
0x9a, 0x88, 0xd8, 0x64, 0x08, 0x36, 0x29, 0xdb, 0x3d, 0x30, 0x98, 0x44, 0x45, 0x7a, 0xaa, 0x4f,
0xba, 0x04, 0x86, 0x90, 0x28, 0x92, 0x8c, 0x48, 0xf2, 0x09, 0x38, 0x15, 0x36, 0x64, 0x84, 0x70,
0x2c, 0x88, 0x1c, 0x13, 0xb8, 0x46, 0xf4, 0x8b, 0x3d, 0xb3, 0x2d, 0x15, 0x4e, 0xb3, 0x2f, 0xc1,
0x74, 0xc8, 0xc0, 0x11, 0x42, 0xbb, 0x14, 0xac, 0xc6, 0xb2, 0x02, 0x2d, 0x19, 0x04, 0x1a, 0xd6,
0xe1, 0xb6, 0xdd, 0xb0, 0x5c, 0xb1, 0x2a, 0xfb, 0xfa, 0x34, 0x4c, 0xb0, 0xe1, 0x69, 0xab, 0x53,
0x47, 0x1d, 0x54, 0xd7, 0xff, 0x5c, 0xff, 0xda, 0xe9, 0x52, 0xef, 0xa0, 0xc6, 0x50, 0x27, 0x28,
0xa1, 0x5e, 0xea, 0x5b, 0x42, 0x5d, 0x8c, 0xa6, 0x8f, 0xaa, 0xa4, 0x4a, 0x3d, 0x95, 0xd4, 0x13,
0xfd, 0x49, 0xfb, 0x15, 0x54, 0xa5, 0x9e, 0x82, 0x6a, 0x30, 0x49, 0x68, 0x5d, 0xb5, 0xd6, 0x5b,
0x57, 0x2d, 0xf4, 0x67, 0xe9, 0x5f, 0x5e, 0xad, 0xf5, 0x96, 0x57, 0x11, 0x3c, 0xe1, 0x55, 0xd6,
0x5a, 0x6f, 0x95, 0x35, 0x80, 0xa7, 0x7f, 0xb1, 0xb5, 0xd6, 0x5b, 0x6c, 0x45, 0xf0, 0x84, 0xd7,
0x5c, 0xeb, 0x21, 0x35, 0xd7, 0x93, 0xfd, 0x89, 0x06, 0x95, 0x5e, 0x1b, 0x61, 0xa5, 0xd7, 0xe2,
0x00, 0xa5, 0x06, 0x56, 0x60, 0xeb, 0x21, 0x15, 0x58, 0x94, 0x62, 0x7d, 0x0a, 0xb1, 0x8d, 0xb0,
0x42, 0x2c, 0x52, 0xb1, 0x7e, 0xf5, 0xd8, 0xcf, 0xc9, 0xf5, 0xd8, 0x85, 0xfe, 0x4c, 0xe1, 0x65,
0xd9, 0x5a, 0x6f, 0x59, 0xb6, 0x10, 0x95, 0x73, 0x61, 0xd5, 0xd9, 0x4b, 0x7d, 0xab, 0xb3, 0x21,
0x52, 0x38, 0xaa, 0x48, 0x7b, 0xb1, 0x5f, 0x91, 0xb6, 0x14, 0xcd, 0x3d, 0xb8, 0x56, 0xdb, 0xeb,
0x53, 0xab, 0x3d, 0x1d, 0x4d, 0xfc, 0xb3, 0x92, 0xed, 0x67, 0x25, 0xdb, 0xcf, 0x4a, 0xb6, 0x9f,
0x95, 0x6c, 0x3f, 0xfd, 0x92, 0x2d, 0x1f, 0xff, 0xcc, 0x97, 0xe6, 0x95, 0xdc, 0x7f, 0x56, 0xbd,
0x3f, 0xb8, 0xf6, 0x42, 0xc3, 0x3d, 0xc2, 0xc3, 0xdb, 0x26, 0x64, 0xc8, 0x0f, 0x00, 0xb7, 0xcc,
0x76, 0xbb, 0x61, 0x1d, 0xb2, 0x9a, 0x6d, 0xb1, 0x77, 0x29, 0x91, 0x01, 0xc8, 0x1f, 0x9b, 0xd9,
0xa4, 0xc2, 0x6c, 0xba, 0xb1, 0xfc, 0x16, 0xfd, 0x0e, 0xa4, 0x5b, 0xce, 0xa1, 0xc7, 0x16, 0xeb,
0x99, 0x08, 0x25, 0x36, 0xfa, 0xa4, 0x3e, 0x19, 0xb4, 0xbc, 0x06, 0xac, 0xda, 0xfe, 0xb1, 0xeb,
0xab, 0xa6, 0x46, 0xa9, 0x86, 0x7d, 0x1a, 0x54, 0x6d, 0xdf, 0x6f, 0xc1, 0x61, 0x2b, 0xeb, 0x1e,
0x35, 0xd2, 0x05, 0x82, 0xe7, 0x05, 0x98, 0x94, 0xb4, 0x0d, 0xc9, 0xf9, 0x07, 0xf0, 0x0d, 0x56,
0x4c, 0xd6, 0x3c, 0x2a, 0x27, 0xc4, 0x80, 0xcc, 0x3d, 0x0a, 0xe3, 0x01, 0x6e, 0x3d, 0x03, 0xca,
0x01, 0xfb, 0x3a, 0xa5, 0x72, 0x90, 0xfb, 0xa2, 0x02, 0x69, 0x76, 0x94, 0x60, 0xdb, 0x6c, 0x74,
0xf4, 0xe7, 0x21, 0xde, 0xe4, 0x5f, 0x69, 0x7a, 0xd0, 0xaf, 0xcf, 0x12, 0x06, 0x7d, 0x0d, 0x12,
0x1d, 0xef, 0x2b, 0x4f, 0x0f, 0xf4, 0x9d, 0x58, 0x02, 0xcf, 0xdd, 0x53, 0x60, 0x8a, 0x9d, 0x74,
0x75, 0xd8, 0x01, 0x68, 0xb3, 0x3d, 0xfb, 0x35, 0x05, 0x52, 0xde, 0x95, 0xbe, 0x0f, 0x13, 0xde,
0x05, 0x3d, 0x64, 0x4f, 0x23, 0x35, 0x2f, 0x58, 0xb8, 0x87, 0x63, 0x29, 0xe4, 0x13, 0xdd, 0x8c,
0xa2, 0x73, 0x72, 0xb0, 0x71, 0xb6, 0x00, 0xd3, 0x21, 0x62, 0x27, 0x99, 0x90, 0x73, 0xe7, 0x20,
0x55, 0xb1, 0x5d, 0xfa, 0xcb, 0x39, 0xfa, 0x29, 0x61, 0x57, 0xa1, 0x18, 0xd3, 0x46, 0x08, 0x78,
0xf1, 0x1c, 0x8c, 0xb1, 0xec, 0xd7, 0x47, 0x21, 0xb6, 0x59, 0xd0, 0x46, 0xc8, 0xff, 0x45, 0x4d,
0x21, 0xff, 0x97, 0xb4, 0x58, 0x71, 0xe3, 0x01, 0x76, 0x9a, 0x46, 0xfa, 0xed, 0x34, 0xed, 0x8f,
0x52, 0xf3, 0xfc, 0x49, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3d, 0x79, 0x7c, 0xa8, 0xc6, 0x83, 0x00,
0x00,
}
r := bytes.NewReader(gzipped)
gzipr, err := compress_gzip.NewReader(r)
if err != nil {
panic(err)
}
ungzipped, err := io_ioutil.ReadAll(gzipr)
if err != nil {
panic(err)
}
if err := github_com_gogo_protobuf_proto.Unmarshal(ungzipped, d); err != nil {
panic(err)
}
return d
}
func (x MapEnum) String() string {
s, ok := MapEnum_name[int32(x)]
if ok {
return s
}
return strconv.Itoa(int(x))
}
func (x Message_Humour) String() string {
s, ok := Message_Humour_name[int32(x)]
if ok {
return s
}
return strconv.Itoa(int(x))
}
func (this *Message) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*Message)
if !ok {
that2, ok := that.(Message)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *Message")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *Message but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *Message but is not nil && this == nil")
}
if this.Name != that1.Name {
return fmt.Errorf("Name this(%v) Not Equal that(%v)", this.Name, that1.Name)
}
if this.Hilarity != that1.Hilarity {
return fmt.Errorf("Hilarity this(%v) Not Equal that(%v)", this.Hilarity, that1.Hilarity)
}
if this.HeightInCm != that1.HeightInCm {
return fmt.Errorf("HeightInCm this(%v) Not Equal that(%v)", this.HeightInCm, that1.HeightInCm)
}
if !bytes.Equal(this.Data, that1.Data) {
return fmt.Errorf("Data this(%v) Not Equal that(%v)", this.Data, that1.Data)
}
if this.ResultCount != that1.ResultCount {
return fmt.Errorf("ResultCount this(%v) Not Equal that(%v)", this.ResultCount, that1.ResultCount)
}
if this.TrueScotsman != that1.TrueScotsman {
return fmt.Errorf("TrueScotsman this(%v) Not Equal that(%v)", this.TrueScotsman, that1.TrueScotsman)
}
if this.Score != that1.Score {
return fmt.Errorf("Score this(%v) Not Equal that(%v)", this.Score, that1.Score)
}
if len(this.Key) != len(that1.Key) {
return fmt.Errorf("Key this(%v) Not Equal that(%v)", len(this.Key), len(that1.Key))
}
for i := range this.Key {
if this.Key[i] != that1.Key[i] {
return fmt.Errorf("Key this[%v](%v) Not Equal that[%v](%v)", i, this.Key[i], i, that1.Key[i])
}
}
if !this.Nested.Equal(that1.Nested) {
return fmt.Errorf("Nested this(%v) Not Equal that(%v)", this.Nested, that1.Nested)
}
if len(this.Terrain) != len(that1.Terrain) {
return fmt.Errorf("Terrain this(%v) Not Equal that(%v)", len(this.Terrain), len(that1.Terrain))
}
for i := range this.Terrain {
if !this.Terrain[i].Equal(that1.Terrain[i]) {
return fmt.Errorf("Terrain this[%v](%v) Not Equal that[%v](%v)", i, this.Terrain[i], i, that1.Terrain[i])
}
}
if !this.Proto2Field.Equal(that1.Proto2Field) {
return fmt.Errorf("Proto2Field this(%v) Not Equal that(%v)", this.Proto2Field, that1.Proto2Field)
}
if len(this.Proto2Value) != len(that1.Proto2Value) {
return fmt.Errorf("Proto2Value this(%v) Not Equal that(%v)", len(this.Proto2Value), len(that1.Proto2Value))
}
for i := range this.Proto2Value {
if !this.Proto2Value[i].Equal(that1.Proto2Value[i]) {
return fmt.Errorf("Proto2Value this[%v](%v) Not Equal that[%v](%v)", i, this.Proto2Value[i], i, that1.Proto2Value[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *Message) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*Message)
if !ok {
that2, ok := that.(Message)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.Name != that1.Name {
return false
}
if this.Hilarity != that1.Hilarity {
return false
}
if this.HeightInCm != that1.HeightInCm {
return false
}
if !bytes.Equal(this.Data, that1.Data) {
return false
}
if this.ResultCount != that1.ResultCount {
return false
}
if this.TrueScotsman != that1.TrueScotsman {
return false
}
if this.Score != that1.Score {
return false
}
if len(this.Key) != len(that1.Key) {
return false
}
for i := range this.Key {
if this.Key[i] != that1.Key[i] {
return false
}
}
if !this.Nested.Equal(that1.Nested) {
return false
}
if len(this.Terrain) != len(that1.Terrain) {
return false
}
for i := range this.Terrain {
if !this.Terrain[i].Equal(that1.Terrain[i]) {
return false
}
}
if !this.Proto2Field.Equal(that1.Proto2Field) {
return false
}
if len(this.Proto2Value) != len(that1.Proto2Value) {
return false
}
for i := range this.Proto2Value {
if !this.Proto2Value[i].Equal(that1.Proto2Value[i]) {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *Nested) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*Nested)
if !ok {
that2, ok := that.(Nested)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *Nested")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *Nested but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *Nested but is not nil && this == nil")
}
if this.Bunny != that1.Bunny {
return fmt.Errorf("Bunny this(%v) Not Equal that(%v)", this.Bunny, that1.Bunny)
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *Nested) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*Nested)
if !ok {
that2, ok := that.(Nested)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.Bunny != that1.Bunny {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *AllMaps) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*AllMaps)
if !ok {
that2, ok := that.(AllMaps)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *AllMaps")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *AllMaps but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *AllMaps but is not nil && this == nil")
}
if len(this.StringToDoubleMap) != len(that1.StringToDoubleMap) {
return fmt.Errorf("StringToDoubleMap this(%v) Not Equal that(%v)", len(this.StringToDoubleMap), len(that1.StringToDoubleMap))
}
for i := range this.StringToDoubleMap {
if this.StringToDoubleMap[i] != that1.StringToDoubleMap[i] {
return fmt.Errorf("StringToDoubleMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToDoubleMap[i], i, that1.StringToDoubleMap[i])
}
}
if len(this.StringToFloatMap) != len(that1.StringToFloatMap) {
return fmt.Errorf("StringToFloatMap this(%v) Not Equal that(%v)", len(this.StringToFloatMap), len(that1.StringToFloatMap))
}
for i := range this.StringToFloatMap {
if this.StringToFloatMap[i] != that1.StringToFloatMap[i] {
return fmt.Errorf("StringToFloatMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToFloatMap[i], i, that1.StringToFloatMap[i])
}
}
if len(this.Int32Map) != len(that1.Int32Map) {
return fmt.Errorf("Int32Map this(%v) Not Equal that(%v)", len(this.Int32Map), len(that1.Int32Map))
}
for i := range this.Int32Map {
if this.Int32Map[i] != that1.Int32Map[i] {
return fmt.Errorf("Int32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Int32Map[i], i, that1.Int32Map[i])
}
}
if len(this.Int64Map) != len(that1.Int64Map) {
return fmt.Errorf("Int64Map this(%v) Not Equal that(%v)", len(this.Int64Map), len(that1.Int64Map))
}
for i := range this.Int64Map {
if this.Int64Map[i] != that1.Int64Map[i] {
return fmt.Errorf("Int64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Int64Map[i], i, that1.Int64Map[i])
}
}
if len(this.Uint32Map) != len(that1.Uint32Map) {
return fmt.Errorf("Uint32Map this(%v) Not Equal that(%v)", len(this.Uint32Map), len(that1.Uint32Map))
}
for i := range this.Uint32Map {
if this.Uint32Map[i] != that1.Uint32Map[i] {
return fmt.Errorf("Uint32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Uint32Map[i], i, that1.Uint32Map[i])
}
}
if len(this.Uint64Map) != len(that1.Uint64Map) {
return fmt.Errorf("Uint64Map this(%v) Not Equal that(%v)", len(this.Uint64Map), len(that1.Uint64Map))
}
for i := range this.Uint64Map {
if this.Uint64Map[i] != that1.Uint64Map[i] {
return fmt.Errorf("Uint64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Uint64Map[i], i, that1.Uint64Map[i])
}
}
if len(this.Sint32Map) != len(that1.Sint32Map) {
return fmt.Errorf("Sint32Map this(%v) Not Equal that(%v)", len(this.Sint32Map), len(that1.Sint32Map))
}
for i := range this.Sint32Map {
if this.Sint32Map[i] != that1.Sint32Map[i] {
return fmt.Errorf("Sint32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sint32Map[i], i, that1.Sint32Map[i])
}
}
if len(this.Sint64Map) != len(that1.Sint64Map) {
return fmt.Errorf("Sint64Map this(%v) Not Equal that(%v)", len(this.Sint64Map), len(that1.Sint64Map))
}
for i := range this.Sint64Map {
if this.Sint64Map[i] != that1.Sint64Map[i] {
return fmt.Errorf("Sint64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sint64Map[i], i, that1.Sint64Map[i])
}
}
if len(this.Fixed32Map) != len(that1.Fixed32Map) {
return fmt.Errorf("Fixed32Map this(%v) Not Equal that(%v)", len(this.Fixed32Map), len(that1.Fixed32Map))
}
for i := range this.Fixed32Map {
if this.Fixed32Map[i] != that1.Fixed32Map[i] {
return fmt.Errorf("Fixed32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Fixed32Map[i], i, that1.Fixed32Map[i])
}
}
if len(this.Sfixed32Map) != len(that1.Sfixed32Map) {
return fmt.Errorf("Sfixed32Map this(%v) Not Equal that(%v)", len(this.Sfixed32Map), len(that1.Sfixed32Map))
}
for i := range this.Sfixed32Map {
if this.Sfixed32Map[i] != that1.Sfixed32Map[i] {
return fmt.Errorf("Sfixed32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sfixed32Map[i], i, that1.Sfixed32Map[i])
}
}
if len(this.Fixed64Map) != len(that1.Fixed64Map) {
return fmt.Errorf("Fixed64Map this(%v) Not Equal that(%v)", len(this.Fixed64Map), len(that1.Fixed64Map))
}
for i := range this.Fixed64Map {
if this.Fixed64Map[i] != that1.Fixed64Map[i] {
return fmt.Errorf("Fixed64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Fixed64Map[i], i, that1.Fixed64Map[i])
}
}
if len(this.Sfixed64Map) != len(that1.Sfixed64Map) {
return fmt.Errorf("Sfixed64Map this(%v) Not Equal that(%v)", len(this.Sfixed64Map), len(that1.Sfixed64Map))
}
for i := range this.Sfixed64Map {
if this.Sfixed64Map[i] != that1.Sfixed64Map[i] {
return fmt.Errorf("Sfixed64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sfixed64Map[i], i, that1.Sfixed64Map[i])
}
}
if len(this.BoolMap) != len(that1.BoolMap) {
return fmt.Errorf("BoolMap this(%v) Not Equal that(%v)", len(this.BoolMap), len(that1.BoolMap))
}
for i := range this.BoolMap {
if this.BoolMap[i] != that1.BoolMap[i] {
return fmt.Errorf("BoolMap this[%v](%v) Not Equal that[%v](%v)", i, this.BoolMap[i], i, that1.BoolMap[i])
}
}
if len(this.StringMap) != len(that1.StringMap) {
return fmt.Errorf("StringMap this(%v) Not Equal that(%v)", len(this.StringMap), len(that1.StringMap))
}
for i := range this.StringMap {
if this.StringMap[i] != that1.StringMap[i] {
return fmt.Errorf("StringMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringMap[i], i, that1.StringMap[i])
}
}
if len(this.StringToBytesMap) != len(that1.StringToBytesMap) {
return fmt.Errorf("StringToBytesMap this(%v) Not Equal that(%v)", len(this.StringToBytesMap), len(that1.StringToBytesMap))
}
for i := range this.StringToBytesMap {
if !bytes.Equal(this.StringToBytesMap[i], that1.StringToBytesMap[i]) {
return fmt.Errorf("StringToBytesMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToBytesMap[i], i, that1.StringToBytesMap[i])
}
}
if len(this.StringToEnumMap) != len(that1.StringToEnumMap) {
return fmt.Errorf("StringToEnumMap this(%v) Not Equal that(%v)", len(this.StringToEnumMap), len(that1.StringToEnumMap))
}
for i := range this.StringToEnumMap {
if this.StringToEnumMap[i] != that1.StringToEnumMap[i] {
return fmt.Errorf("StringToEnumMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToEnumMap[i], i, that1.StringToEnumMap[i])
}
}
if len(this.StringToMsgMap) != len(that1.StringToMsgMap) {
return fmt.Errorf("StringToMsgMap this(%v) Not Equal that(%v)", len(this.StringToMsgMap), len(that1.StringToMsgMap))
}
for i := range this.StringToMsgMap {
if !this.StringToMsgMap[i].Equal(that1.StringToMsgMap[i]) {
return fmt.Errorf("StringToMsgMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToMsgMap[i], i, that1.StringToMsgMap[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *AllMaps) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*AllMaps)
if !ok {
that2, ok := that.(AllMaps)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.StringToDoubleMap) != len(that1.StringToDoubleMap) {
return false
}
for i := range this.StringToDoubleMap {
if this.StringToDoubleMap[i] != that1.StringToDoubleMap[i] {
return false
}
}
if len(this.StringToFloatMap) != len(that1.StringToFloatMap) {
return false
}
for i := range this.StringToFloatMap {
if this.StringToFloatMap[i] != that1.StringToFloatMap[i] {
return false
}
}
if len(this.Int32Map) != len(that1.Int32Map) {
return false
}
for i := range this.Int32Map {
if this.Int32Map[i] != that1.Int32Map[i] {
return false
}
}
if len(this.Int64Map) != len(that1.Int64Map) {
return false
}
for i := range this.Int64Map {
if this.Int64Map[i] != that1.Int64Map[i] {
return false
}
}
if len(this.Uint32Map) != len(that1.Uint32Map) {
return false
}
for i := range this.Uint32Map {
if this.Uint32Map[i] != that1.Uint32Map[i] {
return false
}
}
if len(this.Uint64Map) != len(that1.Uint64Map) {
return false
}
for i := range this.Uint64Map {
if this.Uint64Map[i] != that1.Uint64Map[i] {
return false
}
}
if len(this.Sint32Map) != len(that1.Sint32Map) {
return false
}
for i := range this.Sint32Map {
if this.Sint32Map[i] != that1.Sint32Map[i] {
return false
}
}
if len(this.Sint64Map) != len(that1.Sint64Map) {
return false
}
for i := range this.Sint64Map {
if this.Sint64Map[i] != that1.Sint64Map[i] {
return false
}
}
if len(this.Fixed32Map) != len(that1.Fixed32Map) {
return false
}
for i := range this.Fixed32Map {
if this.Fixed32Map[i] != that1.Fixed32Map[i] {
return false
}
}
if len(this.Sfixed32Map) != len(that1.Sfixed32Map) {
return false
}
for i := range this.Sfixed32Map {
if this.Sfixed32Map[i] != that1.Sfixed32Map[i] {
return false
}
}
if len(this.Fixed64Map) != len(that1.Fixed64Map) {
return false
}
for i := range this.Fixed64Map {
if this.Fixed64Map[i] != that1.Fixed64Map[i] {
return false
}
}
if len(this.Sfixed64Map) != len(that1.Sfixed64Map) {
return false
}
for i := range this.Sfixed64Map {
if this.Sfixed64Map[i] != that1.Sfixed64Map[i] {
return false
}
}
if len(this.BoolMap) != len(that1.BoolMap) {
return false
}
for i := range this.BoolMap {
if this.BoolMap[i] != that1.BoolMap[i] {
return false
}
}
if len(this.StringMap) != len(that1.StringMap) {
return false
}
for i := range this.StringMap {
if this.StringMap[i] != that1.StringMap[i] {
return false
}
}
if len(this.StringToBytesMap) != len(that1.StringToBytesMap) {
return false
}
for i := range this.StringToBytesMap {
if !bytes.Equal(this.StringToBytesMap[i], that1.StringToBytesMap[i]) {
return false
}
}
if len(this.StringToEnumMap) != len(that1.StringToEnumMap) {
return false
}
for i := range this.StringToEnumMap {
if this.StringToEnumMap[i] != that1.StringToEnumMap[i] {
return false
}
}
if len(this.StringToMsgMap) != len(that1.StringToMsgMap) {
return false
}
for i := range this.StringToMsgMap {
if !this.StringToMsgMap[i].Equal(that1.StringToMsgMap[i]) {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *AllMapsOrdered) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*AllMapsOrdered)
if !ok {
that2, ok := that.(AllMapsOrdered)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *AllMapsOrdered")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *AllMapsOrdered but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *AllMapsOrdered but is not nil && this == nil")
}
if len(this.StringToDoubleMap) != len(that1.StringToDoubleMap) {
return fmt.Errorf("StringToDoubleMap this(%v) Not Equal that(%v)", len(this.StringToDoubleMap), len(that1.StringToDoubleMap))
}
for i := range this.StringToDoubleMap {
if this.StringToDoubleMap[i] != that1.StringToDoubleMap[i] {
return fmt.Errorf("StringToDoubleMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToDoubleMap[i], i, that1.StringToDoubleMap[i])
}
}
if len(this.StringToFloatMap) != len(that1.StringToFloatMap) {
return fmt.Errorf("StringToFloatMap this(%v) Not Equal that(%v)", len(this.StringToFloatMap), len(that1.StringToFloatMap))
}
for i := range this.StringToFloatMap {
if this.StringToFloatMap[i] != that1.StringToFloatMap[i] {
return fmt.Errorf("StringToFloatMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToFloatMap[i], i, that1.StringToFloatMap[i])
}
}
if len(this.Int32Map) != len(that1.Int32Map) {
return fmt.Errorf("Int32Map this(%v) Not Equal that(%v)", len(this.Int32Map), len(that1.Int32Map))
}
for i := range this.Int32Map {
if this.Int32Map[i] != that1.Int32Map[i] {
return fmt.Errorf("Int32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Int32Map[i], i, that1.Int32Map[i])
}
}
if len(this.Int64Map) != len(that1.Int64Map) {
return fmt.Errorf("Int64Map this(%v) Not Equal that(%v)", len(this.Int64Map), len(that1.Int64Map))
}
for i := range this.Int64Map {
if this.Int64Map[i] != that1.Int64Map[i] {
return fmt.Errorf("Int64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Int64Map[i], i, that1.Int64Map[i])
}
}
if len(this.Uint32Map) != len(that1.Uint32Map) {
return fmt.Errorf("Uint32Map this(%v) Not Equal that(%v)", len(this.Uint32Map), len(that1.Uint32Map))
}
for i := range this.Uint32Map {
if this.Uint32Map[i] != that1.Uint32Map[i] {
return fmt.Errorf("Uint32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Uint32Map[i], i, that1.Uint32Map[i])
}
}
if len(this.Uint64Map) != len(that1.Uint64Map) {
return fmt.Errorf("Uint64Map this(%v) Not Equal that(%v)", len(this.Uint64Map), len(that1.Uint64Map))
}
for i := range this.Uint64Map {
if this.Uint64Map[i] != that1.Uint64Map[i] {
return fmt.Errorf("Uint64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Uint64Map[i], i, that1.Uint64Map[i])
}
}
if len(this.Sint32Map) != len(that1.Sint32Map) {
return fmt.Errorf("Sint32Map this(%v) Not Equal that(%v)", len(this.Sint32Map), len(that1.Sint32Map))
}
for i := range this.Sint32Map {
if this.Sint32Map[i] != that1.Sint32Map[i] {
return fmt.Errorf("Sint32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sint32Map[i], i, that1.Sint32Map[i])
}
}
if len(this.Sint64Map) != len(that1.Sint64Map) {
return fmt.Errorf("Sint64Map this(%v) Not Equal that(%v)", len(this.Sint64Map), len(that1.Sint64Map))
}
for i := range this.Sint64Map {
if this.Sint64Map[i] != that1.Sint64Map[i] {
return fmt.Errorf("Sint64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sint64Map[i], i, that1.Sint64Map[i])
}
}
if len(this.Fixed32Map) != len(that1.Fixed32Map) {
return fmt.Errorf("Fixed32Map this(%v) Not Equal that(%v)", len(this.Fixed32Map), len(that1.Fixed32Map))
}
for i := range this.Fixed32Map {
if this.Fixed32Map[i] != that1.Fixed32Map[i] {
return fmt.Errorf("Fixed32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Fixed32Map[i], i, that1.Fixed32Map[i])
}
}
if len(this.Sfixed32Map) != len(that1.Sfixed32Map) {
return fmt.Errorf("Sfixed32Map this(%v) Not Equal that(%v)", len(this.Sfixed32Map), len(that1.Sfixed32Map))
}
for i := range this.Sfixed32Map {
if this.Sfixed32Map[i] != that1.Sfixed32Map[i] {
return fmt.Errorf("Sfixed32Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sfixed32Map[i], i, that1.Sfixed32Map[i])
}
}
if len(this.Fixed64Map) != len(that1.Fixed64Map) {
return fmt.Errorf("Fixed64Map this(%v) Not Equal that(%v)", len(this.Fixed64Map), len(that1.Fixed64Map))
}
for i := range this.Fixed64Map {
if this.Fixed64Map[i] != that1.Fixed64Map[i] {
return fmt.Errorf("Fixed64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Fixed64Map[i], i, that1.Fixed64Map[i])
}
}
if len(this.Sfixed64Map) != len(that1.Sfixed64Map) {
return fmt.Errorf("Sfixed64Map this(%v) Not Equal that(%v)", len(this.Sfixed64Map), len(that1.Sfixed64Map))
}
for i := range this.Sfixed64Map {
if this.Sfixed64Map[i] != that1.Sfixed64Map[i] {
return fmt.Errorf("Sfixed64Map this[%v](%v) Not Equal that[%v](%v)", i, this.Sfixed64Map[i], i, that1.Sfixed64Map[i])
}
}
if len(this.BoolMap) != len(that1.BoolMap) {
return fmt.Errorf("BoolMap this(%v) Not Equal that(%v)", len(this.BoolMap), len(that1.BoolMap))
}
for i := range this.BoolMap {
if this.BoolMap[i] != that1.BoolMap[i] {
return fmt.Errorf("BoolMap this[%v](%v) Not Equal that[%v](%v)", i, this.BoolMap[i], i, that1.BoolMap[i])
}
}
if len(this.StringMap) != len(that1.StringMap) {
return fmt.Errorf("StringMap this(%v) Not Equal that(%v)", len(this.StringMap), len(that1.StringMap))
}
for i := range this.StringMap {
if this.StringMap[i] != that1.StringMap[i] {
return fmt.Errorf("StringMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringMap[i], i, that1.StringMap[i])
}
}
if len(this.StringToBytesMap) != len(that1.StringToBytesMap) {
return fmt.Errorf("StringToBytesMap this(%v) Not Equal that(%v)", len(this.StringToBytesMap), len(that1.StringToBytesMap))
}
for i := range this.StringToBytesMap {
if !bytes.Equal(this.StringToBytesMap[i], that1.StringToBytesMap[i]) {
return fmt.Errorf("StringToBytesMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToBytesMap[i], i, that1.StringToBytesMap[i])
}
}
if len(this.StringToEnumMap) != len(that1.StringToEnumMap) {
return fmt.Errorf("StringToEnumMap this(%v) Not Equal that(%v)", len(this.StringToEnumMap), len(that1.StringToEnumMap))
}
for i := range this.StringToEnumMap {
if this.StringToEnumMap[i] != that1.StringToEnumMap[i] {
return fmt.Errorf("StringToEnumMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToEnumMap[i], i, that1.StringToEnumMap[i])
}
}
if len(this.StringToMsgMap) != len(that1.StringToMsgMap) {
return fmt.Errorf("StringToMsgMap this(%v) Not Equal that(%v)", len(this.StringToMsgMap), len(that1.StringToMsgMap))
}
for i := range this.StringToMsgMap {
if !this.StringToMsgMap[i].Equal(that1.StringToMsgMap[i]) {
return fmt.Errorf("StringToMsgMap this[%v](%v) Not Equal that[%v](%v)", i, this.StringToMsgMap[i], i, that1.StringToMsgMap[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *AllMapsOrdered) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*AllMapsOrdered)
if !ok {
that2, ok := that.(AllMapsOrdered)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.StringToDoubleMap) != len(that1.StringToDoubleMap) {
return false
}
for i := range this.StringToDoubleMap {
if this.StringToDoubleMap[i] != that1.StringToDoubleMap[i] {
return false
}
}
if len(this.StringToFloatMap) != len(that1.StringToFloatMap) {
return false
}
for i := range this.StringToFloatMap {
if this.StringToFloatMap[i] != that1.StringToFloatMap[i] {
return false
}
}
if len(this.Int32Map) != len(that1.Int32Map) {
return false
}
for i := range this.Int32Map {
if this.Int32Map[i] != that1.Int32Map[i] {
return false
}
}
if len(this.Int64Map) != len(that1.Int64Map) {
return false
}
for i := range this.Int64Map {
if this.Int64Map[i] != that1.Int64Map[i] {
return false
}
}
if len(this.Uint32Map) != len(that1.Uint32Map) {
return false
}
for i := range this.Uint32Map {
if this.Uint32Map[i] != that1.Uint32Map[i] {
return false
}
}
if len(this.Uint64Map) != len(that1.Uint64Map) {
return false
}
for i := range this.Uint64Map {
if this.Uint64Map[i] != that1.Uint64Map[i] {
return false
}
}
if len(this.Sint32Map) != len(that1.Sint32Map) {
return false
}
for i := range this.Sint32Map {
if this.Sint32Map[i] != that1.Sint32Map[i] {
return false
}
}
if len(this.Sint64Map) != len(that1.Sint64Map) {
return false
}
for i := range this.Sint64Map {
if this.Sint64Map[i] != that1.Sint64Map[i] {
return false
}
}
if len(this.Fixed32Map) != len(that1.Fixed32Map) {
return false
}
for i := range this.Fixed32Map {
if this.Fixed32Map[i] != that1.Fixed32Map[i] {
return false
}
}
if len(this.Sfixed32Map) != len(that1.Sfixed32Map) {
return false
}
for i := range this.Sfixed32Map {
if this.Sfixed32Map[i] != that1.Sfixed32Map[i] {
return false
}
}
if len(this.Fixed64Map) != len(that1.Fixed64Map) {
return false
}
for i := range this.Fixed64Map {
if this.Fixed64Map[i] != that1.Fixed64Map[i] {
return false
}
}
if len(this.Sfixed64Map) != len(that1.Sfixed64Map) {
return false
}
for i := range this.Sfixed64Map {
if this.Sfixed64Map[i] != that1.Sfixed64Map[i] {
return false
}
}
if len(this.BoolMap) != len(that1.BoolMap) {
return false
}
for i := range this.BoolMap {
if this.BoolMap[i] != that1.BoolMap[i] {
return false
}
}
if len(this.StringMap) != len(that1.StringMap) {
return false
}
for i := range this.StringMap {
if this.StringMap[i] != that1.StringMap[i] {
return false
}
}
if len(this.StringToBytesMap) != len(that1.StringToBytesMap) {
return false
}
for i := range this.StringToBytesMap {
if !bytes.Equal(this.StringToBytesMap[i], that1.StringToBytesMap[i]) {
return false
}
}
if len(this.StringToEnumMap) != len(that1.StringToEnumMap) {
return false
}
for i := range this.StringToEnumMap {
if this.StringToEnumMap[i] != that1.StringToEnumMap[i] {
return false
}
}
if len(this.StringToMsgMap) != len(that1.StringToMsgMap) {
return false
}
for i := range this.StringToMsgMap {
if !this.StringToMsgMap[i].Equal(that1.StringToMsgMap[i]) {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *MessageWithMap) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*MessageWithMap)
if !ok {
that2, ok := that.(MessageWithMap)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *MessageWithMap")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *MessageWithMap but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *MessageWithMap but is not nil && this == nil")
}
if len(this.NameMapping) != len(that1.NameMapping) {
return fmt.Errorf("NameMapping this(%v) Not Equal that(%v)", len(this.NameMapping), len(that1.NameMapping))
}
for i := range this.NameMapping {
if this.NameMapping[i] != that1.NameMapping[i] {
return fmt.Errorf("NameMapping this[%v](%v) Not Equal that[%v](%v)", i, this.NameMapping[i], i, that1.NameMapping[i])
}
}
if len(this.MsgMapping) != len(that1.MsgMapping) {
return fmt.Errorf("MsgMapping this(%v) Not Equal that(%v)", len(this.MsgMapping), len(that1.MsgMapping))
}
for i := range this.MsgMapping {
if !this.MsgMapping[i].Equal(that1.MsgMapping[i]) {
return fmt.Errorf("MsgMapping this[%v](%v) Not Equal that[%v](%v)", i, this.MsgMapping[i], i, that1.MsgMapping[i])
}
}
if len(this.ByteMapping) != len(that1.ByteMapping) {
return fmt.Errorf("ByteMapping this(%v) Not Equal that(%v)", len(this.ByteMapping), len(that1.ByteMapping))
}
for i := range this.ByteMapping {
if !bytes.Equal(this.ByteMapping[i], that1.ByteMapping[i]) {
return fmt.Errorf("ByteMapping this[%v](%v) Not Equal that[%v](%v)", i, this.ByteMapping[i], i, that1.ByteMapping[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *MessageWithMap) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*MessageWithMap)
if !ok {
that2, ok := that.(MessageWithMap)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.NameMapping) != len(that1.NameMapping) {
return false
}
for i := range this.NameMapping {
if this.NameMapping[i] != that1.NameMapping[i] {
return false
}
}
if len(this.MsgMapping) != len(that1.MsgMapping) {
return false
}
for i := range this.MsgMapping {
if !this.MsgMapping[i].Equal(that1.MsgMapping[i]) {
return false
}
}
if len(this.ByteMapping) != len(that1.ByteMapping) {
return false
}
for i := range this.ByteMapping {
if !bytes.Equal(this.ByteMapping[i], that1.ByteMapping[i]) {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *FloatingPoint) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*FloatingPoint)
if !ok {
that2, ok := that.(FloatingPoint)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *FloatingPoint")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *FloatingPoint but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *FloatingPoint but is not nil && this == nil")
}
if this.F != that1.F {
return fmt.Errorf("F this(%v) Not Equal that(%v)", this.F, that1.F)
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *FloatingPoint) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*FloatingPoint)
if !ok {
that2, ok := that.(FloatingPoint)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.F != that1.F {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *Uint128Pair) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*Uint128Pair)
if !ok {
that2, ok := that.(Uint128Pair)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *Uint128Pair")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *Uint128Pair but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *Uint128Pair but is not nil && this == nil")
}
if !this.Left.Equal(that1.Left) {
return fmt.Errorf("Left this(%v) Not Equal that(%v)", this.Left, that1.Left)
}
if that1.Right == nil {
if this.Right != nil {
return fmt.Errorf("this.Right != nil && that1.Right == nil")
}
} else if !this.Right.Equal(*that1.Right) {
return fmt.Errorf("Right this(%v) Not Equal that(%v)", this.Right, that1.Right)
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *Uint128Pair) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*Uint128Pair)
if !ok {
that2, ok := that.(Uint128Pair)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.Left.Equal(that1.Left) {
return false
}
if that1.Right == nil {
if this.Right != nil {
return false
}
} else if !this.Right.Equal(*that1.Right) {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *ContainsNestedMap) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*ContainsNestedMap)
if !ok {
that2, ok := that.(ContainsNestedMap)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *ContainsNestedMap")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *ContainsNestedMap but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *ContainsNestedMap but is not nil && this == nil")
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *ContainsNestedMap) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ContainsNestedMap)
if !ok {
that2, ok := that.(ContainsNestedMap)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *ContainsNestedMap_NestedMap) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*ContainsNestedMap_NestedMap)
if !ok {
that2, ok := that.(ContainsNestedMap_NestedMap)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *ContainsNestedMap_NestedMap")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *ContainsNestedMap_NestedMap but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *ContainsNestedMap_NestedMap but is not nil && this == nil")
}
if len(this.NestedMapField) != len(that1.NestedMapField) {
return fmt.Errorf("NestedMapField this(%v) Not Equal that(%v)", len(this.NestedMapField), len(that1.NestedMapField))
}
for i := range this.NestedMapField {
if this.NestedMapField[i] != that1.NestedMapField[i] {
return fmt.Errorf("NestedMapField this[%v](%v) Not Equal that[%v](%v)", i, this.NestedMapField[i], i, that1.NestedMapField[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *ContainsNestedMap_NestedMap) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ContainsNestedMap_NestedMap)
if !ok {
that2, ok := that.(ContainsNestedMap_NestedMap)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.NestedMapField) != len(that1.NestedMapField) {
return false
}
for i := range this.NestedMapField {
if this.NestedMapField[i] != that1.NestedMapField[i] {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
func (this *NotPacked) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*NotPacked)
if !ok {
that2, ok := that.(NotPacked)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *NotPacked")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *NotPacked but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *NotPacked but is not nil && this == nil")
}
if len(this.Key) != len(that1.Key) {
return fmt.Errorf("Key this(%v) Not Equal that(%v)", len(this.Key), len(that1.Key))
}
for i := range this.Key {
if this.Key[i] != that1.Key[i] {
return fmt.Errorf("Key this[%v](%v) Not Equal that[%v](%v)", i, this.Key[i], i, that1.Key[i])
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *NotPacked) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*NotPacked)
if !ok {
that2, ok := that.(NotPacked)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.Key) != len(that1.Key) {
return false
}
for i := range this.Key {
if this.Key[i] != that1.Key[i] {
return false
}
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
type MessageFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetName() string
GetHilarity() Message_Humour
GetHeightInCm() uint32
GetData() []byte
GetResultCount() int64
GetTrueScotsman() bool
GetScore() float32
GetKey() []uint64
GetNested() *Nested
GetTerrain() map[int64]*Nested
GetProto2Field() *both.NinOptNative
GetProto2Value() map[int64]*both.NinOptEnum
}
func (this *Message) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *Message) TestProto() github_com_gogo_protobuf_proto.Message {
return NewMessageFromFace(this)
}
func (this *Message) GetName() string {
return this.Name
}
func (this *Message) GetHilarity() Message_Humour {
return this.Hilarity
}
func (this *Message) GetHeightInCm() uint32 {
return this.HeightInCm
}
func (this *Message) GetData() []byte {
return this.Data
}
func (this *Message) GetResultCount() int64 {
return this.ResultCount
}
func (this *Message) GetTrueScotsman() bool {
return this.TrueScotsman
}
func (this *Message) GetScore() float32 {
return this.Score
}
func (this *Message) GetKey() []uint64 {
return this.Key
}
func (this *Message) GetNested() *Nested {
return this.Nested
}
func (this *Message) GetTerrain() map[int64]*Nested {
return this.Terrain
}
func (this *Message) GetProto2Field() *both.NinOptNative {
return this.Proto2Field
}
func (this *Message) GetProto2Value() map[int64]*both.NinOptEnum {
return this.Proto2Value
}
func NewMessageFromFace(that MessageFace) *Message {
this := &Message{}
this.Name = that.GetName()
this.Hilarity = that.GetHilarity()
this.HeightInCm = that.GetHeightInCm()
this.Data = that.GetData()
this.ResultCount = that.GetResultCount()
this.TrueScotsman = that.GetTrueScotsman()
this.Score = that.GetScore()
this.Key = that.GetKey()
this.Nested = that.GetNested()
this.Terrain = that.GetTerrain()
this.Proto2Field = that.GetProto2Field()
this.Proto2Value = that.GetProto2Value()
return this
}
type NestedFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetBunny() string
}
func (this *Nested) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *Nested) TestProto() github_com_gogo_protobuf_proto.Message {
return NewNestedFromFace(this)
}
func (this *Nested) GetBunny() string {
return this.Bunny
}
func NewNestedFromFace(that NestedFace) *Nested {
this := &Nested{}
this.Bunny = that.GetBunny()
return this
}
type AllMapsFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetStringToDoubleMap() map[string]float64
GetStringToFloatMap() map[string]float32
GetInt32Map() map[int32]int32
GetInt64Map() map[int64]int64
GetUint32Map() map[uint32]uint32
GetUint64Map() map[uint64]uint64
GetSint32Map() map[int32]int32
GetSint64Map() map[int64]int64
GetFixed32Map() map[uint32]uint32
GetSfixed32Map() map[int32]int32
GetFixed64Map() map[uint64]uint64
GetSfixed64Map() map[int64]int64
GetBoolMap() map[bool]bool
GetStringMap() map[string]string
GetStringToBytesMap() map[string][]byte
GetStringToEnumMap() map[string]MapEnum
GetStringToMsgMap() map[string]*FloatingPoint
}
func (this *AllMaps) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *AllMaps) TestProto() github_com_gogo_protobuf_proto.Message {
return NewAllMapsFromFace(this)
}
func (this *AllMaps) GetStringToDoubleMap() map[string]float64 {
return this.StringToDoubleMap
}
func (this *AllMaps) GetStringToFloatMap() map[string]float32 {
return this.StringToFloatMap
}
func (this *AllMaps) GetInt32Map() map[int32]int32 {
return this.Int32Map
}
func (this *AllMaps) GetInt64Map() map[int64]int64 {
return this.Int64Map
}
func (this *AllMaps) GetUint32Map() map[uint32]uint32 {
return this.Uint32Map
}
func (this *AllMaps) GetUint64Map() map[uint64]uint64 {
return this.Uint64Map
}
func (this *AllMaps) GetSint32Map() map[int32]int32 {
return this.Sint32Map
}
func (this *AllMaps) GetSint64Map() map[int64]int64 {
return this.Sint64Map
}
func (this *AllMaps) GetFixed32Map() map[uint32]uint32 {
return this.Fixed32Map
}
func (this *AllMaps) GetSfixed32Map() map[int32]int32 {
return this.Sfixed32Map
}
func (this *AllMaps) GetFixed64Map() map[uint64]uint64 {
return this.Fixed64Map
}
func (this *AllMaps) GetSfixed64Map() map[int64]int64 {
return this.Sfixed64Map
}
func (this *AllMaps) GetBoolMap() map[bool]bool {
return this.BoolMap
}
func (this *AllMaps) GetStringMap() map[string]string {
return this.StringMap
}
func (this *AllMaps) GetStringToBytesMap() map[string][]byte {
return this.StringToBytesMap
}
func (this *AllMaps) GetStringToEnumMap() map[string]MapEnum {
return this.StringToEnumMap
}
func (this *AllMaps) GetStringToMsgMap() map[string]*FloatingPoint {
return this.StringToMsgMap
}
func NewAllMapsFromFace(that AllMapsFace) *AllMaps {
this := &AllMaps{}
this.StringToDoubleMap = that.GetStringToDoubleMap()
this.StringToFloatMap = that.GetStringToFloatMap()
this.Int32Map = that.GetInt32Map()
this.Int64Map = that.GetInt64Map()
this.Uint32Map = that.GetUint32Map()
this.Uint64Map = that.GetUint64Map()
this.Sint32Map = that.GetSint32Map()
this.Sint64Map = that.GetSint64Map()
this.Fixed32Map = that.GetFixed32Map()
this.Sfixed32Map = that.GetSfixed32Map()
this.Fixed64Map = that.GetFixed64Map()
this.Sfixed64Map = that.GetSfixed64Map()
this.BoolMap = that.GetBoolMap()
this.StringMap = that.GetStringMap()
this.StringToBytesMap = that.GetStringToBytesMap()
this.StringToEnumMap = that.GetStringToEnumMap()
this.StringToMsgMap = that.GetStringToMsgMap()
return this
}
type AllMapsOrderedFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetStringToDoubleMap() map[string]float64
GetStringToFloatMap() map[string]float32
GetInt32Map() map[int32]int32
GetInt64Map() map[int64]int64
GetUint32Map() map[uint32]uint32
GetUint64Map() map[uint64]uint64
GetSint32Map() map[int32]int32
GetSint64Map() map[int64]int64
GetFixed32Map() map[uint32]uint32
GetSfixed32Map() map[int32]int32
GetFixed64Map() map[uint64]uint64
GetSfixed64Map() map[int64]int64
GetBoolMap() map[bool]bool
GetStringMap() map[string]string
GetStringToBytesMap() map[string][]byte
GetStringToEnumMap() map[string]MapEnum
GetStringToMsgMap() map[string]*FloatingPoint
}
func (this *AllMapsOrdered) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *AllMapsOrdered) TestProto() github_com_gogo_protobuf_proto.Message {
return NewAllMapsOrderedFromFace(this)
}
func (this *AllMapsOrdered) GetStringToDoubleMap() map[string]float64 {
return this.StringToDoubleMap
}
func (this *AllMapsOrdered) GetStringToFloatMap() map[string]float32 {
return this.StringToFloatMap
}
func (this *AllMapsOrdered) GetInt32Map() map[int32]int32 {
return this.Int32Map
}
func (this *AllMapsOrdered) GetInt64Map() map[int64]int64 {
return this.Int64Map
}
func (this *AllMapsOrdered) GetUint32Map() map[uint32]uint32 {
return this.Uint32Map
}
func (this *AllMapsOrdered) GetUint64Map() map[uint64]uint64 {
return this.Uint64Map
}
func (this *AllMapsOrdered) GetSint32Map() map[int32]int32 {
return this.Sint32Map
}
func (this *AllMapsOrdered) GetSint64Map() map[int64]int64 {
return this.Sint64Map
}
func (this *AllMapsOrdered) GetFixed32Map() map[uint32]uint32 {
return this.Fixed32Map
}
func (this *AllMapsOrdered) GetSfixed32Map() map[int32]int32 {
return this.Sfixed32Map
}
func (this *AllMapsOrdered) GetFixed64Map() map[uint64]uint64 {
return this.Fixed64Map
}
func (this *AllMapsOrdered) GetSfixed64Map() map[int64]int64 {
return this.Sfixed64Map
}
func (this *AllMapsOrdered) GetBoolMap() map[bool]bool {
return this.BoolMap
}
func (this *AllMapsOrdered) GetStringMap() map[string]string {
return this.StringMap
}
func (this *AllMapsOrdered) GetStringToBytesMap() map[string][]byte {
return this.StringToBytesMap
}
func (this *AllMapsOrdered) GetStringToEnumMap() map[string]MapEnum {
return this.StringToEnumMap
}
func (this *AllMapsOrdered) GetStringToMsgMap() map[string]*FloatingPoint {
return this.StringToMsgMap
}
func NewAllMapsOrderedFromFace(that AllMapsOrderedFace) *AllMapsOrdered {
this := &AllMapsOrdered{}
this.StringToDoubleMap = that.GetStringToDoubleMap()
this.StringToFloatMap = that.GetStringToFloatMap()
this.Int32Map = that.GetInt32Map()
this.Int64Map = that.GetInt64Map()
this.Uint32Map = that.GetUint32Map()
this.Uint64Map = that.GetUint64Map()
this.Sint32Map = that.GetSint32Map()
this.Sint64Map = that.GetSint64Map()
this.Fixed32Map = that.GetFixed32Map()
this.Sfixed32Map = that.GetSfixed32Map()
this.Fixed64Map = that.GetFixed64Map()
this.Sfixed64Map = that.GetSfixed64Map()
this.BoolMap = that.GetBoolMap()
this.StringMap = that.GetStringMap()
this.StringToBytesMap = that.GetStringToBytesMap()
this.StringToEnumMap = that.GetStringToEnumMap()
this.StringToMsgMap = that.GetStringToMsgMap()
return this
}
type MessageWithMapFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetNameMapping() map[int32]string
GetMsgMapping() map[int64]*FloatingPoint
GetByteMapping() map[bool][]byte
}
func (this *MessageWithMap) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *MessageWithMap) TestProto() github_com_gogo_protobuf_proto.Message {
return NewMessageWithMapFromFace(this)
}
func (this *MessageWithMap) GetNameMapping() map[int32]string {
return this.NameMapping
}
func (this *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint {
return this.MsgMapping
}
func (this *MessageWithMap) GetByteMapping() map[bool][]byte {
return this.ByteMapping
}
func NewMessageWithMapFromFace(that MessageWithMapFace) *MessageWithMap {
this := &MessageWithMap{}
this.NameMapping = that.GetNameMapping()
this.MsgMapping = that.GetMsgMapping()
this.ByteMapping = that.GetByteMapping()
return this
}
type FloatingPointFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetF() float64
}
func (this *FloatingPoint) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *FloatingPoint) TestProto() github_com_gogo_protobuf_proto.Message {
return NewFloatingPointFromFace(this)
}
func (this *FloatingPoint) GetF() float64 {
return this.F
}
func NewFloatingPointFromFace(that FloatingPointFace) *FloatingPoint {
this := &FloatingPoint{}
this.F = that.GetF()
return this
}
type Uint128PairFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetLeft() github_com_gogo_protobuf_test_custom.Uint128
GetRight() *github_com_gogo_protobuf_test_custom.Uint128
}
func (this *Uint128Pair) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *Uint128Pair) TestProto() github_com_gogo_protobuf_proto.Message {
return NewUint128PairFromFace(this)
}
func (this *Uint128Pair) GetLeft() github_com_gogo_protobuf_test_custom.Uint128 {
return this.Left
}
func (this *Uint128Pair) GetRight() *github_com_gogo_protobuf_test_custom.Uint128 {
return this.Right
}
func NewUint128PairFromFace(that Uint128PairFace) *Uint128Pair {
this := &Uint128Pair{}
this.Left = that.GetLeft()
this.Right = that.GetRight()
return this
}
type ContainsNestedMapFace interface {
Proto() github_com_gogo_protobuf_proto.Message
}
func (this *ContainsNestedMap) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *ContainsNestedMap) TestProto() github_com_gogo_protobuf_proto.Message {
return NewContainsNestedMapFromFace(this)
}
func NewContainsNestedMapFromFace(that ContainsNestedMapFace) *ContainsNestedMap {
this := &ContainsNestedMap{}
return this
}
type ContainsNestedMap_NestedMapFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetNestedMapField() map[string]float64
}
func (this *ContainsNestedMap_NestedMap) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *ContainsNestedMap_NestedMap) TestProto() github_com_gogo_protobuf_proto.Message {
return NewContainsNestedMap_NestedMapFromFace(this)
}
func (this *ContainsNestedMap_NestedMap) GetNestedMapField() map[string]float64 {
return this.NestedMapField
}
func NewContainsNestedMap_NestedMapFromFace(that ContainsNestedMap_NestedMapFace) *ContainsNestedMap_NestedMap {
this := &ContainsNestedMap_NestedMap{}
this.NestedMapField = that.GetNestedMapField()
return this
}
type NotPackedFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetKey() []uint64
}
func (this *NotPacked) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *NotPacked) TestProto() github_com_gogo_protobuf_proto.Message {
return NewNotPackedFromFace(this)
}
func (this *NotPacked) GetKey() []uint64 {
return this.Key
}
func NewNotPackedFromFace(that NotPackedFace) *NotPacked {
this := &NotPacked{}
this.Key = that.GetKey()
return this
}
func (this *Message) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 16)
s = append(s, "&theproto3.Message{")
s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n")
s = append(s, "Hilarity: "+fmt.Sprintf("%#v", this.Hilarity)+",\n")
s = append(s, "HeightInCm: "+fmt.Sprintf("%#v", this.HeightInCm)+",\n")
s = append(s, "Data: "+fmt.Sprintf("%#v", this.Data)+",\n")
s = append(s, "ResultCount: "+fmt.Sprintf("%#v", this.ResultCount)+",\n")
s = append(s, "TrueScotsman: "+fmt.Sprintf("%#v", this.TrueScotsman)+",\n")
s = append(s, "Score: "+fmt.Sprintf("%#v", this.Score)+",\n")
s = append(s, "Key: "+fmt.Sprintf("%#v", this.Key)+",\n")
if this.Nested != nil {
s = append(s, "Nested: "+fmt.Sprintf("%#v", this.Nested)+",\n")
}
keysForTerrain := make([]int64, 0, len(this.Terrain))
for k := range this.Terrain {
keysForTerrain = append(keysForTerrain, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForTerrain)
mapStringForTerrain := "map[int64]*Nested{"
for _, k := range keysForTerrain {
mapStringForTerrain += fmt.Sprintf("%#v: %#v,", k, this.Terrain[k])
}
mapStringForTerrain += "}"
if this.Terrain != nil {
s = append(s, "Terrain: "+mapStringForTerrain+",\n")
}
if this.Proto2Field != nil {
s = append(s, "Proto2Field: "+fmt.Sprintf("%#v", this.Proto2Field)+",\n")
}
keysForProto2Value := make([]int64, 0, len(this.Proto2Value))
for k := range this.Proto2Value {
keysForProto2Value = append(keysForProto2Value, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForProto2Value)
mapStringForProto2Value := "map[int64]*both.NinOptEnum{"
for _, k := range keysForProto2Value {
mapStringForProto2Value += fmt.Sprintf("%#v: %#v,", k, this.Proto2Value[k])
}
mapStringForProto2Value += "}"
if this.Proto2Value != nil {
s = append(s, "Proto2Value: "+mapStringForProto2Value+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *Nested) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&theproto3.Nested{")
s = append(s, "Bunny: "+fmt.Sprintf("%#v", this.Bunny)+",\n")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *AllMaps) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 21)
s = append(s, "&theproto3.AllMaps{")
keysForStringToDoubleMap := make([]string, 0, len(this.StringToDoubleMap))
for k := range this.StringToDoubleMap {
keysForStringToDoubleMap = append(keysForStringToDoubleMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToDoubleMap)
mapStringForStringToDoubleMap := "map[string]float64{"
for _, k := range keysForStringToDoubleMap {
mapStringForStringToDoubleMap += fmt.Sprintf("%#v: %#v,", k, this.StringToDoubleMap[k])
}
mapStringForStringToDoubleMap += "}"
if this.StringToDoubleMap != nil {
s = append(s, "StringToDoubleMap: "+mapStringForStringToDoubleMap+",\n")
}
keysForStringToFloatMap := make([]string, 0, len(this.StringToFloatMap))
for k := range this.StringToFloatMap {
keysForStringToFloatMap = append(keysForStringToFloatMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToFloatMap)
mapStringForStringToFloatMap := "map[string]float32{"
for _, k := range keysForStringToFloatMap {
mapStringForStringToFloatMap += fmt.Sprintf("%#v: %#v,", k, this.StringToFloatMap[k])
}
mapStringForStringToFloatMap += "}"
if this.StringToFloatMap != nil {
s = append(s, "StringToFloatMap: "+mapStringForStringToFloatMap+",\n")
}
keysForInt32Map := make([]int32, 0, len(this.Int32Map))
for k := range this.Int32Map {
keysForInt32Map = append(keysForInt32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForInt32Map)
mapStringForInt32Map := "map[int32]int32{"
for _, k := range keysForInt32Map {
mapStringForInt32Map += fmt.Sprintf("%#v: %#v,", k, this.Int32Map[k])
}
mapStringForInt32Map += "}"
if this.Int32Map != nil {
s = append(s, "Int32Map: "+mapStringForInt32Map+",\n")
}
keysForInt64Map := make([]int64, 0, len(this.Int64Map))
for k := range this.Int64Map {
keysForInt64Map = append(keysForInt64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForInt64Map)
mapStringForInt64Map := "map[int64]int64{"
for _, k := range keysForInt64Map {
mapStringForInt64Map += fmt.Sprintf("%#v: %#v,", k, this.Int64Map[k])
}
mapStringForInt64Map += "}"
if this.Int64Map != nil {
s = append(s, "Int64Map: "+mapStringForInt64Map+",\n")
}
keysForUint32Map := make([]uint32, 0, len(this.Uint32Map))
for k := range this.Uint32Map {
keysForUint32Map = append(keysForUint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForUint32Map)
mapStringForUint32Map := "map[uint32]uint32{"
for _, k := range keysForUint32Map {
mapStringForUint32Map += fmt.Sprintf("%#v: %#v,", k, this.Uint32Map[k])
}
mapStringForUint32Map += "}"
if this.Uint32Map != nil {
s = append(s, "Uint32Map: "+mapStringForUint32Map+",\n")
}
keysForUint64Map := make([]uint64, 0, len(this.Uint64Map))
for k := range this.Uint64Map {
keysForUint64Map = append(keysForUint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForUint64Map)
mapStringForUint64Map := "map[uint64]uint64{"
for _, k := range keysForUint64Map {
mapStringForUint64Map += fmt.Sprintf("%#v: %#v,", k, this.Uint64Map[k])
}
mapStringForUint64Map += "}"
if this.Uint64Map != nil {
s = append(s, "Uint64Map: "+mapStringForUint64Map+",\n")
}
keysForSint32Map := make([]int32, 0, len(this.Sint32Map))
for k := range this.Sint32Map {
keysForSint32Map = append(keysForSint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSint32Map)
mapStringForSint32Map := "map[int32]int32{"
for _, k := range keysForSint32Map {
mapStringForSint32Map += fmt.Sprintf("%#v: %#v,", k, this.Sint32Map[k])
}
mapStringForSint32Map += "}"
if this.Sint32Map != nil {
s = append(s, "Sint32Map: "+mapStringForSint32Map+",\n")
}
keysForSint64Map := make([]int64, 0, len(this.Sint64Map))
for k := range this.Sint64Map {
keysForSint64Map = append(keysForSint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSint64Map)
mapStringForSint64Map := "map[int64]int64{"
for _, k := range keysForSint64Map {
mapStringForSint64Map += fmt.Sprintf("%#v: %#v,", k, this.Sint64Map[k])
}
mapStringForSint64Map += "}"
if this.Sint64Map != nil {
s = append(s, "Sint64Map: "+mapStringForSint64Map+",\n")
}
keysForFixed32Map := make([]uint32, 0, len(this.Fixed32Map))
for k := range this.Fixed32Map {
keysForFixed32Map = append(keysForFixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForFixed32Map)
mapStringForFixed32Map := "map[uint32]uint32{"
for _, k := range keysForFixed32Map {
mapStringForFixed32Map += fmt.Sprintf("%#v: %#v,", k, this.Fixed32Map[k])
}
mapStringForFixed32Map += "}"
if this.Fixed32Map != nil {
s = append(s, "Fixed32Map: "+mapStringForFixed32Map+",\n")
}
keysForSfixed32Map := make([]int32, 0, len(this.Sfixed32Map))
for k := range this.Sfixed32Map {
keysForSfixed32Map = append(keysForSfixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSfixed32Map)
mapStringForSfixed32Map := "map[int32]int32{"
for _, k := range keysForSfixed32Map {
mapStringForSfixed32Map += fmt.Sprintf("%#v: %#v,", k, this.Sfixed32Map[k])
}
mapStringForSfixed32Map += "}"
if this.Sfixed32Map != nil {
s = append(s, "Sfixed32Map: "+mapStringForSfixed32Map+",\n")
}
keysForFixed64Map := make([]uint64, 0, len(this.Fixed64Map))
for k := range this.Fixed64Map {
keysForFixed64Map = append(keysForFixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForFixed64Map)
mapStringForFixed64Map := "map[uint64]uint64{"
for _, k := range keysForFixed64Map {
mapStringForFixed64Map += fmt.Sprintf("%#v: %#v,", k, this.Fixed64Map[k])
}
mapStringForFixed64Map += "}"
if this.Fixed64Map != nil {
s = append(s, "Fixed64Map: "+mapStringForFixed64Map+",\n")
}
keysForSfixed64Map := make([]int64, 0, len(this.Sfixed64Map))
for k := range this.Sfixed64Map {
keysForSfixed64Map = append(keysForSfixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSfixed64Map)
mapStringForSfixed64Map := "map[int64]int64{"
for _, k := range keysForSfixed64Map {
mapStringForSfixed64Map += fmt.Sprintf("%#v: %#v,", k, this.Sfixed64Map[k])
}
mapStringForSfixed64Map += "}"
if this.Sfixed64Map != nil {
s = append(s, "Sfixed64Map: "+mapStringForSfixed64Map+",\n")
}
keysForBoolMap := make([]bool, 0, len(this.BoolMap))
for k := range this.BoolMap {
keysForBoolMap = append(keysForBoolMap, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForBoolMap)
mapStringForBoolMap := "map[bool]bool{"
for _, k := range keysForBoolMap {
mapStringForBoolMap += fmt.Sprintf("%#v: %#v,", k, this.BoolMap[k])
}
mapStringForBoolMap += "}"
if this.BoolMap != nil {
s = append(s, "BoolMap: "+mapStringForBoolMap+",\n")
}
keysForStringMap := make([]string, 0, len(this.StringMap))
for k := range this.StringMap {
keysForStringMap = append(keysForStringMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringMap)
mapStringForStringMap := "map[string]string{"
for _, k := range keysForStringMap {
mapStringForStringMap += fmt.Sprintf("%#v: %#v,", k, this.StringMap[k])
}
mapStringForStringMap += "}"
if this.StringMap != nil {
s = append(s, "StringMap: "+mapStringForStringMap+",\n")
}
keysForStringToBytesMap := make([]string, 0, len(this.StringToBytesMap))
for k := range this.StringToBytesMap {
keysForStringToBytesMap = append(keysForStringToBytesMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToBytesMap)
mapStringForStringToBytesMap := "map[string][]byte{"
for _, k := range keysForStringToBytesMap {
mapStringForStringToBytesMap += fmt.Sprintf("%#v: %#v,", k, this.StringToBytesMap[k])
}
mapStringForStringToBytesMap += "}"
if this.StringToBytesMap != nil {
s = append(s, "StringToBytesMap: "+mapStringForStringToBytesMap+",\n")
}
keysForStringToEnumMap := make([]string, 0, len(this.StringToEnumMap))
for k := range this.StringToEnumMap {
keysForStringToEnumMap = append(keysForStringToEnumMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToEnumMap)
mapStringForStringToEnumMap := "map[string]MapEnum{"
for _, k := range keysForStringToEnumMap {
mapStringForStringToEnumMap += fmt.Sprintf("%#v: %#v,", k, this.StringToEnumMap[k])
}
mapStringForStringToEnumMap += "}"
if this.StringToEnumMap != nil {
s = append(s, "StringToEnumMap: "+mapStringForStringToEnumMap+",\n")
}
keysForStringToMsgMap := make([]string, 0, len(this.StringToMsgMap))
for k := range this.StringToMsgMap {
keysForStringToMsgMap = append(keysForStringToMsgMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToMsgMap)
mapStringForStringToMsgMap := "map[string]*FloatingPoint{"
for _, k := range keysForStringToMsgMap {
mapStringForStringToMsgMap += fmt.Sprintf("%#v: %#v,", k, this.StringToMsgMap[k])
}
mapStringForStringToMsgMap += "}"
if this.StringToMsgMap != nil {
s = append(s, "StringToMsgMap: "+mapStringForStringToMsgMap+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *AllMapsOrdered) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 21)
s = append(s, "&theproto3.AllMapsOrdered{")
keysForStringToDoubleMap := make([]string, 0, len(this.StringToDoubleMap))
for k := range this.StringToDoubleMap {
keysForStringToDoubleMap = append(keysForStringToDoubleMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToDoubleMap)
mapStringForStringToDoubleMap := "map[string]float64{"
for _, k := range keysForStringToDoubleMap {
mapStringForStringToDoubleMap += fmt.Sprintf("%#v: %#v,", k, this.StringToDoubleMap[k])
}
mapStringForStringToDoubleMap += "}"
if this.StringToDoubleMap != nil {
s = append(s, "StringToDoubleMap: "+mapStringForStringToDoubleMap+",\n")
}
keysForStringToFloatMap := make([]string, 0, len(this.StringToFloatMap))
for k := range this.StringToFloatMap {
keysForStringToFloatMap = append(keysForStringToFloatMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToFloatMap)
mapStringForStringToFloatMap := "map[string]float32{"
for _, k := range keysForStringToFloatMap {
mapStringForStringToFloatMap += fmt.Sprintf("%#v: %#v,", k, this.StringToFloatMap[k])
}
mapStringForStringToFloatMap += "}"
if this.StringToFloatMap != nil {
s = append(s, "StringToFloatMap: "+mapStringForStringToFloatMap+",\n")
}
keysForInt32Map := make([]int32, 0, len(this.Int32Map))
for k := range this.Int32Map {
keysForInt32Map = append(keysForInt32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForInt32Map)
mapStringForInt32Map := "map[int32]int32{"
for _, k := range keysForInt32Map {
mapStringForInt32Map += fmt.Sprintf("%#v: %#v,", k, this.Int32Map[k])
}
mapStringForInt32Map += "}"
if this.Int32Map != nil {
s = append(s, "Int32Map: "+mapStringForInt32Map+",\n")
}
keysForInt64Map := make([]int64, 0, len(this.Int64Map))
for k := range this.Int64Map {
keysForInt64Map = append(keysForInt64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForInt64Map)
mapStringForInt64Map := "map[int64]int64{"
for _, k := range keysForInt64Map {
mapStringForInt64Map += fmt.Sprintf("%#v: %#v,", k, this.Int64Map[k])
}
mapStringForInt64Map += "}"
if this.Int64Map != nil {
s = append(s, "Int64Map: "+mapStringForInt64Map+",\n")
}
keysForUint32Map := make([]uint32, 0, len(this.Uint32Map))
for k := range this.Uint32Map {
keysForUint32Map = append(keysForUint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForUint32Map)
mapStringForUint32Map := "map[uint32]uint32{"
for _, k := range keysForUint32Map {
mapStringForUint32Map += fmt.Sprintf("%#v: %#v,", k, this.Uint32Map[k])
}
mapStringForUint32Map += "}"
if this.Uint32Map != nil {
s = append(s, "Uint32Map: "+mapStringForUint32Map+",\n")
}
keysForUint64Map := make([]uint64, 0, len(this.Uint64Map))
for k := range this.Uint64Map {
keysForUint64Map = append(keysForUint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForUint64Map)
mapStringForUint64Map := "map[uint64]uint64{"
for _, k := range keysForUint64Map {
mapStringForUint64Map += fmt.Sprintf("%#v: %#v,", k, this.Uint64Map[k])
}
mapStringForUint64Map += "}"
if this.Uint64Map != nil {
s = append(s, "Uint64Map: "+mapStringForUint64Map+",\n")
}
keysForSint32Map := make([]int32, 0, len(this.Sint32Map))
for k := range this.Sint32Map {
keysForSint32Map = append(keysForSint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSint32Map)
mapStringForSint32Map := "map[int32]int32{"
for _, k := range keysForSint32Map {
mapStringForSint32Map += fmt.Sprintf("%#v: %#v,", k, this.Sint32Map[k])
}
mapStringForSint32Map += "}"
if this.Sint32Map != nil {
s = append(s, "Sint32Map: "+mapStringForSint32Map+",\n")
}
keysForSint64Map := make([]int64, 0, len(this.Sint64Map))
for k := range this.Sint64Map {
keysForSint64Map = append(keysForSint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSint64Map)
mapStringForSint64Map := "map[int64]int64{"
for _, k := range keysForSint64Map {
mapStringForSint64Map += fmt.Sprintf("%#v: %#v,", k, this.Sint64Map[k])
}
mapStringForSint64Map += "}"
if this.Sint64Map != nil {
s = append(s, "Sint64Map: "+mapStringForSint64Map+",\n")
}
keysForFixed32Map := make([]uint32, 0, len(this.Fixed32Map))
for k := range this.Fixed32Map {
keysForFixed32Map = append(keysForFixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForFixed32Map)
mapStringForFixed32Map := "map[uint32]uint32{"
for _, k := range keysForFixed32Map {
mapStringForFixed32Map += fmt.Sprintf("%#v: %#v,", k, this.Fixed32Map[k])
}
mapStringForFixed32Map += "}"
if this.Fixed32Map != nil {
s = append(s, "Fixed32Map: "+mapStringForFixed32Map+",\n")
}
keysForSfixed32Map := make([]int32, 0, len(this.Sfixed32Map))
for k := range this.Sfixed32Map {
keysForSfixed32Map = append(keysForSfixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSfixed32Map)
mapStringForSfixed32Map := "map[int32]int32{"
for _, k := range keysForSfixed32Map {
mapStringForSfixed32Map += fmt.Sprintf("%#v: %#v,", k, this.Sfixed32Map[k])
}
mapStringForSfixed32Map += "}"
if this.Sfixed32Map != nil {
s = append(s, "Sfixed32Map: "+mapStringForSfixed32Map+",\n")
}
keysForFixed64Map := make([]uint64, 0, len(this.Fixed64Map))
for k := range this.Fixed64Map {
keysForFixed64Map = append(keysForFixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForFixed64Map)
mapStringForFixed64Map := "map[uint64]uint64{"
for _, k := range keysForFixed64Map {
mapStringForFixed64Map += fmt.Sprintf("%#v: %#v,", k, this.Fixed64Map[k])
}
mapStringForFixed64Map += "}"
if this.Fixed64Map != nil {
s = append(s, "Fixed64Map: "+mapStringForFixed64Map+",\n")
}
keysForSfixed64Map := make([]int64, 0, len(this.Sfixed64Map))
for k := range this.Sfixed64Map {
keysForSfixed64Map = append(keysForSfixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSfixed64Map)
mapStringForSfixed64Map := "map[int64]int64{"
for _, k := range keysForSfixed64Map {
mapStringForSfixed64Map += fmt.Sprintf("%#v: %#v,", k, this.Sfixed64Map[k])
}
mapStringForSfixed64Map += "}"
if this.Sfixed64Map != nil {
s = append(s, "Sfixed64Map: "+mapStringForSfixed64Map+",\n")
}
keysForBoolMap := make([]bool, 0, len(this.BoolMap))
for k := range this.BoolMap {
keysForBoolMap = append(keysForBoolMap, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForBoolMap)
mapStringForBoolMap := "map[bool]bool{"
for _, k := range keysForBoolMap {
mapStringForBoolMap += fmt.Sprintf("%#v: %#v,", k, this.BoolMap[k])
}
mapStringForBoolMap += "}"
if this.BoolMap != nil {
s = append(s, "BoolMap: "+mapStringForBoolMap+",\n")
}
keysForStringMap := make([]string, 0, len(this.StringMap))
for k := range this.StringMap {
keysForStringMap = append(keysForStringMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringMap)
mapStringForStringMap := "map[string]string{"
for _, k := range keysForStringMap {
mapStringForStringMap += fmt.Sprintf("%#v: %#v,", k, this.StringMap[k])
}
mapStringForStringMap += "}"
if this.StringMap != nil {
s = append(s, "StringMap: "+mapStringForStringMap+",\n")
}
keysForStringToBytesMap := make([]string, 0, len(this.StringToBytesMap))
for k := range this.StringToBytesMap {
keysForStringToBytesMap = append(keysForStringToBytesMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToBytesMap)
mapStringForStringToBytesMap := "map[string][]byte{"
for _, k := range keysForStringToBytesMap {
mapStringForStringToBytesMap += fmt.Sprintf("%#v: %#v,", k, this.StringToBytesMap[k])
}
mapStringForStringToBytesMap += "}"
if this.StringToBytesMap != nil {
s = append(s, "StringToBytesMap: "+mapStringForStringToBytesMap+",\n")
}
keysForStringToEnumMap := make([]string, 0, len(this.StringToEnumMap))
for k := range this.StringToEnumMap {
keysForStringToEnumMap = append(keysForStringToEnumMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToEnumMap)
mapStringForStringToEnumMap := "map[string]MapEnum{"
for _, k := range keysForStringToEnumMap {
mapStringForStringToEnumMap += fmt.Sprintf("%#v: %#v,", k, this.StringToEnumMap[k])
}
mapStringForStringToEnumMap += "}"
if this.StringToEnumMap != nil {
s = append(s, "StringToEnumMap: "+mapStringForStringToEnumMap+",\n")
}
keysForStringToMsgMap := make([]string, 0, len(this.StringToMsgMap))
for k := range this.StringToMsgMap {
keysForStringToMsgMap = append(keysForStringToMsgMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToMsgMap)
mapStringForStringToMsgMap := "map[string]*FloatingPoint{"
for _, k := range keysForStringToMsgMap {
mapStringForStringToMsgMap += fmt.Sprintf("%#v: %#v,", k, this.StringToMsgMap[k])
}
mapStringForStringToMsgMap += "}"
if this.StringToMsgMap != nil {
s = append(s, "StringToMsgMap: "+mapStringForStringToMsgMap+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *MessageWithMap) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 7)
s = append(s, "&theproto3.MessageWithMap{")
keysForNameMapping := make([]int32, 0, len(this.NameMapping))
for k := range this.NameMapping {
keysForNameMapping = append(keysForNameMapping, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForNameMapping)
mapStringForNameMapping := "map[int32]string{"
for _, k := range keysForNameMapping {
mapStringForNameMapping += fmt.Sprintf("%#v: %#v,", k, this.NameMapping[k])
}
mapStringForNameMapping += "}"
if this.NameMapping != nil {
s = append(s, "NameMapping: "+mapStringForNameMapping+",\n")
}
keysForMsgMapping := make([]int64, 0, len(this.MsgMapping))
for k := range this.MsgMapping {
keysForMsgMapping = append(keysForMsgMapping, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForMsgMapping)
mapStringForMsgMapping := "map[int64]*FloatingPoint{"
for _, k := range keysForMsgMapping {
mapStringForMsgMapping += fmt.Sprintf("%#v: %#v,", k, this.MsgMapping[k])
}
mapStringForMsgMapping += "}"
if this.MsgMapping != nil {
s = append(s, "MsgMapping: "+mapStringForMsgMapping+",\n")
}
keysForByteMapping := make([]bool, 0, len(this.ByteMapping))
for k := range this.ByteMapping {
keysForByteMapping = append(keysForByteMapping, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForByteMapping)
mapStringForByteMapping := "map[bool][]byte{"
for _, k := range keysForByteMapping {
mapStringForByteMapping += fmt.Sprintf("%#v: %#v,", k, this.ByteMapping[k])
}
mapStringForByteMapping += "}"
if this.ByteMapping != nil {
s = append(s, "ByteMapping: "+mapStringForByteMapping+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *FloatingPoint) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&theproto3.FloatingPoint{")
s = append(s, "F: "+fmt.Sprintf("%#v", this.F)+",\n")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *Uint128Pair) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 6)
s = append(s, "&theproto3.Uint128Pair{")
s = append(s, "Left: "+fmt.Sprintf("%#v", this.Left)+",\n")
s = append(s, "Right: "+fmt.Sprintf("%#v", this.Right)+",\n")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *ContainsNestedMap) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 4)
s = append(s, "&theproto3.ContainsNestedMap{")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *ContainsNestedMap_NestedMap) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&theproto3.ContainsNestedMap_NestedMap{")
keysForNestedMapField := make([]string, 0, len(this.NestedMapField))
for k := range this.NestedMapField {
keysForNestedMapField = append(keysForNestedMapField, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForNestedMapField)
mapStringForNestedMapField := "map[string]float64{"
for _, k := range keysForNestedMapField {
mapStringForNestedMapField += fmt.Sprintf("%#v: %#v,", k, this.NestedMapField[k])
}
mapStringForNestedMapField += "}"
if this.NestedMapField != nil {
s = append(s, "NestedMapField: "+mapStringForNestedMapField+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *NotPacked) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&theproto3.NotPacked{")
s = append(s, "Key: "+fmt.Sprintf("%#v", this.Key)+",\n")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringTheproto3(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func NewPopulatedMessage(r randyTheproto3, easy bool) *Message {
this := &Message{}
this.Name = string(randStringTheproto3(r))
this.Hilarity = Message_Humour([]int32{0, 1, 2, 3}[r.Intn(4)])
this.HeightInCm = uint32(r.Uint32())
v1 := r.Intn(100)
this.Data = make([]byte, v1)
for i := 0; i < v1; i++ {
this.Data[i] = byte(r.Intn(256))
}
this.ResultCount = int64(r.Int63())
if r.Intn(2) == 0 {
this.ResultCount *= -1
}
this.TrueScotsman = bool(bool(r.Intn(2) == 0))
this.Score = float32(r.Float32())
if r.Intn(2) == 0 {
this.Score *= -1
}
v2 := r.Intn(10)
this.Key = make([]uint64, v2)
for i := 0; i < v2; i++ {
this.Key[i] = uint64(uint64(r.Uint32()))
}
if r.Intn(5) != 0 {
this.Nested = NewPopulatedNested(r, easy)
}
if r.Intn(5) != 0 {
v3 := r.Intn(10)
this.Terrain = make(map[int64]*Nested)
for i := 0; i < v3; i++ {
this.Terrain[int64(r.Int63())] = NewPopulatedNested(r, easy)
}
}
if r.Intn(5) != 0 {
this.Proto2Field = both.NewPopulatedNinOptNative(r, easy)
}
if r.Intn(5) != 0 {
v4 := r.Intn(10)
this.Proto2Value = make(map[int64]*both.NinOptEnum)
for i := 0; i < v4; i++ {
this.Proto2Value[int64(r.Int63())] = both.NewPopulatedNinOptEnum(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 14)
}
return this
}
func NewPopulatedNested(r randyTheproto3, easy bool) *Nested {
this := &Nested{}
this.Bunny = string(randStringTheproto3(r))
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 2)
}
return this
}
func NewPopulatedAllMaps(r randyTheproto3, easy bool) *AllMaps {
this := &AllMaps{}
if r.Intn(5) != 0 {
v5 := r.Intn(10)
this.StringToDoubleMap = make(map[string]float64)
for i := 0; i < v5; i++ {
v6 := randStringTheproto3(r)
this.StringToDoubleMap[v6] = float64(r.Float64())
if r.Intn(2) == 0 {
this.StringToDoubleMap[v6] *= -1
}
}
}
if r.Intn(5) != 0 {
v7 := r.Intn(10)
this.StringToFloatMap = make(map[string]float32)
for i := 0; i < v7; i++ {
v8 := randStringTheproto3(r)
this.StringToFloatMap[v8] = float32(r.Float32())
if r.Intn(2) == 0 {
this.StringToFloatMap[v8] *= -1
}
}
}
if r.Intn(5) != 0 {
v9 := r.Intn(10)
this.Int32Map = make(map[int32]int32)
for i := 0; i < v9; i++ {
v10 := int32(r.Int31())
this.Int32Map[v10] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Int32Map[v10] *= -1
}
}
}
if r.Intn(5) != 0 {
v11 := r.Intn(10)
this.Int64Map = make(map[int64]int64)
for i := 0; i < v11; i++ {
v12 := int64(r.Int63())
this.Int64Map[v12] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Int64Map[v12] *= -1
}
}
}
if r.Intn(5) != 0 {
v13 := r.Intn(10)
this.Uint32Map = make(map[uint32]uint32)
for i := 0; i < v13; i++ {
v14 := uint32(r.Uint32())
this.Uint32Map[v14] = uint32(r.Uint32())
}
}
if r.Intn(5) != 0 {
v15 := r.Intn(10)
this.Uint64Map = make(map[uint64]uint64)
for i := 0; i < v15; i++ {
v16 := uint64(uint64(r.Uint32()))
this.Uint64Map[v16] = uint64(uint64(r.Uint32()))
}
}
if r.Intn(5) != 0 {
v17 := r.Intn(10)
this.Sint32Map = make(map[int32]int32)
for i := 0; i < v17; i++ {
v18 := int32(r.Int31())
this.Sint32Map[v18] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Sint32Map[v18] *= -1
}
}
}
if r.Intn(5) != 0 {
v19 := r.Intn(10)
this.Sint64Map = make(map[int64]int64)
for i := 0; i < v19; i++ {
v20 := int64(r.Int63())
this.Sint64Map[v20] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Sint64Map[v20] *= -1
}
}
}
if r.Intn(5) != 0 {
v21 := r.Intn(10)
this.Fixed32Map = make(map[uint32]uint32)
for i := 0; i < v21; i++ {
v22 := uint32(r.Uint32())
this.Fixed32Map[v22] = uint32(r.Uint32())
}
}
if r.Intn(5) != 0 {
v23 := r.Intn(10)
this.Sfixed32Map = make(map[int32]int32)
for i := 0; i < v23; i++ {
v24 := int32(r.Int31())
this.Sfixed32Map[v24] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Sfixed32Map[v24] *= -1
}
}
}
if r.Intn(5) != 0 {
v25 := r.Intn(10)
this.Fixed64Map = make(map[uint64]uint64)
for i := 0; i < v25; i++ {
v26 := uint64(uint64(r.Uint32()))
this.Fixed64Map[v26] = uint64(uint64(r.Uint32()))
}
}
if r.Intn(5) != 0 {
v27 := r.Intn(10)
this.Sfixed64Map = make(map[int64]int64)
for i := 0; i < v27; i++ {
v28 := int64(r.Int63())
this.Sfixed64Map[v28] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Sfixed64Map[v28] *= -1
}
}
}
if r.Intn(5) != 0 {
v29 := r.Intn(10)
this.BoolMap = make(map[bool]bool)
for i := 0; i < v29; i++ {
v30 := bool(bool(r.Intn(2) == 0))
this.BoolMap[v30] = bool(bool(r.Intn(2) == 0))
}
}
if r.Intn(5) != 0 {
v31 := r.Intn(10)
this.StringMap = make(map[string]string)
for i := 0; i < v31; i++ {
this.StringMap[randStringTheproto3(r)] = randStringTheproto3(r)
}
}
if r.Intn(5) != 0 {
v32 := r.Intn(10)
this.StringToBytesMap = make(map[string][]byte)
for i := 0; i < v32; i++ {
v33 := r.Intn(100)
v34 := randStringTheproto3(r)
this.StringToBytesMap[v34] = make([]byte, v33)
for i := 0; i < v33; i++ {
this.StringToBytesMap[v34][i] = byte(r.Intn(256))
}
}
}
if r.Intn(5) != 0 {
v35 := r.Intn(10)
this.StringToEnumMap = make(map[string]MapEnum)
for i := 0; i < v35; i++ {
this.StringToEnumMap[randStringTheproto3(r)] = MapEnum([]int32{0, 1, 2}[r.Intn(3)])
}
}
if r.Intn(5) != 0 {
v36 := r.Intn(10)
this.StringToMsgMap = make(map[string]*FloatingPoint)
for i := 0; i < v36; i++ {
this.StringToMsgMap[randStringTheproto3(r)] = NewPopulatedFloatingPoint(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 18)
}
return this
}
func NewPopulatedAllMapsOrdered(r randyTheproto3, easy bool) *AllMapsOrdered {
this := &AllMapsOrdered{}
if r.Intn(5) != 0 {
v37 := r.Intn(10)
this.StringToDoubleMap = make(map[string]float64)
for i := 0; i < v37; i++ {
v38 := randStringTheproto3(r)
this.StringToDoubleMap[v38] = float64(r.Float64())
if r.Intn(2) == 0 {
this.StringToDoubleMap[v38] *= -1
}
}
}
if r.Intn(5) != 0 {
v39 := r.Intn(10)
this.StringToFloatMap = make(map[string]float32)
for i := 0; i < v39; i++ {
v40 := randStringTheproto3(r)
this.StringToFloatMap[v40] = float32(r.Float32())
if r.Intn(2) == 0 {
this.StringToFloatMap[v40] *= -1
}
}
}
if r.Intn(5) != 0 {
v41 := r.Intn(10)
this.Int32Map = make(map[int32]int32)
for i := 0; i < v41; i++ {
v42 := int32(r.Int31())
this.Int32Map[v42] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Int32Map[v42] *= -1
}
}
}
if r.Intn(5) != 0 {
v43 := r.Intn(10)
this.Int64Map = make(map[int64]int64)
for i := 0; i < v43; i++ {
v44 := int64(r.Int63())
this.Int64Map[v44] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Int64Map[v44] *= -1
}
}
}
if r.Intn(5) != 0 {
v45 := r.Intn(10)
this.Uint32Map = make(map[uint32]uint32)
for i := 0; i < v45; i++ {
v46 := uint32(r.Uint32())
this.Uint32Map[v46] = uint32(r.Uint32())
}
}
if r.Intn(5) != 0 {
v47 := r.Intn(10)
this.Uint64Map = make(map[uint64]uint64)
for i := 0; i < v47; i++ {
v48 := uint64(uint64(r.Uint32()))
this.Uint64Map[v48] = uint64(uint64(r.Uint32()))
}
}
if r.Intn(5) != 0 {
v49 := r.Intn(10)
this.Sint32Map = make(map[int32]int32)
for i := 0; i < v49; i++ {
v50 := int32(r.Int31())
this.Sint32Map[v50] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Sint32Map[v50] *= -1
}
}
}
if r.Intn(5) != 0 {
v51 := r.Intn(10)
this.Sint64Map = make(map[int64]int64)
for i := 0; i < v51; i++ {
v52 := int64(r.Int63())
this.Sint64Map[v52] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Sint64Map[v52] *= -1
}
}
}
if r.Intn(5) != 0 {
v53 := r.Intn(10)
this.Fixed32Map = make(map[uint32]uint32)
for i := 0; i < v53; i++ {
v54 := uint32(r.Uint32())
this.Fixed32Map[v54] = uint32(r.Uint32())
}
}
if r.Intn(5) != 0 {
v55 := r.Intn(10)
this.Sfixed32Map = make(map[int32]int32)
for i := 0; i < v55; i++ {
v56 := int32(r.Int31())
this.Sfixed32Map[v56] = int32(r.Int31())
if r.Intn(2) == 0 {
this.Sfixed32Map[v56] *= -1
}
}
}
if r.Intn(5) != 0 {
v57 := r.Intn(10)
this.Fixed64Map = make(map[uint64]uint64)
for i := 0; i < v57; i++ {
v58 := uint64(uint64(r.Uint32()))
this.Fixed64Map[v58] = uint64(uint64(r.Uint32()))
}
}
if r.Intn(5) != 0 {
v59 := r.Intn(10)
this.Sfixed64Map = make(map[int64]int64)
for i := 0; i < v59; i++ {
v60 := int64(r.Int63())
this.Sfixed64Map[v60] = int64(r.Int63())
if r.Intn(2) == 0 {
this.Sfixed64Map[v60] *= -1
}
}
}
if r.Intn(5) != 0 {
v61 := r.Intn(10)
this.BoolMap = make(map[bool]bool)
for i := 0; i < v61; i++ {
v62 := bool(bool(r.Intn(2) == 0))
this.BoolMap[v62] = bool(bool(r.Intn(2) == 0))
}
}
if r.Intn(5) != 0 {
v63 := r.Intn(10)
this.StringMap = make(map[string]string)
for i := 0; i < v63; i++ {
this.StringMap[randStringTheproto3(r)] = randStringTheproto3(r)
}
}
if r.Intn(5) != 0 {
v64 := r.Intn(10)
this.StringToBytesMap = make(map[string][]byte)
for i := 0; i < v64; i++ {
v65 := r.Intn(100)
v66 := randStringTheproto3(r)
this.StringToBytesMap[v66] = make([]byte, v65)
for i := 0; i < v65; i++ {
this.StringToBytesMap[v66][i] = byte(r.Intn(256))
}
}
}
if r.Intn(5) != 0 {
v67 := r.Intn(10)
this.StringToEnumMap = make(map[string]MapEnum)
for i := 0; i < v67; i++ {
this.StringToEnumMap[randStringTheproto3(r)] = MapEnum([]int32{0, 1, 2}[r.Intn(3)])
}
}
if r.Intn(5) != 0 {
v68 := r.Intn(10)
this.StringToMsgMap = make(map[string]*FloatingPoint)
for i := 0; i < v68; i++ {
this.StringToMsgMap[randStringTheproto3(r)] = NewPopulatedFloatingPoint(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 18)
}
return this
}
func NewPopulatedMessageWithMap(r randyTheproto3, easy bool) *MessageWithMap {
this := &MessageWithMap{}
if r.Intn(5) != 0 {
v69 := r.Intn(10)
this.NameMapping = make(map[int32]string)
for i := 0; i < v69; i++ {
this.NameMapping[int32(r.Int31())] = randStringTheproto3(r)
}
}
if r.Intn(5) != 0 {
v70 := r.Intn(10)
this.MsgMapping = make(map[int64]*FloatingPoint)
for i := 0; i < v70; i++ {
this.MsgMapping[int64(r.Int63())] = NewPopulatedFloatingPoint(r, easy)
}
}
if r.Intn(5) != 0 {
v71 := r.Intn(10)
this.ByteMapping = make(map[bool][]byte)
for i := 0; i < v71; i++ {
v72 := r.Intn(100)
v73 := bool(bool(r.Intn(2) == 0))
this.ByteMapping[v73] = make([]byte, v72)
for i := 0; i < v72; i++ {
this.ByteMapping[v73][i] = byte(r.Intn(256))
}
}
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 4)
}
return this
}
func NewPopulatedFloatingPoint(r randyTheproto3, easy bool) *FloatingPoint {
this := &FloatingPoint{}
this.F = float64(r.Float64())
if r.Intn(2) == 0 {
this.F *= -1
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 2)
}
return this
}
func NewPopulatedUint128Pair(r randyTheproto3, easy bool) *Uint128Pair {
this := &Uint128Pair{}
v74 := github_com_gogo_protobuf_test_custom.NewPopulatedUint128(r)
this.Left = *v74
this.Right = github_com_gogo_protobuf_test_custom.NewPopulatedUint128(r)
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 3)
}
return this
}
func NewPopulatedContainsNestedMap(r randyTheproto3, easy bool) *ContainsNestedMap {
this := &ContainsNestedMap{}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 1)
}
return this
}
func NewPopulatedContainsNestedMap_NestedMap(r randyTheproto3, easy bool) *ContainsNestedMap_NestedMap {
this := &ContainsNestedMap_NestedMap{}
if r.Intn(5) != 0 {
v75 := r.Intn(10)
this.NestedMapField = make(map[string]float64)
for i := 0; i < v75; i++ {
v76 := randStringTheproto3(r)
this.NestedMapField[v76] = float64(r.Float64())
if r.Intn(2) == 0 {
this.NestedMapField[v76] *= -1
}
}
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 2)
}
return this
}
func NewPopulatedNotPacked(r randyTheproto3, easy bool) *NotPacked {
this := &NotPacked{}
v77 := r.Intn(10)
this.Key = make([]uint64, v77)
for i := 0; i < v77; i++ {
this.Key[i] = uint64(uint64(r.Uint32()))
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedTheproto3(r, 6)
}
return this
}
type randyTheproto3 interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneTheproto3(r randyTheproto3) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringTheproto3(r randyTheproto3) string {
v78 := r.Intn(100)
tmps := make([]rune, v78)
for i := 0; i < v78; i++ {
tmps[i] = randUTF8RuneTheproto3(r)
}
return string(tmps)
}
func randUnrecognizedTheproto3(r randyTheproto3, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldTheproto3(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldTheproto3(dAtA []byte, r randyTheproto3, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(key))
v79 := r.Int63()
if r.Intn(2) == 0 {
v79 *= -1
}
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(v79))
case 1:
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateTheproto3(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA
}
func encodeVarintPopulateTheproto3(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *Message) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.Name)
if l > 0 {
n += 1 + l + sovTheproto3(uint64(l))
}
if m.Hilarity != 0 {
n += 1 + sovTheproto3(uint64(m.Hilarity))
}
if m.HeightInCm != 0 {
n += 1 + sovTheproto3(uint64(m.HeightInCm))
}
l = len(m.Data)
if l > 0 {
n += 1 + l + sovTheproto3(uint64(l))
}
if m.ResultCount != 0 {
n += 1 + sovTheproto3(uint64(m.ResultCount))
}
if m.TrueScotsman {
n += 2
}
if m.Score != 0 {
n += 5
}
if len(m.Key) > 0 {
l = 0
for _, e := range m.Key {
l += sovTheproto3(uint64(e))
}
n += 1 + sovTheproto3(uint64(l)) + l
}
if m.Nested != nil {
l = m.Nested.Size()
n += 1 + l + sovTheproto3(uint64(l))
}
if len(m.Terrain) > 0 {
for k, v := range m.Terrain {
_ = k
_ = v
l = 0
if v != nil {
l = v.Size()
l += 1 + sovTheproto3(uint64(l))
}
mapEntrySize := 1 + sovTheproto3(uint64(k)) + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.Proto2Field != nil {
l = m.Proto2Field.Size()
n += 1 + l + sovTheproto3(uint64(l))
}
if len(m.Proto2Value) > 0 {
for k, v := range m.Proto2Value {
_ = k
_ = v
l = 0
if v != nil {
l = v.Size()
l += 1 + sovTheproto3(uint64(l))
}
mapEntrySize := 1 + sovTheproto3(uint64(k)) + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *Nested) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.Bunny)
if l > 0 {
n += 1 + l + sovTheproto3(uint64(l))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *AllMaps) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.StringToDoubleMap) > 0 {
for k, v := range m.StringToDoubleMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToFloatMap) > 0 {
for k, v := range m.StringToFloatMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Int32Map) > 0 {
for k, v := range m.Int32Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Int64Map) > 0 {
for k, v := range m.Int64Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Uint32Map) > 0 {
for k, v := range m.Uint32Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Uint64Map) > 0 {
for k, v := range m.Uint64Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sint32Map) > 0 {
for k, v := range m.Sint32Map {
_ = k
_ = v
mapEntrySize := 1 + sozTheproto3(uint64(k)) + 1 + sozTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sint64Map) > 0 {
for k, v := range m.Sint64Map {
_ = k
_ = v
mapEntrySize := 1 + sozTheproto3(uint64(k)) + 1 + sozTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Fixed32Map) > 0 {
for k, v := range m.Fixed32Map {
_ = k
_ = v
mapEntrySize := 1 + 4 + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sfixed32Map) > 0 {
for k, v := range m.Sfixed32Map {
_ = k
_ = v
mapEntrySize := 1 + 4 + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Fixed64Map) > 0 {
for k, v := range m.Fixed64Map {
_ = k
_ = v
mapEntrySize := 1 + 8 + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sfixed64Map) > 0 {
for k, v := range m.Sfixed64Map {
_ = k
_ = v
mapEntrySize := 1 + 8 + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.BoolMap) > 0 {
for k, v := range m.BoolMap {
_ = k
_ = v
mapEntrySize := 1 + 1 + 1 + 1
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringMap) > 0 {
for k, v := range m.StringMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + len(v) + sovTheproto3(uint64(len(v)))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToBytesMap) > 0 {
for k, v := range m.StringToBytesMap {
_ = k
_ = v
l = 0
if len(v) > 0 {
l = 1 + len(v) + sovTheproto3(uint64(len(v)))
}
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToEnumMap) > 0 {
for k, v := range m.StringToEnumMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 2 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToMsgMap) > 0 {
for k, v := range m.StringToMsgMap {
_ = k
_ = v
l = 0
if v != nil {
l = v.Size()
l += 1 + sovTheproto3(uint64(l))
}
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + l
n += mapEntrySize + 2 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *AllMapsOrdered) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.StringToDoubleMap) > 0 {
for k, v := range m.StringToDoubleMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToFloatMap) > 0 {
for k, v := range m.StringToFloatMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Int32Map) > 0 {
for k, v := range m.Int32Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Int64Map) > 0 {
for k, v := range m.Int64Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Uint32Map) > 0 {
for k, v := range m.Uint32Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Uint64Map) > 0 {
for k, v := range m.Uint64Map {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sint32Map) > 0 {
for k, v := range m.Sint32Map {
_ = k
_ = v
mapEntrySize := 1 + sozTheproto3(uint64(k)) + 1 + sozTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sint64Map) > 0 {
for k, v := range m.Sint64Map {
_ = k
_ = v
mapEntrySize := 1 + sozTheproto3(uint64(k)) + 1 + sozTheproto3(uint64(v))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Fixed32Map) > 0 {
for k, v := range m.Fixed32Map {
_ = k
_ = v
mapEntrySize := 1 + 4 + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sfixed32Map) > 0 {
for k, v := range m.Sfixed32Map {
_ = k
_ = v
mapEntrySize := 1 + 4 + 1 + 4
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Fixed64Map) > 0 {
for k, v := range m.Fixed64Map {
_ = k
_ = v
mapEntrySize := 1 + 8 + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.Sfixed64Map) > 0 {
for k, v := range m.Sfixed64Map {
_ = k
_ = v
mapEntrySize := 1 + 8 + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.BoolMap) > 0 {
for k, v := range m.BoolMap {
_ = k
_ = v
mapEntrySize := 1 + 1 + 1 + 1
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringMap) > 0 {
for k, v := range m.StringMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + len(v) + sovTheproto3(uint64(len(v)))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToBytesMap) > 0 {
for k, v := range m.StringToBytesMap {
_ = k
_ = v
l = 0
if len(v) > 0 {
l = 1 + len(v) + sovTheproto3(uint64(len(v)))
}
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToEnumMap) > 0 {
for k, v := range m.StringToEnumMap {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + sovTheproto3(uint64(v))
n += mapEntrySize + 2 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.StringToMsgMap) > 0 {
for k, v := range m.StringToMsgMap {
_ = k
_ = v
l = 0
if v != nil {
l = v.Size()
l += 1 + sovTheproto3(uint64(l))
}
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + l
n += mapEntrySize + 2 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *MessageWithMap) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.NameMapping) > 0 {
for k, v := range m.NameMapping {
_ = k
_ = v
mapEntrySize := 1 + sovTheproto3(uint64(k)) + 1 + len(v) + sovTheproto3(uint64(len(v)))
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.MsgMapping) > 0 {
for k, v := range m.MsgMapping {
_ = k
_ = v
l = 0
if v != nil {
l = v.Size()
l += 1 + sovTheproto3(uint64(l))
}
mapEntrySize := 1 + sozTheproto3(uint64(k)) + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if len(m.ByteMapping) > 0 {
for k, v := range m.ByteMapping {
_ = k
_ = v
l = 0
if len(v) > 0 {
l = 1 + len(v) + sovTheproto3(uint64(len(v)))
}
mapEntrySize := 1 + 1 + l
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *FloatingPoint) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.F != 0 {
n += 9
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *Uint128Pair) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.Left.Size()
n += 1 + l + sovTheproto3(uint64(l))
if m.Right != nil {
l = m.Right.Size()
n += 1 + l + sovTheproto3(uint64(l))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *ContainsNestedMap) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *ContainsNestedMap_NestedMap) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.NestedMapField) > 0 {
for k, v := range m.NestedMapField {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovTheproto3(uint64(len(k))) + 1 + 8
n += mapEntrySize + 1 + sovTheproto3(uint64(mapEntrySize))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *NotPacked) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Key) > 0 {
for _, e := range m.Key {
n += 1 + sovTheproto3(uint64(e))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func sovTheproto3(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozTheproto3(x uint64) (n int) {
return sovTheproto3(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Message) String() string {
if this == nil {
return "nil"
}
keysForTerrain := make([]int64, 0, len(this.Terrain))
for k := range this.Terrain {
keysForTerrain = append(keysForTerrain, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForTerrain)
mapStringForTerrain := "map[int64]*Nested{"
for _, k := range keysForTerrain {
mapStringForTerrain += fmt.Sprintf("%v: %v,", k, this.Terrain[k])
}
mapStringForTerrain += "}"
keysForProto2Value := make([]int64, 0, len(this.Proto2Value))
for k := range this.Proto2Value {
keysForProto2Value = append(keysForProto2Value, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForProto2Value)
mapStringForProto2Value := "map[int64]*both.NinOptEnum{"
for _, k := range keysForProto2Value {
mapStringForProto2Value += fmt.Sprintf("%v: %v,", k, this.Proto2Value[k])
}
mapStringForProto2Value += "}"
s := strings.Join([]string{`&Message{`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`Hilarity:` + fmt.Sprintf("%v", this.Hilarity) + `,`,
`HeightInCm:` + fmt.Sprintf("%v", this.HeightInCm) + `,`,
`Data:` + fmt.Sprintf("%v", this.Data) + `,`,
`ResultCount:` + fmt.Sprintf("%v", this.ResultCount) + `,`,
`TrueScotsman:` + fmt.Sprintf("%v", this.TrueScotsman) + `,`,
`Score:` + fmt.Sprintf("%v", this.Score) + `,`,
`Key:` + fmt.Sprintf("%v", this.Key) + `,`,
`Nested:` + strings.Replace(this.Nested.String(), "Nested", "Nested", 1) + `,`,
`Terrain:` + mapStringForTerrain + `,`,
`Proto2Field:` + strings.Replace(fmt.Sprintf("%v", this.Proto2Field), "NinOptNative", "both.NinOptNative", 1) + `,`,
`Proto2Value:` + mapStringForProto2Value + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *Nested) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Nested{`,
`Bunny:` + fmt.Sprintf("%v", this.Bunny) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *AllMaps) String() string {
if this == nil {
return "nil"
}
keysForStringToDoubleMap := make([]string, 0, len(this.StringToDoubleMap))
for k := range this.StringToDoubleMap {
keysForStringToDoubleMap = append(keysForStringToDoubleMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToDoubleMap)
mapStringForStringToDoubleMap := "map[string]float64{"
for _, k := range keysForStringToDoubleMap {
mapStringForStringToDoubleMap += fmt.Sprintf("%v: %v,", k, this.StringToDoubleMap[k])
}
mapStringForStringToDoubleMap += "}"
keysForStringToFloatMap := make([]string, 0, len(this.StringToFloatMap))
for k := range this.StringToFloatMap {
keysForStringToFloatMap = append(keysForStringToFloatMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToFloatMap)
mapStringForStringToFloatMap := "map[string]float32{"
for _, k := range keysForStringToFloatMap {
mapStringForStringToFloatMap += fmt.Sprintf("%v: %v,", k, this.StringToFloatMap[k])
}
mapStringForStringToFloatMap += "}"
keysForInt32Map := make([]int32, 0, len(this.Int32Map))
for k := range this.Int32Map {
keysForInt32Map = append(keysForInt32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForInt32Map)
mapStringForInt32Map := "map[int32]int32{"
for _, k := range keysForInt32Map {
mapStringForInt32Map += fmt.Sprintf("%v: %v,", k, this.Int32Map[k])
}
mapStringForInt32Map += "}"
keysForInt64Map := make([]int64, 0, len(this.Int64Map))
for k := range this.Int64Map {
keysForInt64Map = append(keysForInt64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForInt64Map)
mapStringForInt64Map := "map[int64]int64{"
for _, k := range keysForInt64Map {
mapStringForInt64Map += fmt.Sprintf("%v: %v,", k, this.Int64Map[k])
}
mapStringForInt64Map += "}"
keysForUint32Map := make([]uint32, 0, len(this.Uint32Map))
for k := range this.Uint32Map {
keysForUint32Map = append(keysForUint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForUint32Map)
mapStringForUint32Map := "map[uint32]uint32{"
for _, k := range keysForUint32Map {
mapStringForUint32Map += fmt.Sprintf("%v: %v,", k, this.Uint32Map[k])
}
mapStringForUint32Map += "}"
keysForUint64Map := make([]uint64, 0, len(this.Uint64Map))
for k := range this.Uint64Map {
keysForUint64Map = append(keysForUint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForUint64Map)
mapStringForUint64Map := "map[uint64]uint64{"
for _, k := range keysForUint64Map {
mapStringForUint64Map += fmt.Sprintf("%v: %v,", k, this.Uint64Map[k])
}
mapStringForUint64Map += "}"
keysForSint32Map := make([]int32, 0, len(this.Sint32Map))
for k := range this.Sint32Map {
keysForSint32Map = append(keysForSint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSint32Map)
mapStringForSint32Map := "map[int32]int32{"
for _, k := range keysForSint32Map {
mapStringForSint32Map += fmt.Sprintf("%v: %v,", k, this.Sint32Map[k])
}
mapStringForSint32Map += "}"
keysForSint64Map := make([]int64, 0, len(this.Sint64Map))
for k := range this.Sint64Map {
keysForSint64Map = append(keysForSint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSint64Map)
mapStringForSint64Map := "map[int64]int64{"
for _, k := range keysForSint64Map {
mapStringForSint64Map += fmt.Sprintf("%v: %v,", k, this.Sint64Map[k])
}
mapStringForSint64Map += "}"
keysForFixed32Map := make([]uint32, 0, len(this.Fixed32Map))
for k := range this.Fixed32Map {
keysForFixed32Map = append(keysForFixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForFixed32Map)
mapStringForFixed32Map := "map[uint32]uint32{"
for _, k := range keysForFixed32Map {
mapStringForFixed32Map += fmt.Sprintf("%v: %v,", k, this.Fixed32Map[k])
}
mapStringForFixed32Map += "}"
keysForSfixed32Map := make([]int32, 0, len(this.Sfixed32Map))
for k := range this.Sfixed32Map {
keysForSfixed32Map = append(keysForSfixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSfixed32Map)
mapStringForSfixed32Map := "map[int32]int32{"
for _, k := range keysForSfixed32Map {
mapStringForSfixed32Map += fmt.Sprintf("%v: %v,", k, this.Sfixed32Map[k])
}
mapStringForSfixed32Map += "}"
keysForFixed64Map := make([]uint64, 0, len(this.Fixed64Map))
for k := range this.Fixed64Map {
keysForFixed64Map = append(keysForFixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForFixed64Map)
mapStringForFixed64Map := "map[uint64]uint64{"
for _, k := range keysForFixed64Map {
mapStringForFixed64Map += fmt.Sprintf("%v: %v,", k, this.Fixed64Map[k])
}
mapStringForFixed64Map += "}"
keysForSfixed64Map := make([]int64, 0, len(this.Sfixed64Map))
for k := range this.Sfixed64Map {
keysForSfixed64Map = append(keysForSfixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSfixed64Map)
mapStringForSfixed64Map := "map[int64]int64{"
for _, k := range keysForSfixed64Map {
mapStringForSfixed64Map += fmt.Sprintf("%v: %v,", k, this.Sfixed64Map[k])
}
mapStringForSfixed64Map += "}"
keysForBoolMap := make([]bool, 0, len(this.BoolMap))
for k := range this.BoolMap {
keysForBoolMap = append(keysForBoolMap, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForBoolMap)
mapStringForBoolMap := "map[bool]bool{"
for _, k := range keysForBoolMap {
mapStringForBoolMap += fmt.Sprintf("%v: %v,", k, this.BoolMap[k])
}
mapStringForBoolMap += "}"
keysForStringMap := make([]string, 0, len(this.StringMap))
for k := range this.StringMap {
keysForStringMap = append(keysForStringMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringMap)
mapStringForStringMap := "map[string]string{"
for _, k := range keysForStringMap {
mapStringForStringMap += fmt.Sprintf("%v: %v,", k, this.StringMap[k])
}
mapStringForStringMap += "}"
keysForStringToBytesMap := make([]string, 0, len(this.StringToBytesMap))
for k := range this.StringToBytesMap {
keysForStringToBytesMap = append(keysForStringToBytesMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToBytesMap)
mapStringForStringToBytesMap := "map[string][]byte{"
for _, k := range keysForStringToBytesMap {
mapStringForStringToBytesMap += fmt.Sprintf("%v: %v,", k, this.StringToBytesMap[k])
}
mapStringForStringToBytesMap += "}"
keysForStringToEnumMap := make([]string, 0, len(this.StringToEnumMap))
for k := range this.StringToEnumMap {
keysForStringToEnumMap = append(keysForStringToEnumMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToEnumMap)
mapStringForStringToEnumMap := "map[string]MapEnum{"
for _, k := range keysForStringToEnumMap {
mapStringForStringToEnumMap += fmt.Sprintf("%v: %v,", k, this.StringToEnumMap[k])
}
mapStringForStringToEnumMap += "}"
keysForStringToMsgMap := make([]string, 0, len(this.StringToMsgMap))
for k := range this.StringToMsgMap {
keysForStringToMsgMap = append(keysForStringToMsgMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToMsgMap)
mapStringForStringToMsgMap := "map[string]*FloatingPoint{"
for _, k := range keysForStringToMsgMap {
mapStringForStringToMsgMap += fmt.Sprintf("%v: %v,", k, this.StringToMsgMap[k])
}
mapStringForStringToMsgMap += "}"
s := strings.Join([]string{`&AllMaps{`,
`StringToDoubleMap:` + mapStringForStringToDoubleMap + `,`,
`StringToFloatMap:` + mapStringForStringToFloatMap + `,`,
`Int32Map:` + mapStringForInt32Map + `,`,
`Int64Map:` + mapStringForInt64Map + `,`,
`Uint32Map:` + mapStringForUint32Map + `,`,
`Uint64Map:` + mapStringForUint64Map + `,`,
`Sint32Map:` + mapStringForSint32Map + `,`,
`Sint64Map:` + mapStringForSint64Map + `,`,
`Fixed32Map:` + mapStringForFixed32Map + `,`,
`Sfixed32Map:` + mapStringForSfixed32Map + `,`,
`Fixed64Map:` + mapStringForFixed64Map + `,`,
`Sfixed64Map:` + mapStringForSfixed64Map + `,`,
`BoolMap:` + mapStringForBoolMap + `,`,
`StringMap:` + mapStringForStringMap + `,`,
`StringToBytesMap:` + mapStringForStringToBytesMap + `,`,
`StringToEnumMap:` + mapStringForStringToEnumMap + `,`,
`StringToMsgMap:` + mapStringForStringToMsgMap + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *AllMapsOrdered) String() string {
if this == nil {
return "nil"
}
keysForStringToDoubleMap := make([]string, 0, len(this.StringToDoubleMap))
for k := range this.StringToDoubleMap {
keysForStringToDoubleMap = append(keysForStringToDoubleMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToDoubleMap)
mapStringForStringToDoubleMap := "map[string]float64{"
for _, k := range keysForStringToDoubleMap {
mapStringForStringToDoubleMap += fmt.Sprintf("%v: %v,", k, this.StringToDoubleMap[k])
}
mapStringForStringToDoubleMap += "}"
keysForStringToFloatMap := make([]string, 0, len(this.StringToFloatMap))
for k := range this.StringToFloatMap {
keysForStringToFloatMap = append(keysForStringToFloatMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToFloatMap)
mapStringForStringToFloatMap := "map[string]float32{"
for _, k := range keysForStringToFloatMap {
mapStringForStringToFloatMap += fmt.Sprintf("%v: %v,", k, this.StringToFloatMap[k])
}
mapStringForStringToFloatMap += "}"
keysForInt32Map := make([]int32, 0, len(this.Int32Map))
for k := range this.Int32Map {
keysForInt32Map = append(keysForInt32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForInt32Map)
mapStringForInt32Map := "map[int32]int32{"
for _, k := range keysForInt32Map {
mapStringForInt32Map += fmt.Sprintf("%v: %v,", k, this.Int32Map[k])
}
mapStringForInt32Map += "}"
keysForInt64Map := make([]int64, 0, len(this.Int64Map))
for k := range this.Int64Map {
keysForInt64Map = append(keysForInt64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForInt64Map)
mapStringForInt64Map := "map[int64]int64{"
for _, k := range keysForInt64Map {
mapStringForInt64Map += fmt.Sprintf("%v: %v,", k, this.Int64Map[k])
}
mapStringForInt64Map += "}"
keysForUint32Map := make([]uint32, 0, len(this.Uint32Map))
for k := range this.Uint32Map {
keysForUint32Map = append(keysForUint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForUint32Map)
mapStringForUint32Map := "map[uint32]uint32{"
for _, k := range keysForUint32Map {
mapStringForUint32Map += fmt.Sprintf("%v: %v,", k, this.Uint32Map[k])
}
mapStringForUint32Map += "}"
keysForUint64Map := make([]uint64, 0, len(this.Uint64Map))
for k := range this.Uint64Map {
keysForUint64Map = append(keysForUint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForUint64Map)
mapStringForUint64Map := "map[uint64]uint64{"
for _, k := range keysForUint64Map {
mapStringForUint64Map += fmt.Sprintf("%v: %v,", k, this.Uint64Map[k])
}
mapStringForUint64Map += "}"
keysForSint32Map := make([]int32, 0, len(this.Sint32Map))
for k := range this.Sint32Map {
keysForSint32Map = append(keysForSint32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSint32Map)
mapStringForSint32Map := "map[int32]int32{"
for _, k := range keysForSint32Map {
mapStringForSint32Map += fmt.Sprintf("%v: %v,", k, this.Sint32Map[k])
}
mapStringForSint32Map += "}"
keysForSint64Map := make([]int64, 0, len(this.Sint64Map))
for k := range this.Sint64Map {
keysForSint64Map = append(keysForSint64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSint64Map)
mapStringForSint64Map := "map[int64]int64{"
for _, k := range keysForSint64Map {
mapStringForSint64Map += fmt.Sprintf("%v: %v,", k, this.Sint64Map[k])
}
mapStringForSint64Map += "}"
keysForFixed32Map := make([]uint32, 0, len(this.Fixed32Map))
for k := range this.Fixed32Map {
keysForFixed32Map = append(keysForFixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint32s(keysForFixed32Map)
mapStringForFixed32Map := "map[uint32]uint32{"
for _, k := range keysForFixed32Map {
mapStringForFixed32Map += fmt.Sprintf("%v: %v,", k, this.Fixed32Map[k])
}
mapStringForFixed32Map += "}"
keysForSfixed32Map := make([]int32, 0, len(this.Sfixed32Map))
for k := range this.Sfixed32Map {
keysForSfixed32Map = append(keysForSfixed32Map, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForSfixed32Map)
mapStringForSfixed32Map := "map[int32]int32{"
for _, k := range keysForSfixed32Map {
mapStringForSfixed32Map += fmt.Sprintf("%v: %v,", k, this.Sfixed32Map[k])
}
mapStringForSfixed32Map += "}"
keysForFixed64Map := make([]uint64, 0, len(this.Fixed64Map))
for k := range this.Fixed64Map {
keysForFixed64Map = append(keysForFixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Uint64s(keysForFixed64Map)
mapStringForFixed64Map := "map[uint64]uint64{"
for _, k := range keysForFixed64Map {
mapStringForFixed64Map += fmt.Sprintf("%v: %v,", k, this.Fixed64Map[k])
}
mapStringForFixed64Map += "}"
keysForSfixed64Map := make([]int64, 0, len(this.Sfixed64Map))
for k := range this.Sfixed64Map {
keysForSfixed64Map = append(keysForSfixed64Map, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForSfixed64Map)
mapStringForSfixed64Map := "map[int64]int64{"
for _, k := range keysForSfixed64Map {
mapStringForSfixed64Map += fmt.Sprintf("%v: %v,", k, this.Sfixed64Map[k])
}
mapStringForSfixed64Map += "}"
keysForBoolMap := make([]bool, 0, len(this.BoolMap))
for k := range this.BoolMap {
keysForBoolMap = append(keysForBoolMap, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForBoolMap)
mapStringForBoolMap := "map[bool]bool{"
for _, k := range keysForBoolMap {
mapStringForBoolMap += fmt.Sprintf("%v: %v,", k, this.BoolMap[k])
}
mapStringForBoolMap += "}"
keysForStringMap := make([]string, 0, len(this.StringMap))
for k := range this.StringMap {
keysForStringMap = append(keysForStringMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringMap)
mapStringForStringMap := "map[string]string{"
for _, k := range keysForStringMap {
mapStringForStringMap += fmt.Sprintf("%v: %v,", k, this.StringMap[k])
}
mapStringForStringMap += "}"
keysForStringToBytesMap := make([]string, 0, len(this.StringToBytesMap))
for k := range this.StringToBytesMap {
keysForStringToBytesMap = append(keysForStringToBytesMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToBytesMap)
mapStringForStringToBytesMap := "map[string][]byte{"
for _, k := range keysForStringToBytesMap {
mapStringForStringToBytesMap += fmt.Sprintf("%v: %v,", k, this.StringToBytesMap[k])
}
mapStringForStringToBytesMap += "}"
keysForStringToEnumMap := make([]string, 0, len(this.StringToEnumMap))
for k := range this.StringToEnumMap {
keysForStringToEnumMap = append(keysForStringToEnumMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToEnumMap)
mapStringForStringToEnumMap := "map[string]MapEnum{"
for _, k := range keysForStringToEnumMap {
mapStringForStringToEnumMap += fmt.Sprintf("%v: %v,", k, this.StringToEnumMap[k])
}
mapStringForStringToEnumMap += "}"
keysForStringToMsgMap := make([]string, 0, len(this.StringToMsgMap))
for k := range this.StringToMsgMap {
keysForStringToMsgMap = append(keysForStringToMsgMap, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStringToMsgMap)
mapStringForStringToMsgMap := "map[string]*FloatingPoint{"
for _, k := range keysForStringToMsgMap {
mapStringForStringToMsgMap += fmt.Sprintf("%v: %v,", k, this.StringToMsgMap[k])
}
mapStringForStringToMsgMap += "}"
s := strings.Join([]string{`&AllMapsOrdered{`,
`StringToDoubleMap:` + mapStringForStringToDoubleMap + `,`,
`StringToFloatMap:` + mapStringForStringToFloatMap + `,`,
`Int32Map:` + mapStringForInt32Map + `,`,
`Int64Map:` + mapStringForInt64Map + `,`,
`Uint32Map:` + mapStringForUint32Map + `,`,
`Uint64Map:` + mapStringForUint64Map + `,`,
`Sint32Map:` + mapStringForSint32Map + `,`,
`Sint64Map:` + mapStringForSint64Map + `,`,
`Fixed32Map:` + mapStringForFixed32Map + `,`,
`Sfixed32Map:` + mapStringForSfixed32Map + `,`,
`Fixed64Map:` + mapStringForFixed64Map + `,`,
`Sfixed64Map:` + mapStringForSfixed64Map + `,`,
`BoolMap:` + mapStringForBoolMap + `,`,
`StringMap:` + mapStringForStringMap + `,`,
`StringToBytesMap:` + mapStringForStringToBytesMap + `,`,
`StringToEnumMap:` + mapStringForStringToEnumMap + `,`,
`StringToMsgMap:` + mapStringForStringToMsgMap + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *MessageWithMap) String() string {
if this == nil {
return "nil"
}
keysForNameMapping := make([]int32, 0, len(this.NameMapping))
for k := range this.NameMapping {
keysForNameMapping = append(keysForNameMapping, k)
}
github_com_gogo_protobuf_sortkeys.Int32s(keysForNameMapping)
mapStringForNameMapping := "map[int32]string{"
for _, k := range keysForNameMapping {
mapStringForNameMapping += fmt.Sprintf("%v: %v,", k, this.NameMapping[k])
}
mapStringForNameMapping += "}"
keysForMsgMapping := make([]int64, 0, len(this.MsgMapping))
for k := range this.MsgMapping {
keysForMsgMapping = append(keysForMsgMapping, k)
}
github_com_gogo_protobuf_sortkeys.Int64s(keysForMsgMapping)
mapStringForMsgMapping := "map[int64]*FloatingPoint{"
for _, k := range keysForMsgMapping {
mapStringForMsgMapping += fmt.Sprintf("%v: %v,", k, this.MsgMapping[k])
}
mapStringForMsgMapping += "}"
keysForByteMapping := make([]bool, 0, len(this.ByteMapping))
for k := range this.ByteMapping {
keysForByteMapping = append(keysForByteMapping, k)
}
github_com_gogo_protobuf_sortkeys.Bools(keysForByteMapping)
mapStringForByteMapping := "map[bool][]byte{"
for _, k := range keysForByteMapping {
mapStringForByteMapping += fmt.Sprintf("%v: %v,", k, this.ByteMapping[k])
}
mapStringForByteMapping += "}"
s := strings.Join([]string{`&MessageWithMap{`,
`NameMapping:` + mapStringForNameMapping + `,`,
`MsgMapping:` + mapStringForMsgMapping + `,`,
`ByteMapping:` + mapStringForByteMapping + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *FloatingPoint) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&FloatingPoint{`,
`F:` + fmt.Sprintf("%v", this.F) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *Uint128Pair) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Uint128Pair{`,
`Left:` + fmt.Sprintf("%v", this.Left) + `,`,
`Right:` + fmt.Sprintf("%v", this.Right) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *ContainsNestedMap) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ContainsNestedMap{`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *ContainsNestedMap_NestedMap) String() string {
if this == nil {
return "nil"
}
keysForNestedMapField := make([]string, 0, len(this.NestedMapField))
for k := range this.NestedMapField {
keysForNestedMapField = append(keysForNestedMapField, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForNestedMapField)
mapStringForNestedMapField := "map[string]float64{"
for _, k := range keysForNestedMapField {
mapStringForNestedMapField += fmt.Sprintf("%v: %v,", k, this.NestedMapField[k])
}
mapStringForNestedMapField += "}"
s := strings.Join([]string{`&ContainsNestedMap_NestedMap{`,
`NestedMapField:` + mapStringForNestedMapField + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func (this *NotPacked) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&NotPacked{`,
`Key:` + fmt.Sprintf("%v", this.Key) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func valueToStringTheproto3(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Message) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Message: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Message: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Hilarity", wireType)
}
m.Hilarity = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Hilarity |= Message_Humour(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field HeightInCm", wireType)
}
m.HeightInCm = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.HeightInCm |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Data", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Data = append(m.Data[:0], dAtA[iNdEx:postIndex]...)
if m.Data == nil {
m.Data = []byte{}
}
iNdEx = postIndex
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ResultCount", wireType)
}
m.ResultCount = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.ResultCount |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 8:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TrueScotsman", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.TrueScotsman = bool(v != 0)
case 9:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Score", wireType)
}
var v uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
v = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
m.Score = float32(math.Float32frombits(v))
case 5:
if wireType == 0 {
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Key = append(m.Key, v)
} else if wireType == 2 {
var packedLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
packedLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if packedLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + packedLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
var elementCount int
var count int
for _, integer := range dAtA[iNdEx:postIndex] {
if integer < 128 {
count++
}
}
elementCount = count
if elementCount != 0 && len(m.Key) == 0 {
m.Key = make([]uint64, 0, elementCount)
}
for iNdEx < postIndex {
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Key = append(m.Key, v)
}
} else {
return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
}
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Nested", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Nested == nil {
m.Nested = &Nested{}
}
if err := m.Nested.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Terrain", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Terrain == nil {
m.Terrain = make(map[int64]*Nested)
}
var mapkey int64
var mapvalue *Nested
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthTheproto3
}
postmsgIndex := iNdEx + mapmsglen
if postmsgIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = &Nested{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Terrain[mapkey] = mapvalue
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Proto2Field", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Proto2Field == nil {
m.Proto2Field = &both.NinOptNative{}
}
if err := m.Proto2Field.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 13:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Proto2Value", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Proto2Value == nil {
m.Proto2Value = make(map[int64]*both.NinOptEnum)
}
var mapkey int64
var mapvalue *both.NinOptEnum
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthTheproto3
}
postmsgIndex := iNdEx + mapmsglen
if postmsgIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = &both.NinOptEnum{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Proto2Value[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Nested) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Nested: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Nested: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Bunny", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Bunny = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *AllMaps) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: AllMaps: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: AllMaps: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToDoubleMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToDoubleMap == nil {
m.StringToDoubleMap = make(map[string]float64)
}
var mapkey string
var mapvalue float64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapvaluetemp uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvaluetemp = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
mapvalue = math.Float64frombits(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToDoubleMap[mapkey] = mapvalue
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToFloatMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToFloatMap == nil {
m.StringToFloatMap = make(map[string]float32)
}
var mapkey string
var mapvalue float32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapvaluetemp uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvaluetemp = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
mapvalue = math.Float32frombits(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToFloatMap[mapkey] = mapvalue
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Int32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Int32Map == nil {
m.Int32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Int32Map[mapkey] = mapvalue
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Int64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Int64Map == nil {
m.Int64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Int64Map[mapkey] = mapvalue
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Uint32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Uint32Map == nil {
m.Uint32Map = make(map[uint32]uint32)
}
var mapkey uint32
var mapvalue uint32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Uint32Map[mapkey] = mapvalue
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Uint64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Uint64Map == nil {
m.Uint64Map = make(map[uint64]uint64)
}
var mapkey uint64
var mapvalue uint64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Uint64Map[mapkey] = mapvalue
iNdEx = postIndex
case 7:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sint32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sint32Map == nil {
m.Sint32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkeytemp = int32((uint32(mapkeytemp) >> 1) ^ uint32(((mapkeytemp&1)<<31)>>31))
mapkey = int32(mapkeytemp)
} else if fieldNum == 2 {
var mapvaluetemp int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvaluetemp = int32((uint32(mapvaluetemp) >> 1) ^ uint32(((mapvaluetemp&1)<<31)>>31))
mapvalue = int32(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sint32Map[mapkey] = mapvalue
iNdEx = postIndex
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sint64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sint64Map == nil {
m.Sint64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkeytemp = (mapkeytemp >> 1) ^ uint64((int64(mapkeytemp&1)<<63)>>63)
mapkey = int64(mapkeytemp)
} else if fieldNum == 2 {
var mapvaluetemp uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvaluetemp = (mapvaluetemp >> 1) ^ uint64((int64(mapvaluetemp&1)<<63)>>63)
mapvalue = int64(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sint64Map[mapkey] = mapvalue
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Fixed32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Fixed32Map == nil {
m.Fixed32Map = make(map[uint32]uint32)
}
var mapkey uint32
var mapvalue uint32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapkey = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else if fieldNum == 2 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvalue = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Fixed32Map[mapkey] = mapvalue
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sfixed32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sfixed32Map == nil {
m.Sfixed32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapkey = int32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else if fieldNum == 2 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvalue = int32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sfixed32Map[mapkey] = mapvalue
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Fixed64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Fixed64Map == nil {
m.Fixed64Map = make(map[uint64]uint64)
}
var mapkey uint64
var mapvalue uint64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapkey = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else if fieldNum == 2 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvalue = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Fixed64Map[mapkey] = mapvalue
iNdEx = postIndex
case 12:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sfixed64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sfixed64Map == nil {
m.Sfixed64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapkey = int64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else if fieldNum == 2 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvalue = int64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sfixed64Map[mapkey] = mapvalue
iNdEx = postIndex
case 13:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field BoolMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.BoolMap == nil {
m.BoolMap = make(map[bool]bool)
}
var mapkey bool
var mapvalue bool
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkey = bool(mapkeytemp != 0)
} else if fieldNum == 2 {
var mapvaluetemp int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvalue = bool(mapvaluetemp != 0)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.BoolMap[mapkey] = mapvalue
iNdEx = postIndex
case 14:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringMap == nil {
m.StringMap = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringMap[mapkey] = mapvalue
iNdEx = postIndex
case 15:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToBytesMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToBytesMap == nil {
m.StringToBytesMap = make(map[string][]byte)
}
var mapkey string
mapvalue := []byte{}
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapbyteLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapbyteLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intMapbyteLen := int(mapbyteLen)
if intMapbyteLen < 0 {
return ErrInvalidLengthTheproto3
}
postbytesIndex := iNdEx + intMapbyteLen
if postbytesIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postbytesIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = make([]byte, mapbyteLen)
copy(mapvalue, dAtA[iNdEx:postbytesIndex])
iNdEx = postbytesIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToBytesMap[mapkey] = mapvalue
iNdEx = postIndex
case 16:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToEnumMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToEnumMap == nil {
m.StringToEnumMap = make(map[string]MapEnum)
}
var mapkey string
var mapvalue MapEnum
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= MapEnum(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToEnumMap[mapkey] = mapvalue
iNdEx = postIndex
case 17:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToMsgMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToMsgMap == nil {
m.StringToMsgMap = make(map[string]*FloatingPoint)
}
var mapkey string
var mapvalue *FloatingPoint
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthTheproto3
}
postmsgIndex := iNdEx + mapmsglen
if postmsgIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = &FloatingPoint{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToMsgMap[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *AllMapsOrdered) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: AllMapsOrdered: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: AllMapsOrdered: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToDoubleMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToDoubleMap == nil {
m.StringToDoubleMap = make(map[string]float64)
}
var mapkey string
var mapvalue float64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapvaluetemp uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvaluetemp = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
mapvalue = math.Float64frombits(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToDoubleMap[mapkey] = mapvalue
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToFloatMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToFloatMap == nil {
m.StringToFloatMap = make(map[string]float32)
}
var mapkey string
var mapvalue float32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapvaluetemp uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvaluetemp = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
mapvalue = math.Float32frombits(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToFloatMap[mapkey] = mapvalue
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Int32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Int32Map == nil {
m.Int32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Int32Map[mapkey] = mapvalue
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Int64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Int64Map == nil {
m.Int64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Int64Map[mapkey] = mapvalue
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Uint32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Uint32Map == nil {
m.Uint32Map = make(map[uint32]uint32)
}
var mapkey uint32
var mapvalue uint32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Uint32Map[mapkey] = mapvalue
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Uint64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Uint64Map == nil {
m.Uint64Map = make(map[uint64]uint64)
}
var mapkey uint64
var mapvalue uint64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Uint64Map[mapkey] = mapvalue
iNdEx = postIndex
case 7:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sint32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sint32Map == nil {
m.Sint32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkeytemp = int32((uint32(mapkeytemp) >> 1) ^ uint32(((mapkeytemp&1)<<31)>>31))
mapkey = int32(mapkeytemp)
} else if fieldNum == 2 {
var mapvaluetemp int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvaluetemp = int32((uint32(mapvaluetemp) >> 1) ^ uint32(((mapvaluetemp&1)<<31)>>31))
mapvalue = int32(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sint32Map[mapkey] = mapvalue
iNdEx = postIndex
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sint64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sint64Map == nil {
m.Sint64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkeytemp = (mapkeytemp >> 1) ^ uint64((int64(mapkeytemp&1)<<63)>>63)
mapkey = int64(mapkeytemp)
} else if fieldNum == 2 {
var mapvaluetemp uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvaluetemp = (mapvaluetemp >> 1) ^ uint64((int64(mapvaluetemp&1)<<63)>>63)
mapvalue = int64(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sint64Map[mapkey] = mapvalue
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Fixed32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Fixed32Map == nil {
m.Fixed32Map = make(map[uint32]uint32)
}
var mapkey uint32
var mapvalue uint32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapkey = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else if fieldNum == 2 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvalue = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Fixed32Map[mapkey] = mapvalue
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sfixed32Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sfixed32Map == nil {
m.Sfixed32Map = make(map[int32]int32)
}
var mapkey int32
var mapvalue int32
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapkey = int32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else if fieldNum == 2 {
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
mapvalue = int32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sfixed32Map[mapkey] = mapvalue
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Fixed64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Fixed64Map == nil {
m.Fixed64Map = make(map[uint64]uint64)
}
var mapkey uint64
var mapvalue uint64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapkey = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else if fieldNum == 2 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvalue = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Fixed64Map[mapkey] = mapvalue
iNdEx = postIndex
case 12:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sfixed64Map", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Sfixed64Map == nil {
m.Sfixed64Map = make(map[int64]int64)
}
var mapkey int64
var mapvalue int64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapkey = int64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else if fieldNum == 2 {
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvalue = int64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Sfixed64Map[mapkey] = mapvalue
iNdEx = postIndex
case 13:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field BoolMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.BoolMap == nil {
m.BoolMap = make(map[bool]bool)
}
var mapkey bool
var mapvalue bool
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkey = bool(mapkeytemp != 0)
} else if fieldNum == 2 {
var mapvaluetemp int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvaluetemp |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapvalue = bool(mapvaluetemp != 0)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.BoolMap[mapkey] = mapvalue
iNdEx = postIndex
case 14:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringMap == nil {
m.StringMap = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringMap[mapkey] = mapvalue
iNdEx = postIndex
case 15:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToBytesMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToBytesMap == nil {
m.StringToBytesMap = make(map[string][]byte)
}
var mapkey string
mapvalue := []byte{}
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapbyteLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapbyteLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intMapbyteLen := int(mapbyteLen)
if intMapbyteLen < 0 {
return ErrInvalidLengthTheproto3
}
postbytesIndex := iNdEx + intMapbyteLen
if postbytesIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postbytesIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = make([]byte, mapbyteLen)
copy(mapvalue, dAtA[iNdEx:postbytesIndex])
iNdEx = postbytesIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToBytesMap[mapkey] = mapvalue
iNdEx = postIndex
case 16:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToEnumMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToEnumMap == nil {
m.StringToEnumMap = make(map[string]MapEnum)
}
var mapkey string
var mapvalue MapEnum
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapvalue |= MapEnum(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToEnumMap[mapkey] = mapvalue
iNdEx = postIndex
case 17:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StringToMsgMap", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StringToMsgMap == nil {
m.StringToMsgMap = make(map[string]*FloatingPoint)
}
var mapkey string
var mapvalue *FloatingPoint
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthTheproto3
}
postmsgIndex := iNdEx + mapmsglen
if postmsgIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = &FloatingPoint{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StringToMsgMap[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *MessageWithMap) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MessageWithMap: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MessageWithMap: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NameMapping", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NameMapping == nil {
m.NameMapping = make(map[int32]string)
}
var mapkey int32
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkey |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.NameMapping[mapkey] = mapvalue
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MsgMapping", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.MsgMapping == nil {
m.MsgMapping = make(map[int64]*FloatingPoint)
}
var mapkey int64
var mapvalue *FloatingPoint
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkeytemp = (mapkeytemp >> 1) ^ uint64((int64(mapkeytemp&1)<<63)>>63)
mapkey = int64(mapkeytemp)
} else if fieldNum == 2 {
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthTheproto3
}
postmsgIndex := iNdEx + mapmsglen
if postmsgIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = &FloatingPoint{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.MsgMapping[mapkey] = mapvalue
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ByteMapping", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ByteMapping == nil {
m.ByteMapping = make(map[bool][]byte)
}
var mapkey bool
mapvalue := []byte{}
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var mapkeytemp int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapkeytemp |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
mapkey = bool(mapkeytemp != 0)
} else if fieldNum == 2 {
var mapbyteLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapbyteLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intMapbyteLen := int(mapbyteLen)
if intMapbyteLen < 0 {
return ErrInvalidLengthTheproto3
}
postbytesIndex := iNdEx + intMapbyteLen
if postbytesIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postbytesIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue = make([]byte, mapbyteLen)
copy(mapvalue, dAtA[iNdEx:postbytesIndex])
iNdEx = postbytesIndex
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.ByteMapping[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *FloatingPoint) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FloatingPoint: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FloatingPoint: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 1 {
return fmt.Errorf("proto: wrong wireType = %d for field F", wireType)
}
var v uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
v = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
m.F = float64(math.Float64frombits(v))
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Uint128Pair) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Uint128Pair: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Uint128Pair: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Left", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Left.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Right", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
var v github_com_gogo_protobuf_test_custom.Uint128
m.Right = &v
if err := m.Right.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ContainsNestedMap) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ContainsNestedMap: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ContainsNestedMap: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ContainsNestedMap_NestedMap) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: NestedMap: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: NestedMap: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NestedMapField", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NestedMapField == nil {
m.NestedMapField = make(map[string]float64)
}
var mapkey string
var mapvalue float64
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthTheproto3
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthTheproto3
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var mapvaluetemp uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
mapvaluetemp = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
iNdEx += 8
mapvalue = math.Float64frombits(mapvaluetemp)
} else {
iNdEx = entryPreIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.NestedMapField[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *NotPacked) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: NotPacked: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: NotPacked: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 5:
if wireType == 0 {
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Key = append(m.Key, v)
} else if wireType == 2 {
var packedLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
packedLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if packedLen < 0 {
return ErrInvalidLengthTheproto3
}
postIndex := iNdEx + packedLen
if postIndex < 0 {
return ErrInvalidLengthTheproto3
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
var elementCount int
var count int
for _, integer := range dAtA[iNdEx:postIndex] {
if integer < 128 {
count++
}
}
elementCount = count
if elementCount != 0 && len(m.Key) == 0 {
m.Key = make([]uint64, 0, elementCount)
}
for iNdEx < postIndex {
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTheproto3
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Key = append(m.Key, v)
}
} else {
return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
}
default:
iNdEx = preIndex
skippy, err := skipTheproto3(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTheproto3
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipTheproto3(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTheproto3
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTheproto3
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTheproto3
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthTheproto3
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupTheproto3
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthTheproto3
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthTheproto3 = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowTheproto3 = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupTheproto3 = fmt.Errorf("proto: unexpected end of group")
)
| Miciah/origin | vendor/github.com/gogo/protobuf/test/theproto3/combos/unmarshaler/theproto3.pb.go | GO | apache-2.0 | 334,898 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.client.console.policies;
import java.util.List;
import org.apache.syncope.client.console.wicket.extensions.markup.html.repeater.data.table.BooleanPropertyColumn;
import org.apache.syncope.client.console.wicket.extensions.markup.html.repeater.data.table.CollectionPropertyColumn;
import org.apache.syncope.client.console.wicket.markup.html.form.ActionLink;
import org.apache.syncope.client.console.wicket.markup.html.form.ActionsPanel;
import org.apache.syncope.common.lib.policy.AccountPolicyTO;
import org.apache.syncope.common.lib.types.PolicyType;
import org.apache.syncope.common.lib.types.IdRepoEntitlement;
import org.apache.wicket.PageReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.authroles.authorization.strategies.role.metadata.MetaDataRoleAuthorizationStrategy;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.StringResourceModel;
/**
* Account policies page.
*/
public class AccountPolicyDirectoryPanel extends PolicyDirectoryPanel<AccountPolicyTO> {
private static final long serialVersionUID = 4984337552918213290L;
public AccountPolicyDirectoryPanel(final String id, final PageReference pageRef) {
super(id, PolicyType.ACCOUNT, pageRef);
this.addNewItemPanelBuilder(new PolicyModalPanelBuilder<>(
PolicyType.ACCOUNT, new AccountPolicyTO(), modal, pageRef), true);
MetaDataRoleAuthorizationStrategy.authorize(addAjaxLink, RENDER, IdRepoEntitlement.POLICY_CREATE);
initResultTable();
}
@Override
protected void addCustomColumnFields(final List<IColumn<AccountPolicyTO, String>> columns) {
columns.add(new CollectionPropertyColumn<>(new StringResourceModel(
"passthroughResources", this), "passthroughResources"));
columns.add(new PropertyColumn<>(new StringResourceModel(
"maxAuthenticationAttempts", this), "maxAuthenticationAttempts", "maxAuthenticationAttempts"));
columns.add(new BooleanPropertyColumn<>(new StringResourceModel(
"propagateSuspension", this), "propagateSuspension", "propagateSuspension"));
}
@Override
protected void addCustomActions(final ActionsPanel<AccountPolicyTO> panel, final IModel<AccountPolicyTO> model) {
panel.add(new ActionLink<>() {
private static final long serialVersionUID = -3722207913631435501L;
@Override
public void onClick(final AjaxRequestTarget target, final AccountPolicyTO ignore) {
target.add(ruleCompositionModal.setContent(new PolicyRuleDirectoryPanel<>(
ruleCompositionModal, model.getObject().getKey(), PolicyType.ACCOUNT, pageRef)));
ruleCompositionModal.header(new StringResourceModel(
"policy.rules", AccountPolicyDirectoryPanel.this, model));
MetaDataRoleAuthorizationStrategy.authorize(
ruleCompositionModal.getForm(), ENABLE, IdRepoEntitlement.POLICY_UPDATE);
ruleCompositionModal.show(true);
}
}, ActionLink.ActionType.COMPOSE, IdRepoEntitlement.POLICY_UPDATE);
}
}
| apache/syncope | client/idrepo/console/src/main/java/org/apache/syncope/client/console/policies/AccountPolicyDirectoryPanel.java | Java | apache-2.0 | 4,155 |
"""Test the Z-Wave over MQTT config flow."""
from homeassistant import config_entries, setup
from homeassistant.components.ozw.config_flow import TITLE
from homeassistant.components.ozw.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_user_create_entry(hass):
"""Test the user step creates an entry."""
hass.config.components.add("mqtt")
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] is None
with patch(
"homeassistant.components.ozw.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.ozw.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TITLE
assert result2["data"] == {}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_mqtt_not_setup(hass):
"""Test that mqtt is required."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "mqtt_required"
async def test_one_instance_allowed(hass):
"""Test that only one instance is allowed."""
entry = MockConfigEntry(domain=DOMAIN, data={}, title=TITLE)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "single_instance_allowed"
| sdague/home-assistant | tests/components/ozw/test_config_flow.py | Python | apache-2.0 | 1,943 |
/*
* #%L
* ELK OWL Object Interfaces
*
* $Id$
* $HeadURL$
* %%
* Copyright (C) 2011 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.semanticweb.elk.owl.interfaces;
import org.semanticweb.elk.owl.visitors.ElkPropertyAxiomVisitor;
/**
* A generic interface for axioms with data properties or object properties.
*
* @author "Yevgeny Kazakov"
*
* @param <P>
* the type of the property of this axiom
*/
public interface ElkPropertyAxiom<P> extends ElkAxiom {
/**
* Get the property of this axiom.
*
* @return the property of this axiom
*/
P getProperty();
/**
* Accept an {@link ElkPropertyAxiomVisitor}.
*
* @param visitor
* the visitor that can work with this axiom type
* @return the output of the visitor
*/
public <O> O accept(ElkPropertyAxiomVisitor<O> visitor);
/**
* A factory for creating instances
*
* @author Yevgeny Kazakov
*
*/
interface Factory extends ElkAsymmetricObjectPropertyAxiom.Factory,
ElkFunctionalDataPropertyAxiom.Factory,
ElkFunctionalObjectPropertyAxiom.Factory,
ElkInverseFunctionalObjectPropertyAxiom.Factory,
ElkIrreflexiveObjectPropertyAxiom.Factory,
ElkPropertyAssertionAxiom.Factory, ElkPropertyDomainAxiom.Factory,
ElkPropertyRangeAxiom.Factory,
ElkReflexiveObjectPropertyAxiom.Factory,
ElkSymmetricObjectPropertyAxiom.Factory,
ElkTransitiveObjectPropertyAxiom.Factory {
// combined interface
}
}
| live-ontologies/elk-reasoner | elk-owl-parent/elk-owl-model/src/main/java/org/semanticweb/elk/owl/interfaces/ElkPropertyAxiom.java | Java | apache-2.0 | 2,035 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.api.byte_;
import org.assertj.core.api.ByteAssert;
import org.assertj.core.api.ByteAssertBaseTest;
import org.assertj.core.data.Percentage;
import static org.assertj.core.data.Percentage.withPercentage;
import static org.mockito.Mockito.verify;
public class ByteAssert_isCloseToPercentage_byte_Test extends ByteAssertBaseTest {
private final Percentage percentage = withPercentage((byte) 5);
private final Byte value = 10;
@Override
protected ByteAssert invoke_api_method() {
return assertions.isCloseTo(value, percentage);
}
@Override
protected void verify_internal_effects() {
verify(bytes).assertIsCloseToPercentage(getInfo(assertions), getActual(assertions), value, percentage);
}
}
| ChrisA89/assertj-core | src/test/java/org/assertj/core/api/byte_/ByteAssert_isCloseToPercentage_byte_Test.java | Java | apache-2.0 | 1,374 |
package razor.android.lib.core.holders;
import java.util.ArrayList;
import java.util.List;
import razor.android.lib.core.R;
import razor.android.lib.core.adapters.CoreViewModelListAdapter;
import razor.android.lib.core.adapters.CoreViewModelListAdapter.OnCoreViewModelListAdapterListener;
import razor.android.lib.core.helpers.EndlessScrollListener;
import razor.android.lib.core.interfaces.ICoreViewModel;
import razor.android.lib.core.views.ListFooterView;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ListView;
import android.widget.TextView;
public class ViewModelListViewHolder {
public interface ViewModelListViewAdapterBuilder{
CoreViewModelListAdapter createAdapter(Context context, List<ICoreViewModel> models);
}
private View parentView = null;
private ListView modelList = null;
private CoreViewModelListAdapter adapter = null;
private EndlessScrollListener scrollListener = null;
private List<ICoreViewModel> modelStore = null;
private ViewModelListViewAdapterBuilder builder = null;
private ListFooterView vFooter = null;
private TextView noResultMessage = null;
public ViewModelListViewHolder(View view){
this.parentView = view;
this.scrollListener = new EndlessScrollListener(2);
this.modelList = (ListView) view.findViewById(R.id.viewmodel_list);
this.modelList.setOnScrollListener(this.scrollListener);
this.noResultMessage = (TextView)view.findViewById(R.id.no_results);
this.vFooter = new ListFooterView(view.getContext());
this.modelList.addFooterView(vFooter, null, false);
}
public ListView getModelList(){
return this.modelList;
}
public int getSize(){
return this.modelStore != null ? this.modelStore.size() : 0;
}
public void setAdapter(List<ICoreViewModel> models, OnCoreViewModelListAdapterListener listener){
if(this.modelStore==null){
this.modelStore = new ArrayList<ICoreViewModel>();
}
this.modelStore.addAll(models);
// build the adapter
if(this.builder == null){
this.adapter = new CoreViewModelListAdapter(this.parentView.getContext(),this.modelStore);
} else{
this.adapter = this.builder.createAdapter(this.parentView.getContext(), this.modelStore);
}
this.adapter.setListener(listener);
this.modelList.setAdapter(this.adapter);
this.modelList.refreshDrawableState();
}
public void updateAdapter(List<ICoreViewModel> moreModels){
if(this.modelStore!=null && this.adapter!=null){
this.modelStore.addAll(moreModels);
this.adapter.notifyDataSetChanged();
}
}
public void clearAdapter(){
if(this.modelStore!=null){
this.modelStore.clear();
}
if(this.adapter!=null){
this.adapter.notifyDataSetChanged();
}
if(this.scrollListener!=null){
this.scrollListener.reset();
}
}
public void setOnMoreDataListener(EndlessScrollListener.OnLoadMoreDataListener moreDataListener){
this.scrollListener.setMoreDataListener(moreDataListener);
}
public void setBuilder(ViewModelListViewAdapterBuilder builder) {
this.builder = builder;
}
public ViewModelListViewAdapterBuilder getBuilder() {
return builder;
}
public void showLoading(boolean show){
if(show){
this.showNoResults(false);
}
if(this.vFooter!=null){
this.vFooter.showLoading(show);
}
}
public void showNoResults(boolean show){
if(this.modelList!=null&&this.noResultMessage!=null){
if (!show){
this.modelList.setVisibility(View.VISIBLE);
this.noResultMessage.setVisibility(View.GONE);
}
else{
this.modelList.setVisibility(View.GONE);
this.noResultMessage.setVisibility(View.VISIBLE);
}
}
}
}
| paulshemmings/pico | razor.lib.comms/target/unpack/apklibs/razor.android_razor.android.lib.core_apklib_1.0.0-SNAPSHOT/src/razor/android/lib/core/holders/ViewModelListViewHolder.java | Java | apache-2.0 | 3,778 |
package org.terracotta.sample.service;
import org.ehcache.Cache;
/**
* @author Aurelien Broszniowski
*/
public class CachedDataService implements DataService<byte[]> {
private final Cache<Long, byte[]> cache;
public CachedDataService(final Cache<Long, byte[]> cache) {
this.cache = cache;
}
@Override
public byte[] loadData(final Long key) {
return cache.get(key);
}
}
| ehcache/ehcache3-samples | scale-continuum/src/main/java/org/terracotta/sample/service/CachedDataService.java | Java | apache-2.0 | 396 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.annotations.Listeners;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
import com.google.common.base.Predicate;
import org.apache.lucene.uninverting.UninvertingReader;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.BootstrapForTesting;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.DjbHashFunction;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
import org.elasticsearch.test.search.MockSearchService;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.*;
import org.junit.rules.RuleChain;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.file.FileSystem;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Lists.newArrayList;
/**
* Base testcase for randomized unit testing with Elasticsearch
*/
@Listeners({
ReproduceInfoPrinter.class,
LoggingListener.class
})
// remove this entire annotation on upgrade to 5.3!
@ThreadLeakFilters(defaultFilters = true, filters = {
IBMJ9HackThreadFilters.class,
})
@ThreadLeakScope(Scope.SUITE)
@ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = 20 * TimeUnits.MINUTE)
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
// we suppress pretty much all the lucene codecs for now, except asserting
// assertingcodec is the winner for a codec here: it finds bugs and gives clear exceptions.
@SuppressCodecs({
"SimpleText", "Memory", "CheapBastard", "Direct", "Compressing", "FST50", "FSTOrd50",
"TestBloomFilteredLucenePostings", "MockRandom", "BlockTreeOrds", "LuceneFixedGap",
"LuceneVarGapFixedInterval", "LuceneVarGapDocFreqInterval", "Lucene50"
})
@LuceneTestCase.SuppressReproduceLine
public abstract class ElasticsearchTestCase extends LuceneTestCase {
static {
BootstrapForTesting.ensureInitialized();
}
protected final ESLogger logger = Loggers.getLogger(getClass());
// -----------------------------------------------------------------
// Suite and test case setup/cleanup.
// -----------------------------------------------------------------
@Rule
public RuleChain failureAndSuccessEvents = RuleChain.outerRule(new TestRuleAdapter() {
@Override
protected void afterIfSuccessful() throws Throwable {
ElasticsearchTestCase.this.afterIfSuccessful();
}
@Override
protected void afterAlways(List<Throwable> errors) throws Throwable {
if (errors != null && errors.isEmpty() == false) {
ElasticsearchTestCase.this.afterIfFailed(errors);
}
super.afterAlways(errors);
}
});
/** called when a test fails, supplying the errors it generated */
protected void afterIfFailed(List<Throwable> errors) {
}
/** called after a test is finished, but only if succesfull */
protected void afterIfSuccessful() throws Exception {
}
// setup mock filesystems for this test run. we change PathUtils
// so that all accesses are plumbed thru any mock wrappers
@BeforeClass
public static void setFileSystem() throws Exception {
Field field = PathUtils.class.getDeclaredField("DEFAULT");
field.setAccessible(true);
FileSystem mock = LuceneTestCase.getBaseTempDirForTestClass().getFileSystem();
field.set(null, mock);
assertEquals(mock, PathUtils.getDefaultFileSystem());
}
@AfterClass
public static void restoreFileSystem() throws Exception {
Field field1 = PathUtils.class.getDeclaredField("ACTUAL_DEFAULT");
field1.setAccessible(true);
Field field2 = PathUtils.class.getDeclaredField("DEFAULT");
field2.setAccessible(true);
field2.set(null, field1.get(null));
}
// setup a default exception handler which knows when and how to print a stacktrace
private static Thread.UncaughtExceptionHandler defaultHandler;
@BeforeClass
public static void setDefaultExceptionHandler() throws Exception {
defaultHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(defaultHandler));
}
@AfterClass
public static void restoreDefaultExceptionHandler() throws Exception {
Thread.setDefaultUncaughtExceptionHandler(defaultHandler);
}
// randomize content type for request builders
@BeforeClass
public static void setContentType() throws Exception {
Requests.CONTENT_TYPE = randomFrom(XContentType.values());
Requests.INDEX_CONTENT_TYPE = randomFrom(XContentType.values());
}
@AfterClass
public static void restoreContentType() {
Requests.CONTENT_TYPE = XContentType.SMILE;
Requests.INDEX_CONTENT_TYPE = XContentType.JSON;
}
// randomize and override the number of cpus so tests reproduce regardless of real number of cpus
@BeforeClass
public static void setProcessors() {
int numCpu = TestUtil.nextInt(random(), 1, 4);
System.setProperty(EsExecutors.DEFAULT_SYSPROP, Integer.toString(numCpu));
assertEquals(numCpu, EsExecutors.boundedNumberOfProcessors(Settings.EMPTY));
}
@AfterClass
public static void restoreProcessors() {
System.clearProperty(EsExecutors.DEFAULT_SYSPROP);
}
@After
public final void ensureCleanedUp() throws Exception {
MockPageCacheRecycler.ensureAllPagesAreReleased();
MockBigArrays.ensureAllArraysAreReleased();
// field cache should NEVER get loaded.
String[] entries = UninvertingReader.getUninvertedStats();
assertEquals("fieldcache must never be used, got=" + Arrays.toString(entries), 0, entries.length);
}
// this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that
@After
public final void ensureAllSearchContextsReleased() throws Exception {
assertBusy(new Runnable() {
@Override
public void run() {
MockSearchService.assertNoInFLightContext();
}
});
}
// mockdirectorywrappers currently set this boolean if checkindex fails
// TODO: can we do this cleaner???
/** MockFSDirectoryService sets this: */
public static boolean checkIndexFailed;
@Before
public final void resetCheckIndexStatus() throws Exception {
checkIndexFailed = false;
}
@After
public final void ensureCheckIndexPassed() throws Exception {
assertFalse("at least one shard failed CheckIndex", checkIndexFailed);
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
// TODO: replaces uses of getRandom() with random()
// TODO: decide on one set of naming for between/scaledBetween and remove others
// TODO: replace frequently() with usually()
/** Shortcut for {@link RandomizedContext#getRandom()}. Use {@link #random()} instead. */
public static Random getRandom() {
// TODO: replace uses of this function with random()
return random();
}
/**
* Returns a "scaled" random number between min and max (inclusive).
*
* @see RandomizedTest#scaledRandomIntBetween(int, int);
*/
public static int scaledRandomIntBetween(int min, int max) {
return RandomizedTest.scaledRandomIntBetween(min, max);
}
/**
* A random integer from <code>min</code> to <code>max</code> (inclusive).
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int randomIntBetween(int min, int max) {
return RandomInts.randomIntBetween(random(), min, max);
}
/**
* Returns a "scaled" number of iterations for loops which can have a variable
* iteration count. This method is effectively
* an alias to {@link #scaledRandomIntBetween(int, int)}.
*/
public static int iterations(int min, int max) {
return scaledRandomIntBetween(min, max);
}
/**
* An alias for {@link #randomIntBetween(int, int)}.
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int between(int min, int max) {
return randomIntBetween(min, max);
}
/**
* The exact opposite of {@link #rarely()}.
*/
public static boolean frequently() {
return !rarely();
}
public static boolean randomBoolean() {
return random().nextBoolean();
}
public static byte randomByte() {
return (byte) random().nextInt();
}
public static short randomShort() {
return (short) random().nextInt();
}
public static int randomInt() {
return random().nextInt();
}
public static float randomFloat() {
return random().nextFloat();
}
public static double randomDouble() {
return random().nextDouble();
}
public static long randomLong() {
return random().nextLong();
}
/** A random integer from 0..max (inclusive). */
public static int randomInt(int max) {
return RandomizedTest.randomInt(max);
}
/** Pick a random object from the given array. The array must not be empty. */
public static <T> T randomFrom(T... array) {
return RandomPicks.randomFrom(random(), array);
}
/** Pick a random object from the given list. */
public static <T> T randomFrom(List<T> list) {
return RandomPicks.randomFrom(random(), list);
}
public static String randomAsciiOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomAsciiOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomAsciiOfLength(int codeUnits) {
return RandomizedTest.randomAsciiOfLength(codeUnits);
}
public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomUnicodeOfLength(int codeUnits) {
return RandomizedTest.randomUnicodeOfLength(codeUnits);
}
public static String randomUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomizedTest.randomUnicodeOfCodepointLengthBetween(minCodePoints, maxCodePoints);
}
public static String randomUnicodeOfCodepointLength(int codePoints) {
return RandomizedTest.randomUnicodeOfCodepointLength(codePoints);
}
public static String randomRealisticUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomRealisticUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomRealisticUnicodeOfLength(int codeUnits) {
return RandomizedTest.randomRealisticUnicodeOfLength(codeUnits);
}
public static String randomRealisticUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomizedTest.randomRealisticUnicodeOfCodepointLengthBetween(minCodePoints, maxCodePoints);
}
public static String randomRealisticUnicodeOfCodepointLength(int codePoints) {
return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints);
}
public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) {
if (allowNull && random().nextBoolean()) {
return null;
}
String[] array = new String[random().nextInt(maxArraySize)]; // allow empty arrays
for (int i = 0; i < array.length; i++) {
array[i] = RandomStrings.randomAsciiOfLength(random(), maxStringSize);
}
return array;
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/
public static void assertBusy(Runnable codeBlock) throws Exception {
assertBusy(Executors.callable(codeBlock), 10, TimeUnit.SECONDS);
}
public static void assertBusy(Runnable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception {
assertBusy(Executors.callable(codeBlock), maxWaitTime, unit);
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/
public static <V> V assertBusy(Callable<V> codeBlock) throws Exception {
return assertBusy(codeBlock, 10, TimeUnit.SECONDS);
}
/**
* Runs the code block for the provided interval, waiting for no assertions to trip.
*/
public static <V> V assertBusy(Callable<V> codeBlock, long maxWaitTime, TimeUnit unit) throws Exception {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1);
long timeInMillis = 1;
long sum = 0;
List<AssertionError> failures = new ArrayList<>();
for (int i = 0; i < iterations; i++) {
try {
return codeBlock.call();
} catch (AssertionError e) {
failures.add(e);
}
sum += timeInMillis;
Thread.sleep(timeInMillis);
timeInMillis *= 2;
}
timeInMillis = maxTimeInMillis - sum;
Thread.sleep(Math.max(timeInMillis, 0));
try {
return codeBlock.call();
} catch (AssertionError e) {
for (AssertionError failure : failures) {
e.addSuppressed(failure);
}
throw e;
}
}
public static boolean awaitBusy(Predicate<?> breakPredicate) throws InterruptedException {
return awaitBusy(breakPredicate, 10, TimeUnit.SECONDS);
}
public static boolean awaitBusy(Predicate<?> breakPredicate, long maxWaitTime, TimeUnit unit) throws InterruptedException {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1);
long timeInMillis = 1;
long sum = 0;
for (int i = 0; i < iterations; i++) {
if (breakPredicate.apply(null)) {
return true;
}
sum += timeInMillis;
Thread.sleep(timeInMillis);
timeInMillis *= 2;
}
timeInMillis = maxTimeInMillis - sum;
Thread.sleep(Math.max(timeInMillis, 0));
return breakPredicate.apply(null);
}
public static boolean terminate(ExecutorService... services) throws InterruptedException {
boolean terminated = true;
for (ExecutorService service : services) {
if (service != null) {
terminated &= ThreadPool.terminate(service, 10, TimeUnit.SECONDS);
}
}
return terminated;
}
public static boolean terminate(ThreadPool service) throws InterruptedException {
return ThreadPool.terminate(service, 10, TimeUnit.SECONDS);
}
/**
* Returns a {@link java.nio.file.Path} pointing to the class path relative resource given
* as the first argument. In contrast to
* <code>getClass().getResource(...).getFile()</code> this method will not
* return URL encoded paths if the parent path contains spaces or other
* non-standard characters.
*/
@Override
public Path getDataPath(String relativePath) {
// we override LTC behavior here: wrap even resources with mockfilesystems,
// because some code is buggy when it comes to multiple nio.2 filesystems
// (e.g. FileSystemUtils, and likely some tests)
try {
return PathUtils.get(getClass().getResource(relativePath).toURI());
} catch (Exception e) {
throw new RuntimeException("resource not found: " + relativePath, e);
}
}
/** Returns a random number of temporary paths. */
public String[] tmpPaths() {
final int numPaths = TestUtil.nextInt(random(), 1, 3);
final String[] absPaths = new String[numPaths];
for (int i = 0; i < numPaths; i++) {
absPaths[i] = createTempDir().toAbsolutePath().toString();
}
return absPaths;
}
public NodeEnvironment newNodeEnvironment() throws IOException {
return newNodeEnvironment(Settings.EMPTY);
}
public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
.put("path.home", createTempDir().toAbsolutePath())
.putArray("path.data", tmpPaths()).build();
return new NodeEnvironment(build, new Environment(build));
}
/** Return consistent index settings for the provided index version. */
public static Settings.Builder settings(Version version) {
Settings.Builder builder = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version);
if (version.before(Version.V_2_0_0)) {
builder.put(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION, DjbHashFunction.class);
}
return builder;
}
// -----------------------------------------------------------------
// Failure utilities
// -----------------------------------------------------------------
static final class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
private final Thread.UncaughtExceptionHandler parent;
private final ESLogger logger = Loggers.getLogger(getClass());
private ElasticsearchUncaughtExceptionHandler(Thread.UncaughtExceptionHandler parent) {
this.parent = parent;
}
@Override
public void uncaughtException(Thread t, Throwable e) {
if (e instanceof EsRejectedExecutionException) {
if (e.getMessage() != null && e.getMessage().contains(EsAbortPolicy.SHUTTING_DOWN_KEY)) {
return; // ignore the EsRejectedExecutionException when a node shuts down
}
} else if (e instanceof OutOfMemoryError) {
if (e.getMessage() != null && e.getMessage().contains("unable to create new native thread")) {
printStackDump(logger);
}
}
parent.uncaughtException(t, e);
}
}
protected static final void printStackDump(ESLogger logger) {
// print stack traces if we can't create any native thread anymore
Map<Thread, StackTraceElement[]> allStackTraces = Thread.getAllStackTraces();
logger.error(formatThreadStacks(allStackTraces));
}
/** Dump threads and their current stack trace. */
private static String formatThreadStacks(Map<Thread, StackTraceElement[]> threads) {
StringBuilder message = new StringBuilder();
int cnt = 1;
final Formatter f = new Formatter(message, Locale.ENGLISH);
for (Map.Entry<Thread, StackTraceElement[]> e : threads.entrySet()) {
if (e.getKey().isAlive()) {
f.format(Locale.ENGLISH, "\n %2d) %s", cnt++, threadName(e.getKey())).flush();
}
if (e.getValue().length == 0) {
message.append("\n at (empty stack)");
} else {
for (StackTraceElement ste : e.getValue()) {
message.append("\n at ").append(ste);
}
}
}
return message.toString();
}
private static String threadName(Thread t) {
return "Thread[" +
"id=" + t.getId() +
", name=" + t.getName() +
", state=" + t.getState() +
", group=" + groupName(t.getThreadGroup()) +
"]";
}
private static String groupName(ThreadGroup threadGroup) {
if (threadGroup == null) {
return "{null group}";
} else {
return threadGroup.getName();
}
}
/**
* Returns size random values
*/
public static <T> List<T> randomSubsetOf(int size, T... values) {
if (size > values.length) {
throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a list of " + values.length + " objects");
}
List<T> list = newArrayList(values);
Collections.shuffle(list);
return list.subList(0, size);
}
/**
* Returns true iff assertions for elasticsearch packages are enabled
*/
public static boolean assertionsEnabled() {
boolean enabled = false;
assert (enabled = true);
return enabled;
}
}
| wangyuxue/elasticsearch | core/src/test/java/org/elasticsearch/test/ElasticsearchTestCase.java | Java | apache-2.0 | 23,505 |
define([
'../Core/Cartesian3',
'../Core/defined',
'../Core/Intersect',
'../Core/ManagedArray',
'../Core/Math',
'./Cesium3DTileOptimizationHint',
'./Cesium3DTileRefine'
], function(
Cartesian3,
defined,
Intersect,
ManagedArray,
CesiumMath,
Cesium3DTileOptimizationHint,
Cesium3DTileRefine) {
'use strict';
/**
* @private
*/
function Cesium3DTilesetTraversal() {
}
function isVisible(tile) {
return tile._visible && tile._inRequestVolume;
}
var traversal = {
stack : new ManagedArray(),
stackMaximumLength : 0
};
var emptyTraversal = {
stack : new ManagedArray(),
stackMaximumLength : 0
};
var descendantTraversal = {
stack : new ManagedArray(),
stackMaximumLength : 0
};
var selectionTraversal = {
stack : new ManagedArray(),
stackMaximumLength : 0,
ancestorStack : new ManagedArray(),
ancestorStackMaximumLength : 0
};
var descendantSelectionDepth = 2;
Cesium3DTilesetTraversal.selectTiles = function(tileset, frameState) {
tileset._requestedTiles.length = 0;
if (tileset.debugFreezeFrame) {
return;
}
tileset._selectedTiles.length = 0;
tileset._selectedTilesToStyle.length = 0;
tileset._emptyTiles.length = 0;
tileset._hasMixedContent = false;
var root = tileset.root;
updateTile(tileset, root, frameState);
// The root tile is not visible
if (!isVisible(root)) {
return;
}
// The tileset doesn't meet the SSE requirement, therefore the tree does not need to be rendered
if (root.getScreenSpaceError(frameState, true) <= tileset._maximumScreenSpaceError) {
return;
}
if (!skipLevelOfDetail(tileset)) {
executeBaseTraversal(tileset, root, frameState);
} else if (tileset.immediatelyLoadDesiredLevelOfDetail) {
executeSkipTraversal(tileset, root, frameState);
} else {
executeBaseAndSkipTraversal(tileset, root, frameState);
}
traversal.stack.trim(traversal.stackMaximumLength);
emptyTraversal.stack.trim(emptyTraversal.stackMaximumLength);
descendantTraversal.stack.trim(descendantTraversal.stackMaximumLength);
selectionTraversal.stack.trim(selectionTraversal.stackMaximumLength);
selectionTraversal.ancestorStack.trim(selectionTraversal.ancestorStackMaximumLength);
// Update the priority for any requests found during traversal
// Update after traversal so that min and max values can be used to normalize priority values
var requestedTiles = tileset._requestedTiles;
var length = requestedTiles.length;
for (var i = 0; i < length; ++i) {
requestedTiles[i].updatePriority();
}
};
function executeBaseTraversal(tileset, root, frameState) {
var baseScreenSpaceError = tileset._maximumScreenSpaceError;
var maximumScreenSpaceError = tileset._maximumScreenSpaceError;
executeTraversal(tileset, root, baseScreenSpaceError, maximumScreenSpaceError, frameState);
}
function executeSkipTraversal(tileset, root, frameState) {
var baseScreenSpaceError = Number.MAX_VALUE;
var maximumScreenSpaceError = tileset._maximumScreenSpaceError;
executeTraversal(tileset, root, baseScreenSpaceError, maximumScreenSpaceError, frameState);
traverseAndSelect(tileset, root, frameState);
}
function executeBaseAndSkipTraversal(tileset, root, frameState) {
var baseScreenSpaceError = Math.max(tileset.baseScreenSpaceError, tileset.maximumScreenSpaceError);
var maximumScreenSpaceError = tileset.maximumScreenSpaceError;
executeTraversal(tileset, root, baseScreenSpaceError, maximumScreenSpaceError, frameState);
traverseAndSelect(tileset, root, frameState);
}
function skipLevelOfDetail(tileset) {
return tileset._skipLevelOfDetail;
}
function addEmptyTile(tileset, tile) {
tileset._emptyTiles.push(tile);
}
function selectTile(tileset, tile, frameState) {
if (tile.contentVisibility(frameState) !== Intersect.OUTSIDE) {
var tileContent = tile.content;
if (tileContent.featurePropertiesDirty) {
// A feature's property in this tile changed, the tile needs to be re-styled.
tileContent.featurePropertiesDirty = false;
tile.lastStyleTime = 0; // Force applying the style to this tile
tileset._selectedTilesToStyle.push(tile);
} else if ((tile._selectedFrame < frameState.frameNumber - 1)) {
// Tile is newly selected; it is selected this frame, but was not selected last frame.
tileset._selectedTilesToStyle.push(tile);
}
tile._selectedFrame = frameState.frameNumber;
tileset._selectedTiles.push(tile);
}
}
function selectDescendants(tileset, root, frameState) {
var stack = descendantTraversal.stack;
stack.push(root);
while (stack.length > 0) {
descendantTraversal.stackMaximumLength = Math.max(descendantTraversal.stackMaximumLength, stack.length);
var tile = stack.pop();
var children = tile.children;
var childrenLength = children.length;
for (var i = 0; i < childrenLength; ++i) {
var child = children[i];
if (isVisible(child)) {
if (child.contentAvailable) {
updateTile(tileset, child, frameState);
touchTile(tileset, child, frameState);
selectTile(tileset, child, frameState);
} else if (child._depth - root._depth < descendantSelectionDepth) {
// Continue traversing, but not too far
stack.push(child);
}
}
}
}
}
function selectDesiredTile(tileset, tile, frameState) {
if (!skipLevelOfDetail(tileset)) {
if (tile.contentAvailable) {
// The tile can be selected right away and does not require traverseAndSelect
selectTile(tileset, tile, frameState);
}
return;
}
// If this tile is not loaded attempt to select its ancestor instead
var loadedTile = tile.contentAvailable ? tile : tile._ancestorWithContentAvailable;
if (defined(loadedTile)) {
// Tiles will actually be selected in traverseAndSelect
loadedTile._shouldSelect = true;
} else {
// If no ancestors are ready traverse down and select tiles to minimize empty regions.
// This happens often for immediatelyLoadDesiredLevelOfDetail where parent tiles are not necessarily loaded before zooming out.
selectDescendants(tileset, tile, frameState);
}
}
function visitTile(tileset, tile, frameState) {
++tileset._statistics.visited;
tile._visitedFrame = frameState.frameNumber;
}
function touchTile(tileset, tile, frameState) {
if (tile._touchedFrame === frameState.frameNumber) {
// Prevents another pass from touching the frame again
return;
}
tileset._cache.touch(tile);
tile._touchedFrame = frameState.frameNumber;
}
function updateMinimumMaximumPriority(tileset, tile) {
tileset._maximumPriority.distance = Math.max(tile._priorityHolder._distanceToCamera, tileset._maximumPriority.distance);
tileset._minimumPriority.distance = Math.min(tile._priorityHolder._distanceToCamera, tileset._minimumPriority.distance);
tileset._maximumPriority.depth = Math.max(tile._depth, tileset._maximumPriority.depth);
tileset._minimumPriority.depth = Math.min(tile._depth, tileset._minimumPriority.depth);
tileset._maximumPriority.foveatedFactor = Math.max(tile._priorityHolder._foveatedFactor, tileset._maximumPriority.foveatedFactor);
tileset._minimumPriority.foveatedFactor = Math.min(tile._priorityHolder._foveatedFactor, tileset._minimumPriority.foveatedFactor);
tileset._maximumPriority.reverseScreenSpaceError = Math.max(tile._priorityReverseScreenSpaceError, tileset._maximumPriority.reverseScreenSpaceError);
tileset._minimumPriority.reverseScreenSpaceError = Math.min(tile._priorityReverseScreenSpaceError, tileset._minimumPriority.reverseScreenSpaceError);
}
function isOnScreenLongEnough(tileset, tile, frameState) {
// Prevent unnecessary loads while camera is moving by getting the ratio of travel distance to tile size.
if (!tileset.cullRequestsWhileMoving) {
return true;
}
var sphere = tile.boundingSphere;
var diameter = Math.max(sphere.radius * 2.0, 1.0);
var camera = frameState.camera;
var deltaMagnitude = camera.positionWCDeltaMagnitude !== 0.0 ? camera.positionWCDeltaMagnitude : camera.positionWCDeltaMagnitudeLastFrame;
var movementRatio = tileset.cullRequestsWhileMovingMultiplier * deltaMagnitude / diameter; // How do n frames of this movement compare to the tile's physical size.
return movementRatio < 1.0;
}
function loadTile(tileset, tile, frameState) {
if (tile._requestedFrame === frameState.frameNumber || (!hasUnloadedContent(tile) && !tile.contentExpired)) {
return;
}
if (!isOnScreenLongEnough(tileset, tile, frameState)) {
return;
}
var cameraHasNotStoppedMovingLongEnough = frameState.camera.timeSinceMoved < tileset.foveatedTimeDelay;
if (tile.priorityDeferred && cameraHasNotStoppedMovingLongEnough) {
return;
}
tile._requestedFrame = frameState.frameNumber;
tileset._requestedTiles.push(tile);
}
function updateVisibility(tileset, tile, frameState) {
if (tile._updatedVisibilityFrame === tileset._updatedVisibilityFrame) {
// Return early if visibility has already been checked during the traversal.
// The visibility may have already been checked if the cullWithChildrenBounds optimization is used.
return;
}
tile.updateVisibility(frameState);
tile._updatedVisibilityFrame = tileset._updatedVisibilityFrame;
}
function anyChildrenVisible(tileset, tile, frameState) {
var anyVisible = false;
var children = tile.children;
var length = children.length;
for (var i = 0; i < length; ++i) {
var child = children[i];
updateVisibility(tileset, child, frameState);
anyVisible = anyVisible || isVisible(child);
}
return anyVisible;
}
function meetsScreenSpaceErrorEarly(tileset, tile, frameState) {
var parent = tile.parent;
if (!defined(parent) || parent.hasTilesetContent || (parent.refine !== Cesium3DTileRefine.ADD)) {
return false;
}
// Use parent's geometric error with child's box to see if the tile already meet the SSE
return tile.getScreenSpaceError(frameState, true) <= tileset._maximumScreenSpaceError;
}
function updateTileVisibility(tileset, tile, frameState) {
updateVisibility(tileset, tile, frameState);
if (!isVisible(tile)) {
return;
}
var hasChildren = tile.children.length > 0;
if (tile.hasTilesetContent && hasChildren) {
// Use the root tile's visibility instead of this tile's visibility.
// The root tile may be culled by the children bounds optimization in which
// case this tile should also be culled.
var child = tile.children[0];
updateTileVisibility(tileset, child, frameState);
tile._visible = child._visible;
return;
}
if (meetsScreenSpaceErrorEarly(tileset, tile, frameState)) {
tile._visible = false;
return;
}
// Optimization - if none of the tile's children are visible then this tile isn't visible
var replace = tile.refine === Cesium3DTileRefine.REPLACE;
var useOptimization = tile._optimChildrenWithinParent === Cesium3DTileOptimizationHint.USE_OPTIMIZATION;
if (replace && useOptimization && hasChildren) {
if (!anyChildrenVisible(tileset, tile, frameState)) {
++tileset._statistics.numberOfTilesCulledWithChildrenUnion;
tile._visible = false;
return;
}
}
}
function updateTile(tileset, tile, frameState) {
// Reset some of the tile's flags and re-evaluate visibility
updateTileVisibility(tileset, tile, frameState);
tile.updateExpiration();
// Request priority
tile._wasMinPriorityChild = false;
tile._priorityHolder = tile;
updateMinimumMaximumPriority(tileset, tile);
// SkipLOD
tile._shouldSelect = false;
tile._finalResolution = true;
}
function updateTileAncestorContentLinks(tile, frameState) {
tile._ancestorWithContent = undefined;
tile._ancestorWithContentAvailable = undefined;
var parent = tile.parent;
if (defined(parent)) {
// ancestorWithContent is an ancestor that has content or has the potential to have
// content. Used in conjunction with tileset.skipLevels to know when to skip a tile.
// ancestorWithContentAvailable is an ancestor that is rendered if a desired tile is not loaded.
var hasContent = !hasUnloadedContent(parent) || (parent._requestedFrame === frameState.frameNumber);
tile._ancestorWithContent = hasContent ? parent : parent._ancestorWithContent;
tile._ancestorWithContentAvailable = parent.contentAvailable ? parent : parent._ancestorWithContentAvailable; // Links a descendant up to its contentAvailable ancestor as the traversal progresses.
}
}
function hasEmptyContent(tile) {
return tile.hasEmptyContent || tile.hasTilesetContent;
}
function hasUnloadedContent(tile) {
return !hasEmptyContent(tile) && tile.contentUnloaded;
}
function reachedSkippingThreshold(tileset, tile) {
var ancestor = tile._ancestorWithContent;
return !tileset.immediatelyLoadDesiredLevelOfDetail &&
(tile._priorityProgressiveResolutionScreenSpaceErrorLeaf ||
(defined(ancestor) &&
(tile._screenSpaceError < (ancestor._screenSpaceError / tileset.skipScreenSpaceErrorFactor)) &&
(tile._depth > (ancestor._depth + tileset.skipLevels))));
}
function sortChildrenByDistanceToCamera(a, b) {
// Sort by farthest child first since this is going on a stack
if (b._distanceToCamera === 0 && a._distanceToCamera === 0) {
return b._centerZDepth - a._centerZDepth;
}
return b._distanceToCamera - a._distanceToCamera;
}
function updateAndPushChildren(tileset, tile, stack, frameState) {
var i;
var replace = tile.refine === Cesium3DTileRefine.REPLACE;
var children = tile.children;
var length = children.length;
for (i = 0; i < length; ++i) {
updateTile(tileset, children[i], frameState);
}
// Sort by distance to take advantage of early Z and reduce artifacts for skipLevelOfDetail
children.sort(sortChildrenByDistanceToCamera);
// For traditional replacement refinement only refine if all children are loaded.
// Empty tiles are exempt since it looks better if children stream in as they are loaded to fill the empty space.
var checkRefines = !skipLevelOfDetail(tileset) && replace && !hasEmptyContent(tile);
var refines = true;
var anyChildrenVisible = false;
// Determining min child
var minIndex = -1;
var minimumPriority = Number.MAX_VALUE;
var child;
for (i = 0; i < length; ++i) {
child = children[i];
if (isVisible(child)) {
stack.push(child);
if (child._foveatedFactor < minimumPriority) {
minIndex = i;
minimumPriority = child._foveatedFactor;
}
anyChildrenVisible = true;
} else if (checkRefines || tileset.loadSiblings) {
// Keep non-visible children loaded since they are still needed before the parent can refine.
// Or loadSiblings is true so always load tiles regardless of visibility.
if (child._foveatedFactor < minimumPriority) {
minIndex = i;
minimumPriority = child._foveatedFactor;
}
loadTile(tileset, child, frameState);
touchTile(tileset, child, frameState);
}
if (checkRefines) {
var childRefines;
if (!child._inRequestVolume) {
childRefines = false;
} else if (hasEmptyContent(child)) {
childRefines = executeEmptyTraversal(tileset, child, frameState);
} else {
childRefines = child.contentAvailable;
}
refines = refines && childRefines;
}
}
if (!anyChildrenVisible) {
refines = false;
}
if (minIndex !== -1 && !skipLevelOfDetail(tileset) && replace) {
// An ancestor will hold the _foveatedFactor and _distanceToCamera for descendants between itself and its highest priority descendant. Siblings of a min children along the way use this ancestor as their priority holder as well.
// Priority of all tiles that refer to the _foveatedFactor and _distanceToCamera stored in the common ancestor will be differentiated based on their _depth.
var minPriorityChild = children[minIndex];
minPriorityChild._wasMinPriorityChild = true;
var priorityHolder = (tile._wasMinPriorityChild || tile === tileset.root) && minimumPriority <= tile._priorityHolder._foveatedFactor ? tile._priorityHolder : tile; // This is where priority dependency chains are wired up or started anew.
priorityHolder._foveatedFactor = Math.min(minPriorityChild._foveatedFactor, priorityHolder._foveatedFactor);
priorityHolder._distanceToCamera = Math.min(minPriorityChild._distanceToCamera, priorityHolder._distanceToCamera);
for (i = 0; i < length; ++i) {
child = children[i];
child._priorityHolder = priorityHolder;
}
}
return refines;
}
function inBaseTraversal(tileset, tile, baseScreenSpaceError) {
if (!skipLevelOfDetail(tileset)) {
return true;
}
if (tileset.immediatelyLoadDesiredLevelOfDetail) {
return false;
}
if (!defined(tile._ancestorWithContent)) {
// Include root or near-root tiles in the base traversal so there is something to select up to
return true;
}
if (tile._screenSpaceError === 0.0) {
// If a leaf, use parent's SSE
return tile.parent._screenSpaceError > baseScreenSpaceError;
}
return tile._screenSpaceError > baseScreenSpaceError;
}
function canTraverse(tileset, tile) {
if (tile.children.length === 0) {
return false;
}
if (tile.hasTilesetContent) {
// Traverse external tileset to visit its root tile
// Don't traverse if the subtree is expired because it will be destroyed
return !tile.contentExpired;
}
return tile._screenSpaceError > tileset._maximumScreenSpaceError;
}
function executeTraversal(tileset, root, baseScreenSpaceError, maximumScreenSpaceError, frameState) {
// Depth-first traversal that traverses all visible tiles and marks tiles for selection.
// If skipLevelOfDetail is off then a tile does not refine until all children are loaded.
// This is the traditional replacement refinement approach and is called the base traversal.
// Tiles that have a greater screen space error than the base screen space error are part of the base traversal,
// all other tiles are part of the skip traversal. The skip traversal allows for skipping levels of the tree
// and rendering children and parent tiles simultaneously.
var stack = traversal.stack;
stack.push(root);
while (stack.length > 0) {
traversal.stackMaximumLength = Math.max(traversal.stackMaximumLength, stack.length);
var tile = stack.pop();
updateTileAncestorContentLinks(tile, frameState);
var baseTraversal = inBaseTraversal(tileset, tile, baseScreenSpaceError);
var add = tile.refine === Cesium3DTileRefine.ADD;
var replace = tile.refine === Cesium3DTileRefine.REPLACE;
var parent = tile.parent;
var parentRefines = !defined(parent) || parent._refines;
var refines = false;
if (canTraverse(tileset, tile)) {
refines = updateAndPushChildren(tileset, tile, stack, frameState) && parentRefines;
}
var stoppedRefining = !refines && parentRefines;
if (hasEmptyContent(tile)) {
// Add empty tile just to show its debug bounding volume
// If the tile has tileset content load the external tileset
// If the tile cannot refine further select its nearest loaded ancestor
addEmptyTile(tileset, tile, frameState);
loadTile(tileset, tile, frameState);
if (stoppedRefining) {
selectDesiredTile(tileset, tile, frameState);
}
} else if (add) {
// Additive tiles are always loaded and selected
selectDesiredTile(tileset, tile, frameState);
loadTile(tileset, tile, frameState);
} else if (replace) {
if (baseTraversal) {
// Always load tiles in the base traversal
// Select tiles that can't refine further
loadTile(tileset, tile, frameState);
if (stoppedRefining) {
selectDesiredTile(tileset, tile, frameState);
}
} else if (stoppedRefining) {
// In skip traversal, load and select tiles that can't refine further
selectDesiredTile(tileset, tile, frameState);
loadTile(tileset, tile, frameState);
} else if (reachedSkippingThreshold(tileset, tile)) {
// In skip traversal, load tiles that aren't skipped. In practice roughly half the tiles stay unloaded.
loadTile(tileset, tile, frameState);
}
}
visitTile(tileset, tile, frameState);
touchTile(tileset, tile, frameState);
tile._refines = refines;
}
}
function executeEmptyTraversal(tileset, root, frameState) {
// Depth-first traversal that checks if all nearest descendants with content are loaded. Ignores visibility.
var allDescendantsLoaded = true;
var stack = emptyTraversal.stack;
stack.push(root);
while (stack.length > 0) {
emptyTraversal.stackMaximumLength = Math.max(emptyTraversal.stackMaximumLength, stack.length);
var tile = stack.pop();
var children = tile.children;
var childrenLength = children.length;
// Only traverse if the tile is empty - traversal stop at descendants with content
var traverse = hasEmptyContent(tile) && canTraverse(tileset, tile);
// Traversal stops but the tile does not have content yet.
// There will be holes if the parent tries to refine to its children, so don't refine.
if (!traverse && !tile.contentAvailable) {
allDescendantsLoaded = false;
}
updateTile(tileset, tile, frameState);
if (!isVisible(tile)) {
// Load tiles that aren't visible since they are still needed for the parent to refine
loadTile(tileset, tile, frameState);
touchTile(tileset, tile, frameState);
}
if (traverse) {
for (var i = 0; i < childrenLength; ++i) {
var child = children[i];
stack.push(child);
}
}
}
return allDescendantsLoaded;
}
/**
* Traverse the tree and check if their selected frame is the current frame. If so, add it to a selection queue.
* This is a preorder traversal so children tiles are selected before ancestor tiles.
*
* The reason for the preorder traversal is so that tiles can easily be marked with their
* selection depth. A tile's _selectionDepth is its depth in the tree where all non-selected tiles are removed.
* This property is important for use in the stencil test because we want to render deeper tiles on top of their
* ancestors. If a tileset is very deep, the depth is unlikely to fit into the stencil buffer.
*
* We want to select children before their ancestors because there is no guarantee on the relationship between
* the children's z-depth and the ancestor's z-depth. We cannot rely on Z because we want the child to appear on top
* of ancestor regardless of true depth. The stencil tests used require children to be drawn first.
*
* NOTE: 3D Tiles uses 3 bits from the stencil buffer meaning this will not work when there is a chain of
* selected tiles that is deeper than 7. This is not very likely.
*/
function traverseAndSelect(tileset, root, frameState) {
var stack = selectionTraversal.stack;
var ancestorStack = selectionTraversal.ancestorStack;
var lastAncestor;
stack.push(root);
while (stack.length > 0 || ancestorStack.length > 0) {
selectionTraversal.stackMaximumLength = Math.max(selectionTraversal.stackMaximumLength, stack.length);
selectionTraversal.ancestorStackMaximumLength = Math.max(selectionTraversal.ancestorStackMaximumLength, ancestorStack.length);
if (ancestorStack.length > 0) {
var waitingTile = ancestorStack.peek();
if (waitingTile._stackLength === stack.length) {
ancestorStack.pop();
if (waitingTile !== lastAncestor) {
waitingTile._finalResolution = false;
}
selectTile(tileset, waitingTile, frameState);
continue;
}
}
var tile = stack.pop();
if (!defined(tile)) {
// stack is empty but ancestorStack isn't
continue;
}
var add = tile.refine === Cesium3DTileRefine.ADD;
var shouldSelect = tile._shouldSelect;
var children = tile.children;
var childrenLength = children.length;
var traverse = canTraverse(tileset, tile);
if (shouldSelect) {
if (add) {
selectTile(tileset, tile, frameState);
} else {
tile._selectionDepth = ancestorStack.length;
if (tile._selectionDepth > 0) {
tileset._hasMixedContent = true;
}
lastAncestor = tile;
if (!traverse) {
selectTile(tileset, tile, frameState);
continue;
}
ancestorStack.push(tile);
tile._stackLength = stack.length;
}
}
if (traverse) {
for (var i = 0; i < childrenLength; ++i) {
var child = children[i];
if (isVisible(child)) {
stack.push(child);
}
}
}
}
}
return Cesium3DTilesetTraversal;
});
| geoscan/cesium | Source/Scene/Cesium3DTilesetTraversal.js | JavaScript | apache-2.0 | 28,819 |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.update;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.plugin.access.elastic.ElasticAgentExtension;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.LocalizedOperationResult;
public class ElasticAgentProfileCreateCommand extends ElasticAgentProfileCommand {
public ElasticAgentProfileCreateCommand(GoConfigService goConfigService, ElasticProfile elasticProfile, ElasticAgentExtension extension, Username currentUser, LocalizedOperationResult result) {
super(goConfigService, elasticProfile, extension, currentUser, result);
}
@Override
public void update(CruiseConfig preprocessedConfig) {
getPluginProfiles(preprocessedConfig).add(elasticProfile);
}
@Override
public boolean isValid(CruiseConfig preprocessedConfig) {
return isValidForCreateOrUpdate(preprocessedConfig);
}
@Override
public void encrypt(CruiseConfig preProcessedConfig) {
elasticProfile.encryptSecureProperties(preProcessedConfig);
}
}
| ketan/gocd | server/src/main/java/com/thoughtworks/go/config/update/ElasticAgentProfileCreateCommand.java | Java | apache-2.0 | 1,817 |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.services;
import com.intellij.openapi.components.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SmartList;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.idea.maven.model.MavenArtifactInfo;
import org.jetbrains.idea.maven.model.MavenRepositoryInfo;
import org.jetbrains.idea.maven.services.artifactory.ArtifactoryRepositoryService;
import org.jetbrains.idea.maven.services.nexus.NexusRepositoryService;
import org.jetbrains.idea.maven.utils.MavenLog;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* @author Gregory.Shrago
*/
@State(
name = "MavenServices",
storages = {@Storage(
file = StoragePathMacros.APP_CONFIG + "/mavenServices.xml")})
public class MavenRepositoryServicesManager implements PersistentStateComponent<Element> {
private final List<String> myUrls = new ArrayList<String>();
@NotNull
public static MavenRepositoryServicesManager getInstance() {
return ServiceManager.getService(MavenRepositoryServicesManager.class);
}
@NotNull
public static MavenRepositoryService[] getServices() {
return new MavenRepositoryService[]{new NexusRepositoryService(), new ArtifactoryRepositoryService()};
}
public static String[] getServiceUrls() {
final List<String> configured = getInstance().getUrls();
if (!configured.isEmpty()) return ArrayUtil.toStringArray(configured);
return new String[]{
"http://oss.sonatype.org/service/local/",
"http://repo.jfrog.org/artifactory/api/",
"https://repository.jboss.org/nexus/service/local/"
};
}
public List<String> getUrls() {
return myUrls;
}
public void setUrls(List<String> urls) {
myUrls.clear();
myUrls.addAll(urls);
}
@Override
public Element getState() {
final Element element = new Element("maven-services");
for (String url : myUrls) {
final Element child = new Element("service-url");
child.setText(StringUtil.escapeXml(url));
element.addContent(child);
}
return element;
}
@Override
public void loadState(Element state) {
myUrls.clear();
for (Element element : (List<Element>)state.getChildren("service-url")) {
myUrls.add(StringUtil.unescapeXml(element.getTextTrim()));
}
}
@NotNull
public static List<MavenRepositoryInfo> getRepositories(String url) {
List<MavenRepositoryInfo> result = new SmartList<MavenRepositoryInfo>();
for (MavenRepositoryService service : getServices()) {
try {
result.addAll(service.getRepositories(url));
}
catch (IOException e) {
MavenLog.LOG.info(e);
}
}
return result;
}
@NotNull
public static List<MavenArtifactInfo> findArtifacts(@NotNull MavenArtifactInfo template, @NotNull String url) {
List<MavenArtifactInfo> result = new SmartList<MavenArtifactInfo>();
for (MavenRepositoryService service : getServices()) {
try {
result.addAll(service.findArtifacts(url, template));
}
catch (IOException e) {
MavenLog.LOG.info(e);
}
}
return result;
}
}
| android-ia/platform_tools_idea | plugins/maven/src/main/java/org/jetbrains/idea/maven/services/MavenRepositoryServicesManager.java | Java | apache-2.0 | 3,801 |
<?php
$max_age = 0;//600; //seconds this URL should be cached
header('Expires: ' . gmdate('D, d M Y H:i:s', time() + $max_age) . ' GMT');
header("Cache-Control: public, max-age=$max_age");
header("Content-Type: application/json");
$response = [];
$included_fonts = glob("../fonts/*.ttf");//only ttf for now
foreach($included_fonts as $included_font){
$response[] = basename($included_font, ".ttf");
}
//sort($response, SORT_NATURAL);
echo json_encode($response);
?> | wbmccool/bsd-txt | ajax/fonts.php | PHP | apache-2.0 | 473 |
package com.eas.client.utils.scalableui;
import java.awt.BorderLayout;
import java.awt.Component;
/**
*
* @author Marat
*/
public class ScalableBorderLayout extends BorderLayout{
public ScalableBorderLayout() {
super();
}
@Override
public void addLayoutComponent(Component comp, Object constraints) {
if(!(comp instanceof ScalablePopup.ScalablePopupPanel))
super.addLayoutComponent(comp, constraints);
}
}
| altsoft/PlatypusJS | platypus-js-scalable-widget/src/main/java/com/eas/client/utils/scalableui/ScalableBorderLayout.java | Java | apache-2.0 | 467 |
/**
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.bpmn2;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashSet;
import java.util.List;
import java.util.LinkedList;
import org.custommonkey.xmlunit.Diff;
import org.custommonkey.xmlunit.Difference;
import org.custommonkey.xmlunit.DifferenceConstants;
import org.custommonkey.xmlunit.DifferenceListener;
import org.custommonkey.xmlunit.ElementNameAndAttributeQualifier;
import org.custommonkey.xmlunit.XMLTestCase;
import org.custommonkey.xmlunit.XMLUnit;
import org.drools.core.xml.SemanticModules;
import org.jbpm.bpmn2.xml.BPMNDISemanticModule;
import org.jbpm.bpmn2.xml.BPMNSemanticModule;
import org.jbpm.bpmn2.xml.XmlBPMNProcessDumper;
import org.jbpm.compiler.xml.XmlProcessReader;
import org.jbpm.ruleflow.core.RuleFlowProcess;
import org.kie.api.definition.process.Process;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
public class BPMN2XMLTest extends XMLTestCase {
private static final Logger logger = LoggerFactory.getLogger(BPMN2XMLTest.class);
private static final String[] processes = {
"BPMN2-SimpleXMLProcess.bpmn2"
};
private String errorMessage;
public void setUp() throws Exception {
super.setUp();
XMLUnit.setIgnoreWhitespace(true);
XMLUnit.setIgnoreComments(true);
setErrorMessage(null);
}
public void testXML() throws IOException, SAXException {
SemanticModules modules = new SemanticModules();
modules.addSemanticModule(new BPMNSemanticModule());
modules.addSemanticModule(new BPMNDISemanticModule());
XmlProcessReader processReader = new XmlProcessReader(modules, getClass().getClassLoader());
for (String processName: processes) {
String original = slurp(BPMN2XMLTest.class.getResourceAsStream("/" + processName));
List<Process> processes = processReader.read(BPMN2XMLTest.class.getResourceAsStream("/" + processName));
assertNotNull(processes);
assertEquals(1, processes.size());
RuleFlowProcess p = (RuleFlowProcess) processes.get(0);
String result = XmlBPMNProcessDumper.INSTANCE.dump(p, XmlBPMNProcessDumper.META_DATA_USING_DI);
// Compare original with result using XMLUnit
Diff diff = new Diff(original, result);
// Ignore the sequence of nodes (or children nodes) when looking at these nodes
final HashSet<String> sequenceDoesNotMatter = new HashSet<String>();
sequenceDoesNotMatter.add("startEvent");
sequenceDoesNotMatter.add("scriptTask");
sequenceDoesNotMatter.add("endEvent");
sequenceDoesNotMatter.add("bpmndi:BPMNShape");
diff.overrideDifferenceListener(new DifferenceListener() {
public int differenceFound(Difference diff) {
String nodeName = diff.getTestNodeDetail().getNode().getNodeName();
if( sequenceDoesNotMatter.contains(nodeName)
&& diff.getId() == DifferenceConstants.CHILD_NODELIST_SEQUENCE_ID ) {
return RETURN_IGNORE_DIFFERENCE_NODES_IDENTICAL;
}
logger.info( "! {}", diff.getTestNodeDetail().getNode().getNodeName());
return RETURN_ACCEPT_DIFFERENCE;
}
public void skippedComparison(Node one, Node two) {
logger.info("{} : {}", one.getLocalName(), two.getLocalName()) ;
}
});
// nodes should only be compared if their attributes are the same
diff.overrideElementQualifier(new ElementNameAndAttributeQualifier());
assertTrue("Original and generated output is not the same.", diff.identical());
}
}
public void testInvalidXML() throws Exception, SAXException {
SemanticModules modules = new SemanticModules();
modules.addSemanticModule(new BPMNSemanticModule());
modules.addSemanticModule(new BPMNDISemanticModule());
XmlProcessReader processReader = new XmlProcessReader(modules, getClass().getClassLoader()) {
@Override
protected String processParserMessage(LinkedList<Object> parents, org.xml.sax.Attributes attr, String errorMessage) {
setErrorMessage(super.processParserMessage(parents, attr, errorMessage));
return errorMessage;
}
};
processReader.read(BPMN2XMLTest.class.getResourceAsStream("/BPMN2-XMLProcessWithError.bpmn2"));
assertNotNull(getErrorMessage());
assertEquals("Process Info: id:error.process, pkg:org.jbpm, name:errorprocess, version:1.0 \n" +
"Node Info: id:_F8A89567-7416-4CCA-9CCD-BC1DDE870F1E name: \n" +
"Parser message: (null: 45, 181): cvc-complex-type.2.4.a: Invalid content was found starting with element 'bpmn2:endEvent'. One of '{\"http://www.omg.org/spec/BPMN/20100524/MODEL\":artifact, \"http://www.omg.org/spec/BPMN/20100524/MODEL\":resourceRole, \"http://www.omg.org/spec/BPMN/20100524/MODEL\":correlationSubscription, \"http://www.omg.org/spec/BPMN/20100524/MODEL\":supports}' is expected.", getErrorMessage());
}
public void testInvalidXMLInCompositeNode() throws Exception, SAXException {
SemanticModules modules = new SemanticModules();
modules.addSemanticModule(new BPMNSemanticModule());
modules.addSemanticModule(new BPMNDISemanticModule());
XmlProcessReader processReader = new XmlProcessReader(modules, getClass().getClassLoader()) {
@Override
protected String processParserMessage(LinkedList<Object> parents, org.xml.sax.Attributes attr, String errorMessage) {
setErrorMessage(super.processParserMessage(parents, attr, errorMessage));
return errorMessage;
}
};
processReader.read(BPMN2XMLTest.class.getResourceAsStream("/BPMN2-XMLProcessWithErrorInCompositeNode.bpmn2"));
assertNotNull(getErrorMessage());
assertEquals("Process Info: id:abc.abc, pkg:org.drools.bpmn2, name:abc, version:1.0 \n" +
"Node Info: id:_47489F3D-FEBD-4452-B62E-B04EF191C6C3 name: \n" +
"Parser message: (null: 24, 185): cvc-complex-type.2.4.a: Invalid content was found starting with element 'bpmn2:subProcess'. One of '{\"http://www.omg.org/spec/BPMN/20100524/MODEL\":artifact}' is expected.", getErrorMessage());
}
private void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
private String getErrorMessage() {
return errorMessage;
}
public static String slurp(InputStream in) throws IOException {
StringBuffer out = new StringBuffer();
byte[] b = new byte[4096];
for (int n; (n = in.read(b)) != -1;) {
out.append(new String(b, 0, n));
}
return out.toString();
}
}
| pleacu/jbpm | jbpm-bpmn2/src/test/java/org/jbpm/bpmn2/BPMN2XMLTest.java | Java | apache-2.0 | 7,203 |
var Main = function(){
this.domain = '';
this.pname = '免费版';
this.days = [];
this.months = [];
this.flowDayData = [];
this.cacheDayData = [];
this.flowMontyData = [];
this.cacheMonthData = [];
this.getInfo = function() {
var that = this;
$.ajax({
url : '/api/?c=record&a=getDomainInfo',
dataType : 'json',
success : function(a) {
if (a.status.code != 1) {
that.renderLoginError();
return;
}
that.domain = a.domain.name;
that.pname = a.domain.pname;
$("#domain_name_show").html(that.domain);
$("#domain_pname_show").html(that.pname).parent('a').attr('href','?c=product&a=index&domain='+that.domain);
that.getFlow();
//that.render();
if(a.domain['cdn_id'] == 0){
$("#popup").css('display','block');
$("#msg").html("<a href='?c=cdn&a=index&domain="+that.domain+"' style='color:red'>请先增加CDN站点</a>");
$("#msg").css("display","block");
}
},
error : function(e) {
that.showError('后台数据出错' + e.responseText);
}
});
}
this.getFlow = function(){
var that = this;
$.ajax({
url:'/api/?c=record&a=getCdnFlow',
dataType:'json',
success:function(a){
//alert('s');
if(a.status.code == 1){
that.days = a.days;
that.months = a.months;
that.render();
return;
}
that.render();
return;
},
error:function(a){
alert('error');
}
});
}
this.render = function() {
this.renderDay();
this.renderMonth();
}
this.showError = function(msg){
alert(msg);
}
this.getDayRatio = function(index) {
flowval = this.flowDayData[index];
cacheval = this.cacheDayData[index];
return Highcharts.numberFormat(cacheval*100/flowval,2);
}
this.getMonthRatio = function(index) {
flowval = this.flowMonthData[index];
cacheval = this.cacheMonthData[index];
return Highcharts.numberFormat(cacheval*100/flowval,2);
}
this.renderDay = function() {
var that = this;
var data = this.days;
var flow = {};
var cache = {};
var options = {
chart : {
defaultSeriesType : 'line',
renderTo : 'query_day',
inverted : false
},
title : {
text : '最近24小时流量(蓝色),缓存(绿色)统计图'
},
subtitle : {
text : '',
x : 80
},
xAxis : {
categories : []
},
yAxis : {
title : {
text : '流量'
}
},
credits:{
enabled: true,
position: {
align: 'right',
x: -10,
y: -10
},
href: "https://www.cdnbest.com",
style: {
color:'blue'
},
text: "CDN贝"
},
legend : {
enabled : false
},
tooltip : {
valueSuffix:'{value}m',
formatter : function() {
var str = '流量:' + Highcharts.numberFormat(this.y, 1) + ' byte<br/>当前时间:' + this.x + '点';
str += ",缓存命中率:" + that.getDayRatio(this.point.x) + "%";
return str
}
},
series : []
};
flow.name = '流量';
//线条颜色
//flow.color = '#89A54E';
//显示方式,柱子还是线条或其它
//flow.type = 'spline';
flow.data = [];
for ( var i in data.cate) {
flow.data.push(data.nums[data.cate[i]]);
}
for ( var i in data.cate) {
options.xAxis.categories.push(data.cate[i].substr(-2));
}
this.flowDayData = flow.data;
cache.name = "缓存";
//cache.color = 'red',
//cache.type = 'spline';
cache.data = [];
for ( var i in data.cate) {
cache.data.push(data.cache[data.cate[i]]);
}
this.cacheDayData = cache.data;
options.series.push(flow);
options.series.push(cache);
var chat = new Highcharts.Chart(options);
}
this.renderMonth = function() {
var that = this;
var data = this.months;
var options = {
chart : {
defaultSeriesType : 'line',
renderTo : 'query_month',
inverted : false
},
title : {
text : '最近30天流量(蓝色),缓存(绿色)统计图'
},
subtitle : {
text : '',
x : 80
},
credits:{
enabled: true,
position: {
align: 'right',
x: -10,
y: -10
},
href: "https://www.cdnbest.com",
style: {
color:'blue'
},
text: "CDN贝"
},
xAxis : {
categories : []
},
yAxis : {
title : {
text : '流量'
}
},
legend : {
enabled : false
},
tooltip : {
formatter : function() {
var str = '流量:' + Highcharts.numberFormat(this.y, 1) + ' byte<br/>当前时间:' + this.x + '日';
str += ",缓存命中率:" + that.getMonthRatio(this.point.x) + "%";
return str
}
},
series : []
};
var flow = {};
flow.name = '流量';
flow.data = [];
for ( var i in data.cate) {
flow.data.push(data.nums[data.cate[i]]);
}
this.flowMonthData = flow.data;
for ( var i in data.cate) {
options.xAxis.categories.push(data.cate[i].substr(-2));
}
options.series.push(flow);
var cache = {};
cache.name = "缓存";
cache.data = [];
for ( var i in data.cate) {
cache.data.push(data.cache[data.cate[i]]);
}
this.cacheMonthData = cache.data;
options.series.push(cache);
var chat = new Highcharts.Chart(options);
}
this.renderLoginError = function() {
var template = $("#site-nologin-template").html();
$("#h").append(template);
}
}
$(document).ready(function(){
$("#record-operat").find('#cdnflow').find('a').addClass('cur');
$("#nav_domain").addClass("cur");
var main = new Main();
main.getInfo();
}); | zdchao/php_demo | wwwroot/wwwroot/user/view/new0807/record/cdnflow.js | JavaScript | apache-2.0 | 5,652 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.producer.MockProducer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.state.StateSerdes;
import org.apache.kafka.test.MockProcessorContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
public class SinkNodeTest {
private final Serializer anySerializer = Serdes.Bytes().serializer();
private final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
private final MockProcessorContext context = new MockProcessorContext(anyStateSerde,
new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null, new LogContext("sinknode-test "), new DefaultProductionExceptionHandler()));
private final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
@Before
public void before() {
sink.init(context);
}
@After
public void after() {
context.close();
}
@Test
@SuppressWarnings("unchecked")
public void shouldThrowStreamsExceptionOnInputRecordWithInvalidTimestamp() {
final Bytes anyKey = new Bytes("any key".getBytes());
final Bytes anyValue = new Bytes("any value".getBytes());
// When/Then
context.setTime(-1); // ensures a negative timestamp is set for the record we send next
try {
sink.process(anyKey, anyValue);
fail("Should have thrown StreamsException");
} catch (final StreamsException ignored) {
// expected
}
}
@Test
@SuppressWarnings("unchecked")
public void shouldThrowStreamsExceptionOnKeyValueTypeSerializerMismatch() {
final String keyOfDifferentTypeThanSerializer = "key with different type";
final String valueOfDifferentTypeThanSerializer = "value with different type";
// When/Then
context.setTime(0);
try {
sink.process(keyOfDifferentTypeThanSerializer, valueOfDifferentTypeThanSerializer);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
}
}
@Test
@SuppressWarnings("unchecked")
public void shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() {
final String invalidValueToTriggerSerializerMismatch = "";
// When/Then
context.setTime(1);
try {
sink.process(null, invalidValueToTriggerSerializerMismatch);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString("unknown because key is null"));
}
}
@Test
@SuppressWarnings("unchecked")
public void shouldHandleNullValuesWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() {
final String invalidKeyToTriggerSerializerMismatch = "";
// When/Then
context.setTime(1);
try {
sink.process(invalidKeyToTriggerSerializerMismatch, null);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString("unknown because value is null"));
}
}
}
| MyPureCloud/kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/SinkNodeTest.java | Java | apache-2.0 | 4,866 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/iot/model/DescribeAuditMitigationActionsTaskResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::IoT::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
DescribeAuditMitigationActionsTaskResult::DescribeAuditMitigationActionsTaskResult() :
m_taskStatus(AuditMitigationActionsTaskStatus::NOT_SET)
{
}
DescribeAuditMitigationActionsTaskResult::DescribeAuditMitigationActionsTaskResult(const Aws::AmazonWebServiceResult<JsonValue>& result) :
m_taskStatus(AuditMitigationActionsTaskStatus::NOT_SET)
{
*this = result;
}
DescribeAuditMitigationActionsTaskResult& DescribeAuditMitigationActionsTaskResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
JsonView jsonValue = result.GetPayload().View();
if(jsonValue.ValueExists("taskStatus"))
{
m_taskStatus = AuditMitigationActionsTaskStatusMapper::GetAuditMitigationActionsTaskStatusForName(jsonValue.GetString("taskStatus"));
}
if(jsonValue.ValueExists("startTime"))
{
m_startTime = jsonValue.GetDouble("startTime");
}
if(jsonValue.ValueExists("endTime"))
{
m_endTime = jsonValue.GetDouble("endTime");
}
if(jsonValue.ValueExists("taskStatistics"))
{
Aws::Map<Aws::String, JsonView> taskStatisticsJsonMap = jsonValue.GetObject("taskStatistics").GetAllObjects();
for(auto& taskStatisticsItem : taskStatisticsJsonMap)
{
m_taskStatistics[taskStatisticsItem.first] = taskStatisticsItem.second.AsObject();
}
}
if(jsonValue.ValueExists("target"))
{
m_target = jsonValue.GetObject("target");
}
if(jsonValue.ValueExists("auditCheckToActionsMapping"))
{
Aws::Map<Aws::String, JsonView> auditCheckToActionsMappingJsonMap = jsonValue.GetObject("auditCheckToActionsMapping").GetAllObjects();
for(auto& auditCheckToActionsMappingItem : auditCheckToActionsMappingJsonMap)
{
Array<JsonView> mitigationActionNameListJsonList = auditCheckToActionsMappingItem.second.AsArray();
Aws::Vector<Aws::String> mitigationActionNameListList;
mitigationActionNameListList.reserve((size_t)mitigationActionNameListJsonList.GetLength());
for(unsigned mitigationActionNameListIndex = 0; mitigationActionNameListIndex < mitigationActionNameListJsonList.GetLength(); ++mitigationActionNameListIndex)
{
mitigationActionNameListList.push_back(mitigationActionNameListJsonList[mitigationActionNameListIndex].AsString());
}
m_auditCheckToActionsMapping[auditCheckToActionsMappingItem.first] = std::move(mitigationActionNameListList);
}
}
if(jsonValue.ValueExists("actionsDefinition"))
{
Array<JsonView> actionsDefinitionJsonList = jsonValue.GetArray("actionsDefinition");
for(unsigned actionsDefinitionIndex = 0; actionsDefinitionIndex < actionsDefinitionJsonList.GetLength(); ++actionsDefinitionIndex)
{
m_actionsDefinition.push_back(actionsDefinitionJsonList[actionsDefinitionIndex].AsObject());
}
}
return *this;
}
| awslabs/aws-sdk-cpp | aws-cpp-sdk-iot/source/model/DescribeAuditMitigationActionsTaskResult.cpp | C++ | apache-2.0 | 3,297 |
from __future__ import absolute_import
from __future__ import print_function
from typing import Any, Dict, List
from .template_parser import (
tokenize,
Token,
is_django_block_tag,
)
from six.moves import range
import os
def pretty_print_html(html, num_spaces=4):
# type: (str, int) -> str
# We use 1-based indexing for both rows and columns.
tokens = tokenize(html)
lines = html.split('\n')
# We will keep a stack of "start" tags so that we know
# when HTML ranges end. Note that some start tags won't
# be blocks from an indentation standpoint.
stack = [] # type: List[Dict[str, Any]]
# Seed our stack with a pseudo entry to make depth calculations
# easier.
info = dict(
block=False,
depth=-1,
line=-1,
token_kind='html_start',
tag='html',
extra_indent=0) # type: Dict[str, Any]
stack.append(info)
# Our main job is to figure out offsets that we use to nudge lines
# over by.
offsets = {} # type: Dict[int, int]
# Loop through our start/end tokens, and calculate offsets. As
# we proceed, we will push/pop info dictionaries on/off a stack.
for token in tokens:
if token.kind in ('html_start', 'handlebars_start',
'html_singleton', 'django_start') and stack[-1]['tag'] != 'pre':
# An HTML start tag should only cause a new indent if we
# are on a new line.
if (token.tag not in ('extends', 'include', 'else', 'elif') and
(is_django_block_tag(token.tag) or
token.kind != 'django_start')):
is_block = token.line > stack[-1]['line']
if is_block:
if (((token.kind == 'handlebars_start' and
stack[-1]['token_kind'] == 'handlebars_start') or
(token.kind == 'django_start' and
stack[-1]['token_kind'] == 'django_start')) and
not stack[-1]['indenting']):
info = stack.pop()
info['depth'] = info['depth'] + 1
info['indenting'] = True
info['adjust_offset_until'] = token.line
stack.append(info)
new_depth = stack[-1]['depth'] + 1
extra_indent = stack[-1]['extra_indent']
line = lines[token.line - 1]
adjustment = len(line)-len(line.lstrip()) + 1
offset = (1 + extra_indent + new_depth * num_spaces) - adjustment
info = dict(
block=True,
depth=new_depth,
actual_depth=new_depth,
line=token.line,
tag=token.tag,
token_kind=token.kind,
line_span=token.line_span,
offset=offset,
extra_indent=token.col - adjustment + extra_indent,
extra_indent_prev=extra_indent,
adjustment=adjustment,
indenting=True,
adjust_offset_until=token.line
)
if token.kind in ('handlebars_start', 'django_start'):
info.update(dict(depth=new_depth - 1, indenting=False))
else:
info = dict(
block=False,
depth=stack[-1]['depth'],
actual_depth=stack[-1]['depth'],
line=token.line,
tag=token.tag,
token_kind=token.kind,
extra_indent=stack[-1]['extra_indent']
)
stack.append(info)
elif token.kind in ('html_end', 'handlebars_end',
'html_singleton_end', 'django_end') and (stack[-1]['tag'] != 'pre' or token.tag == 'pre'):
info = stack.pop()
if info['block']:
# We are at the end of an indentation block. We
# assume the whole block was formatted ok before, just
# possibly at an indentation that we don't like, so we
# nudge over all lines in the block by the same offset.
start_line = info['line']
end_line = token.line
if token.tag == 'pre':
offsets[start_line] = 0
offsets[end_line] = 0
else:
offsets[start_line] = info['offset']
line = lines[token.line - 1]
adjustment = len(line)-len(line.lstrip()) + 1
if adjustment == token.col:
offsets[end_line] = (info['offset'] +
info['adjustment'] -
adjustment +
info['extra_indent'] -
info['extra_indent_prev'])
elif (start_line + info['line_span'] - 1 == end_line and
info['line_span'] > 2 and token.kind != 'html_singleton_end'):
offsets[end_line] = (1 + info['extra_indent'] + (info['depth'] + 1) * num_spaces) - adjustment
elif token.line != info['line']:
offsets[end_line] = info['offset']
if token.tag != 'pre' and token.kind != 'html_singleton_end' and token.tag != 'script':
for line_num in range(start_line + 1, end_line):
# Be careful not to override offsets that happened
# deeper in the HTML within our block.
if line_num not in offsets:
line = lines[line_num - 1]
new_depth = info['depth'] + 1
if (line.lstrip().startswith('{{else}}') or
line.lstrip().startswith('{% else %}') or
line.lstrip().startswith('{% elif')):
new_depth = info['actual_depth']
extra_indent = info['extra_indent']
adjustment = len(line)-len(line.lstrip()) + 1
offset = (1 + extra_indent + new_depth * num_spaces) - adjustment
offsets[line_num] = offset
elif (token.kind in ('handlebars_end', 'django_end') and
info['indenting'] and
line_num < info['adjust_offset_until']):
offsets[line_num] += num_spaces
elif token.tag != 'pre':
for line_num in range(start_line + 1, end_line):
if line_num not in offsets:
offsets[line_num] = info['offset']
else:
for line_num in range(start_line + 1, end_line):
if line_num not in offsets:
offsets[line_num] = 0
# Now that we have all of our offsets calculated, we can just
# join all our lines together, fixing up offsets as needed.
formatted_lines = []
for i, line in enumerate(html.split('\n')):
row = i + 1
offset = offsets.get(row, 0)
pretty_line = line
if line.strip() == '':
pretty_line = ''
else:
if offset > 0:
pretty_line = (' ' * offset) + pretty_line
elif offset < 0:
pretty_line = pretty_line[-1 * offset:]
assert line.strip() == pretty_line.strip()
formatted_lines.append(pretty_line)
return '\n'.join(formatted_lines)
def validate_indent_html(fn):
# type: (str) -> int
file = open(fn)
html = file.read()
phtml = pretty_print_html(html)
file.close()
if not html.split('\n') == phtml.split('\n'):
temp_file = open('/var/tmp/pretty_html.txt', 'w')
temp_file.write(phtml)
temp_file.close()
print('Invalid Indentation detected in file: %s\nDiff for the file against expected indented file:' % (fn))
os.system('diff %s %s' % (fn, '/var/tmp/pretty_html.txt'))
return 0
return 1
| christi3k/zulip | tools/lib/pretty_print.py | Python | apache-2.0 | 8,553 |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.filewatch.jdk7;
import com.google.common.base.Throwables;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.gradle.api.Action;
import org.gradle.api.internal.file.FileSystemSubset;
import org.gradle.internal.filewatch.FileWatcher;
import org.gradle.internal.filewatch.FileWatcherEvent;
import org.gradle.internal.filewatch.FileWatcherListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.ref.SoftReference;
import java.nio.file.ClosedWatchServiceException;
import java.nio.file.WatchService;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
public class WatchServiceFileWatcherBacking {
private static final Logger LOGGER = LoggerFactory.getLogger(WatchServiceFileWatcherBacking.class);
private final AtomicBoolean started = new AtomicBoolean();
private final AtomicBoolean running = new AtomicBoolean();
private final AtomicBoolean stopped = new AtomicBoolean();
private final AtomicReference<SoftReference<Thread>> pollerThreadReference = new AtomicReference<SoftReference<Thread>>();
private final Action<? super Throwable> onError;
private final WatchServiceRegistrar watchServiceRegistrar;
private final WatchService watchService;
private final WatchServicePoller poller;
private final FileWatcher fileWatcher = new FileWatcher() {
@Override
public boolean isRunning() {
return running.get();
}
@Override
public void watch(FileSystemSubset fileSystemSubset) throws IOException {
WatchServiceFileWatcherBacking.this.watchServiceRegistrar.watch(fileSystemSubset);
}
@Override
public void stop() {
WatchServiceFileWatcherBacking.this.stop();
}
};
WatchServiceFileWatcherBacking(Action<? super Throwable> onError, FileWatcherListener listener, WatchService watchService) throws IOException {
this(onError, listener, watchService, new WatchServiceRegistrar(watchService, listener));
}
WatchServiceFileWatcherBacking(Action<? super Throwable> onError, FileWatcherListener listener, WatchService watchService, WatchServiceRegistrar watchServiceRegistrar) throws IOException {
this.onError = onError;
this.watchServiceRegistrar = watchServiceRegistrar;
this.watchService = watchService;
this.poller = new WatchServicePoller(watchService);
}
public FileWatcher start(ListeningExecutorService executorService) {
if (started.compareAndSet(false, true)) {
final ListenableFuture<?> runLoopFuture = executorService.submit(new Runnable() {
@Override
public void run() {
if (!stopped.get()) {
pollerThreadReference.set(new SoftReference<Thread>(Thread.currentThread()));
running.set(true);
try {
try {
pumpEvents();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (Throwable t) {
if (!(Throwables.getRootCause(t) instanceof InterruptedException)) {
stop();
onError.execute(t);
}
}
} finally {
stop();
}
}
}
});
// This is necessary so that the watcher indicates its not running if the runnable gets cancelled
Futures.addCallback(runLoopFuture, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
running.set(false);
}
@Override
public void onFailure(Throwable t) {
running.set(false);
}
}, MoreExecutors.directExecutor());
return fileWatcher;
} else {
throw new IllegalStateException("file watcher is started");
}
}
private void pumpEvents() throws InterruptedException {
while (isRunning()) {
try {
List<FileWatcherEvent> events = poller.takeEvents();
if (events != null) {
deliverEvents(events);
}
} catch (ClosedWatchServiceException e) {
LOGGER.debug("Received ClosedWatchServiceException, stopping");
stop();
}
}
}
private void deliverEvents(List<FileWatcherEvent> events) {
for (FileWatcherEvent event : events) {
if (!isRunning()) {
LOGGER.debug("File watching isn't running, breaking out of event delivery.");
break;
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Received file system event: {}", event);
}
watchServiceRegistrar.onChange(fileWatcher, event);
}
}
private boolean isRunning() {
return running.get() && !Thread.currentThread().isInterrupted();
}
private void stop() {
if (stopped.compareAndSet(false, true)) {
if (running.compareAndSet(true, false)) {
LOGGER.debug("Stopping file watching");
interruptPollerThread();
try {
watchService.close();
} catch (IOException e) {
// ignore exception in shutdown
} catch (ClosedWatchServiceException e) {
// ignore
}
}
}
}
private void interruptPollerThread() {
SoftReference<Thread> threadSoftReference = pollerThreadReference.getAndSet(null);
if (threadSoftReference != null) {
Thread pollerThread = threadSoftReference.get();
if (pollerThread != null && pollerThread != Thread.currentThread()) {
// only interrupt poller thread if it's not current thread
LOGGER.debug("Interrupting poller thread '{}'", pollerThread.getName());
pollerThread.interrupt();
}
}
}
}
| blindpirate/gradle | subprojects/core/src/main/java/org/gradle/internal/filewatch/jdk7/WatchServiceFileWatcherBacking.java | Java | apache-2.0 | 7,361 |
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
is3pThrottled,
getAmpAdRenderOutsideViewport,
incrementLoadingAds,
} from '../../amp-ad/0.1/concurrent-load';
import {adConfig} from '../../../ads/_config';
import {signingServerURLs} from '../../../ads/_a4a-config';
import {
removeChildren,
createElementWithAttributes,
} from '../../../src/dom';
import {cancellation, isCancellation} from '../../../src/error';
import {
installAnchorClickInterceptor,
} from '../../../src/anchor-click-interceptor';
import {
installFriendlyIframeEmbed,
setFriendlyIframeEmbedVisible,
} from '../../../src/friendly-iframe-embed';
import {isLayoutSizeDefined} from '../../../src/layout';
import {isAdPositionAllowed} from '../../../src/ad-helper';
import {dev, user} from '../../../src/log';
import {getMode} from '../../../src/mode';
import {isArray, isObject, isEnumValue} from '../../../src/types';
import {some} from '../../../src/utils/promise';
import {utf8Decode} from '../../../src/utils/bytes';
import {viewerForDoc} from '../../../src/viewer';
import {xhrFor} from '../../../src/xhr';
import {endsWith} from '../../../src/string';
import {platformFor} from '../../../src/platform';
import {cryptoFor} from '../../../src/crypto';
import {isExperimentOn} from '../../../src/experiments';
import {setStyle} from '../../../src/style';
import {handleClick} from '../../../ads/alp/handler';
import {AdDisplayState} from '../../../extensions/amp-ad/0.1/amp-ad-ui';
import {
getDefaultBootstrapBaseUrl,
generateSentinel,
} from '../../../src/3p-frame';
import {
installUrlReplacementsForEmbed,
} from '../../../src/service/url-replacements-impl';
import {extensionsFor} from '../../../src/extensions';
import {A4AVariableSource} from './a4a-variable-source';
// TODO(tdrl): Temporary. Remove when we migrate to using amp-analytics.
import {getTimingDataAsync} from '../../../src/service/variable-source';
import {getContextMetadata} from '../../../src/iframe-attributes';
/** @type {string} */
const METADATA_STRING = '<script type="application/json" amp-ad-metadata>';
/** @type {string} */
const METADATA_STRING_NO_QUOTES =
'<script type=application/json amp-ad-metadata>';
// TODO(tdrl): Temporary, while we're verifying whether SafeFrame is an
// acceptable solution to the 'Safari on iOS doesn't fetch iframe src from
// cache' issue. See https://github.com/ampproject/amphtml/issues/5614
/** @type {string} */
const SAFEFRAME_VERSION = '1-0-5';
/** @type {string} @visibleForTesting */
export const SAFEFRAME_IMPL_PATH =
'https://tpc.googlesyndication.com/safeframe/' + SAFEFRAME_VERSION +
'/html/container.html';
/** @type {string} @visibleForTesting */
export const RENDERING_TYPE_HEADER = 'X-AmpAdRender';
/** @type {string} */
const TAG = 'amp-a4a';
/** @type {string} */
const NO_CONTENT_RESPONSE = 'NO-CONTENT-RESPONSE';
/** @enum {string} */
export const XORIGIN_MODE = {
CLIENT_CACHE: 'client_cache',
SAFEFRAME: 'safeframe',
NAMEFRAME: 'nameframe',
};
/** @type {!Object} @private */
const SHARED_IFRAME_PROPERTIES = {
frameborder: '0',
allowfullscreen: '',
allowtransparency: '',
scrolling: 'no',
marginwidth: '0',
marginheight: '0',
};
/** @typedef {{
* creative: ArrayBuffer,
* signature: ?Uint8Array,
* size: ?Array<number>
* }} */
export let AdResponseDef;
/** @typedef {{
minifiedCreative: string,
customElementExtensions: !Array<string>,
customStylesheets: !Array<{href: string}>
}} */
let CreativeMetaDataDef;
/**
* A set of public keys for a single AMP signing service. A single service may
* return more than one key if, e.g., they're rotating keys and they serve
* the current and upcoming keys. A CryptoKeysDef stores one or more
* (promises to) keys, in the order given by the return value from the
* signing service.
*
* @typedef {{serviceName: string, keys: !Array<!Promise<!../../../src/crypto.PublicKeyInfoDef>>}}
*/
let CryptoKeysDef;
/**
* The public keys for all signing services. This is an array of promises,
* one per signing service, in the order given by the array returned by
* #getSigningServiceNames(). Each entry resolves to the keys returned by
* that service, represented by a `CryptoKeysDef` object.
*
* @typedef {Array<!Promise<!CryptoKeysDef>>}
*/
let AllServicesCryptoKeysDef;
/** @private */
export const LIFECYCLE_STAGES = {
// Note: Use strings as values here, rather than numbers, so that "0" does
// not test as `false` later.
adSlotCleared: '-1',
urlBuilt: '1',
adRequestStart: '2',
adRequestEnd: '3',
extractCreativeAndSignature: '4',
adResponseValidateStart: '5',
renderFriendlyStart: '6', // TODO(dvoytenko): this signal and similar are actually "embed-create", not "render-start".
renderCrossDomainStart: '7',
renderFriendlyEnd: '8',
renderCrossDomainEnd: '9',
preAdThrottle: '10',
renderSafeFrameStart: '11',
throttled3p: '12',
adResponseValidateEnd: '13',
xDomIframeLoaded: '14',
friendlyIframeLoaded: '15',
adSlotCollapsed: '16',
adSlotUnhidden: '17',
layoutAdPromiseDelay: '18',
signatureVerifySuccess: '19',
networkError: '20',
friendlyIframeIniLoad: '21',
};
/**
* Utility function that ensures any error thrown is handled by optional
* onError handler (if none provided or handler throws, error is swallowed and
* undefined is returned).
* @param {!Function} fn to protect
* @param {T=} inThis An optional object to use as the 'this' object
* when calling the function. If not provided, undefined is bound as this
* when calling function.
* @param {function(this:T, !Error, ...*):?=} onError function given error
* and arguments provided to function call.
* @return {!Function} protected function
* @template T
* @visibleForTesting
*/
export function protectFunctionWrapper(
fn, inThis = undefined, onError = undefined) {
return (...fnArgs) => {
try {
return fn.apply(inThis, fnArgs);
} catch (err) {
if (onError) {
try {
// Ideally we could use [err, ...var_args] but linter disallows
// spread so instead using unshift :(
fnArgs.unshift(err);
return onError.apply(inThis, fnArgs);
} catch (captureErr) {
// swallow error if error handler throws.
}
}
// In the event of no optional on error function or its execution throws,
// return undefined.
return undefined;
}
};
};
export class AmpA4A extends AMP.BaseElement {
// TODO: Add more error handling throughout code.
// TODO: Handle creatives that do not fill.
/**
* @param {!Element} element
*/
constructor(element) {
super(element);
dev().assert(AMP.AmpAdUIHandler);
dev().assert(AMP.AmpAdXOriginIframeHandler);
/** @private {?Promise<?CreativeMetaDataDef>} */
this.adPromise_ = null;
/**
* @private {number} unique ID of the currently executing promise to allow
* for cancellation.
*/
this.promiseId_ = 0;
/** {?Object} */
this.config = null;
/** @private {?string} */
this.adUrl_ = null;
/** @private {?../../../src/friendly-iframe-embed.FriendlyIframeEmbed} */
this.friendlyIframeEmbed_ = null;
/** {?AMP.AmpAdUIHandler} */
this.uiHandler = null;
/** @private {?AMP.AmpAdXOriginIframeHandler} */
this.xOriginIframeHandler_ = null;
/** @private {boolean} whether layoutMeasure has been executed. */
this.layoutMeasureExecuted_ = false;
/** @const @private {!../../../src/service/vsync-impl.Vsync} */
this.vsync_ = this.getVsync();
/** @private {boolean} whether creative has been verified as AMP */
this.isVerifiedAmpCreative_ = false;
/** @private @const {!../../../src/service/crypto-impl.Crypto} */
this.crypto_ = cryptoFor(this.win);
if (!this.win.ampA4aValidationKeys) {
// Without the following variable assignment, there's no way to apply a
// type annotation to a win-scoped variable, so the type checker doesn't
// catch type errors here. This no-op allows us to enforce some type
// expectations. The const assignment will hopefully be optimized
// away by the compiler. *fingers crossed*
/** @type {!AllServicesCryptoKeysDef} */
const forTypeSafety = this.getKeyInfoSets_();
this.win.ampA4aValidationKeys = forTypeSafety;
}
/** @private {?ArrayBuffer} */
this.creativeBody_ = null;
/**
* Note(keithwrightbos) - ensure the default here is null so that ios
* uses safeframe when response header is not specified.
* @private {?XORIGIN_MODE}
*/
this.experimentalNonAmpCreativeRenderMethod_ =
platformFor(this.win).isIos() ? XORIGIN_MODE.SAFEFRAME : null;
/**
* Gets a notion of current time, in ms. The value is not necessarily
* absolute, so should be used only for computing deltas. When available,
* the performance system will be used; otherwise Date.now() will be
* returned.
*
* @const {function():number}
* @private
*/
this.getNow_ = (this.win.performance && this.win.performance.now) ?
this.win.performance.now.bind(this.win.performance) : Date.now;
/**
* Protected version of emitLifecycleEvent that ensures error does not
* cause promise chain to reject.
* @private {function(string, !Object=)}
*/
this.protectedEmitLifecycleEvent_ = protectFunctionWrapper(
this.emitLifecycleEvent, this,
(err, varArgs) => {
dev().error(TAG, this.element.getAttribute('type'),
'Error on emitLifecycleEvent', err, varArgs) ;
});
/** @const {string} */
this.sentinel = generateSentinel(window);
/**
* Used to indicate whether this slot should be collapsed or not. Marked
* true if the ad response has status 204, is null, or has a null
* arrayBuffer.
* @private {boolean}
*/
this.isCollapsed_ = false;
}
/** @override */
getPriority() {
// Priority used for scheduling preload and layout callback. Because
// AMP creatives will be injected as part of the promise chain created
// within onLayoutMeasure, this is only relevant to non-AMP creatives
// therefore we want this to match the 3p priority.
return 2;
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
buildCallback() {
const adType = this.element.getAttribute('type');
this.config = adConfig[adType] || {};
this.uiHandler = new AMP.AmpAdUIHandler(this);
this.uiHandler.init();
}
/** @override */
renderOutsideViewport() {
// Ensure non-verified AMP creatives are throttled.
if (!this.isVerifiedAmpCreative_ && is3pThrottled(this.win)) {
this.protectedEmitLifecycleEvent_('throttled3p');
return false;
}
// Otherwise the ad is good to go.
const elementCheck = getAmpAdRenderOutsideViewport(this.element);
return elementCheck !== null ?
elementCheck : super.renderOutsideViewport();
}
/**
* To be overridden by network specific implementation indicating if element
* (and environment generally) are valid for sending XHR queries.
* @return {boolean} whether element is valid and ad request should be
* sent. If false, no ad request is sent and slot will be collapsed if
* possible.
*/
isValidElement() {
return true;
}
/**
* Returns true if this element was loaded from an amp-ad element. For use by
* network-specific implementations that don't want to allow themselves to be
* embedded directly into a page.
* @return {boolean}
*/
isAmpAdElement() {
return this.element.tagName == 'AMP-AD' ||
this.element.tagName == 'AMP-EMBED';
}
/**
* Prefetches and preconnects URLs related to the ad using adPreconnect
* registration which assumes ad request domain used for 3p is applicable.
* @param {boolean=} unusedOnLayout
* @override
*/
preconnectCallback(unusedOnLayout) {
this.preconnect.url(SAFEFRAME_IMPL_PATH);
this.preconnect.url(getDefaultBootstrapBaseUrl(this.win, 'nameframe'));
if (!this.config) {
return;
}
const preconnect = this.config.preconnect;
// NOTE(keithwrightbos): using onLayout to indicate if preconnect should be
// given preferential treatment. Currently this would be false when
// relevant (i.e. want to preconnect on or before onLayoutMeasure) which
// causes preconnect to delay for 1 sec (see custom-element#preconnect)
// therefore hard coding to true.
// NOTE(keithwrightbos): Does not take isValidElement into account so could
// preconnect unnecessarily, however it is assumed that isValidElement
// matches amp-ad loader predicate such that A4A impl does not load.
if (typeof preconnect == 'string') {
this.preconnect.url(preconnect, true);
} else if (preconnect) {
preconnect.forEach(p => {
this.preconnect.url(p, true);
});
}
}
/** @override */
onLayoutMeasure() {
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.onLayoutMeasure();
}
if (this.layoutMeasureExecuted_ ||
!this.crypto_.isCryptoAvailable()) {
// onLayoutMeasure gets called multiple times.
return;
}
const slotRect = this.getIntersectionElementLayoutBox();
if (slotRect.height == 0 || slotRect.width == 0) {
dev().fine(
TAG, 'onLayoutMeasure canceled due height/width 0', this.element);
return;
}
user().assert(isAdPositionAllowed(this.element, this.win),
'<%s> is not allowed to be placed in elements with ' +
'position:fixed: %s', this.element.tagName, this.element);
this.layoutMeasureExecuted_ = true;
// OnLayoutMeasure can be called when page is in prerender so delay until
// visible. Assume that it is ok to call isValidElement as it should
// only being looking at window, immutable properties (i.e. location) and
// its element ancestry.
if (!this.isValidElement()) {
// TODO(kjwright): collapse?
user().warn(TAG, this.element.getAttribute('type'),
'Amp ad element ignored as invalid', this.element);
return;
}
// Increment unique promise ID so that if its value changes within the
// promise chain due to cancel from unlayout, the promise will be rejected.
this.promiseId_++;
const promiseId = this.promiseId_;
// Shorthand for: reject promise if current promise chain is out of date.
const checkStillCurrent = promiseId => {
if (promiseId != this.promiseId_) {
throw cancellation();
}
};
// If in localDev `type=fake` Ad specifies `force3p`, it will be forced
// to go via 3p.
if (getMode().localDev &&
this.element.getAttribute('type') == 'fake' &&
this.element.getAttribute('force3p') == 'true') {
this.adUrl_ = this.getAdUrl();
this.adPromise_ = Promise.resolve();
return;
}
// Return value from this chain: True iff rendering was "successful"
// (i.e., shouldn't try to render later via iframe); false iff should
// try to render later in iframe.
// Cases to handle in this chain:
// - Everything ok => Render; return true
// - Empty network response returned => Don't render; return true
// - Can't parse creative out of response => Don't render; return false
// - Can parse, but creative is empty => Don't render; return true
// - Validation fails => return false
// - Rendering fails => return false
// - Chain cancelled => don't return; drop error
// - Uncaught error otherwise => don't return; percolate error up
this.adPromise_ = viewerForDoc(this.getAmpDoc()).whenFirstVisible()
// This block returns the ad URL, if one is available.
/** @return {!Promise<?string>} */
.then(() => {
checkStillCurrent(promiseId);
return /** @type {!Promise<?string>} */ (this.getAdUrl());
})
// This block returns the (possibly empty) response to the XHR request.
/** @return {!Promise<?Response>} */
.then(adUrl => {
checkStillCurrent(promiseId);
this.adUrl_ = adUrl;
this.protectedEmitLifecycleEvent_('urlBuilt');
return adUrl && this.sendXhrRequest_(adUrl);
})
// The following block returns either the response (as a {bytes, headers}
// object), or null if no response is available / response is empty.
/** @return {?Promise<?{bytes: !ArrayBuffer, headers: !Headers}>} */
.then(fetchResponse => {
checkStillCurrent(promiseId);
this.protectedEmitLifecycleEvent_('adRequestEnd');
// If the response is null, we want to return null so that
// unlayoutCallback will attempt to render via x-domain iframe,
// assuming ad url or creative exist.
if (!fetchResponse) {
return null;
}
// If the response has response code 204, or arrayBuffer is null,
// collapse it.
if (!fetchResponse.arrayBuffer || fetchResponse.status == 204) {
this.forceCollapse();
return Promise.reject(NO_CONTENT_RESPONSE);
}
// TODO(tdrl): Temporary, while we're verifying whether SafeFrame is
// an acceptable solution to the 'Safari on iOS doesn't fetch
// iframe src from cache' issue. See
// https://github.com/ampproject/amphtml/issues/5614
const method = fetchResponse.headers.get(RENDERING_TYPE_HEADER) ||
this.experimentalNonAmpCreativeRenderMethod_;
this.experimentalNonAmpCreativeRenderMethod_ = method;
if (method && !isEnumValue(XORIGIN_MODE, method)) {
dev().error('AMP-A4A', `cross-origin render mode header ${method}`);
}
// Note: Resolving a .then inside a .then because we need to capture
// two fields of fetchResponse, one of which is, itself, a promise,
// and one of which isn't. If we just return
// fetchResponse.arrayBuffer(), the next step in the chain will
// resolve it to a concrete value, but we'll lose track of
// fetchResponse.headers.
return fetchResponse.arrayBuffer().then(bytes => {
if (bytes.byteLength == 0) {
// The server returned no content. Instead of displaying a blank
// rectangle, we collapse the slot instead.
this.forceCollapse();
return Promise.reject(NO_CONTENT_RESPONSE);
}
return {
bytes,
headers: fetchResponse.headers,
};
});
})
// This block returns the ad creative and signature, if available; null
// otherwise.
/**
* @return {!Promise<?{creative: !ArrayBuffer, signature: !ArrayBuffer}>}
*/
.then(responseParts => {
checkStillCurrent(promiseId);
if (responseParts) {
this.protectedEmitLifecycleEvent_('extractCreativeAndSignature');
}
return responseParts && this.extractCreativeAndSignature(
responseParts.bytes, responseParts.headers);
})
// This block returns the ad creative if it exists and validates as AMP;
// null otherwise.
/** @return {!Promise<?ArrayBuffer>} */
.then(creativeParts => {
checkStillCurrent(promiseId);
// Keep a handle to the creative body so that we can render into
// SafeFrame or NameFrame later, if necessary. TODO(tdrl): Temporary,
// while we
// assess whether this is the right solution to the Safari+iOS iframe
// src cache issue. If we decide to keep a SafeFrame-like solution,
// we should restructure the promise chain to pass this info along
// more cleanly, without use of an object variable outside the chain.
if (!creativeParts) {
return Promise.resolve();
}
if (this.experimentalNonAmpCreativeRenderMethod_ !=
XORIGIN_MODE.CLIENT_CACHE &&
creativeParts.creative) {
this.creativeBody_ = creativeParts.creative;
}
if (creativeParts.size && creativeParts.size.length == 2) {
this.handleResize(creativeParts.size[0], creativeParts.size[1]);
}
if (!creativeParts.signature) {
return Promise.resolve();
}
this.protectedEmitLifecycleEvent_('adResponseValidateStart');
return this.verifyCreativeSignature_(
creativeParts.creative, creativeParts.signature)
.then(creative => {
if (creative) {
return creative;
}
user().error(TAG, this.element.getAttribute('type'),
'Unable to validate AMP creative against key providers');
// Attempt to re-fetch the keys in case our locally cached
// batch has expired.
this.win.ampA4aValidationKeys = this.getKeyInfoSets_();
return this.verifyCreativeSignature_(
creativeParts.creative, creativeParts.signature);
});
})
.then(creative => {
checkStillCurrent(promiseId);
// Need to know if creative was verified as part of render outside
// viewport but cannot wait on promise. Sadly, need a state a
// variable.
this.isVerifiedAmpCreative_ = !!creative;
// TODO(levitzky) If creative comes back null, we should consider re-
// fetching the signing server public keys and try the verification
// step again.
return creative && utf8Decode(creative);
})
// This block returns CreativeMetaDataDef iff the creative was verified
// as AMP and could be properly parsed for friendly iframe render.
/** @return {?CreativeMetaDataDef} */
.then(creativeDecoded => {
checkStillCurrent(promiseId);
// Note: It's critical that #getAmpAdMetadata_ be called
// on precisely the same creative that was validated
// via #validateAdResponse_. See GitHub issue
// https://github.com/ampproject/amphtml/issues/4187
let creativeMetaDataDef;
if (!creativeDecoded ||
!(creativeMetaDataDef = this.getAmpAdMetadata_(creativeDecoded))) {
return null;
}
// Update priority.
this.updatePriority(0);
// Load any extensions; do not wait on their promises as this
// is just to prefetch.
const extensions = extensionsFor(this.win);
creativeMetaDataDef.customElementExtensions.forEach(
extensionId => extensions.loadExtension(extensionId));
return creativeMetaDataDef;
})
.catch(error => {
if (error == NO_CONTENT_RESPONSE) {
return {
minifiedCreative: '',
customElementExtensions: [],
customStylesheets: [],
};
}
// If error in chain occurs, report it and return null so that
// layoutCallback can render via cross domain iframe assuming ad
// url or creative exist.
this.promiseErrorHandler_(error);
return null;
});
}
/**
* Attempts to validate the creative signature against every key currently in
* our possession. This should never be called before at least one key fetch
* attempt is made.
*
* @param {!ArrayBuffer} creative
* @param {!Uint8Array} signature
* @return {!Promise<!ArrayBuffer>} The creative.
*/
verifyCreativeSignature_(creative, signature) {
if (getMode().localDev) {
// localDev mode allows "FAKESIG" signature for the "fake" network.
if (signature == 'FAKESIG' &&
this.element.getAttribute('type') == 'fake') {
return Promise.resolve(creative);
}
}
// For each signing service, we have exactly one Promise,
// keyInfoSetPromise, that holds an Array of Promises of signing keys.
// So long as any one of these signing services can verify the
// signature, then the creative is valid AMP.
/** @type {!AllServicesCryptoKeysDef} */
const keyInfoSetPromises = this.win.ampA4aValidationKeys;
// Track if verification found, as it will ensure that promises yet to
// resolve will "cancel" as soon as possible saving unnecessary resource
// allocation.
let verified = false;
return some(keyInfoSetPromises.map(keyInfoSetPromise => {
// Resolve Promise into an object containing a 'keys' field, which
// is an Array of Promises of signing keys. *whew*
return keyInfoSetPromise.then(keyInfoSet => {
// As long as any one individual key of a particular signing
// service, keyInfoPromise, can verify the signature, then the
// creative is valid AMP.
if (verified) {
return Promise.reject('noop');
}
return some(keyInfoSet.keys.map(keyInfoPromise => {
// Resolve Promise into signing key.
return keyInfoPromise.then(keyInfo => {
if (verified) {
return Promise.reject('noop');
}
if (!keyInfo) {
return Promise.reject('Promise resolved to null key.');
}
const signatureVerifyStartTime = this.getNow_();
// If the key exists, try verifying with it.
return this.crypto_.verifySignature(
new Uint8Array(creative),
signature,
keyInfo)
.then(isValid => {
if (isValid) {
verified = true;
this.protectedEmitLifecycleEvent_(
'signatureVerifySuccess', {
'met.delta.AD_SLOT_ID': Math.round(
this.getNow_() - signatureVerifyStartTime),
'signingServiceName.AD_SLOT_ID': keyInfo.serviceName,
});
return creative;
}
// Only report if signature is expected to match, given that
// multiple key providers could have been specified.
// Note: the 'keyInfo &&' check here is not strictly
// necessary, because we checked that above. But
// Closure type compiler can't seem to recognize that, so
// this guarantees it to the compiler.
if (keyInfo &&
this.crypto_.verifyHashVersion(signature, keyInfo)) {
user().error(TAG, this.element.getAttribute('type'),
'Key failed to validate creative\'s signature',
keyInfo.serviceName, keyInfo.cryptoKey);
}
// Reject to ensure the some operation waits for other
// possible providers to properly verify and resolve.
return Promise.reject(
`${keyInfo.serviceName} key failed to verify`);
},
err => {
dev().error(
TAG, this.element.getAttribute('type'), keyInfo.serviceName,
err, this.element);
});
});
}))
// some() returns an array of which we only need a single value.
.then(returnedArray => returnedArray[0], () => {
// Rejection occurs if all keys for this provider fail to validate.
return Promise.reject(
`All keys for ${keyInfoSet.serviceName} failed to verify`);
});
});
}))
.then(returnedArray => {
this.protectedEmitLifecycleEvent_('adResponseValidateEnd');
return returnedArray[0];
}, () => {
// rejection occurs if all providers fail to verify.
this.protectedEmitLifecycleEvent_('adResponseValidateEnd');
return Promise.reject('No validation service could verify this key');
});
}
/**
* Handles uncaught errors within promise flow.
* @param {*} error
* @private
*/
promiseErrorHandler_(error) {
if (isCancellation(error)) {
// Rethrow if cancellation.
throw error;
}
if (!error || !error.message) {
error = new Error('unknown error ' + error);
}
// Add `type` to the message. Ensure to preserve the original stack.
const type = this.element.getAttribute('type') || 'notype';
error.message = `${TAG}: ${type}: ${error.message}`;
// Additional arguments.
const adQueryIdx = this.adUrl_ ? this.adUrl_.indexOf('?') : -1;
error.args = {
'au': adQueryIdx < 0 ? '' :
this.adUrl_.substring(adQueryIdx + 1, adQueryIdx + 251),
};
if (getMode().development || getMode().localDev || getMode().log) {
user().error(TAG, error);
} else {
user().warn(TAG, error);
// Report with 1% sampling as an expected dev error.
if (Math.random() < 0.01) {
dev().expectedError(TAG, error);
}
}
}
/** @override */
layoutCallback() {
// Promise may be null if element was determined to be invalid for A4A.
if (!this.adPromise_) {
return Promise.resolve();
}
// There's no real throttling with A4A, but this is the signal that is
// most comparable with the layout callback for 3p ads.
this.protectedEmitLifecycleEvent_('preAdThrottle');
const layoutCallbackStart = this.getNow_();
// Promise chain will have determined if creative is valid AMP.
return this.adPromise_.then(creativeMetaData => {
const delta = this.getNow_() - layoutCallbackStart;
this.protectedEmitLifecycleEvent_('layoutAdPromiseDelay', {
layoutAdPromiseDelay: Math.round(delta),
isAmpCreative: !!creativeMetaData,
});
if (this.isCollapsed_) {
return Promise.resolve();
}
const protectedOnCreativeRender =
protectFunctionWrapper(this.onCreativeRender, this, err => {
dev().error(TAG, this.element.getAttribute('type'),
'Error executing onCreativeRender', err);
});
if (!creativeMetaData) {
// Non-AMP creative case, will verify ad url existence.
return this.renderNonAmpCreative_()
.then(() => protectedOnCreativeRender(false));
}
// Must be an AMP creative.
return this.renderAmpCreative_(creativeMetaData)
.then(() => protectedOnCreativeRender(true))
.catch(err => {
// Failed to render via AMP creative path so fallback to non-AMP
// rendering within cross domain iframe.
user().error(TAG, this.element.getAttribute('type'),
'Error injecting creative in friendly frame', err);
this.promiseErrorHandler_(err);
return this.renderNonAmpCreative_()
.then(() => protectedOnCreativeRender(false));
});
}).catch(error => {
this.promiseErrorHandler_(error);
throw cancellation();
});
}
/** @override */
unlayoutCallback() {
this.protectedEmitLifecycleEvent_('adSlotCleared');
this.uiHandler.setDisplayState(AdDisplayState.NOT_LAID_OUT);
this.isCollapsed_ = false;
// Allow embed to release its resources.
if (this.friendlyIframeEmbed_) {
this.friendlyIframeEmbed_.destroy();
this.friendlyIframeEmbed_ = null;
}
// Remove creative and reset to allow for creation of new ad.
if (!this.layoutMeasureExecuted_) {
return true;
}
removeChildren(this.element);
this.adPromise_ = null;
this.adUrl_ = null;
this.creativeBody_ = null;
this.isVerifiedAmpCreative_ = false;
this.experimentalNonAmpCreativeRenderMethod_ =
platformFor(this.win).isIos() ? XORIGIN_MODE.SAFEFRAME : null;
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.freeXOriginIframe();
this.xOriginIframeHandler_ = null;
}
this.layoutMeasureExecuted_ = false;
// Increment promiseId to cause any pending promise to cancel.
this.promiseId_++;
return true;
}
/** @override */
viewportCallback(inViewport) {
if (this.friendlyIframeEmbed_) {
setFriendlyIframeEmbedVisible(this.friendlyIframeEmbed_, inViewport);
}
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.viewportCallback(inViewport);
}
}
/** @override */
createPlaceholderCallback() {
return this.uiHandler.createPlaceholderCallback();
}
/**
* Gets the Ad URL to send an XHR Request to. To be implemented
* by network.
* @return {!Promise<string>|string}
*/
getAdUrl() {
throw new Error('getAdUrl not implemented!');
}
/**
* Extracts creative and verification signature (if present) from
* XHR response body and header. To be implemented by network.
*
* In the returned value, the `creative` field should be an `ArrayBuffer`
* containing the utf-8 encoded bytes of the creative itself, while the
* `signature` field should be a `Uint8Array` containing the raw signature
* bytes. The `signature` field may be null if no signature was available
* for this creative / the creative is not valid AMP.
*
* @param {!ArrayBuffer} unusedResponseArrayBuffer content as array buffer
* @param {!../../../src/service/xhr-impl.FetchResponseHeaders} unusedResponseHeaders
* XHR service FetchResponseHeaders object containing the response
* headers.
* @return {!Promise<!AdResponseDef>}
*/
extractCreativeAndSignature(unusedResponseArrayBuffer,
unusedResponseHeaders) {
throw new Error('extractCreativeAndSignature not implemented!');
}
/**
* This function is called if the ad response contains a creative size header
* indicating the size of the creative. It provides an opportunity to resize
* the creative, if desired, before it is rendered.
*
* To be implemented by network.
*
* @param {number} width
* @param {number} height
* */
handleResize(width, height) {
user().info('A4A', `Received creative with size ${width}x${height}.`);
}
/**
* Forces the UI Handler to collapse this slot.
* @visibleForTesting
*/
forceCollapse() {
dev().assert(this.uiHandler);
this.uiHandler.setDisplayState(AdDisplayState.LOADING);
this.uiHandler.setDisplayState(AdDisplayState.LOADED_NO_CONTENT);
this.isCollapsed_ = true;
}
/**
* Callback executed when creative has successfully rendered within the
* publisher page. To be overridden by network implementations as needed.
*
* @param {boolean} isVerifiedAmpCreative whether or not the creative was
* verified as AMP and therefore given preferential treatment.
*/
onCreativeRender(isVerifiedAmpCreative) {
if (isVerifiedAmpCreative) {
this.protectedEmitLifecycleEvent_('renderFriendlyEnd');
}
}
/**
* @param {!Element} iframe that was just created. To be overridden for
* testing.
* @visibleForTesting
*/
onCrossDomainIframeCreated(iframe) {
dev().info(TAG, this.element.getAttribute('type'),
`onCrossDomainIframeCreated ${iframe}`);
}
/**
* Send ad request, extract the creative and signature from the response.
* @param {string} adUrl Request URL to send XHR to.
* @return {!Promise<?../../../src/service/xhr-impl.FetchResponse>}
* @private
*/
sendXhrRequest_(adUrl) {
this.protectedEmitLifecycleEvent_('adRequestStart');
const xhrInit = {
mode: 'cors',
method: 'GET',
credentials: 'include',
};
return xhrFor(this.win)
.fetch(adUrl, xhrInit)
.catch(unusedReason => {
// If an error occurs, let the ad be rendered via iframe after delay.
// TODO(taymonbeal): Figure out a more sophisticated test for deciding
// whether to retry with an iframe after an ad request failure or just
// give up and render the fallback content (or collapse the ad slot).
this.protectedEmitLifecycleEvent_('networkError');
return null;
});
}
/**
* To be overridden by network specific implementation indicating which
* signing service(s) is to be used.
* @return {!Array<string>} A list of signing services.
*/
getSigningServiceNames() {
return getMode().localDev ? ['google', 'google-dev'] : ['google'];
}
/**
* Retrieves all public keys, as specified in _a4a-config.js.
* None of the (inner or outer) promises returned by this function can reject.
*
* @return {!AllServicesCryptoKeysDef}
* @private
*/
getKeyInfoSets_() {
if (!this.crypto_.isCryptoAvailable()) {
return [];
}
return this.getSigningServiceNames().map(serviceName => {
dev().assert(getMode().localDev || !endsWith(serviceName, '-dev'));
const url = signingServerURLs[serviceName];
const currServiceName = serviceName;
if (url) {
// Set disableAmpSourceOrigin so that __amp_source_origin is not
// included in XHR CORS request allowing for keyset to be cached
// across pages.
return xhrFor(this.win).fetchJson(url, {
mode: 'cors',
method: 'GET',
ampCors: false,
credentials: 'omit',
}).then(jwkSetObj => {
const result = {serviceName: currServiceName};
if (isObject(jwkSetObj) && Array.isArray(jwkSetObj.keys) &&
jwkSetObj.keys.every(isObject)) {
result.keys = jwkSetObj.keys;
} else {
user().error(TAG, this.element.getAttribute('type'),
`Invalid response from signing server ${currServiceName}`,
this.element);
result.keys = [];
}
return result;
}).then(jwkSet => {
return {
serviceName: jwkSet.serviceName,
keys: jwkSet.keys.map(jwk =>
this.crypto_.importPublicKey(jwkSet.serviceName, jwk)
.catch(err => {
user().error(TAG, this.element.getAttribute('type'),
`error importing keys for service: ${jwkSet.serviceName}`,
err, this.element);
return null;
})),
};
}).catch(err => {
user().error(
TAG, this.element.getAttribute('type'), err, this.element);
// TODO(a4a-team): This is a failure in the initial attempt to get
// the keys, probably b/c of a network condition. We should
// re-trigger key fetching later.
return {serviceName: currServiceName, keys: []};
});
} else {
// The given serviceName does not have a corresponding URL in
// _a4a-config.js.
const reason = `Signing service '${serviceName}' does not exist.`;
user().error(
TAG, this.element.getAttribute('type'), reason, this.element);
return Promise.resolve({serviceName: currServiceName, keys: []});
}
});
}
/**
* Render non-AMP creative within cross domain iframe.
* @return {Promise<boolean>} Whether the creative was successfully rendered.
* @private
*/
renderNonAmpCreative_() {
this.promiseErrorHandler_(new Error('fallback to 3p'));
this.protectedEmitLifecycleEvent_('preAdThrottle');
incrementLoadingAds(this.win);
// Haven't rendered yet, so try rendering via one of our
// cross-domain iframe solutions.
const method = this.experimentalNonAmpCreativeRenderMethod_;
if ((method == XORIGIN_MODE.SAFEFRAME ||
method == XORIGIN_MODE.NAMEFRAME) &&
this.creativeBody_) {
const renderPromise = this.renderViaNameAttrOfXOriginIframe_(
this.creativeBody_);
this.creativeBody_ = null; // Free resources.
return renderPromise;
} else if (this.adUrl_) {
return this.renderViaCachedContentIframe_(this.adUrl_);
} else {
// Ad URL may not exist if buildAdUrl throws error or returns empty.
// If error occurred, it would have already been reported but let's
// report to user in case of empty.
user().warn(TAG, this.element.getAttribute('type'),
'No creative or URL available -- A4A can\'t render any ad');
return Promise.resolve(false);
}
}
/**
* Render a validated AMP creative directly in the parent page.
* @param {!CreativeMetaDataDef} creativeMetaData Metadata required to render
* AMP creative.
* @return {!Promise} Whether the creative was successfully rendered.
* @private
*/
renderAmpCreative_(creativeMetaData) {
dev().assert(creativeMetaData.minifiedCreative,
'missing minified creative');
dev().assert(!!this.element.ownerDocument, 'missing owner document?!');
this.protectedEmitLifecycleEvent_('renderFriendlyStart');
// Create and setup friendly iframe.
const iframe = /** @type {!HTMLIFrameElement} */(
createElementWithAttributes(
/** @type {!Document} */(this.element.ownerDocument), 'iframe', {
frameborder: '0',
allowfullscreen: '',
allowtransparency: '',
scrolling: 'no',
}));
this.applyFillContent(iframe);
const fontsArray = [];
if (creativeMetaData.customStylesheets) {
creativeMetaData.customStylesheets.forEach(s => {
const href = s['href'];
if (href) {
fontsArray.push(href);
}
});
}
return installFriendlyIframeEmbed(
iframe, this.element, {
host: this.element,
url: this.adUrl_,
html: creativeMetaData.minifiedCreative,
extensionIds: creativeMetaData.customElementExtensions || [],
fonts: fontsArray,
}, embedWin => {
installUrlReplacementsForEmbed(this.getAmpDoc(), embedWin,
new A4AVariableSource(this.getAmpDoc(), embedWin));
}).then(friendlyIframeEmbed => {
this.friendlyIframeEmbed_ = friendlyIframeEmbed;
setFriendlyIframeEmbedVisible(
friendlyIframeEmbed, this.isInViewport());
// Ensure visibility hidden has been removed (set by boilerplate).
const frameDoc = friendlyIframeEmbed.iframe.contentDocument ||
friendlyIframeEmbed.win.document;
setStyle(frameDoc.body, 'visibility', 'visible');
// Capture phase click handlers on the ad.
installAnchorClickInterceptor(
this.getAmpDoc(), friendlyIframeEmbed.win);
// Bubble phase click handlers on the ad.
this.registerAlpHandler_(friendlyIframeEmbed.win);
// Capture timing info for friendly iframe load completion.
getTimingDataAsync(friendlyIframeEmbed.win,
'navigationStart', 'loadEventEnd').then(delta => {
this.protectedEmitLifecycleEvent_('friendlyIframeLoaded', {
'navStartToLoadEndDelta.AD_SLOT_ID': Math.round(delta),
});
}).catch(err => {
dev().error(TAG, this.element.getAttribute('type'),
'getTimingDataAsync for renderFriendlyEnd failed: ', err);
});
// It's enough to wait for "ini-load" signal because in a FIE case
// we know that the embed no longer consumes significant resources
// after the initial load.
return friendlyIframeEmbed.whenIniLoaded();
}).then(() => {
// Capture ini-load ping.
this.protectedEmitLifecycleEvent_('friendlyIframeIniLoad');
});
}
/**
* Shared functionality for cross-domain iframe-based rendering methods.
* @param {!Element} iframe Iframe to render. Should be fully configured
* (all attributes set), but not yet attached to DOM.
* @return {!Promise} awaiting load event for ad frame
* @private
*/
iframeRenderHelper_(iframe) {
// TODO(keithwrightbos): noContentCallback?
this.xOriginIframeHandler_ = new AMP.AmpAdXOriginIframeHandler(this);
return this.xOriginIframeHandler_.init(iframe, /* opt_isA4A */ true);
}
/**
* Creates iframe whose src matches that of the ad URL. The response should
* have been cached causing the browser to render without callout. However,
* it is possible for cache miss to occur which can be detected server-side
* by missing ORIGIN header.
*
* Note: As of 2016-10-18, the fill-from-cache assumption appears to fail on
* Safari-on-iOS, which issues a fresh network request, even though the
* content is already in cache.
*
* @param {string} adUrl Ad request URL, as sent to #sendXhrRequest_ (i.e.,
* before any modifications that XHR module does to it.)
* @return {!Promise} awaiting ad completed insertion.
* @private
*/
renderViaCachedContentIframe_(adUrl) {
this.protectedEmitLifecycleEvent_('renderCrossDomainStart');
/** @const {!Element} */
const iframe = createElementWithAttributes(
/** @type {!Document} */(this.element.ownerDocument),
'iframe', Object.assign({
'height': this.element.getAttribute('height'),
'width': this.element.getAttribute('width'),
// XHR request modifies URL by adding origin as parameter. Need to
// append ad URL, otherwise cache will miss.
// TODO: remove call to getCorsUrl and instead have fetch API return
// modified url.
'src': xhrFor(this.win).getCorsUrl(this.win, adUrl),
}, SHARED_IFRAME_PROPERTIES));
// Can't get the attributes until we have the iframe, then set it.
const attributes = getContextMetadata(
this.win, this.element, this.sentinel);
iframe.setAttribute('name', JSON.stringify(attributes));
iframe.setAttribute('data-amp-3p-sentinel', this.sentinel);
return this.iframeRenderHelper_(iframe);
}
/**
* Render the creative via some "cross domain iframe that accepts the creative
* in the name attribute". This could be SafeFrame or the AMP-native
* NameFrame.
*
* @param {!ArrayBuffer} creativeBody
* @return {!Promise} awaiting load event for ad frame
* @private
*/
renderViaNameAttrOfXOriginIframe_(creativeBody) {
const method = this.experimentalNonAmpCreativeRenderMethod_;
dev().assert(method == XORIGIN_MODE.SAFEFRAME ||
method == XORIGIN_MODE.NAMEFRAME,
'Unrecognized A4A cross-domain rendering mode: %s', method);
this.protectedEmitLifecycleEvent_('renderSafeFrameStart');
return utf8Decode(creativeBody).then(creative => {
let srcPath;
let nameData;
switch (method) {
case XORIGIN_MODE.SAFEFRAME:
srcPath = SAFEFRAME_IMPL_PATH + '?n=0';
nameData = `${SAFEFRAME_VERSION};${creative.length};${creative}`;
break;
case XORIGIN_MODE.NAMEFRAME:
srcPath = getDefaultBootstrapBaseUrl(this.win, 'nameframe');
nameData = '';
// Name will be set for real below in nameframe case.
break;
default:
// Shouldn't be able to get here, but... Because of the assert, above,
// we can only get here in non-dev mode, so give user feedback.
user().error('A4A', 'A4A received unrecognized cross-domain name'
+ ' attribute iframe rendering mode request: %s. Unable to'
+ ' render a creative for'
+ ' slot %s.', method, this.element.getAttribute('id'));
return Promise.reject('Unrecognized rendering mode request');
}
/** @const {!Element} */
const iframe = createElementWithAttributes(
/** @type {!Document} */(this.element.ownerDocument),
'iframe', Object.assign({
'height': this.element.getAttribute('height'),
'width': this.element.getAttribute('width'),
'src': srcPath,
'name': nameData,
}, SHARED_IFRAME_PROPERTIES));
if (method == XORIGIN_MODE.NAMEFRAME) {
// TODO(bradfrizzell): change name of function and var
const attributes = getContextMetadata(
this.win, this.element, this.sentinel);
attributes['creative'] = creative;
const name = JSON.stringify(attributes);
// Need to reassign the name once we've generated the context
// attributes off of the iframe. Need the iframe to generate.
iframe.setAttribute('name', name);
iframe.setAttribute('data-amp-3p-sentinel', this.sentinel);
}
return this.iframeRenderHelper_(iframe);
});
}
/**
*
* Throws {@code SyntaxError} if the metadata block delimiters are missing
* or corrupted or if the metadata content doesn't parse as JSON.
* @param {string} creative from which CSS is extracted
* @return {?CreativeMetaDataDef} Object result of parsing JSON data blob inside
* the metadata markers on the ad text, or null if no metadata markers are
* found.
* @private
* TODO(keithwrightbos@): report error cases
*/
getAmpAdMetadata_(creative) {
let metadataString = METADATA_STRING;
let metadataStart = creative.lastIndexOf(METADATA_STRING);
if (metadataStart < 0) {
metadataString = METADATA_STRING_NO_QUOTES;
metadataStart = creative.lastIndexOf(METADATA_STRING_NO_QUOTES);
}
if (metadataStart < 0) {
// Couldn't find a metadata blob.
dev().warn(TAG, this.element.getAttribute('type'),
'Could not locate start index for amp meta data in: %s', creative);
return null;
}
const metadataEnd = creative.lastIndexOf('</script>');
if (metadataEnd < 0) {
// Couldn't find a metadata blob.
dev().warn(TAG, this.element.getAttribute('type'),
'Could not locate closing script tag for amp meta data in: %s',
creative);
return null;
}
try {
const metaDataObj = JSON.parse(
creative.slice(metadataStart + metadataString.length, metadataEnd));
const ampRuntimeUtf16CharOffsets =
metaDataObj['ampRuntimeUtf16CharOffsets'];
if (!isArray(ampRuntimeUtf16CharOffsets) ||
ampRuntimeUtf16CharOffsets.length != 2 ||
typeof ampRuntimeUtf16CharOffsets[0] !== 'number' ||
typeof ampRuntimeUtf16CharOffsets[1] !== 'number') {
throw new Error('Invalid runtime offsets');
}
const metaData = {};
if (metaDataObj['customElementExtensions']) {
metaData.customElementExtensions =
metaDataObj['customElementExtensions'];
if (!isArray(metaData.customElementExtensions)) {
throw new Error(
'Invalid extensions', metaData.customElementExtensions);
}
} else {
metaData.customElementExtensions = [];
}
if (metaDataObj['customStylesheets']) {
// Expect array of objects with at least one key being 'href' whose
// value is URL.
metaData.customStylesheets = metaDataObj['customStylesheets'];
const errorMsg = 'Invalid custom stylesheets';
if (!isArray(metaData.customStylesheets)) {
throw new Error(errorMsg);
}
metaData.customStylesheets.forEach(stylesheet => {
if (!isObject(stylesheet) || !stylesheet['href'] ||
typeof stylesheet['href'] !== 'string' ||
!/^https:\/\//i.test(stylesheet['href'])) {
throw new Error(errorMsg);
}
});
}
// TODO(keithwrightbos): OK to assume ampRuntimeUtf16CharOffsets is before
// metadata as its in the head?
metaData.minifiedCreative =
creative.slice(0, ampRuntimeUtf16CharOffsets[0]) +
creative.slice(ampRuntimeUtf16CharOffsets[1], metadataStart) +
creative.slice(metadataEnd + '</script>'.length);
return metaData;
} catch (err) {
dev().warn(
TAG, this.element.getAttribute('type'), 'Invalid amp metadata: %s',
creative.slice(metadataStart + METADATA_STRING.length, metadataEnd));
return null;
}
}
/**
* Registers a click handler for "A2A" (AMP-to-AMP navigation where the AMP
* viewer navigates to an AMP destination on our behalf.
* @param {!Window} iframeWin
*/
registerAlpHandler_(iframeWin) {
if (!isExperimentOn(this.win, 'alp-for-a4a')) {
return;
}
iframeWin.document.documentElement.addEventListener('click', event => {
handleClick(event, url => {
viewerForDoc(this.getAmpDoc()).navigateTo(url, 'a4a');
});
});
}
/**
* Receive collapse notifications and record lifecycle events for them.
*
* @param unusedElement {!AmpElement}
* @override
*/
collapsedCallback(unusedElement) {
this.protectedEmitLifecycleEvent_('adSlotCollapsed');
}
/**
* To be overriden by network specific implementation.
* This function will be called for each lifecycle event as specified in the
* LIFECYCLE_STAGES enum declaration. It may additionally pass extra
* variables of the form { name: val }. It is up to the subclass what to
* do with those variables.
*
* @param {string} unusedEventName
* @param {!Object<string, string|number>=} opt_extraVariables
*/
emitLifecycleEvent(unusedEventName, opt_extraVariables) {}
}
| Adtoma/amphtml | extensions/amp-a4a/0.1/amp-a4a.js | JavaScript | apache-2.0 | 54,093 |
/**
* Copyright 2010-2014 Axel Fontaine
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Private API. No compatibility guarantees provided.
*/
package org.flywaydb.core.internal.resolver.jdbc; | typischmann/flyway | flyway-core/src/main/java/org/flywaydb/core/internal/resolver/jdbc/package-info.java | Java | apache-2.0 | 718 |
/*
* Copyright 2013 Séven Le Mesle
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fr.xebia.extras.selma.beans;
import java.util.List;
/**
* Created by slemesle on 21/06/2014.
*/
public class LibraryDTO {
private List<BookDTO> books;
public List<BookDTO> getBooks() {
return books;
}
public void setBooks(List<BookDTO> books) {
this.books = books;
}
}
| zouabimourad/selma | processor/src/test/java/fr/xebia/extras/selma/beans/LibraryDTO.java | Java | apache-2.0 | 925 |
package com.material.widget;
import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.TypedArray;
import android.graphics.*;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
/**
* Created by IntelliJ IDEA.
* User: keith.
* Date: 14-10-9.
* Time: 17:04.
*/
public class PaperButton extends View {
private static final String TAG = PaperButton.class.getSimpleName();
private static final long ANIMATION_DURATION = 200;
private static final int StateNormal = 1;
private static final int StateTouchDown = 2;
private static final int StateTouchUp = 3;
private static final float SHADOW_RADIUS = 8.0f;
private static final float SHADOW_OFFSET_X = 0.0f;
private static final float SHADOW_OFFSET_Y = 4.0f;
private static final float MIN_SHADOW_COLOR_ALPHA = 0.1f;
private static final float MAX_SHADOW_COLOR_ALPHA = 0.4f;
private int mState = StateNormal;
private long mStartTime;
private int mColor;
private int mShadowColor;
private int mCornerRadius;
private int mPadding;
private int mTextSize;
private int mTextColor;
private float mShadowRadius;
private float mShadowOffsetX;
private float mShadowOffsetY;
private CharSequence mText;
private RectF backgroundRectF;
private Rect mFingerRect;
private Path rippleClipPath;
private boolean mMoveOutside;
private Point mTouchPoint = new Point();
private Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint ripplePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
public PaperButton(Context context) {
this(context, null);
}
public PaperButton(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public PaperButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mPadding = getResources().getDimensionPixelSize(R.dimen.paper_padding);
TypedArray attributes = context.obtainStyledAttributes(attrs, R.styleable.PaperButton);
mColor = attributes.getColor(R.styleable.PaperButton_paper_color,
getResources().getColor(R.color.paper_button_color));
mShadowColor = attributes.getColor(R.styleable.PaperButton_paper_shadow_color,
getResources().getColor(R.color.paper_button_shadow_color));
mCornerRadius = attributes.getDimensionPixelSize(R.styleable.PaperButton_paper_corner_radius,
getResources().getDimensionPixelSize(R.dimen.paper_button_corner_radius));
mText = attributes.getText(R.styleable.PaperButton_paper_text);
mTextSize = attributes.getDimensionPixelSize(R.styleable.PaperButton_paper_text_size,
getResources().getDimensionPixelSize(R.dimen.paper_text_size));
mTextColor = attributes.getColor(R.styleable.PaperButton_paper_text_color,
getResources().getColor(R.color.paper_text_color));
final String assetPath = attributes.getString(R.styleable.PaperButton_paper_font);
if (assetPath != null) {
AssetManager assets = context.getAssets();
Typeface typeface = Typeface.createFromAsset(assets, assetPath);
textPaint.setTypeface(typeface);
}
mShadowRadius = attributes.getFloat(R.styleable.PaperButton_paper_shadow_radius, SHADOW_RADIUS);
mShadowOffsetX = attributes.getFloat(R.styleable.PaperButton_paper_shadow_offset_x, SHADOW_OFFSET_X);
mShadowOffsetY = attributes.getFloat(R.styleable.PaperButton_paper_shadow_offset_y, SHADOW_OFFSET_Y);
attributes.recycle();
backgroundPaint.setColor(mColor);
backgroundPaint.setStyle(Paint.Style.FILL);
int shadowColor = changeColorAlpha(mShadowColor, MIN_SHADOW_COLOR_ALPHA);
backgroundPaint.setShadowLayer(mShadowRadius, mShadowOffsetX, mShadowOffsetY, shadowColor);
textPaint.setColor(mTextColor);
textPaint.setTextSize(mTextSize);
textPaint.setTextAlign(TextPaint.Align.CENTER);
ripplePaint.setColor(darkenColor(mColor));
setWillNotDraw(false);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
}
private int changeColorAlpha(int color, float value) {
int alpha = Math.round(Color.alpha(color) * value);
int red = Color.red(color);
int green = Color.green(color);
int blue = Color.blue(color);
return Color.argb(alpha, red, green, blue);
}
private int darkenColor(int color) {
float[] hsv = new float[3];
Color.colorToHSV(color, hsv);
hsv[2] *= 0.9f;
return Color.HSVToColor(hsv);
}
public void setColor(int color) {
mColor = color;
backgroundPaint.setColor(mColor);
ripplePaint.setColor(darkenColor(mColor));//
invalidate();
}
public void setShadowColor(int color) {
mShadowColor = color;
backgroundPaint.setShadowLayer(mShadowRadius, mShadowOffsetX, mShadowOffsetY, mShadowColor);
invalidate();
}
public void setTextSize(int pixel) {
mTextSize = pixel;
textPaint.setTextSize(mTextSize);
invalidate();
}
public void setTextColor(int color) {
mTextColor = color;
textPaint.setColor(mTextColor);
invalidate();
}
public void setText(String text){
mText = text;
invalidate();
}
private RectF getRectF() {
if (backgroundRectF == null) {
backgroundRectF = new RectF();
backgroundRectF.left = mPadding;
backgroundRectF.top = mPadding;
backgroundRectF.right = getWidth() - mPadding;
backgroundRectF.bottom = getHeight() - mPadding;
}
return backgroundRectF;
}
@Override
public boolean onTouchEvent(@NonNull MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mMoveOutside = false;
mFingerRect = new Rect(getLeft(), getTop(), getRight(), getBottom());
mTouchPoint.set(Math.round(event.getX()), Math.round(event.getY()));
mState = StateTouchDown;
mStartTime = System.currentTimeMillis();
invalidate();
break;
case MotionEvent.ACTION_MOVE:
if (!mFingerRect.contains(getLeft() + (int) event.getX(),
getTop() + (int) event.getY())) {
mMoveOutside = true;
mState = StateNormal;
invalidate();
}
break;
case MotionEvent.ACTION_UP:
if (!mMoveOutside) {
mState = StateTouchUp;
mStartTime = System.currentTimeMillis();
invalidate();
performClick();
}
break;
case MotionEvent.ACTION_CANCEL:
mState = StateNormal;
invalidate();
break;
}
return true;
}
@Override
protected void onDraw(@NonNull Canvas canvas) {
super.onDraw(canvas);
int radius = 0;
int shadowColor = changeColorAlpha(mShadowColor, MIN_SHADOW_COLOR_ALPHA);
long elapsed = System.currentTimeMillis() - mStartTime;
switch (mState) {
case StateNormal:
shadowColor = changeColorAlpha(mShadowColor, MIN_SHADOW_COLOR_ALPHA);
break;
case StateTouchDown:
ripplePaint.setAlpha(255);
if (elapsed < ANIMATION_DURATION) {
radius = Math.round(elapsed * getWidth() / 2 / ANIMATION_DURATION);
float shadowAlpha = (MAX_SHADOW_COLOR_ALPHA - MIN_SHADOW_COLOR_ALPHA)
* elapsed
/ ANIMATION_DURATION
+ MIN_SHADOW_COLOR_ALPHA;
shadowColor = changeColorAlpha(mShadowColor, shadowAlpha);
} else {
radius = getWidth() / 2;
shadowColor = changeColorAlpha(mShadowColor, MAX_SHADOW_COLOR_ALPHA);
}
postInvalidate();
break;
case StateTouchUp:
if (elapsed < ANIMATION_DURATION) {
int alpha = Math.round((ANIMATION_DURATION - elapsed) * 255 / ANIMATION_DURATION);
ripplePaint.setAlpha(alpha);
radius = getWidth() / 2 + Math.round(elapsed * getWidth() / 2 / ANIMATION_DURATION);
float shadowAlpha = (MAX_SHADOW_COLOR_ALPHA - MIN_SHADOW_COLOR_ALPHA)
* (ANIMATION_DURATION - elapsed)
/ ANIMATION_DURATION
+ MIN_SHADOW_COLOR_ALPHA;
shadowColor = changeColorAlpha(mShadowColor, shadowAlpha);
} else {
mState = StateNormal;
radius = 0;
ripplePaint.setAlpha(0);
shadowColor = changeColorAlpha(mShadowColor, MIN_SHADOW_COLOR_ALPHA);
}
postInvalidate();
break;
}
backgroundPaint.setShadowLayer(mShadowRadius, mShadowOffsetX, mShadowOffsetY, shadowColor);
canvas.drawRoundRect(getRectF(), mCornerRadius, mCornerRadius, backgroundPaint);
canvas.save();
if (mState == StateTouchDown || mState == StateTouchUp) {
if (rippleClipPath == null) {
rippleClipPath = new Path();
rippleClipPath.addRoundRect(getRectF(), mCornerRadius, mCornerRadius, Path.Direction.CW);
}
canvas.clipPath(rippleClipPath);
}
canvas.drawCircle(mTouchPoint.x, mTouchPoint.y, radius, ripplePaint);
canvas.restore();
if (mText != null && mText.length() > 0) {
int y = (int) (getHeight() / 2 - ((textPaint.descent() + textPaint.ascent()) / 2));
canvas.drawText(mText.toString(), getWidth() / 2, y, textPaint);
}
}
}
| 0359xiaodong/MaterialQQLite | MaterialWidget/src/main/java/com/material/widget/PaperButton.java | Java | apache-2.0 | 10,482 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks FilterDataset input pipeline op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.contrib.data.python.ops import optimization
from tensorflow.python.client import session
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class FilterBenchmark(test.Benchmark):
# This benchmark compares the performance of pipeline with multiple chained
# filter with and without filter fusion.
def benchmarkFilters(self):
chain_lengths = [0, 1, 2, 5, 10, 20, 50]
for chain_length in chain_lengths:
self._benchmarkFilters(chain_length, False)
self._benchmarkFilters(chain_length, True)
def _benchmarkFilters(self, chain_length, optimize_dataset):
with ops.Graph().as_default():
dataset = dataset_ops.Dataset.from_tensors(5).repeat(None)
for _ in range(chain_length):
dataset = dataset.filter(lambda x: math_ops.greater_equal(x - 5, 0))
if optimize_dataset:
dataset = dataset.apply(optimization.optimize(["filter_fusion"]))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with session.Session() as sess:
for _ in range(10):
sess.run(next_element.op)
deltas = []
for _ in range(100):
start = time.time()
for _ in range(100):
sess.run(next_element.op)
end = time.time()
deltas.append(end - start)
median_wall_time = np.median(deltas) / 100
opt_mark = "opt" if optimize_dataset else "no-opt"
print("Filter dataset {} chain length: {} Median wall time: {}".format(
opt_mark, chain_length, median_wall_time))
self.report_benchmark(
iters=1000,
wall_time=median_wall_time,
name="benchmark_filter_dataset_chain_latency_{}_{}".format(
opt_mark, chain_length))
if __name__ == "__main__":
test.main()
| kobejean/tensorflow | tensorflow/contrib/data/python/kernel_tests/filter_dataset_op_test.py | Python | apache-2.0 | 2,828 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.core.graph.command.impl;
import java.util.HashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.stunner.core.command.CommandResult;
import org.kie.workbench.common.stunner.core.command.exception.BadCommandArgumentsException;
import org.kie.workbench.common.stunner.core.graph.Node;
import org.kie.workbench.common.stunner.core.graph.content.Bounds;
import org.kie.workbench.common.stunner.core.graph.content.view.View;
import org.kie.workbench.common.stunner.core.rule.RuleEvaluationContext;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class UpdateElementPropertyValueCommandTest extends AbstractGraphCommandTest {
private static final String UUID = "testUUID";
private static final String DEF_ID = "defId";
private static final String PROPERTY_ID = "pId";
private static final String PROPERTY_VALUE = "testValue1";
private static final String PROPERTY_OLD_VALUE = "testOldValue1";
@Mock
private Node candidate;
private View content;
@Mock
private Object definition;
private Object property = new PropertyStub(PROPERTY_ID);
private UpdateElementPropertyValueCommand tested;
@Before
@SuppressWarnings("unchecked")
public void setup() throws Exception {
super.init(500,
500);
content = mockView(10,
10,
50,
50);
when(candidate.getContent()).thenReturn(content);
when(content.getDefinition()).thenReturn(definition);
Set<Object> properties = new HashSet<Object>(1) {{
add(property);
}};
when(definitionAdapter.getProperties(eq(definition))).thenReturn(properties);
when(definitionAdapter.getId(eq(definition))).thenReturn(DEF_ID);
when(propertyAdapter.getId(eq(property))).thenReturn(PROPERTY_ID);
when(propertyAdapter.getValue(eq(property))).thenReturn(PROPERTY_OLD_VALUE);
when(graphIndex.getNode(eq(UUID))).thenReturn(candidate);
when(graphIndex.get(eq(UUID))).thenReturn(candidate);
this.tested = new UpdateElementPropertyValueCommand(UUID,
PROPERTY_ID,
PROPERTY_VALUE);
}
@Test
@SuppressWarnings("unchecked")
public void testAllow() {
CommandResult<RuleViolation> result = tested.allow(graphCommandExecutionContext);
assertEquals(CommandResult.Type.INFO,
result.getType());
verify(ruleManager,
times(0)).evaluate(eq(ruleSet),
any(RuleEvaluationContext.class));
}
@Test(expected = BadCommandArgumentsException.class)
public void testAllowNodeNotFound() {
when(graphIndex.getNode(eq(UUID))).thenReturn(null);
tested.allow(graphCommandExecutionContext);
}
@Test
@SuppressWarnings("unchecked")
public void testExecute() {
CommandResult<RuleViolation> result = tested.execute(graphCommandExecutionContext);
ArgumentCaptor<Bounds> bounds = ArgumentCaptor.forClass(Bounds.class);
assertEquals(CommandResult.Type.INFO,
result.getType());
assertEquals(PROPERTY_OLD_VALUE,
tested.getOldValue());
verify(propertyAdapter,
times(1)).getValue(eq(property));
verify(propertyAdapter,
times(1)).setValue(eq(property),
eq(PROPERTY_VALUE));
}
@Test(expected = BadCommandArgumentsException.class)
public void testExecuteNodeNotFound() {
when(graphIndex.get(eq(UUID))).thenReturn(null);
tested.execute(graphCommandExecutionContext);
}
private class PropertyStub {
private final String uuid;
private PropertyStub(String uuid) {
this.uuid = uuid;
}
}
}
| etirelli/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-commons/kie-wb-common-stunner-core-common/src/test/java/org/kie/workbench/common/stunner/core/graph/command/impl/UpdateElementPropertyValueCommandTest.java | Java | apache-2.0 | 5,093 |
package com.lidroid.xutils.sample;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.*;
import com.lidroid.xutils.ViewUtils;
import com.lidroid.xutils.exception.DbException;
import com.lidroid.xutils.exception.HttpException;
import com.lidroid.xutils.http.HttpHandler;
import com.lidroid.xutils.http.ResponseInfo;
import com.lidroid.xutils.http.callback.RequestCallBack;
import com.lidroid.xutils.sample.download.DownloadInfo;
import com.lidroid.xutils.sample.download.DownloadManager;
import com.lidroid.xutils.sample.download.DownloadService;
import com.lidroid.xutils.util.LogUtils;
import com.lidroid.xutils.view.annotation.ViewInject;
import com.lidroid.xutils.view.annotation.event.OnClick;
import java.io.File;
import java.lang.ref.WeakReference;
/**
* Author: wyouflf
* Date: 13-11-20
* Time: 上午12:12
*/
public class DownloadListActivity extends Activity {
@ViewInject(R.id.download_list)
private ListView downloadList;
private DownloadManager downloadManager;
private DownloadListAdapter downloadListAdapter;
private Context mAppContext;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.download_list);
ViewUtils.inject(this);
mAppContext = this.getApplicationContext();
downloadManager = DownloadService.getDownloadManager(mAppContext);
downloadListAdapter = new DownloadListAdapter(mAppContext);
downloadList.setAdapter(downloadListAdapter);
}
@Override
public void onResume() {
super.onResume();
downloadListAdapter.notifyDataSetChanged();
}
@Override
public void onDestroy() {
try {
if (downloadListAdapter != null && downloadManager != null) {
downloadManager.backupDownloadInfoList();
}
} catch (DbException e) {
LogUtils.e(e.getMessage(), e);
}
super.onDestroy();
}
private class DownloadListAdapter extends BaseAdapter {
private final Context mContext;
private final LayoutInflater mInflater;
private DownloadListAdapter(Context context) {
mContext = context;
mInflater = LayoutInflater.from(mContext);
}
@Override
public int getCount() {
if (downloadManager == null) return 0;
return downloadManager.getDownloadInfoListCount();
}
@Override
public Object getItem(int i) {
return downloadManager.getDownloadInfo(i);
}
@Override
public long getItemId(int i) {
return i;
}
@SuppressWarnings("unchecked")
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
DownloadItemViewHolder holder = null;
DownloadInfo downloadInfo = downloadManager.getDownloadInfo(i);
if (view == null) {
view = mInflater.inflate(R.layout.download_item, null);
holder = new DownloadItemViewHolder(downloadInfo);
ViewUtils.inject(holder, view);
view.setTag(holder);
holder.refresh();
} else {
holder = (DownloadItemViewHolder) view.getTag();
holder.update(downloadInfo);
}
HttpHandler<File> handler = downloadInfo.getHandler();
if (handler != null) {
RequestCallBack callBack = handler.getRequestCallBack();
if (callBack instanceof DownloadManager.ManagerCallBack) {
DownloadManager.ManagerCallBack managerCallBack = (DownloadManager.ManagerCallBack) callBack;
if (managerCallBack.getBaseCallBack() == null) {
managerCallBack.setBaseCallBack(new DownloadRequestCallBack());
}
}
callBack.setUserTag(new WeakReference<DownloadItemViewHolder>(holder));
}
return view;
}
}
public class DownloadItemViewHolder {
@ViewInject(R.id.download_label)
TextView label;
@ViewInject(R.id.download_state)
TextView state;
@ViewInject(R.id.download_pb)
ProgressBar progressBar;
@ViewInject(R.id.download_stop_btn)
Button stopBtn;
@ViewInject(R.id.download_remove_btn)
Button removeBtn;
private DownloadInfo downloadInfo;
public DownloadItemViewHolder(DownloadInfo downloadInfo) {
this.downloadInfo = downloadInfo;
}
@OnClick(R.id.download_stop_btn)
public void stop(View view) {
HttpHandler.State state = downloadInfo.getState();
switch (state) {
case WAITING:
case STARTED:
case LOADING:
try {
downloadManager.stopDownload(downloadInfo);
} catch (DbException e) {
LogUtils.e(e.getMessage(), e);
}
break;
case CANCELLED:
case FAILURE:
try {
downloadManager.resumeDownload(downloadInfo, new DownloadRequestCallBack());
} catch (DbException e) {
LogUtils.e(e.getMessage(), e);
}
downloadListAdapter.notifyDataSetChanged();
break;
default:
break;
}
}
@OnClick(R.id.download_remove_btn)
public void remove(View view) {
try {
downloadManager.removeDownload(downloadInfo);
downloadListAdapter.notifyDataSetChanged();
} catch (DbException e) {
LogUtils.e(e.getMessage(), e);
}
}
public void update(DownloadInfo downloadInfo) {
this.downloadInfo = downloadInfo;
refresh();
}
public void refresh() {
label.setText(downloadInfo.getFileName());
state.setText(downloadInfo.getState().toString());
if (downloadInfo.getFileLength() > 0) {
progressBar.setProgress((int) (downloadInfo.getProgress() * 100 / downloadInfo.getFileLength()));
} else {
progressBar.setProgress(0);
}
stopBtn.setVisibility(View.VISIBLE);
stopBtn.setText(mAppContext.getString(R.string.stop));
HttpHandler.State state = downloadInfo.getState();
switch (state) {
case WAITING:
stopBtn.setText(mAppContext.getString(R.string.stop));
break;
case STARTED:
stopBtn.setText(mAppContext.getString(R.string.stop));
break;
case LOADING:
stopBtn.setText(mAppContext.getString(R.string.stop));
break;
case CANCELLED:
stopBtn.setText(mAppContext.getString(R.string.resume));
break;
case SUCCESS:
stopBtn.setVisibility(View.INVISIBLE);
break;
case FAILURE:
stopBtn.setText(mAppContext.getString(R.string.retry));
break;
default:
break;
}
}
}
private class DownloadRequestCallBack extends RequestCallBack<File> {
@SuppressWarnings("unchecked")
private void refreshListItem() {
if (userTag == null) return;
WeakReference<DownloadItemViewHolder> tag = (WeakReference<DownloadItemViewHolder>) userTag;
DownloadItemViewHolder holder = tag.get();
if (holder != null) {
holder.refresh();
}
}
@Override
public void onStart() {
refreshListItem();
}
@Override
public void onLoading(long total, long current, boolean isUploading) {
refreshListItem();
}
@Override
public void onSuccess(ResponseInfo<File> responseInfo) {
refreshListItem();
}
@Override
public void onFailure(HttpException error, String msg) {
refreshListItem();
}
@Override
public void onCancelled() {
refreshListItem();
}
}
} | zzspuck/SmartCity | xUtils-master/sample/src/com/lidroid/xutils/sample/DownloadListActivity.java | Java | apache-2.0 | 8,716 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.deployment.uri.scanners.ftp;
import org.apache.ignite.*;
/**
* An exception occurred during URI FTP deployment.
*/
class GridUriDeploymentFtpException extends IgniteCheckedException {
/** */
private static final long serialVersionUID = 0L;
/**
* Creates new grid exception with given error message.
*
* @param msg Error message.
*/
GridUriDeploymentFtpException(String msg) { super(msg); }
/**
* Creates new grid ftp client exception with given error message and optional nested exception.
*
* @param msg Error message.
* @param cause Optional nested exception (can be {@code null}).
*/
GridUriDeploymentFtpException(String msg, Throwable cause) { super(msg, cause); }
}
| gridgain/apache-ignite | modules/urideploy/src/main/java/org/apache/ignite/spi/deployment/uri/scanners/ftp/GridUriDeploymentFtpException.java | Java | apache-2.0 | 1,580 |
/*
* Copyright 2012 Amadeus s.a.s.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Aria.classDefinition({
$classpath : "test.aria.html.HTMLTestSuite",
$extends : "aria.jsunit.TestSuite",
$constructor : function () {
this.$TestSuite.constructor.call(this);
this.addTests("test.aria.html.element.ElementTestSuite");
this.addTests("test.aria.html.controllers.suggestions.ResourcesHandlerTest");
this.addTests("test.aria.html.textinput.TextInputTestSuite");
this.addTests("test.aria.html.checkbox.CheckBoxTest");
this.addTests("test.aria.html.template.basic.HtmlTemplateTestCase");
this.addTests("test.aria.html.template.submodule.SubModuleTestCase");
this.addTests("test.aria.html.radioButton.RadioButtonTest");
this.addTests("test.aria.html.select.SelectTest");
this.addTests("test.aria.html.select.bodycontent.BodyContentTestCase");
this.addTests("test.aria.html.select.onchange.DataModelOnChangeTestCase");
this.addTests("test.aria.html.DisabledTraitTest");
this.addTests("test.aria.html.radioButton.ieBug.RadioButtonTestCase");
this.addTests("test.aria.html.textarea.TextAreaTestSuite");
this.addTests("test.aria.html.template.prematureDisposal.PrematureDisposalTest");
this.addTests("test.aria.html.radioButton.listenerAfterDestruction.ListenerCalledAfterDestructionTest");
this.addTests("test.aria.html.radioButton.disabled.DisabledStateTest");
}
});
| mlaval/ariatemplates | test/aria/html/HTMLTestSuite.js | JavaScript | apache-2.0 | 2,019 |
package com.alibaba.json.bvt.bug;
import java.util.ArrayList;
import org.junit.Assert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import junit.framework.TestCase;
public class Bug_for_issue_389 extends TestCase {
public void test_for_issue() throws Exception {
Def def = new Def();
def.add(new User());
String defStr = JSON.toJSONString(def);
Assert.assertEquals("[{}]", defStr);
Def _def = JSON.parseObject(defStr, Def.class);
Assert.assertEquals(User.class, _def.get(0).getClass());
}
public void test_for_issue_1() throws Exception {
Def def = new Def();
def.add(new User());
String defStr = JSON.toJSONString(def);
Assert.assertEquals("[{}]", defStr);
Def _def = JSON.parseObject(defStr, new TypeReference<Def>() {});
Assert.assertEquals(User.class, _def.get(0).getClass());
}
public static class User {
String name;
String value;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
public static class Def extends ArrayList<User> {
}
}
| alibaba/fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_389.java | Java | apache-2.0 | 1,419 |
import { Nullable } from "babylonjs/types";
import { serializeAsVector3, serializeAsTexture, serialize, expandToProperty, serializeAsColor3, SerializationHelper } from "babylonjs/Misc/decorators";
import { Matrix, Vector3 } from "babylonjs/Maths/math.vector";
import { Color3 } from "babylonjs/Maths/math.color";
import { IAnimatable } from 'babylonjs/Animations/animatable.interface';
import { Tags } from "babylonjs/Misc/tags";
import { BaseTexture } from "babylonjs/Materials/Textures/baseTexture";
import { Texture } from "babylonjs/Materials/Textures/texture";
import { DynamicTexture } from "babylonjs/Materials/Textures/dynamicTexture";
import { IEffectCreationOptions } from "babylonjs/Materials/effect";
import { MaterialDefines } from "babylonjs/Materials/materialDefines";
import { MaterialHelper } from "babylonjs/Materials/materialHelper";
import { PushMaterial } from "babylonjs/Materials/pushMaterial";
import { MaterialFlags } from "babylonjs/Materials/materialFlags";
import { VertexBuffer } from "babylonjs/Buffers/buffer";
import { AbstractMesh } from "babylonjs/Meshes/abstractMesh";
import { SubMesh } from "babylonjs/Meshes/subMesh";
import { Mesh } from "babylonjs/Meshes/mesh";
import { Scene } from "babylonjs/scene";
import { RegisterClass } from 'babylonjs/Misc/typeStore';
import { EffectFallbacks } from 'babylonjs/Materials/effectFallbacks';
import "./fur.fragment";
import "./fur.vertex";
class FurMaterialDefines extends MaterialDefines {
public DIFFUSE = false;
public HEIGHTMAP = false;
public CLIPPLANE = false;
public CLIPPLANE2 = false;
public CLIPPLANE3 = false;
public CLIPPLANE4 = false;
public CLIPPLANE5 = false;
public CLIPPLANE6 = false;
public ALPHATEST = false;
public DEPTHPREPASS = false;
public POINTSIZE = false;
public FOG = false;
public NORMAL = false;
public UV1 = false;
public UV2 = false;
public VERTEXCOLOR = false;
public VERTEXALPHA = false;
public NUM_BONE_INFLUENCERS = 0;
public BonesPerMesh = 0;
public INSTANCES = false;
public INSTANCESCOLOR = false;
public HIGHLEVEL = false;
public IMAGEPROCESSINGPOSTPROCESS = false;
public SKIPFINALCOLORCLAMP = false;
constructor() {
super();
this.rebuild();
}
}
export class FurMaterial extends PushMaterial {
@serializeAsTexture("diffuseTexture")
private _diffuseTexture: BaseTexture;
@expandToProperty("_markAllSubMeshesAsTexturesDirty")
public diffuseTexture: BaseTexture;
@serializeAsTexture("heightTexture")
private _heightTexture: BaseTexture;
@expandToProperty("_markAllSubMeshesAsTexturesDirty")
public heightTexture: BaseTexture;
@serializeAsColor3()
public diffuseColor = new Color3(1, 1, 1);
@serialize()
public furLength: number = 1;
@serialize()
public furAngle: number = 0;
@serializeAsColor3()
public furColor = new Color3(0.44, 0.21, 0.02);
@serialize()
public furOffset: number = 0.0;
@serialize()
public furSpacing: number = 12;
@serializeAsVector3()
public furGravity = new Vector3(0, 0, 0);
@serialize()
public furSpeed: number = 100;
@serialize()
public furDensity: number = 20;
@serialize()
public furOcclusion: number = 0.0;
public furTexture: DynamicTexture;
@serialize("disableLighting")
private _disableLighting = false;
@expandToProperty("_markAllSubMeshesAsLightsDirty")
public disableLighting: boolean;
@serialize("maxSimultaneousLights")
private _maxSimultaneousLights = 4;
@expandToProperty("_markAllSubMeshesAsLightsDirty")
public maxSimultaneousLights: number;
@serialize()
public highLevelFur: boolean = true;
public _meshes: AbstractMesh[];
private _furTime: number = 0;
constructor(name: string, scene?: Scene) {
super(name, scene);
}
@serialize()
public get furTime() {
return this._furTime;
}
public set furTime(furTime: number) {
this._furTime = furTime;
}
public needAlphaBlending(): boolean {
return (this.alpha < 1.0);
}
public needAlphaTesting(): boolean {
return false;
}
public getAlphaTestTexture(): Nullable<BaseTexture> {
return null;
}
public updateFur(): void {
for (var i = 1; i < this._meshes.length; i++) {
var offsetFur = <FurMaterial>this._meshes[i].material;
offsetFur.furLength = this.furLength;
offsetFur.furAngle = this.furAngle;
offsetFur.furGravity = this.furGravity;
offsetFur.furSpacing = this.furSpacing;
offsetFur.furSpeed = this.furSpeed;
offsetFur.furColor = this.furColor;
offsetFur.diffuseTexture = this.diffuseTexture;
offsetFur.furTexture = this.furTexture;
offsetFur.highLevelFur = this.highLevelFur;
offsetFur.furTime = this.furTime;
offsetFur.furDensity = this.furDensity;
}
}
// Methods
public isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean {
if (this.isFrozen) {
if (subMesh.effect && subMesh.effect._wasPreviouslyReady) {
return true;
}
}
if (!subMesh.materialDefines) {
subMesh.materialDefines = new FurMaterialDefines();
}
var defines = <FurMaterialDefines>subMesh.materialDefines;
var scene = this.getScene();
if (this._isReadyForSubMesh(subMesh)) {
return true;
}
var engine = scene.getEngine();
// Textures
if (defines._areTexturesDirty) {
if (scene.texturesEnabled) {
if (this.diffuseTexture && MaterialFlags.DiffuseTextureEnabled) {
if (!this.diffuseTexture.isReady()) {
return false;
} else {
defines._needUVs = true;
defines.DIFFUSE = true;
}
}
if (this.heightTexture && engine.getCaps().maxVertexTextureImageUnits) {
if (!this.heightTexture.isReady()) {
return false;
} else {
defines._needUVs = true;
defines.HEIGHTMAP = true;
}
}
}
}
// High level
if (this.highLevelFur !== defines.HIGHLEVEL) {
defines.HIGHLEVEL = true;
defines.markAsUnprocessed();
}
// Misc.
MaterialHelper.PrepareDefinesForMisc(mesh, scene, false, this.pointsCloud, this.fogEnabled, this._shouldTurnAlphaTestOn(mesh), defines);
// Lights
defines._needNormals = MaterialHelper.PrepareDefinesForLights(scene, mesh, defines, false, this._maxSimultaneousLights, this._disableLighting);
// Values that need to be evaluated on every frame
MaterialHelper.PrepareDefinesForFrameBoundValues(scene, engine, defines, useInstances ? true : false);
// Attribs
MaterialHelper.PrepareDefinesForAttributes(mesh, defines, true, true);
// Get correct effect
if (defines.isDirty) {
defines.markAsProcessed();
scene.resetCachedMaterial();
// Fallbacks
var fallbacks = new EffectFallbacks();
if (defines.FOG) {
fallbacks.addFallback(1, "FOG");
}
MaterialHelper.HandleFallbacksForShadows(defines, fallbacks, this.maxSimultaneousLights);
if (defines.NUM_BONE_INFLUENCERS > 0) {
fallbacks.addCPUSkinningFallback(0, mesh);
}
defines.IMAGEPROCESSINGPOSTPROCESS = scene.imageProcessingConfiguration.applyByPostProcess;
//Attributes
var attribs = [VertexBuffer.PositionKind];
if (defines.NORMAL) {
attribs.push(VertexBuffer.NormalKind);
}
if (defines.UV1) {
attribs.push(VertexBuffer.UVKind);
}
if (defines.UV2) {
attribs.push(VertexBuffer.UV2Kind);
}
if (defines.VERTEXCOLOR) {
attribs.push(VertexBuffer.ColorKind);
}
MaterialHelper.PrepareAttributesForBones(attribs, mesh, defines, fallbacks);
MaterialHelper.PrepareAttributesForInstances(attribs, defines);
// Legacy browser patch
var shaderName = "fur";
var join = defines.toString();
var uniforms = ["world", "view", "viewProjection", "vEyePosition", "vLightsType", "vDiffuseColor",
"vFogInfos", "vFogColor", "pointSize",
"vDiffuseInfos",
"mBones",
"vClipPlane", "vClipPlane2", "vClipPlane3", "vClipPlane4", "vClipPlane5", "vClipPlane6", "diffuseMatrix",
"furLength", "furAngle", "furColor", "furOffset", "furGravity", "furTime", "furSpacing", "furDensity", "furOcclusion"
];
var samplers = ["diffuseSampler",
"heightTexture", "furTexture"
];
var uniformBuffers = new Array<string>();
MaterialHelper.PrepareUniformsAndSamplersList(<IEffectCreationOptions>{
uniformsNames: uniforms,
uniformBuffersNames: uniformBuffers,
samplers: samplers,
defines: defines,
maxSimultaneousLights: this.maxSimultaneousLights
});
subMesh.setEffect(scene.getEngine().createEffect(shaderName,
<IEffectCreationOptions>{
attributes: attribs,
uniformsNames: uniforms,
uniformBuffersNames: uniformBuffers,
samplers: samplers,
defines: join,
fallbacks: fallbacks,
onCompiled: this.onCompiled,
onError: this.onError,
indexParameters: { maxSimultaneousLights: this.maxSimultaneousLights }
}, engine), defines, this._materialContext);
}
if (!subMesh.effect || !subMesh.effect.isReady()) {
return false;
}
defines._renderId = scene.getRenderId();
subMesh.effect._wasPreviouslyReady = true;
return true;
}
public bindForSubMesh(world: Matrix, mesh: Mesh, subMesh: SubMesh): void {
var scene = this.getScene();
var defines = <FurMaterialDefines>subMesh.materialDefines;
if (!defines) {
return;
}
var effect = subMesh.effect;
if (!effect) {
return;
}
this._activeEffect = effect;
// Matrices
this.bindOnlyWorldMatrix(world);
this._activeEffect.setMatrix("viewProjection", scene.getTransformMatrix());
// Bones
MaterialHelper.BindBonesParameters(mesh, this._activeEffect);
if (scene.getCachedMaterial() !== this) {
// Textures
if (this._diffuseTexture && MaterialFlags.DiffuseTextureEnabled) {
this._activeEffect.setTexture("diffuseSampler", this._diffuseTexture);
this._activeEffect.setFloat2("vDiffuseInfos", this._diffuseTexture.coordinatesIndex, this._diffuseTexture.level);
this._activeEffect.setMatrix("diffuseMatrix", this._diffuseTexture.getTextureMatrix());
}
if (this._heightTexture) {
this._activeEffect.setTexture("heightTexture", this._heightTexture);
}
// Clip plane
MaterialHelper.BindClipPlane(this._activeEffect, scene);
// Point size
if (this.pointsCloud) {
this._activeEffect.setFloat("pointSize", this.pointSize);
}
scene.bindEyePosition(effect);
}
this._activeEffect.setColor4("vDiffuseColor", this.diffuseColor, this.alpha * mesh.visibility);
if (scene.lightsEnabled && !this.disableLighting) {
MaterialHelper.BindLights(scene, mesh, this._activeEffect, defines, this.maxSimultaneousLights);
}
// View
if (scene.fogEnabled && mesh.applyFog && scene.fogMode !== Scene.FOGMODE_NONE) {
this._activeEffect.setMatrix("view", scene.getViewMatrix());
}
// Fog
MaterialHelper.BindFogParameters(scene, mesh, this._activeEffect);
this._activeEffect.setFloat("furLength", this.furLength);
this._activeEffect.setFloat("furAngle", this.furAngle);
this._activeEffect.setColor4("furColor", this.furColor, 1.0);
if (this.highLevelFur) {
this._activeEffect.setVector3("furGravity", this.furGravity);
this._activeEffect.setFloat("furOffset", this.furOffset);
this._activeEffect.setFloat("furSpacing", this.furSpacing);
this._activeEffect.setFloat("furDensity", this.furDensity);
this._activeEffect.setFloat("furOcclusion", this.furOcclusion);
this._furTime += this.getScene().getEngine().getDeltaTime() / this.furSpeed;
this._activeEffect.setFloat("furTime", this._furTime);
this._activeEffect.setTexture("furTexture", this.furTexture);
}
this._afterBind(mesh, this._activeEffect);
}
public getAnimatables(): IAnimatable[] {
var results = [];
if (this.diffuseTexture && this.diffuseTexture.animations && this.diffuseTexture.animations.length > 0) {
results.push(this.diffuseTexture);
}
if (this.heightTexture && this.heightTexture.animations && this.heightTexture.animations.length > 0) {
results.push(this.heightTexture);
}
return results;
}
public getActiveTextures(): BaseTexture[] {
var activeTextures = super.getActiveTextures();
if (this._diffuseTexture) {
activeTextures.push(this._diffuseTexture);
}
if (this._heightTexture) {
activeTextures.push(this._heightTexture);
}
return activeTextures;
}
public hasTexture(texture: BaseTexture): boolean {
if (super.hasTexture(texture)) {
return true;
}
if (this.diffuseTexture === texture) {
return true;
}
if (this._heightTexture === texture) {
return true;
}
return false;
}
public dispose(forceDisposeEffect?: boolean): void {
if (this.diffuseTexture) {
this.diffuseTexture.dispose();
}
if (this._meshes) {
for (var i = 1; i < this._meshes.length; i++) {
let mat = this._meshes[i].material;
if (mat) {
mat.dispose(forceDisposeEffect);
}
this._meshes[i].dispose();
}
}
super.dispose(forceDisposeEffect);
}
public clone(name: string): FurMaterial {
return SerializationHelper.Clone(() => new FurMaterial(name, this.getScene()), this);
}
public serialize(): any {
var serializationObject = super.serialize();
serializationObject.customType = "BABYLON.FurMaterial";
if (this._meshes) {
serializationObject.sourceMeshName = this._meshes[0].name;
serializationObject.quality = this._meshes.length;
}
return serializationObject;
}
public getClassName(): string {
return "FurMaterial";
}
// Statics
public static Parse(source: any, scene: Scene, rootUrl: string): FurMaterial {
var material = SerializationHelper.Parse(() => new FurMaterial(source.name, scene), source, scene, rootUrl);
if (source.sourceMeshName && material.highLevelFur) {
scene.executeWhenReady(() => {
var sourceMesh = <Mesh>scene.getMeshByName(source.sourceMeshName);
if (sourceMesh) {
var furTexture = FurMaterial.GenerateTexture("Fur Texture", scene);
material.furTexture = furTexture;
FurMaterial.FurifyMesh(sourceMesh, source.quality);
}
});
}
return material;
}
public static GenerateTexture(name: string, scene: Scene): DynamicTexture {
// Generate fur textures
var texture = new DynamicTexture("FurTexture " + name, 256, scene, true);
var context = texture.getContext();
for (var i = 0; i < 20000; ++i) {
context.fillStyle = "rgba(255, " + Math.floor(Math.random() * 255) + ", " + Math.floor(Math.random() * 255) + ", 1)";
context.fillRect((Math.random() * texture.getSize().width), (Math.random() * texture.getSize().height), 2, 2);
}
texture.update(false);
texture.wrapU = Texture.WRAP_ADDRESSMODE;
texture.wrapV = Texture.WRAP_ADDRESSMODE;
return texture;
}
// Creates and returns an array of meshes used as shells for the Fur Material
// that can be disposed later in your code
// The quality is in interval [0, 100]
public static FurifyMesh(sourceMesh: Mesh, quality: number): Mesh[] {
var meshes = [sourceMesh];
var mat: FurMaterial = <FurMaterial>sourceMesh.material;
var i;
if (!(mat instanceof FurMaterial)) {
throw "The material of the source mesh must be a Fur Material";
}
for (i = 1; i < quality; i++) {
var offsetFur = new FurMaterial(mat.name + i, sourceMesh.getScene());
sourceMesh.getScene().materials.pop();
Tags.EnableFor(offsetFur);
Tags.AddTagsTo(offsetFur, "furShellMaterial");
offsetFur.furLength = mat.furLength;
offsetFur.furAngle = mat.furAngle;
offsetFur.furGravity = mat.furGravity;
offsetFur.furSpacing = mat.furSpacing;
offsetFur.furSpeed = mat.furSpeed;
offsetFur.furColor = mat.furColor;
offsetFur.diffuseTexture = mat.diffuseTexture;
offsetFur.furOffset = i / quality;
offsetFur.furTexture = mat.furTexture;
offsetFur.highLevelFur = mat.highLevelFur;
offsetFur.furTime = mat.furTime;
offsetFur.furDensity = mat.furDensity;
var offsetMesh = sourceMesh.clone(sourceMesh.name + i) as Mesh;
offsetMesh.material = offsetFur;
offsetMesh.skeleton = sourceMesh.skeleton;
offsetMesh.position = Vector3.Zero();
meshes.push(offsetMesh);
}
for (i = 1; i < meshes.length; i++) {
meshes[i].parent = sourceMesh;
}
(<FurMaterial>sourceMesh.material)._meshes = meshes;
return meshes;
}
}
RegisterClass("BABYLON.FurMaterial", FurMaterial); | BabylonJS/Babylon.js | materialsLibrary/src/fur/furMaterial.ts | TypeScript | apache-2.0 | 19,590 |
// (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Injectable } from '@angular/core';
import { CoreContentLinksModuleListHandler } from '@core/contentlinks/classes/module-list-handler';
import { CoreContentLinksHelperProvider } from '@core/contentlinks/providers/helper';
import { TranslateService } from '@ngx-translate/core';
import { AddonModBookProvider } from './book';
/**
* Handler to treat links to book list page.
*/
@Injectable()
export class AddonModBookListLinkHandler extends CoreContentLinksModuleListHandler {
name = 'AddonModBookListLinkHandler';
constructor(linkHelper: CoreContentLinksHelperProvider, translate: TranslateService,
protected bookProvider: AddonModBookProvider) {
super(linkHelper, translate, 'AddonModBook', 'book');
}
/**
* Check if the handler is enabled for a certain site (site + user) and a URL.
* If not defined, defaults to true.
*
* @param siteId The site ID.
* @param url The URL to treat.
* @param params The params of the URL. E.g. 'mysite.com?id=1' -> {id: 1}
* @param courseId Course ID related to the URL. Optional but recommended.
* @return Whether the handler is enabled for the URL and site.
*/
isEnabled(siteId: string, url: string, params: any, courseId?: number): boolean | Promise<boolean> {
return this.bookProvider.isPluginEnabled();
}
}
| FMCorz/moodlemobile2 | src/addon/mod/book/providers/list-link-handler.ts | TypeScript | apache-2.0 | 1,951 |
// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview The default renderer for a goog.dom.DimensionPicker. A
* dimension picker allows the user to visually select a row and column count.
* It looks like a palette but in order to minimize DOM load it is rendered.
* using CSS background tiling instead of as a grid of nodes.
*
* @author robbyw@google.com (Robby Walker)
*/
goog.provide('goog.ui.DimensionPickerRenderer');
goog.require('goog.a11y.aria.Announcer');
goog.require('goog.a11y.aria.LivePriority');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.i18n.bidi');
goog.require('goog.style');
goog.require('goog.ui.ControlRenderer');
goog.require('goog.userAgent');
goog.forwardDeclare('goog.ui.DimensionPicker');
/**
* Default renderer for {@link goog.ui.DimensionPicker}s. Renders the
* palette as two divs, one with the un-highlighted background, and one with the
* highlighted background.
*
* @constructor
* @extends {goog.ui.ControlRenderer}
*/
goog.ui.DimensionPickerRenderer = function() {
goog.ui.ControlRenderer.call(this);
/** @private {goog.a11y.aria.Announcer} */
this.announcer_ = new goog.a11y.aria.Announcer();
};
goog.inherits(goog.ui.DimensionPickerRenderer, goog.ui.ControlRenderer);
goog.addSingletonGetter(goog.ui.DimensionPickerRenderer);
/**
* Default CSS class to be applied to the root element of components rendered
* by this renderer.
* @type {string}
*/
goog.ui.DimensionPickerRenderer.CSS_CLASS =
goog.getCssName('goog-dimension-picker');
/**
* Return the underlying div for the given outer element.
* @param {Element} element The root element.
* @return {Element} The underlying div.
* @private
*/
goog.ui.DimensionPickerRenderer.prototype.getUnderlyingDiv_ = function(
element) {
return /** @type {Element} */ (element.firstChild.childNodes[1]);
};
/**
* Return the highlight div for the given outer element.
* @param {Element} element The root element.
* @return {Element} The highlight div.
* @private
*/
goog.ui.DimensionPickerRenderer.prototype.getHighlightDiv_ = function(element) {
return /** @type {Element} */ (element.firstChild.lastChild);
};
/**
* Return the status message div for the given outer element.
* @param {Element} element The root element.
* @return {Element} The status message div.
* @private
*/
goog.ui.DimensionPickerRenderer.prototype.getStatusDiv_ = function(element) {
return /** @type {Element} */ (element.lastChild);
};
/**
* Return the invisible mouse catching div for the given outer element.
* @param {Element} element The root element.
* @return {Element} The invisible mouse catching div.
* @private
*/
goog.ui.DimensionPickerRenderer.prototype.getMouseCatcher_ = function(element) {
return /** @type {Element} */ (element.firstChild.firstChild);
};
/**
* Overrides {@link goog.ui.ControlRenderer#canDecorate} to allow decorating
* empty DIVs only.
* @param {Element} element The element to check.
* @return {boolean} Whether if the element is an empty div.
* @override
*/
goog.ui.DimensionPickerRenderer.prototype.canDecorate = function(element) {
return element.tagName == goog.dom.TagName.DIV && !element.firstChild;
};
/**
* Overrides {@link goog.ui.ControlRenderer#decorate} to decorate empty DIVs.
* @param {goog.ui.Control} control goog.ui.DimensionPicker to decorate.
* @param {Element} element The element to decorate.
* @return {Element} The decorated element.
* @override
*/
goog.ui.DimensionPickerRenderer.prototype.decorate = function(
control, element) {
var palette = /** @type {goog.ui.DimensionPicker} */ (control);
goog.ui.DimensionPickerRenderer.superClass_.decorate.call(
this, palette, element);
this.addElementContents_(palette, element);
this.updateSize(palette, element);
return element;
};
/**
* Scales various elements in order to update the palette's size.
* @param {goog.ui.DimensionPicker} palette The palette object.
* @param {Element} element The element to set the style of.
*/
goog.ui.DimensionPickerRenderer.prototype.updateSize = function(
palette, element) {
var size = palette.getSize();
element.style.width = size.width + 'em';
var underlyingDiv = this.getUnderlyingDiv_(element);
underlyingDiv.style.width = size.width + 'em';
underlyingDiv.style.height = size.height + 'em';
if (palette.isRightToLeft()) {
this.adjustParentDirection_(palette, element);
}
};
/**
* Adds the appropriate content elements to the given outer DIV.
* @param {goog.ui.DimensionPicker} palette The palette object.
* @param {Element} element The element to decorate.
* @private
*/
goog.ui.DimensionPickerRenderer.prototype.addElementContents_ = function(
palette, element) {
// First we create a single div containing three stacked divs. The bottom div
// catches mouse events. We can't use document level mouse move detection as
// we could lose events to iframes. This is especially important in Firefox 2
// in which TrogEdit creates iframes. The middle div uses a css tiled
// background image to represent deselected tiles. The top div uses a
// different css tiled background image to represent selected tiles.
var mouseCatcherDiv = palette.getDomHelper().createDom(
goog.dom.TagName.DIV,
goog.getCssName(this.getCssClass(), 'mousecatcher'));
var unhighlightedDiv =
palette.getDomHelper().createDom(goog.dom.TagName.DIV, {
'class': goog.getCssName(this.getCssClass(), 'unhighlighted'),
'style': 'width:100%;height:100%'
});
var highlightedDiv = palette.getDomHelper().createDom(
goog.dom.TagName.DIV, goog.getCssName(this.getCssClass(), 'highlighted'));
element.appendChild(
palette.getDomHelper().createDom(
goog.dom.TagName.DIV, {
'style': 'width:100%;height:100%;touch-action:none;'
},
mouseCatcherDiv, unhighlightedDiv, highlightedDiv));
// Lastly we add a div to store the text version of the current state.
element.appendChild(
palette.getDomHelper().createDom(
goog.dom.TagName.DIV, goog.getCssName(this.getCssClass(), 'status')));
};
/**
* Creates a div and adds the appropriate contents to it.
* @param {goog.ui.Control} control Picker to render.
* @return {!Element} Root element for the palette.
* @override
*/
goog.ui.DimensionPickerRenderer.prototype.createDom = function(control) {
var palette = /** @type {goog.ui.DimensionPicker} */ (control);
var classNames = this.getClassNames(palette);
// Hide the element from screen readers so they don't announce "1 of 1" for
// the perceived number of items in the palette.
var element = palette.getDomHelper().createDom(
goog.dom.TagName.DIV,
{'class': classNames ? classNames.join(' ') : '', 'aria-hidden': 'true'});
this.addElementContents_(palette, element);
this.updateSize(palette, element);
return element;
};
/**
* Initializes the control's DOM when the control enters the document. Called
* from {@link goog.ui.Control#enterDocument}.
* @param {goog.ui.Control} control Palette whose DOM is to be
* initialized as it enters the document.
* @override
*/
goog.ui.DimensionPickerRenderer.prototype.initializeDom = function(control) {
var palette = /** @type {goog.ui.DimensionPicker} */ (control);
goog.ui.DimensionPickerRenderer.superClass_.initializeDom.call(this, palette);
// Make the displayed highlighted size match the dimension picker's value.
var highlightedSize = palette.getValue();
this.setHighlightedSize(
palette, highlightedSize.width, highlightedSize.height);
this.positionMouseCatcher(palette);
};
/**
* Get the element to listen for mouse move events on.
* @param {goog.ui.DimensionPicker} palette The palette to listen on.
* @return {Element} The element to listen for mouse move events on.
*/
goog.ui.DimensionPickerRenderer.prototype.getMouseMoveElement = function(
palette) {
return /** @type {Element} */ (palette.getElement().firstChild);
};
/**
* Returns the x offset in to the grid for the given mouse x position.
* @param {goog.ui.DimensionPicker} palette The table size palette.
* @param {number} x The mouse event x position.
* @return {number} The x offset in to the grid.
*/
goog.ui.DimensionPickerRenderer.prototype.getGridOffsetX = function(
palette, x) {
// TODO(robbyw): Don't rely on magic 18 - measure each palette's em size.
return Math.min(palette.maxColumns, Math.ceil(x / 18));
};
/**
* Returns the y offset in to the grid for the given mouse y position.
* @param {goog.ui.DimensionPicker} palette The table size palette.
* @param {number} y The mouse event y position.
* @return {number} The y offset in to the grid.
*/
goog.ui.DimensionPickerRenderer.prototype.getGridOffsetY = function(
palette, y) {
return Math.min(palette.maxRows, Math.ceil(y / 18));
};
/**
* Sets the highlighted size. Does nothing if the palette hasn't been rendered.
* @param {goog.ui.DimensionPicker} palette The table size palette.
* @param {number} columns The number of columns to highlight.
* @param {number} rows The number of rows to highlight.
*/
goog.ui.DimensionPickerRenderer.prototype.setHighlightedSize = function(
palette, columns, rows) {
var element = palette.getElement();
// Can't update anything if DimensionPicker hasn't been rendered.
if (!element) {
return;
}
// Style the highlight div.
var style = this.getHighlightDiv_(element).style;
style.width = columns + 'em';
style.height = rows + 'em';
// Explicitly set style.right so the element grows to the left when increase
// in width.
if (palette.isRightToLeft()) {
style.right = '0';
}
/**
* @desc The dimension of the columns and rows currently selected in the
* dimension picker, as text that can be spoken by a screen reader.
*/
var MSG_DIMENSION_PICKER_HIGHLIGHTED_DIMENSIONS = goog.getMsg(
'{$numCols} by {$numRows}',
{'numCols': String(columns), 'numRows': String(rows)});
this.announcer_.say(
MSG_DIMENSION_PICKER_HIGHLIGHTED_DIMENSIONS,
goog.a11y.aria.LivePriority.ASSERTIVE);
// Update the size text.
goog.dom.setTextContent(
this.getStatusDiv_(element),
goog.i18n.bidi.enforceLtrInText(columns + ' x ' + rows));
};
/**
* Position the mouse catcher such that it receives mouse events past the
* selectedsize up to the maximum size. Takes care to not introduce scrollbars.
* Should be called on enter document and when the window changes size.
* @param {goog.ui.DimensionPicker} palette The table size palette.
*/
goog.ui.DimensionPickerRenderer.prototype.positionMouseCatcher = function(
palette) {
var mouseCatcher = this.getMouseCatcher_(palette.getElement());
var doc = goog.dom.getOwnerDocument(mouseCatcher);
var body = doc.body;
var position = goog.style.getRelativePosition(mouseCatcher, body);
// Hide the mouse catcher so it doesn't affect the body's scroll size.
mouseCatcher.style.display = 'none';
// Compute the maximum size the catcher can be without introducing scrolling.
var xAvailableEm = (palette.isRightToLeft() && position.x > 0) ?
Math.floor(position.x / 18) :
Math.floor((body.scrollWidth - position.x) / 18);
// Computing available height is more complicated - we need to check the
// window's inner height.
var height;
if (goog.userAgent.IE) {
// Offset 20px to make up for scrollbar size.
height = goog.style.getClientViewportElement(body).scrollHeight - 20;
} else {
var win = goog.dom.getWindow(doc);
// Offset 20px to make up for scrollbar size.
height = Math.max(win.innerHeight, body.scrollHeight) - 20;
}
var yAvailableEm = Math.floor((height - position.y) / 18);
// Resize and display the mouse catcher.
mouseCatcher.style.width = Math.min(palette.maxColumns, xAvailableEm) + 'em';
mouseCatcher.style.height = Math.min(palette.maxRows, yAvailableEm) + 'em';
mouseCatcher.style.display = '';
// Explicitly set style.right so the mouse catcher is positioned on the left
// side instead of right.
if (palette.isRightToLeft()) {
mouseCatcher.style.right = '0';
}
};
/**
* Returns the CSS class to be applied to the root element of components
* rendered using this renderer.
* @return {string} Renderer-specific CSS class.
* @override
*/
goog.ui.DimensionPickerRenderer.prototype.getCssClass = function() {
return goog.ui.DimensionPickerRenderer.CSS_CLASS;
};
/**
* This function adjusts the positioning from 'left' and 'top' to 'right' and
* 'top' as appropriate for RTL control. This is so when the dimensionpicker
* grow in width, the containing element grow to the left instead of right.
* This won't be necessary if goog.ui.SubMenu rendering code would position RTL
* control with 'right' and 'top'.
* @private
*
* @param {goog.ui.DimensionPicker} palette The palette object.
* @param {Element} element The palette's element.
*/
goog.ui.DimensionPickerRenderer.prototype.adjustParentDirection_ = function(
palette, element) {
var parent = palette.getParent();
if (parent) {
var parentElement = parent.getElement();
// Anchors the containing element to the right so it grows to the left
// when it increase in width.
var right = goog.style.getStyle(parentElement, 'right');
if (right == '') {
var parentPos = goog.style.getPosition(parentElement);
var parentSize = goog.style.getSize(parentElement);
if (parentSize.width != 0 && parentPos.x != 0) {
var visibleRect =
goog.style.getBounds(goog.style.getClientViewportElement());
var visibleWidth = visibleRect.width;
right = visibleWidth - parentPos.x - parentSize.width;
goog.style.setStyle(parentElement, 'right', right + 'px');
}
}
// When a table is inserted, the containing elemet's position is
// recalculated the next time it shows, set left back to '' to prevent
// extra white space on the left.
var left = goog.style.getStyle(parentElement, 'left');
if (left != '') {
goog.style.setStyle(parentElement, 'left', '');
}
} else {
goog.style.setStyle(element, 'right', '0px');
}
};
| teppeis/closure-library | closure/goog/ui/dimensionpickerrenderer.js | JavaScript | apache-2.0 | 14,821 |
/*
* Copyright 2012 Timothy Lin <lzh9102@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package zxinggui.generator;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.SpringLayout;
import java.lang.String;
public class PlainTextGenerator implements GeneratorInterface {
private JPanel panel = new JPanel();
private JTextArea textarea = new JTextArea();
static final String NORTH = SpringLayout.NORTH;
static final String SOUTH = SpringLayout.SOUTH;
static final String EAST = SpringLayout.EAST;
static final String WEST = SpringLayout.WEST;
public PlainTextGenerator() {
SpringLayout layout = new SpringLayout();
JLabel label = new JLabel("Text: ");
JScrollPane scrollPane = new JScrollPane(textarea);
textarea.setLineWrap(true);
panel.setLayout(layout);
panel.add(label);
panel.add(scrollPane);
layout.putConstraint(NORTH, label, 5, NORTH, panel);
layout.putConstraint(WEST, label, 5, WEST, panel);
layout.putConstraint(NORTH, scrollPane, 5, SOUTH, label);
layout.putConstraint(SOUTH, scrollPane, -5, SOUTH, panel);
layout.putConstraint(EAST, scrollPane, -5, EAST, panel);
layout.putConstraint(WEST, scrollPane, 5, WEST, panel);
}
@Override
public JPanel getPanel() {
return panel;
}
@Override
public String getName() {
return "Plain Text";
}
@Override
public String getText() throws GeneratorException {
String text = textarea.getText();
if (text.isEmpty())
throw new GeneratorException("Text cannot be empty.", textarea);
return text;
}
@Override
public void setFocus() {
textarea.requestFocusInWindow();
}
@Override
public int getParsingPriority() {
return 0; // always fallback to plain text
}
@Override
public boolean parseText(String text, boolean write) {
if (text.isEmpty())
return false;
if (write)
textarea.setText(text);
return true;
}
}
| adrianommelo/qrcode-desktop | src/zxinggui/generator/PlainTextGenerator.java | Java | apache-2.0 | 2,466 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.allocation;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* A {@code ClusterAllocationExplanation} is an explanation of why a shard may or may not be allocated to nodes. It also includes weights
* for where the shard is likely to be assigned. It is an immutable class
*/
public final class ClusterAllocationExplanation implements ToXContent, Writeable<ClusterAllocationExplanation> {
private final ShardId shard;
private final boolean primary;
private final String assignedNodeId;
private final Map<DiscoveryNode, Decision> nodeToDecision;
private final Map<DiscoveryNode, Float> nodeWeights;
private final UnassignedInfo unassignedInfo;
private final long remainingDelayNanos;
public ClusterAllocationExplanation(StreamInput in) throws IOException {
this.shard = ShardId.readShardId(in);
this.primary = in.readBoolean();
this.assignedNodeId = in.readOptionalString();
this.unassignedInfo = in.readOptionalWriteable(UnassignedInfo::new);
Map<DiscoveryNode, Decision> ntd = null;
int size = in.readVInt();
ntd = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DiscoveryNode dn = new DiscoveryNode(in);
Decision decision = Decision.readFrom(in);
ntd.put(dn, decision);
}
this.nodeToDecision = ntd;
Map<DiscoveryNode, Float> ntw = null;
size = in.readVInt();
ntw = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DiscoveryNode dn = new DiscoveryNode(in);
float weight = in.readFloat();
ntw.put(dn, weight);
}
this.nodeWeights = ntw;
remainingDelayNanos = in.readVLong();
}
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId,
UnassignedInfo unassignedInfo, Map<DiscoveryNode, Decision> nodeToDecision,
Map<DiscoveryNode, Float> nodeWeights, long remainingDelayNanos) {
this.shard = shard;
this.primary = primary;
this.assignedNodeId = assignedNodeId;
this.unassignedInfo = unassignedInfo;
this.nodeToDecision = nodeToDecision == null ? Collections.emptyMap() : nodeToDecision;
this.nodeWeights = nodeWeights == null ? Collections.emptyMap() : nodeWeights;
this.remainingDelayNanos = remainingDelayNanos;
}
public ShardId getShard() {
return this.shard;
}
public boolean isPrimary() {
return this.primary;
}
/** Return turn if the shard is assigned to a node */
public boolean isAssigned() {
return this.assignedNodeId != null;
}
/** Return the assigned node id or null if not assigned */
@Nullable
public String getAssignedNodeId() {
return this.assignedNodeId;
}
/** Return the unassigned info for the shard or null if the shard is assigned */
@Nullable
public UnassignedInfo getUnassignedInfo() {
return this.unassignedInfo;
}
/** Return a map of node to decision for shard allocation */
public Map<DiscoveryNode, Decision> getNodeDecisions() {
return this.nodeToDecision;
}
/**
* Return a map of node to balancer "weight" for allocation. Higher weights mean the balancer wants to allocated the shard to that node
* more
*/
public Map<DiscoveryNode, Float> getNodeWeights() {
return this.nodeWeights;
}
/** Return the remaining allocation delay for this shard in nanoseconds */
public long getRemainingDelayNanos() {
return this.remainingDelayNanos;
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); {
builder.startObject("shard"); {
builder.field("index", shard.getIndexName());
builder.field("index_uuid", shard.getIndex().getUUID());
builder.field("id", shard.getId());
builder.field("primary", primary);
}
builder.endObject(); // end shard
builder.field("assigned", this.assignedNodeId != null);
// If assigned, show the node id of the node it's assigned to
if (assignedNodeId != null) {
builder.field("assigned_node_id", this.assignedNodeId);
}
// If we have unassigned info, show that
if (unassignedInfo != null) {
unassignedInfo.toXContent(builder, params);
long delay = unassignedInfo.getLastComputedLeftDelayNanos();
builder.field("allocation_delay", TimeValue.timeValueNanos(delay));
builder.field("allocation_delay_ms", TimeValue.timeValueNanos(delay).millis());
builder.field("remaining_delay", TimeValue.timeValueNanos(remainingDelayNanos));
builder.field("remaining_delay_ms", TimeValue.timeValueNanos(remainingDelayNanos).millis());
}
builder.startObject("nodes");
for (Map.Entry<DiscoveryNode, Float> entry : nodeWeights.entrySet()) {
DiscoveryNode node = entry.getKey();
builder.startObject(node.getId()); {
builder.field("node_name", node.getName());
builder.startObject("node_attributes"); {
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
}
builder.endObject(); // end attributes
Decision d = nodeToDecision.get(node);
if (node.getId().equals(assignedNodeId)) {
builder.field("final_decision", "CURRENTLY_ASSIGNED");
} else {
builder.field("final_decision", d.type().toString());
}
builder.field("weight", entry.getValue());
d.toXContent(builder, params);
}
builder.endObject(); // end node <uuid>
}
builder.endObject(); // end nodes
}
builder.endObject(); // end wrapping object
return builder;
}
@Override
public ClusterAllocationExplanation readFrom(StreamInput in) throws IOException {
return new ClusterAllocationExplanation(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
this.getShard().writeTo(out);
out.writeBoolean(this.isPrimary());
out.writeOptionalString(this.getAssignedNodeId());
out.writeOptionalWriteable(this.getUnassignedInfo());
Map<DiscoveryNode, Decision> ntd = this.getNodeDecisions();
out.writeVInt(ntd.size());
for (Map.Entry<DiscoveryNode, Decision> entry : ntd.entrySet()) {
entry.getKey().writeTo(out);
Decision.writeTo(entry.getValue(), out);
}
Map<DiscoveryNode, Float> ntw = this.getNodeWeights();
out.writeVInt(ntw.size());
for (Map.Entry<DiscoveryNode, Float> entry : ntw.entrySet()) {
entry.getKey().writeTo(out);
out.writeFloat(entry.getValue());
}
out.writeVLong(remainingDelayNanos);
}
}
| mmaracic/elasticsearch | core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java | Java | apache-2.0 | 8,925 |
// Copyright John Maddock 2012.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// This tests that cpp_dec_float_50 meets our
// conceptual requirements when used with Boost.Math.
//
#ifdef _MSC_VER
# define _SCL_SECURE_NO_WARNINGS
# pragma warning(disable:4800)
# pragma warning(disable:4512)
# pragma warning(disable:4127)
# pragma warning(disable:4512)
# pragma warning(disable:4503) // decorated name length exceeded, name was truncated
#endif
#if !defined(TEST_MPF_50) && !defined(TEST_BACKEND) && !defined(TEST_MPZ) \
&& !defined(TEST_CPP_DEC_FLOAT) && !defined(TEST_MPFR_50)\
&& !defined(TEST_MPFR_6) && !defined(TEST_MPFR_15) && !defined(TEST_MPFR_17) \
&& !defined(TEST_MPFR_30) && !defined(TEST_CPP_DEC_FLOAT_NO_ET) && !defined(TEST_LOGGED_ADAPTER)
# define TEST_MPF_50
# define TEST_BACKEND
# define TEST_MPZ
# define TEST_MPFR_50
# define TEST_MPFR_6
# define TEST_MPFR_15
# define TEST_MPFR_17
# define TEST_MPFR_30
# define TEST_CPP_DEC_FLOAT
# define TEST_CPP_DEC_FLOAT_NO_ET
# define TEST_LOGGED_ADAPTER
#ifdef _MSC_VER
#pragma message("CAUTION!!: No backend type specified so testing everything.... this will take some time!!")
#endif
#ifdef __GNUC__
#pragma warning "CAUTION!!: No backend type specified so testing everything.... this will take some time!!"
#endif
#endif
#if defined(TEST_MPF_50) || defined(TEST_MPZ)
#include <boost/multiprecision/gmp.hpp>
#endif
#ifdef TEST_BACKEND
#include <boost/multiprecision/concepts/mp_number_archetypes.hpp>
#endif
#if defined(TEST_CPP_DEC_FLOAT) || defined(TEST_CPP_DEC_FLOAT_NO_ET) || defined(TEST_LOGGED_ADAPTER)
#include <boost/multiprecision/cpp_dec_float.hpp>
#endif
#if defined(TEST_MPFR_50) || defined(TEST_MPFR_6) || defined(TEST_MPFR_15) || defined(TEST_MPFR_17) || defined(TEST_MPFR_30)
#include <boost/multiprecision/mpfr.hpp>
#endif
#ifdef TEST_LOGGED_ADAPTER
#include <boost/multiprecision/logged_adaptor.hpp>
#endif
#include <boost/math/special_functions.hpp>
template <class T>
void test_extra(T)
{
T v1, v2, v3;
int i;
boost::math::tgamma(v1);
boost::math::tgamma1pm1(v1);
boost::math::lgamma(v1);
boost::math::lgamma(v1, &i);
boost::math::digamma(v1);
boost::math::tgamma_ratio(v1, v2);
boost::math::tgamma_delta_ratio(v1, v2);
boost::math::factorial<T>(i);
boost::math::unchecked_factorial<T>(i);
i = boost::math::max_factorial<T>::value;
boost::math::double_factorial<T>(i);
boost::math::rising_factorial(v1, i);
boost::math::falling_factorial(v1, i);
#ifndef SLOW_COMPILER
boost::math::tgamma(v1, v2);
boost::math::tgamma_lower(v1, v2);
boost::math::gamma_p(v1, v2);
boost::math::gamma_q(v1, v2);
boost::math::gamma_p_inv(v1, v2);
boost::math::gamma_q_inv(v1, v2);
boost::math::gamma_p_inva(v1, v2);
boost::math::gamma_q_inva(v1, v2);
boost::math::erf(v1);
boost::math::erfc(v1);
boost::math::erf_inv(v1);
boost::math::erfc_inv(v1);
boost::math::beta(v1, v2);
boost::math::beta(v1, v2, v3);
boost::math::betac(v1, v2, v3);
boost::math::ibeta(v1, v2, v3);
boost::math::ibetac(v1, v2, v3);
boost::math::ibeta_inv(v1, v2, v3);
boost::math::ibetac_inv(v1, v2, v3);
boost::math::ibeta_inva(v1, v2, v3);
boost::math::ibetac_inva(v1, v2, v3);
boost::math::ibeta_invb(v1, v2, v3);
boost::math::ibetac_invb(v1, v2, v3);
boost::math::gamma_p_derivative(v2, v3);
boost::math::ibeta_derivative(v1, v2, v3);
#endif
}
void foo()
{
#ifdef TEST_BACKEND
test_extra(boost::multiprecision::concepts::mp_number_float_architype());
#endif
#ifdef TEST_MPF_50
test_extra(boost::multiprecision::mpf_float_50());
#endif
#ifdef TEST_MPFR_50
test_extra(boost::multiprecision::mpfr_float_50());
#endif
#ifdef TEST_MPFR_6
test_extra(boost::multiprecision::number<boost::multiprecision::mpfr_float_backend<6> >());
#endif
#ifdef TEST_MPFR_15
test_extra(boost::multiprecision::number<boost::multiprecision::mpfr_float_backend<15> >());
#endif
#ifdef TEST_MPFR_17
test_extra(boost::multiprecision::number<boost::multiprecision::mpfr_float_backend<17> >());
#endif
#ifdef TEST_MPFR_30
test_extra(boost::multiprecision::number<boost::multiprecision::mpfr_float_backend<30> >());
#endif
#ifdef TEST_CPP_DEC_FLOAT
test_extra(boost::multiprecision::cpp_dec_float_50());
#endif
#ifdef TEST_CPP_DEC_FLOAT_NO_ET
test_extra(boost::multiprecision::number<boost::multiprecision::cpp_dec_float<100>, boost::multiprecision::et_off>());
#endif
#ifdef TEST_LOGGED_ADAPTER
typedef boost::multiprecision::number<boost::multiprecision::logged_adaptor<boost::multiprecision::cpp_dec_float<50> > > num_t;
test_extra(num_t());
#endif
}
int main()
{
foo();
}
| ryancoleman/autodock-vina | boost_1_54_0/libs/multiprecision/test/concepts/sf_concept_check_gamma.cpp | C++ | apache-2.0 | 4,825 |
/*
Derby - Class com.pivotal.gemfirexd.internal.impl.sql.compile.LOBTypeCompiler
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.compile;
import java.sql.Types;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.ClassName;
import com.pivotal.gemfirexd.internal.iapi.reference.JDBC20Translation;
import com.pivotal.gemfirexd.internal.iapi.services.loader.ClassFactory;
import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager;
import com.pivotal.gemfirexd.internal.iapi.sql.compile.TypeCompiler;
import com.pivotal.gemfirexd.internal.iapi.types.BitDataValue;
import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueFactory;
import com.pivotal.gemfirexd.internal.iapi.types.TypeId;
import com.pivotal.gemfirexd.internal.shared.common.StoredFormatIds;
/**
* This class implements TypeCompiler for the SQL LOB types.
*
*/
public class LOBTypeCompiler extends BaseTypeCompiler
{
/**
* Tell whether this type (LOB) can be converted to the given type.
*
* @see TypeCompiler#convertible
*/
public boolean convertible(TypeId otherType,
boolean forDataTypeFunction)
{
// GemStone changes BEGIN
final int otherJDBCTypeId = otherType.getJDBCTypeId();
return otherJDBCTypeId == Types.BLOB ||
otherJDBCTypeId == Types.BINARY ||
otherJDBCTypeId == Types.VARBINARY;
/* (original code)
return (otherType.isBlobTypeId());
*/
// GemStone changes END
}
/**
* Tell whether this type (LOB) is compatible with the given type.
*
* @param otherType The TypeId of the other type.
*/
public boolean compatible(TypeId otherType)
{
return convertible(otherType,false);
}
/**
* Tell whether this type (LOB) can be stored into from the given type.
*
* @param otherType The TypeId of the other type.
* @param cf A ClassFactory
*/
public boolean storable(TypeId otherType, ClassFactory cf)
{
// GemStone changes BEGIN
final int otherJDBCTypeId = otherType.getJDBCTypeId();
return otherJDBCTypeId == Types.BLOB ||
otherJDBCTypeId == Types.BINARY ||
otherJDBCTypeId == Types.VARBINARY;
/* (original code)
// no automatic conversions at store time
return (otherType.isBlobTypeId());
*/
// GemStone changes END
}
/** @see TypeCompiler#interfaceName */
public String interfaceName()
{
return ClassName.BitDataValue;
}
/**
* @see TypeCompiler#getCorrespondingPrimitiveTypeName
*/
public String getCorrespondingPrimitiveTypeName() {
int formatId = getStoredFormatIdFromTypeId();
switch (formatId) {
case StoredFormatIds.BLOB_TYPE_ID: return "java.sql.Blob";
default:
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("unexpected formatId in getCorrespondingPrimitiveTypeName() - " + formatId);
return null;
}
}
/**
* @see TypeCompiler#getCastToCharWidth
*/
public int getCastToCharWidth(DataTypeDescriptor dts)
{
return dts.getMaximumWidth();
}
String nullMethodName() {
int formatId = getStoredFormatIdFromTypeId();
switch (formatId) {
case StoredFormatIds.BLOB_TYPE_ID: return "getNullBlob";
default:
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("unexpected formatId in nullMethodName() - " + formatId);
return null;
}
}
String dataValueMethodName()
{
int formatId = getStoredFormatIdFromTypeId();
switch (formatId) {
case StoredFormatIds.BLOB_TYPE_ID: return "getBlobDataValue";
default:
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("unexpected formatId in dataValueMethodName() - " + formatId);
return null;
}
}
}
| papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/compile/LOBTypeCompiler.java | Java | apache-2.0 | 5,995 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.provisioning.java.propagation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.syncope.common.lib.Attr;
import org.apache.syncope.common.lib.request.AbstractPatchItem;
import org.apache.syncope.common.lib.request.UserUR;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.ResourceOperation;
import org.apache.syncope.core.persistence.api.dao.ExternalResourceDAO;
import org.apache.syncope.core.persistence.api.dao.VirSchemaDAO;
import org.apache.syncope.core.persistence.api.entity.Any;
import org.apache.syncope.core.persistence.api.entity.AnyUtilsFactory;
import org.apache.syncope.core.persistence.api.entity.EntityFactory;
import org.apache.syncope.core.persistence.api.entity.Realm;
import org.apache.syncope.core.persistence.api.entity.VirSchema;
import org.apache.syncope.core.persistence.api.entity.resource.ExternalResource;
import org.apache.syncope.core.persistence.api.entity.resource.Item;
import org.apache.syncope.core.persistence.api.entity.resource.OrgUnit;
import org.apache.syncope.core.persistence.api.entity.resource.Provision;
import org.apache.syncope.core.persistence.api.entity.user.LinkedAccount;
import org.apache.syncope.core.persistence.api.entity.user.User;
import org.apache.syncope.core.provisioning.api.DerAttrHandler;
import org.apache.syncope.core.provisioning.api.MappingManager;
import org.apache.syncope.core.provisioning.api.PropagationByResource;
import org.apache.syncope.core.provisioning.api.propagation.PropagationManager;
import org.apache.syncope.core.provisioning.api.UserWorkflowResult;
import org.apache.syncope.core.provisioning.api.jexl.JexlUtils;
import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor;
import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskInfo;
import org.apache.syncope.core.provisioning.api.serialization.POJOHelper;
import org.apache.syncope.core.provisioning.java.utils.ConnObjectUtils;
import org.apache.syncope.core.provisioning.java.utils.MappingUtils;
import org.identityconnectors.framework.common.objects.Attribute;
import org.identityconnectors.framework.common.objects.AttributeBuilder;
import org.identityconnectors.framework.common.objects.AttributeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
/**
* Manage the data propagation to external resources.
*/
@Transactional(rollbackFor = { Throwable.class })
public class DefaultPropagationManager implements PropagationManager {
protected static final Logger LOG = LoggerFactory.getLogger(PropagationManager.class);
protected final VirSchemaDAO virSchemaDAO;
protected final ExternalResourceDAO resourceDAO;
protected final EntityFactory entityFactory;
protected final ConnObjectUtils connObjectUtils;
protected final MappingManager mappingManager;
protected final DerAttrHandler derAttrHandler;
protected final AnyUtilsFactory anyUtilsFactory;
public DefaultPropagationManager(
final VirSchemaDAO virSchemaDAO,
final ExternalResourceDAO resourceDAO,
final EntityFactory entityFactory,
final ConnObjectUtils connObjectUtils,
final MappingManager mappingManager,
final DerAttrHandler derAttrHandler,
final AnyUtilsFactory anyUtilsFactory) {
this.virSchemaDAO = virSchemaDAO;
this.resourceDAO = resourceDAO;
this.entityFactory = entityFactory;
this.connObjectUtils = connObjectUtils;
this.mappingManager = mappingManager;
this.derAttrHandler = derAttrHandler;
this.anyUtilsFactory = anyUtilsFactory;
}
@Override
public List<PropagationTaskInfo> getCreateTasks(
final AnyTypeKind kind,
final String key,
final Boolean enable,
final PropagationByResource<String> propByRes,
final Collection<Attr> vAttrs,
final Collection<String> noPropResourceKeys) {
return getCreateTasks(
anyUtilsFactory.getInstance(kind).dao().authFind(key),
null,
enable,
propByRes,
null,
vAttrs,
noPropResourceKeys);
}
@Override
public List<PropagationTaskInfo> getUserCreateTasks(
final String key,
final String password,
final Boolean enable,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<Attr> vAttrs,
final Collection<String> noPropResourceKeys) {
return getCreateTasks(
anyUtilsFactory.getInstance(AnyTypeKind.USER).dao().authFind(key),
password,
enable,
propByRes,
propByLinkedAccount,
vAttrs,
noPropResourceKeys);
}
protected List<PropagationTaskInfo> getCreateTasks(
final Any<?> any,
final String password,
final Boolean enable,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<Attr> vAttrs,
final Collection<String> noPropResourceKeys) {
if ((propByRes == null || propByRes.isEmpty())
&& (propByLinkedAccount == null || propByLinkedAccount.isEmpty())) {
return List.of();
}
if (noPropResourceKeys != null) {
if (propByRes != null) {
propByRes.get(ResourceOperation.CREATE).removeAll(noPropResourceKeys);
}
if (propByLinkedAccount != null) {
propByLinkedAccount.get(ResourceOperation.CREATE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
}
}
return createTasks(any, password, true, enable, propByRes, propByLinkedAccount, vAttrs);
}
@Override
public List<PropagationTaskInfo> getUpdateTasks(
final AnyTypeKind kind,
final String key,
final boolean changePwd,
final Boolean enable,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<Attr> vAttrs,
final Collection<String> noPropResourceKeys) {
return getUpdateTasks(
anyUtilsFactory.getInstance(kind).dao().authFind(key),
null,
changePwd,
enable,
propByRes,
propByLinkedAccount,
vAttrs,
noPropResourceKeys);
}
@Override
public List<PropagationTaskInfo> getUserUpdateTasks(
final UserWorkflowResult<Pair<UserUR, Boolean>> wfResult,
final boolean changePwd,
final Collection<String> noPropResourceKeys) {
return getUpdateTasks(
anyUtilsFactory.getInstance(AnyTypeKind.USER).dao().authFind(wfResult.getResult().getLeft().getKey()),
wfResult.getResult().getLeft().getPassword() == null
? null
: wfResult.getResult().getLeft().getPassword().getValue(),
changePwd,
wfResult.getResult().getRight(),
wfResult.getPropByRes(),
wfResult.getPropByLinkedAccount(),
wfResult.getResult().getLeft().getVirAttrs(),
noPropResourceKeys);
}
@Override
public List<PropagationTaskInfo> getUserUpdateTasks(final UserWorkflowResult<Pair<UserUR, Boolean>> wfResult) {
UserUR userUR = wfResult.getResult().getLeft();
// Propagate password update only to requested resources
List<PropagationTaskInfo> tasks;
if (userUR.getPassword() == null) {
// a. no specific password propagation request: generate propagation tasks for any resource associated
tasks = getUserUpdateTasks(wfResult, false, null);
} else {
tasks = new ArrayList<>();
// b. generate the propagation task list in two phases: first the ones containing password,
// then the rest (with no password)
UserWorkflowResult<Pair<UserUR, Boolean>> pwdWFResult = new UserWorkflowResult<>(
wfResult.getResult(),
new PropagationByResource<>(),
wfResult.getPropByLinkedAccount(),
wfResult.getPerformedTasks());
Set<String> pwdResourceNames = new HashSet<>(userUR.getPassword().getResources());
Collection<String> allResourceNames = anyUtilsFactory.getInstance(AnyTypeKind.USER).
dao().findAllResourceKeys(userUR.getKey());
pwdResourceNames.retainAll(allResourceNames);
pwdWFResult.getPropByRes().addAll(ResourceOperation.UPDATE, pwdResourceNames);
if (!pwdWFResult.getPropByRes().isEmpty()) {
Set<String> toBeExcluded = new HashSet<>(allResourceNames);
toBeExcluded.addAll(userUR.getResources().stream().
map(AbstractPatchItem::getValue).collect(Collectors.toList()));
toBeExcluded.removeAll(pwdResourceNames);
tasks.addAll(getUserUpdateTasks(pwdWFResult, true, toBeExcluded));
}
UserWorkflowResult<Pair<UserUR, Boolean>> noPwdWFResult = new UserWorkflowResult<>(
wfResult.getResult(),
new PropagationByResource<>(),
new PropagationByResource<>(),
wfResult.getPerformedTasks());
noPwdWFResult.getPropByRes().merge(wfResult.getPropByRes());
noPwdWFResult.getPropByRes().removeAll(pwdResourceNames);
noPwdWFResult.getPropByRes().purge();
if (!noPwdWFResult.getPropByRes().isEmpty()) {
tasks.addAll(getUserUpdateTasks(noPwdWFResult, false, pwdResourceNames));
}
tasks = tasks.stream().distinct().collect(Collectors.toList());
}
return tasks;
}
protected List<PropagationTaskInfo> getUpdateTasks(
final Any<?> any,
final String password,
final boolean changePwd,
final Boolean enable,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<Attr> vAttrs,
final Collection<String> noPropResourceKeys) {
if (noPropResourceKeys != null) {
if (propByRes != null) {
propByRes.removeAll(noPropResourceKeys);
}
if (propByLinkedAccount != null) {
propByLinkedAccount.get(ResourceOperation.CREATE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
propByLinkedAccount.get(ResourceOperation.UPDATE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
propByLinkedAccount.get(ResourceOperation.DELETE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
}
}
return createTasks(
any,
password,
changePwd,
enable,
Optional.ofNullable(propByRes).orElseGet(PropagationByResource::new),
propByLinkedAccount,
vAttrs);
}
@Override
public List<PropagationTaskInfo> getDeleteTasks(
final AnyTypeKind kind,
final String key,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<String> noPropResourceKeys) {
return getDeleteTasks(
anyUtilsFactory.getInstance(kind).dao().authFind(key),
propByRes, propByLinkedAccount, noPropResourceKeys);
}
protected List<PropagationTaskInfo> getDeleteTasks(
final Any<?> any,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<String> noPropResourceKeys) {
PropagationByResource<String> localPropByRes = new PropagationByResource<>();
if (propByRes == null || propByRes.isEmpty()) {
localPropByRes.addAll(
ResourceOperation.DELETE,
anyUtilsFactory.getInstance(any).dao().findAllResourceKeys(any.getKey()));
} else {
localPropByRes.merge(propByRes);
}
if (noPropResourceKeys != null) {
localPropByRes.removeAll(noPropResourceKeys);
if (propByLinkedAccount != null) {
propByLinkedAccount.get(ResourceOperation.CREATE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
propByLinkedAccount.get(ResourceOperation.UPDATE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
propByLinkedAccount.get(ResourceOperation.DELETE).
removeIf(account -> noPropResourceKeys.contains(account.getLeft()));
}
}
return createTasks(any, null, false, false, localPropByRes, propByLinkedAccount, null);
}
@Override
public PropagationTaskInfo newTask(
final DerAttrHandler derAttrHandler,
final Any<?> any,
final ExternalResource resource,
final ResourceOperation operation,
final Provision provision,
final Stream<? extends Item> mappingItems,
final Pair<String, Set<Attribute>> preparedAttrs) {
PropagationTaskInfo task = new PropagationTaskInfo(resource);
task.setObjectClassName(provision.getObjectClass().getObjectClassValue());
task.setAnyTypeKind(any.getType().getKind());
task.setAnyType(any.getType().getKey());
task.setEntityKey(any.getKey());
task.setOperation(operation);
task.setConnObjectKey(preparedAttrs.getLeft());
// Check if any of mandatory attributes (in the mapping) is missing or not received any value:
// if so, add special attributes that will be evaluated by PropagationTaskExecutor
List<String> mandatoryMissing = new ArrayList<>();
List<String> mandatoryNullOrEmpty = new ArrayList<>();
mappingItems.filter(item -> (!item.isConnObjectKey()
&& JexlUtils.evaluateMandatoryCondition(item.getMandatoryCondition(), any, derAttrHandler))).
forEach(item -> {
Attribute attr = AttributeUtil.find(item.getExtAttrName(), preparedAttrs.getRight());
if (attr == null) {
mandatoryMissing.add(item.getExtAttrName());
} else if (CollectionUtils.isEmpty(attr.getValue())) {
mandatoryNullOrEmpty.add(item.getExtAttrName());
}
});
if (!mandatoryMissing.isEmpty()) {
preparedAttrs.getRight().add(AttributeBuilder.build(
PropagationTaskExecutor.MANDATORY_MISSING_ATTR_NAME, mandatoryMissing));
}
if (!mandatoryNullOrEmpty.isEmpty()) {
preparedAttrs.getRight().add(AttributeBuilder.build(
PropagationTaskExecutor.MANDATORY_NULL_OR_EMPTY_ATTR_NAME, mandatoryNullOrEmpty));
}
task.setAttributes(POJOHelper.serialize(preparedAttrs.getRight()));
return task;
}
/**
* Create propagation tasks.
*
* @param any to be provisioned
* @param password clear text password to be provisioned
* @param changePwd whether password should be included for propagation attributes or not
* @param enable whether user must be enabled or not
* @param propByRes operation to be performed per resource
* @param propByLinkedAccount operation to be performed on linked accounts
* @param vAttrs virtual attributes to be set
* @return list of propagation tasks created
*/
protected List<PropagationTaskInfo> createTasks(
final Any<?> any,
final String password,
final boolean changePwd,
final Boolean enable,
final PropagationByResource<String> propByRes,
final PropagationByResource<Pair<String, String>> propByLinkedAccount,
final Collection<Attr> vAttrs) {
LOG.debug("Provisioning {}:\n{}", any, propByRes);
// Avoid duplicates - see javadoc
propByRes.purge();
LOG.debug("After purge {}:\n{}", any, propByRes);
// Virtual attributes
Set<String> virtualResources = new HashSet<>();
virtualResources.addAll(propByRes.get(ResourceOperation.CREATE));
virtualResources.addAll(propByRes.get(ResourceOperation.UPDATE));
virtualResources.addAll(anyUtilsFactory.getInstance(any).dao().findAllResourceKeys(any.getKey()));
Map<String, Set<Attribute>> vAttrMap = new HashMap<>();
if (vAttrs != null) {
vAttrs.forEach(vAttr -> {
VirSchema schema = virSchemaDAO.find(vAttr.getSchema());
if (schema == null) {
LOG.warn("Ignoring invalid {} {}", VirSchema.class.getSimpleName(), vAttr.getSchema());
} else if (schema.isReadonly()) {
LOG.warn("Ignoring read-only {} {}", VirSchema.class.getSimpleName(), vAttr.getSchema());
} else if (anyUtilsFactory.getInstance(any).dao().
findAllowedSchemas(any, VirSchema.class).contains(schema)
&& virtualResources.contains(schema.getProvision().getResource().getKey())) {
Set<Attribute> values = vAttrMap.get(schema.getProvision().getResource().getKey());
if (values == null) {
values = new HashSet<>();
vAttrMap.put(schema.getProvision().getResource().getKey(), values);
}
values.add(AttributeBuilder.build(schema.getExtAttrName(), vAttr.getValues()));
if (!propByRes.contains(ResourceOperation.CREATE, schema.getProvision().getResource().getKey())) {
propByRes.add(ResourceOperation.UPDATE, schema.getProvision().getResource().getKey());
}
} else {
LOG.warn("{} not owned by or {} not allowed for {}",
schema.getProvision().getResource(), schema, any);
}
});
}
LOG.debug("With virtual attributes {}:\n{}\n{}", any, propByRes, vAttrMap);
List<PropagationTaskInfo> tasks = new ArrayList<>();
propByRes.asMap().forEach((resourceKey, operation) -> {
ExternalResource resource = resourceDAO.find(resourceKey);
Provision provision = Optional.ofNullable(resource).
flatMap(externalResource -> externalResource.getProvision(any.getType())).orElse(null);
Stream<? extends Item> mappingItems = provision == null
? Stream.empty()
: MappingUtils.getPropagationItems(provision.getMapping().getItems().stream());
if (resource == null) {
LOG.error("Invalid resource name specified: {}, ignoring...", resourceKey);
} else if (provision == null) {
LOG.error("No provision specified on resource {} for type {}, ignoring...",
resource, any.getType());
} else if (provision.getMapping() == null || provision.getMapping().getItems().isEmpty()) {
LOG.warn("Requesting propagation for {} but no propagation mapping provided for {}",
any.getType(), resource);
} else {
Pair<String, Set<Attribute>> preparedAttrs =
mappingManager.prepareAttrsFromAny(any, password, changePwd, enable, provision);
if (vAttrMap.containsKey(resourceKey)) {
preparedAttrs.getRight().addAll(vAttrMap.get(resourceKey));
}
PropagationTaskInfo task = newTask(
derAttrHandler,
any,
resource,
operation,
provision,
mappingItems,
preparedAttrs);
task.setOldConnObjectKey(propByRes.getOldConnObjectKey(resourceKey));
tasks.add(task);
LOG.debug("PropagationTask created: {}", task);
}
});
if (any instanceof User && propByLinkedAccount != null) {
User user = (User) any;
propByLinkedAccount.asMap().forEach((accountInfo, operation) -> {
LinkedAccount account = user.getLinkedAccount(accountInfo.getLeft(), accountInfo.getRight()).
orElse(null);
if (account == null && operation == ResourceOperation.DELETE) {
account = new DeletingLinkedAccount(
user, resourceDAO.find(accountInfo.getLeft()), accountInfo.getRight());
}
Provision provision = account == null || account.getResource() == null
? null
: account.getResource().getProvision(AnyTypeKind.USER.name()).orElse(null);
Stream<? extends Item> mappingItems = provision == null
? Stream.empty()
: MappingUtils.getPropagationItems(provision.getMapping().getItems().stream());
if (account == null) {
LOG.error("Invalid operation {} on deleted account {} on resource {}, ignoring...",
operation, accountInfo.getRight(), accountInfo.getLeft());
} else if (account.getResource() == null) {
LOG.error("Invalid resource name specified: {}, ignoring...", accountInfo.getLeft());
} else if (provision == null) {
LOG.error("No provision specified on resource {} for type {}, ignoring...",
account.getResource(), AnyTypeKind.USER.name());
} else if (provision.getMapping() == null || provision.getMapping().getItems().isEmpty()) {
LOG.warn("Requesting propagation for {} but no propagation mapping provided for {}",
AnyTypeKind.USER.name(), account.getResource());
} else {
PropagationTaskInfo accountTask = newTask(
derAttrHandler,
user,
account.getResource(),
operation,
provision,
mappingItems,
Pair.of(account.getConnObjectKeyValue(),
mappingManager.prepareAttrsFromLinkedAccount(
user, account, password, true, provision)));
tasks.add(accountTask);
LOG.debug("PropagationTask created for Linked Account {}: {}",
account.getConnObjectKeyValue(), accountTask);
}
});
}
return tasks;
}
@Override
public List<PropagationTaskInfo> createTasks(
final Realm realm,
final PropagationByResource<String> propByRes,
final Collection<String> noPropResourceKeys) {
if (noPropResourceKeys != null) {
propByRes.removeAll(noPropResourceKeys);
}
LOG.debug("Provisioning {}:\n{}", realm, propByRes);
// Avoid duplicates - see javadoc
propByRes.purge();
LOG.debug("After purge {}:\n{}", realm, propByRes);
List<PropagationTaskInfo> tasks = new ArrayList<>();
propByRes.asMap().forEach((resourceKey, operation) -> {
ExternalResource resource = resourceDAO.find(resourceKey);
OrgUnit orgUnit = Optional.ofNullable(resource).map(ExternalResource::getOrgUnit).orElse(null);
if (resource == null) {
LOG.error("Invalid resource name specified: {}, ignoring...", resourceKey);
} else if (orgUnit == null) {
LOG.error("No orgUnit specified on resource {}, ignoring...", resource);
} else if (StringUtils.isBlank(orgUnit.getConnObjectLink())) {
LOG.warn("Requesting propagation for {} but no ConnObjectLink provided for {}",
realm.getFullPath(), resource);
} else {
PropagationTaskInfo task = new PropagationTaskInfo(resource);
task.setObjectClassName(orgUnit.getObjectClass().getObjectClassValue());
task.setEntityKey(realm.getKey());
task.setOperation(operation);
task.setOldConnObjectKey(propByRes.getOldConnObjectKey(resource.getKey()));
Pair<String, Set<Attribute>> preparedAttrs = mappingManager.prepareAttrsFromRealm(realm, orgUnit);
task.setConnObjectKey(preparedAttrs.getLeft());
task.setAttributes(POJOHelper.serialize(preparedAttrs.getRight()));
tasks.add(task);
LOG.debug("PropagationTask created: {}", task);
}
});
return tasks;
}
}
| apache/syncope | core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/DefaultPropagationManager.java | Java | apache-2.0 | 27,232 |
/*
* Copyright 2002-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.binding.validation;
import org.springframework.richclient.core.Message;
/**
* A specific type of message that relates to a property.
* <code>ValidationMessage</code>s often find their origin in validation
* triggered by a constraint on a property. This information is additionally
* kept available in this <code>ValidationMessage</code>.
*/
public interface ValidationMessage extends Message {
/**
* The property name for messages that have a global scope i.e. do not apply
* to a specific property.
*/
public static final String GLOBAL_PROPERTY = null;
/**
* The property that this validation message applies to; or
* <code>GLOBAL_PROPERTY</code> if this message does not apply to a
* specific property.
*/
String getProperty();
}
| springrichclient/springrcp | spring-richclient-core/src/main/java/org/springframework/binding/validation/ValidationMessage.java | Java | apache-2.0 | 1,409 |
/*
* Copyright 2002-2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.binding.value.swing;
import javax.swing.JSpinner;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.springframework.binding.value.ValueModel;
import org.springframework.binding.value.support.AbstractValueModelAdapter;
/**
* Adapts a value model to a JSpinner control.
*
* @author Oliver Hutchison
*/
public class SpinnerAdapter extends AbstractValueModelAdapter {
private final SpinnerChangeListener listener = new SpinnerChangeListener();
private final JSpinner spinner;
public SpinnerAdapter(JSpinner spinner, ValueModel valueModel) {
super(valueModel);
this.spinner = spinner;
this.spinner.addChangeListener(listener);
initalizeAdaptedValue();
}
protected void valueModelValueChanged(Object newValue) {
if (newValue == null) {
spinner.setValue(new Integer(0));
}
else {
spinner.setValue(newValue);
}
}
private class SpinnerChangeListener implements ChangeListener {
public void stateChanged(ChangeEvent e) {
adaptedValueChanged(spinner.getValue());
}
}
} | springrichclient/springrcp | spring-richclient-core/src/main/java/org/springframework/binding/value/swing/SpinnerAdapter.java | Java | apache-2.0 | 1,821 |
# Copyright 2012 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
-------------------
S3 Token Middleware
-------------------
s3token middleware is for authentication with s3api + keystone.
This middleware:
* Gets a request from the s3api middleware with an S3 Authorization
access key.
* Validates s3 token with Keystone.
* Transforms the account name to AUTH_%(tenant_name).
* Optionally can retrieve and cache secret from keystone
to validate signature locally
.. note::
If upgrading from swift3, the ``auth_version`` config option has been
removed, and the ``auth_uri`` option now includes the Keystone API
version. If you previously had a configuration like
.. code-block:: ini
[filter:s3token]
use = egg:swift3#s3token
auth_uri = https://keystonehost:35357
auth_version = 3
you should now use
.. code-block:: ini
[filter:s3token]
use = egg:swift#s3token
auth_uri = https://keystonehost:35357/v3
"""
import base64
import json
from keystoneclient.v3 import client as keystone_client
from keystoneauth1 import session as keystone_session
from keystoneauth1 import loading as keystone_loading
import requests
import six
from six.moves import urllib
from swift.common.swob import Request, HTTPBadRequest, HTTPUnauthorized, \
HTTPException
from swift.common.utils import config_true_value, split_path, get_logger, \
cache_from_env, append_underscore
from swift.common.wsgi import ConfigFileError
PROTOCOL_NAME = 'S3 Token Authentication'
# Headers to purge if they came from (or may have come from) the client
KEYSTONE_AUTH_HEADERS = (
'X-Identity-Status', 'X-Service-Identity-Status',
'X-Domain-Id', 'X-Service-Domain-Id',
'X-Domain-Name', 'X-Service-Domain-Name',
'X-Project-Id', 'X-Service-Project-Id',
'X-Project-Name', 'X-Service-Project-Name',
'X-Project-Domain-Id', 'X-Service-Project-Domain-Id',
'X-Project-Domain-Name', 'X-Service-Project-Domain-Name',
'X-User-Id', 'X-Service-User-Id',
'X-User-Name', 'X-Service-User-Name',
'X-User-Domain-Id', 'X-Service-User-Domain-Id',
'X-User-Domain-Name', 'X-Service-User-Domain-Name',
'X-Roles', 'X-Service-Roles',
'X-Is-Admin-Project',
'X-Service-Catalog',
# Deprecated headers, too...
'X-Tenant-Id',
'X-Tenant-Name',
'X-Tenant',
'X-User',
'X-Role',
)
def parse_v2_response(token):
access_info = token['access']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in access_info['user']['roles']),
'X-User-Id': access_info['user']['id'],
'X-User-Name': access_info['user']['name'],
'X-Tenant-Id': access_info['token']['tenant']['id'],
'X-Tenant-Name': access_info['token']['tenant']['name'],
'X-Project-Id': access_info['token']['tenant']['id'],
'X-Project-Name': access_info['token']['tenant']['name'],
}
return headers, access_info['token']['tenant']
def parse_v3_response(token):
token = token['token']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in token['roles']),
'X-User-Id': token['user']['id'],
'X-User-Name': token['user']['name'],
'X-User-Domain-Id': token['user']['domain']['id'],
'X-User-Domain-Name': token['user']['domain']['name'],
'X-Tenant-Id': token['project']['id'],
'X-Tenant-Name': token['project']['name'],
'X-Project-Id': token['project']['id'],
'X-Project-Name': token['project']['name'],
'X-Project-Domain-Id': token['project']['domain']['id'],
'X-Project-Domain-Name': token['project']['domain']['name'],
}
return headers, token['project']
class S3Token(object):
"""Middleware that handles S3 authentication."""
def __init__(self, app, conf):
"""Common initialization code."""
self._app = app
self._logger = get_logger(
conf, log_route=conf.get('log_name', 's3token'))
self._logger.debug('Starting the %s component', PROTOCOL_NAME)
self._timeout = float(conf.get('http_timeout', '10.0'))
if not (0 < self._timeout <= 60):
raise ValueError('http_timeout must be between 0 and 60 seconds')
self._reseller_prefix = append_underscore(
conf.get('reseller_prefix', 'AUTH'))
self._delay_auth_decision = config_true_value(
conf.get('delay_auth_decision'))
# where to find the auth service (we use this to validate tokens)
self._request_uri = conf.get('auth_uri', '').rstrip('/') + '/s3tokens'
parsed = urllib.parse.urlsplit(self._request_uri)
if not parsed.scheme or not parsed.hostname:
raise ConfigFileError(
'Invalid auth_uri; must include scheme and host')
if parsed.scheme not in ('http', 'https'):
raise ConfigFileError(
'Invalid auth_uri; scheme must be http or https')
if parsed.query or parsed.fragment or '@' in parsed.netloc:
raise ConfigFileError('Invalid auth_uri; must not include '
'username, query, or fragment')
# SSL
insecure = config_true_value(conf.get('insecure'))
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self._verify = False
elif cert_file and key_file:
self._verify = (cert_file, key_file)
elif cert_file:
self._verify = cert_file
else:
self._verify = None
self._secret_cache_duration = int(conf.get('secret_cache_duration', 0))
if self._secret_cache_duration < 0:
raise ValueError('secret_cache_duration must be non-negative')
if self._secret_cache_duration:
try:
auth_plugin = keystone_loading.get_plugin_loader(
conf.get('auth_type', 'password'))
available_auth_options = auth_plugin.get_options()
auth_options = {}
for option in available_auth_options:
name = option.name.replace('-', '_')
value = conf.get(name)
if value:
auth_options[name] = value
auth = auth_plugin.load_from_options(**auth_options)
session = keystone_session.Session(auth=auth)
self.keystoneclient = keystone_client.Client(
session=session,
region_name=conf.get('region_name'))
self._logger.info("Caching s3tokens for %s seconds",
self._secret_cache_duration)
except Exception:
self._logger.warning("Unable to load keystone auth_plugin. "
"Secret caching will be unavailable.",
exc_info=True)
self.keystoneclient = None
self._secret_cache_duration = 0
def _deny_request(self, code):
error_cls, message = {
'AccessDenied': (HTTPUnauthorized, 'Access denied'),
'InvalidURI': (HTTPBadRequest,
'Could not parse the specified URI'),
}[code]
resp = error_cls(content_type='text/xml')
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, message))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json):
headers = {'Content-Type': 'application/json'}
try:
response = requests.post(self._request_uri,
headers=headers, data=creds_json,
verify=self._verify,
timeout=self._timeout)
except requests.exceptions.RequestException as e:
self._logger.info('HTTP connection exception: %s', e)
raise self._deny_request('InvalidURI')
if response.status_code < 200 or response.status_code >= 300:
self._logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
raise self._deny_request('AccessDenied')
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = Request(environ)
self._logger.debug('Calling S3Token middleware.')
# Always drop auth headers if we're first in the pipeline
if 'keystone.token_info' not in req.environ:
req.headers.update({h: None for h in KEYSTONE_AUTH_HEADERS})
try:
parts = split_path(urllib.parse.unquote(req.path), 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query: %s, skipping.' % req.path
self._logger.debug(msg)
return self._app(environ, start_response)
# Read request signature and access id.
s3_auth_details = req.environ.get('s3api.auth_details')
if not s3_auth_details:
msg = 'No authorization details from s3api. skipping.'
self._logger.debug(msg)
return self._app(environ, start_response)
access = s3_auth_details['access_key']
if isinstance(access, six.binary_type):
access = access.decode('utf-8')
signature = s3_auth_details['signature']
if isinstance(signature, six.binary_type):
signature = signature.decode('utf-8')
string_to_sign = s3_auth_details['string_to_sign']
if isinstance(string_to_sign, six.text_type):
string_to_sign = string_to_sign.encode('utf-8')
token = base64.urlsafe_b64encode(string_to_sign)
if isinstance(token, six.binary_type):
token = token.decode('ascii')
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
memcache_client = None
memcache_token_key = 's3secret/%s' % access
if self._secret_cache_duration > 0:
memcache_client = cache_from_env(environ)
cached_auth_data = None
if memcache_client:
cached_auth_data = memcache_client.get(memcache_token_key)
if cached_auth_data:
if len(cached_auth_data) == 4:
# Old versions of swift may have cached token, too,
# but we don't need it
headers, _token, tenant, secret = cached_auth_data
else:
headers, tenant, secret = cached_auth_data
if s3_auth_details['check_signature'](secret):
self._logger.debug("Cached creds valid")
else:
self._logger.debug("Cached creds invalid")
cached_auth_data = None
if not cached_auth_data:
creds_json = json.dumps(creds)
self._logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json)
# NOTE(vish): We could save a call to keystone by having
# keystone return token, tenant, user, and roles
# from this call.
#
# NOTE(chmou): We still have the same problem we would need to
# change token_auth to detect if we already
# identified and not doing a second query and just
# pass it through to swiftauth in this case.
try:
# NB: requests.Response, not swob.Response
resp = self._json_request(creds_json)
except HTTPException as e_resp:
if self._delay_auth_decision:
msg = ('Received error, deferring rejection based on '
'error: %s')
self._logger.debug(msg, e_resp.status)
return self._app(environ, start_response)
else:
msg = 'Received error, rejecting request with error: %s'
self._logger.debug(msg, e_resp.status)
# NB: swob.Response, not requests.Response
return e_resp(environ, start_response)
self._logger.debug('Keystone Reply: Status: %d, Output: %s',
resp.status_code, resp.content)
try:
token = resp.json()
if 'access' in token:
headers, tenant = parse_v2_response(token)
elif 'token' in token:
headers, tenant = parse_v3_response(token)
else:
raise ValueError
if memcache_client:
user_id = headers.get('X-User-Id')
if not user_id:
raise ValueError
try:
cred_ref = self.keystoneclient.ec2.get(
user_id=user_id,
access=access)
memcache_client.set(
memcache_token_key,
(headers, tenant, cred_ref.secret),
time=self._secret_cache_duration)
self._logger.debug("Cached keystone credentials")
except Exception:
self._logger.warning("Unable to cache secret",
exc_info=True)
# Populate the environment similar to auth_token,
# so we don't have to contact Keystone again.
#
# Note that although the strings are unicode following json
# deserialization, Swift's HeaderEnvironProxy handles ensuring
# they're stored as native strings
req.environ['keystone.token_info'] = token
except (ValueError, KeyError, TypeError):
if self._delay_auth_decision:
error = ('Error on keystone reply: %d %s - '
'deferring rejection downstream')
self._logger.debug(error, resp.status_code, resp.content)
return self._app(environ, start_response)
else:
error = ('Error on keystone reply: %d %s - '
'rejecting request')
self._logger.debug(error, resp.status_code, resp.content)
return self._deny_request('InvalidURI')(
environ, start_response)
req.headers.update(headers)
tenant_to_connect = force_tenant or tenant['id']
if six.PY2 and isinstance(tenant_to_connect, six.text_type):
tenant_to_connect = tenant_to_connect.encode('utf-8')
self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
new_tenant_name)
return self._app(environ, start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return S3Token(app, conf)
return auth_filter
| openstack/swift | swift/common/middleware/s3api/s3token.py | Python | apache-2.0 | 17,603 |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.lang;
import java.io.Serializable;
/**
* The Identifiable interface defines a contract for classes whose Object instances can be uniquely identified relative
* to other Object instances within the same class type hierarchy.
* <p/>
* @author John Blum
* @param <T> the class type of the identifier.
* @see java.lang.Comparable
* @since 7.0
*/
public interface Identifiable<T extends Comparable<T>> extends Serializable {
/**
* Gets the identifier uniquely identifying this Object instance.
* <p/>
* @return an identifier uniquely identifying this Object.
*/
public T getId();
}
| papicella/snappy-store | gemfire-core/src/main/java/com/gemstone/gemfire/lang/Identifiable.java | Java | apache-2.0 | 1,301 |