repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
Imkal/Book-Catalogue | src/com/eleybourn/bookcatalogue/datamanager/BooleanValidator.java | 1973 | /*
* @copyright 2013 Philip Warner
* @license GNU General Public License
*
* This file is part of Book Catalogue.
*
* Book Catalogue is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Book Catalogue is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Book Catalogue. If not, see <http://www.gnu.org/licenses/>.
*/
package com.eleybourn.bookcatalogue.datamanager;
import com.eleybourn.bookcatalogue.R;
import com.eleybourn.bookcatalogue.utils.Utils;
/**
* Validator to apply a default value and validate as Boolean
*
* @author Philip Warner
*
*/
public class BooleanValidator extends DefaultFieldValidator {
BooleanValidator() {
super();
}
BooleanValidator(String defaultValue) {
super(defaultValue);
}
@Override
public void validate(DataManager data, Datum datum, boolean crossValidating) {
if (!datum.isVisible()) {
// No validation required for invisible fields
return;
}
if (crossValidating)
return;
// Will throw on failure...
super.validate(data, datum, crossValidating);
try {
Object o = data.get(datum);
Boolean v;
if (o instanceof Boolean) {
v = (Boolean)o;
} else if (o instanceof Integer) {
v = (((Integer)o) != 0);
} else {
String s = o.toString();
v = Utils.stringToBoolean(s, true);
}
data.putBoolean(datum, v);
return;
} catch (Exception e) {
throw new ValidatorException(R.string.vldt_boolean_expected, new Object[]{datum.getKey()});
}
}
} | gpl-3.0 |
jtux270/translate | ovirt/backend/manager/modules/restapi/types/src/main/java/org/ovirt/engine/api/restapi/types/GroupMapper.java | 2642 | package org.ovirt.engine.api.restapi.types;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.api.model.Domain;
import org.ovirt.engine.api.model.Group;
import org.ovirt.engine.api.restapi.utils.DirectoryEntryIdUtils;
import org.ovirt.engine.api.restapi.utils.GuidUtils;
import org.ovirt.engine.core.aaa.DirectoryGroup;
import org.ovirt.engine.core.common.businessentities.aaa.DbGroup;
public class GroupMapper {
@Mapping(from = DbGroup.class, to = Group.class)
public static Group map(DbGroup entity, Group template) {
Group model = template != null ? template : new Group();
model.setName(entity.getName());
model.setId(entity.getId().toString());
if (!StringUtils.isEmpty(entity.getDomain())) {
Domain dom = new Domain();
dom.setName(entity.getDomain());
dom.setId(DirectoryEntryIdUtils.encode(dom.getName()));
model.setDomain(dom);
}
model.setDomainEntryId(DirectoryEntryIdUtils.encode(entity.getExternalId()));
model.setNamespace(entity.getNamespace());
return model;
}
@Mapping(from = DirectoryGroup.class, to = Group.class)
public static Group map(DirectoryGroup entity, Group template) {
Group model = template != null ? template : new Group();
model.setName(entity.getName());
if (!StringUtils.isEmpty(entity.getDirectoryName())) {
Domain dom = new Domain();
dom.setName(entity.getDirectoryName());
dom.setId(DirectoryEntryIdUtils.encode(dom.getName()));
model.setDomain(dom);
}
model.setId(DirectoryEntryIdUtils.encode(entity.getId()));
model.setNamespace(entity.getNamespace());
return model;
}
@Mapping(from = Group.class, to = DbGroup.class)
public static DbGroup map(Group model, DbGroup template) {
DbGroup entity = template != null? template: new DbGroup();
if (model.isSetName()) {
entity.setName(model.getName());
}
if (model.isSetId()) {
String id = model.getId();
entity.setId(GuidUtils.asGuid(id));
}
if (model.isSetDomain()) {
Domain domain = model.getDomain();
if (domain.isSetName()) {
entity.setDomain(domain.getName());
}
}
if (model.isSetDomainEntryId()) {
entity.setExternalId(DirectoryEntryIdUtils.decode(model.getDomainEntryId()));
}
if (model.isSetNamespace()) {
entity.setNamespace(model.getNamespace());
}
return entity;
}
}
| gpl-3.0 |
dalaro/incubator-tinkerpop | gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/CoalesceStep.java | 3431 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.step.map;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement;
import org.apache.tinkerpop.gremlin.structure.util.StringFactory;
import org.apache.tinkerpop.gremlin.util.iterator.EmptyIterator;
import java.util.*;
/**
* @author Daniel Kuppitz (http://gremlin.guru)
*/
public final class CoalesceStep<S, E> extends FlatMapStep<S, E> implements TraversalParent {
private List<Traversal.Admin<S, E>> coalesceTraversals;
@SafeVarargs
public CoalesceStep(final Traversal.Admin traversal, final Traversal.Admin<S, E>... coalesceTraversals) {
super(traversal);
this.coalesceTraversals = Arrays.asList(coalesceTraversals);
for (final Traversal.Admin<S, ?> conjunctionTraversal : this.coalesceTraversals) {
this.integrateChild(conjunctionTraversal);
}
}
@Override
protected Iterator<E> flatMap(final Traverser.Admin<S> traverser) {
for (final Traversal.Admin<S, E> coalesceTraversal : this.coalesceTraversals) {
coalesceTraversal.reset();
coalesceTraversal.addStart(traverser.asAdmin().split());
if (coalesceTraversal.hasNext())
return coalesceTraversal;
}
return EmptyIterator.instance();
}
@Override
public Set<TraverserRequirement> getRequirements() {
return this.getSelfAndChildRequirements();
}
@Override
public List<Traversal.Admin<S, E>> getLocalChildren() {
return Collections.unmodifiableList(this.coalesceTraversals);
}
@Override
public CoalesceStep<S, E> clone() {
final CoalesceStep<S, E> clone = (CoalesceStep<S, E>) super.clone();
clone.coalesceTraversals = new ArrayList<>();
for (final Traversal.Admin<S, ?> conjunctionTraversal : this.coalesceTraversals) {
clone.coalesceTraversals.add(clone.integrateChild(conjunctionTraversal.clone()));
}
return clone;
}
@Override
public String toString() {
return StringFactory.stepString(this, this.coalesceTraversals);
}
@Override
public int hashCode() {
int result = super.hashCode(), i = 0;
for (final Traversal.Admin<S, E> traversal : this.coalesceTraversals) {
result ^= Integer.rotateLeft(traversal.hashCode(), i++);
}
return result;
}
}
| apache-2.0 |
jagguli/intellij-community | plugins/git4idea/tests/git4idea/test/GitExecutor.java | 5637 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.test;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.Executor;
import git4idea.repo.GitRepository;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertFalse;
/**
*
* @author Kirill Likhodedov
*/
public class GitExecutor extends Executor {
private static final String GIT_EXECUTABLE_ENV = "IDEA_TEST_GIT_EXECUTABLE";
private static final String TEAMCITY_GIT_EXECUTABLE_ENV = "TEAMCITY_GIT_PATH";
private static final int MAX_RETRIES = 3;
private static boolean myVersionPrinted;
private static String findGitExecutable() {
return findExecutable("Git", "git", "git.exe", Arrays.asList(GIT_EXECUTABLE_ENV, TEAMCITY_GIT_EXECUTABLE_ENV));
}
//using inner class to avoid extra work during class loading of unrelated tests
public static class PathHolder {
public static final String GIT_EXECUTABLE = findGitExecutable();
}
public static String git(String command) {
return git(command, false);
}
public static String git(String command, boolean ignoreNonZeroExitCode) {
printVersionTheFirstTime();
List<String> split = splitCommandInParameters(command);
split.add(0, PathHolder.GIT_EXECUTABLE);
File workingDir = ourCurrentDir();
debug("[" + workingDir.getName() + "] # git " + command);
for (int attempt = 0; attempt < MAX_RETRIES; attempt++) {
String stdout;
try {
stdout = run(workingDir, split, ignoreNonZeroExitCode);
if (!isIndexLockFileError(stdout)) {
return stdout;
}
}
catch (ExecutionException e) {
stdout = e.getOutput();
if (!isIndexLockFileError(stdout)) {
throw e;
}
}
LOG.info("Index lock file error, attempt #" + attempt + ": " + stdout);
}
throw new RuntimeException("fatal error during execution of Git command: $command");
}
private static boolean isIndexLockFileError(@NotNull String stdout) {
return stdout.contains("fatal") && stdout.contains("Unable to create") && stdout.contains(".git/index.lock");
}
public static String git(GitRepository repository, String command) {
if (repository != null) {
cd(repository);
}
return git(command);
}
public static String git(String formatString, String... args) {
return git(String.format(formatString, args));
}
public static void cd(GitRepository repository) {
cd(repository.getRoot().getPath());
}
public static void add() {
add(".");
}
public static void add(@NotNull String path) {
git("add --verbose " + path);
}
@NotNull
public static String addCommit(@NotNull String message) {
add();
return commit(message);
}
public static void checkout(@NotNull String... params) {
git("checkout " + StringUtil.join(params, " "));
}
public static String commit(@NotNull String message) {
git("commit -m '" + message + "'");
return last();
}
@NotNull
public static String tac(@NotNull String file) {
touch(file, "content" + Math.random());
return addCommit("touched " + file);
}
@NotNull
public static String last() {
return git("log -1 --pretty=%H");
}
@NotNull
public static String log(String... params) {
return git("log " + StringUtil.join(params, " "));
}
public static void mv(String fromPath, String toPath) {
git("mv " + fromPath + " " + toPath);
}
public static void mv(File from, File to) {
mv(from.getPath(), to.getPath());
}
private static void printVersionTheFirstTime() {
if (!myVersionPrinted) {
myVersionPrinted = true;
git("version");
}
}
@NotNull
public static TestFile file(@NotNull String fileName) {
File f = child(fileName);
return new TestFile(f);
}
public static class TestFile {
@NotNull private final File myFile;
private TestFile(@NotNull File file) {
myFile = file;
}
public TestFile append(@NotNull String content) throws IOException {
FileUtil.writeToFile(myFile, content.getBytes(), true);
return this;
}
public TestFile write(@NotNull String content) throws IOException {
FileUtil.writeToFile(myFile, content.getBytes(), false);
return this;
}
public TestFile create(@NotNull String content) throws IOException {
assertNotExists();
FileUtil.writeToFile(myFile, content.getBytes(), false);
return this;
}
public TestFile assertNotExists() {
assertFalse(myFile.exists());
return this;
}
public TestFile add() {
GitExecutor.add(myFile.getPath());
return this;
}
public TestFile commit() {
return commit("Some message");
}
public TestFile commit(String message) {
GitExecutor.commit(message);
return this;
}
public boolean exists() {
return myFile.exists();
}
}
}
| apache-2.0 |
c3bd/cassandraxml | test/unit/org/apache/cassandra/utils/IntervalTest.java | 2440 | package org.apache.cassandra.utils;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.cassandra.utils.IntervalTree.Interval;
import org.junit.Test;
public class IntervalTest extends TestCase
{
@Test
public void testEncloses() throws Exception
{
Interval interval = new Interval(0,5,null);
Interval interval1 = new Interval(0, 10, null);
Interval interval2 = new Interval(5,10,null);
Interval interval3 = new Interval(0, 11, null);
assertTrue(interval.encloses(interval));
assertTrue(interval1.encloses(interval));
assertFalse(interval.encloses(interval2));
assertTrue(interval1.encloses(interval2));
assertFalse(interval1.encloses(interval3));
}
@Test
public void testContains() throws Exception
{
Interval interval = new Interval(0, 5, null);
assertTrue(interval.contains(0));
assertTrue(interval.contains(5));
assertFalse(interval.contains(-1));
assertFalse(interval.contains(6));
}
@Test
public void testIntersects() throws Exception
{
Interval interval = new Interval(0,5,null);
Interval interval1 = new Interval(0, 10, null);
Interval interval2 = new Interval(5,10,null);
Interval interval3 = new Interval(0, 11, null);
Interval interval5 = new Interval(6,12,null);
assertTrue(interval.intersects(interval1));
assertTrue(interval.intersects(interval2));
assertTrue(interval.intersects(interval3));
assertFalse(interval.intersects(interval5));
}
}
| apache-2.0 |
beebeandwer/TDDL | tddl-group/src/test/java/com/taobao/tddl/group/exception/TAtomDataSourceExceptionUnitTest.java | 699 | package com.taobao.tddl.group.exception;
import org.junit.Assert;
import org.junit.Test;
/**
* @author yangzhu
*/
public class TAtomDataSourceExceptionUnitTest {
@Test
public void all() {
Throwable cause = new Throwable();
String msg = "msg";
TAtomDataSourceException e = new TAtomDataSourceException();
e = new TAtomDataSourceException(msg);
Assert.assertEquals(msg, e.getMessage());
e = new TAtomDataSourceException(cause);
Assert.assertEquals(cause, e.getCause());
e = new TAtomDataSourceException(msg, cause);
Assert.assertEquals(msg, e.getMessage());
Assert.assertEquals(cause, e.getCause());
}
}
| apache-2.0 |
cpsing/tddl | tddl-rule/src/test/java/com/taobao/tddl/rule/TestUtils.java | 4379 | package com.taobao.tddl.rule;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import com.taobao.tddl.rule.model.sqljep.Comparative;
import com.taobao.tddl.rule.model.sqljep.ComparativeAND;
import com.taobao.tddl.rule.model.sqljep.ComparativeOR;
public class TestUtils {
public static final int GreaterThan = Comparative.GreaterThan;
public static final int GreaterThanOrEqual = Comparative.GreaterThanOrEqual;
public static final int Equivalent = Comparative.Equivalent;
public static final int NotEquivalent = Comparative.NotEquivalent;
public static final int LessThan = Comparative.LessThan;
public static final int LessThanOrEqual = Comparative.LessThanOrEqual;
public static Comparative gor(Comparative parent, Comparative target) {
if (parent == null) {
ComparativeOR or = new ComparativeOR();
or.addComparative(target);
return or;
} else {
if (parent instanceof ComparativeOR) {
((ComparativeOR) parent).addComparative(target);
return parent;
} else {
ComparativeOR or = new ComparativeOR();
or.addComparative(parent);
or.addComparative(target);
return or;
}
}
}
public static Comparative gand(Comparative parent, Comparative target) {
if (parent == null) {
ComparativeAND and = new ComparativeAND();
and.addComparative(target);
return and;
} else {
if (parent instanceof ComparativeAND) {
ComparativeAND and = ((ComparativeAND) parent);
if (and.getList().size() == 1) {
and.addComparative(target);
return and;
} else {
ComparativeAND andNew = new ComparativeAND();
andNew.addComparative(and);
andNew.addComparative(target);
return andNew;
}
} else {
ComparativeAND and = new ComparativeAND();
and.addComparative(parent);
and.addComparative(target);
return and;
}
}
}
public static Comparative gcomp(Comparable comp, int sym) {
return new Comparative(sym, comp);
}
public static void testSet(Object[] target, Set<? extends Object> beTestedSet) {
assertEquals(target.length, beTestedSet.size());
int index = 0;
for (Object obj : target) {
assertTrue("index:" + String.valueOf(index) + "-value:" + obj + "|set:" + beTestedSet,
beTestedSet.contains(obj));
index++;
}
}
public static void testSetDate(Date[] target, Set<Object> beTestedSet) {
assertEquals(target.length, beTestedSet.size());
Set<String> dateStr = new HashSet<String>();
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
for (Object date : beTestedSet) {
String formated = format.format(((Date) date));
dateStr.add(formated);
}
Set<String> targetDateString = new HashSet<String>();
for (Date date : target) {
String formated = format.format(((Date) date));
targetDateString.add(formated);
}
int index = 0;
Iterator<String> strs = dateStr.iterator();
while (strs.hasNext()) {
boolean isTrue = false;
StringBuilder sb = new StringBuilder();
String str = strs.next();
sb.append(str).append("|");
for (String obj : targetDateString) {
if (str.trim().equals(obj.trim())) {
strs.remove();
isTrue = true;
}
}
assertTrue("index:" + String.valueOf(index) + "-value:" + targetDateString + " target:" + sb.toString(),
isTrue);
index++;
}
}
}
| apache-2.0 |
hubaoyu/google-io-2014-compat | app/src/main/java/com/example/android/io2014/ui/Utils.java | 3449 | /**
* Copyright 2014 Rahul Parsani
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.io2014.ui;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.view.View;
import android.view.ViewTreeObserver;
public class Utils {
public static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while ((halfHeight / inSampleSize) > reqHeight && (halfWidth / inSampleSize) > reqWidth) {
inSampleSize *= 2;
}
}
return inSampleSize;
}
public static Bitmap decodeSampledBitmapFromResource(Resources res, int resId, int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeResource(res, resId, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeResource(res, resId, options);
}
@SuppressWarnings("deprecation")
public static void removeOnGlobalLayoutListenerCompat(View v, ViewTreeObserver.OnGlobalLayoutListener listener) {
if (hasJellyBean()) {
v.getViewTreeObserver().removeOnGlobalLayoutListener(listener);
} else {
v.getViewTreeObserver().removeGlobalOnLayoutListener(listener);
}
}
@SuppressWarnings("deprecation")
public static void setBackgroundCompat(View v, Drawable drawable) {
if (hasJellyBean()) {
v.setBackground(drawable);
} else {
v.setBackgroundDrawable(drawable);
}
}
/**
* Uses static final constants to detect if the device's platform version is Lollipop or
* later.
*/
public static boolean hasJellyBean() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN;
}
/**
* Uses static final constants to detect if the device's platform version is Lollipop or
* later.
*/
public static boolean hasLollipop() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
}
}
| apache-2.0 |
nwnpallewela/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/commands/ForEachMediatorTargetOutputConnectorCreateCommand.java | 2972 | package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gmf.runtime.common.core.command.CommandResult;
import org.eclipse.gmf.runtime.common.core.command.ICommand;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.emf.type.core.commands.EditElementCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.ConfigureRequest;
import org.eclipse.gmf.runtime.emf.type.core.requests.CreateElementRequest;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.ForEachMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.ForEachMediatorTargetOutputConnector;
/**
* @generated
*/
public class ForEachMediatorTargetOutputConnectorCreateCommand extends EditElementCommand {
/**
* @generated
*/
public ForEachMediatorTargetOutputConnectorCreateCommand(CreateElementRequest req) {
super(req.getLabel(), null, req);
}
/**
* FIXME: replace with setElementToEdit()
* @generated
*/
protected EObject getElementToEdit() {
EObject container = ((CreateElementRequest) getRequest()).getContainer();
if (container instanceof View) {
container = ((View) container).getElement();
}
return container;
}
/**
* @generated
*/
public boolean canExecute() {
ForEachMediator container = (ForEachMediator) getElementToEdit();
if (container.getTargetOutputConnector() != null) {
return false;
}
return true;
}
/**
* @generated
*/
protected CommandResult doExecuteWithResult(IProgressMonitor monitor, IAdaptable info) throws ExecutionException {
ForEachMediatorTargetOutputConnector newElement = EsbFactory.eINSTANCE
.createForEachMediatorTargetOutputConnector();
ForEachMediator owner = (ForEachMediator) getElementToEdit();
owner.setTargetOutputConnector(newElement);
doConfigure(newElement, monitor, info);
((CreateElementRequest) getRequest()).setNewElement(newElement);
return CommandResult.newOKCommandResult(newElement);
}
/**
* @generated
*/
protected void doConfigure(ForEachMediatorTargetOutputConnector newElement, IProgressMonitor monitor,
IAdaptable info) throws ExecutionException {
IElementType elementType = ((CreateElementRequest) getRequest()).getElementType();
ConfigureRequest configureRequest = new ConfigureRequest(getEditingDomain(), newElement, elementType);
configureRequest.setClientContext(((CreateElementRequest) getRequest()).getClientContext());
configureRequest.addParameters(getRequest().getParameters());
ICommand configureCommand = elementType.getEditCommand(configureRequest);
if (configureCommand != null && configureCommand.canExecute()) {
configureCommand.execute(monitor, info);
}
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java | 5845 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters.InternalBucket;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.sameInstance;
public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase<InternalFilters> {
private boolean keyed;
private List<String> keys;
@Override
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
keys = new ArrayList<>();
int numBuckets = randomNumberOfBuckets();
for (int i = 0; i < numBuckets; i++) {
if (keyed) {
keys.add(randomAlphaOfLength(5));
} else {
// this is what the FiltersAggregationBuilder ctor does when not providing KeyedFilter
keys.add(String.valueOf(i));
}
}
}
@Override
protected InternalFilters createTestInstance(String name, Map<String, Object> metadata, InternalAggregations aggregations) {
final List<InternalFilters.InternalBucket> buckets = new ArrayList<>();
for (int i = 0; i < keys.size(); ++i) {
String key = keys.get(i);
int docCount = randomIntBetween(0, 1000);
buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed));
}
return new InternalFilters(name, buckets, keyed, metadata);
}
@Override
protected void assertReduced(InternalFilters reduced, List<InternalFilters> inputs) {
final Map<String, Long> expectedCounts = new TreeMap<>();
for (InternalFilters input : inputs) {
for (InternalFilters.InternalBucket bucket : input.getBuckets()) {
expectedCounts.compute(bucket.getKeyAsString(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
}
final Map<String, Long> actualCounts = new TreeMap<>();
for (InternalFilters.InternalBucket bucket : reduced.getBuckets()) {
actualCounts.compute(bucket.getKeyAsString(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
}
@Override
protected Class<ParsedFilters> implementationClass() {
return ParsedFilters.class;
}
@Override
protected InternalFilters mutateInstance(InternalFilters instance) {
String name = instance.getName();
List<InternalBucket> buckets = instance.getBuckets();
Map<String, Object> metadata = instance.getMetadata();
switch (between(0, 2)) {
case 0:
name += randomAlphaOfLength(5);
break;
case 1:
buckets = new ArrayList<>(buckets);
buckets.add(new InternalFilters.InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed));
break;
case 2:
default:
if (metadata == null) {
metadata = new HashMap<>(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
break;
}
return new InternalFilters(name, buckets, keyed, metadata);
}
public void testReducePipelinesReturnsSameInstanceWithoutPipelines() {
InternalFilters test = createTestInstance();
assertThat(test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), PipelineTree.EMPTY), sameInstance(test));
}
public void testReducePipelinesReducesBucketPipelines() {
/*
* Tests that a pipeline buckets by creating a mock pipeline that
* replaces "inner" with "dummy".
*/
InternalFilters dummy = createTestInstance();
InternalFilters inner = createTestInstance();
InternalAggregations sub = InternalAggregations.from(List.of(inner));
InternalFilters test = createTestInstance("test", emptyMap(), sub);
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
return dummy;
}
};
PipelineTree tree = new PipelineTree(Map.of(inner.getName(), new PipelineTree(emptyMap(), List.of(mockPipeline))), emptyList());
InternalFilters reduced = (InternalFilters) test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), tree);
for (InternalFilters.InternalBucket bucket : reduced.getBuckets()) {
assertThat(bucket.getAggregations().get(dummy.getName()), sameInstance(dummy));
}
}
}
| apache-2.0 |
nikhilvibhav/camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedPollEnricher.java | 4822 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.management.mbean;
import java.util.Map;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeDataSupport;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
import javax.management.openmbean.TabularDataSupport;
import org.apache.camel.CamelContext;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.api.management.mbean.CamelOpenMBeanTypes;
import org.apache.camel.api.management.mbean.ManagedPollEnricherMBean;
import org.apache.camel.model.PollEnrichDefinition;
import org.apache.camel.processor.PollEnricher;
import org.apache.camel.spi.EndpointUtilizationStatistics;
import org.apache.camel.spi.ManagementStrategy;
import org.apache.camel.util.URISupport;
@ManagedResource(description = "Managed PollEnricher")
public class ManagedPollEnricher extends ManagedProcessor implements ManagedPollEnricherMBean {
private final PollEnricher processor;
private String uri;
private boolean sanitize;
public ManagedPollEnricher(CamelContext context, PollEnricher processor, PollEnrichDefinition definition) {
super(context, processor, definition);
this.processor = processor;
}
@Override
public void init(ManagementStrategy strategy) {
super.init(strategy);
sanitize = strategy.getManagementAgent().getMask() != null ? strategy.getManagementAgent().getMask() : false;
uri = getDefinition().getExpression().getExpression();
if (sanitize) {
uri = URISupport.sanitizeUri(uri);
}
}
@Override
public void reset() {
super.reset();
if (processor.getEndpointUtilizationStatistics() != null) {
processor.getEndpointUtilizationStatistics().clear();
}
}
@Override
public Boolean getSupportExtendedInformation() {
return true;
}
@Override
public PollEnrichDefinition getDefinition() {
return (PollEnrichDefinition) super.getDefinition();
}
@Override
public PollEnricher getProcessor() {
return processor;
}
@Override
public String getExpressionLanguage() {
return getDefinition().getExpression().getLanguage();
}
@Override
public String getExpression() {
return uri;
}
@Override
public Long getTimeout() {
return processor.getTimeout();
}
@Override
public Integer getCacheSize() {
return processor.getCacheSize();
}
@Override
public Boolean isIgnoreInvalidEndpoint() {
return processor.isIgnoreInvalidEndpoint();
}
@Override
public Boolean isAggregateOnException() {
return processor.isAggregateOnException();
}
@Override
public TabularData extendedInformation() {
try {
TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.endpointsUtilizationTabularType());
EndpointUtilizationStatistics stats = processor.getEndpointUtilizationStatistics();
if (stats != null) {
for (Map.Entry<String, Long> entry : stats.getStatistics().entrySet()) {
CompositeType ct = CamelOpenMBeanTypes.endpointsUtilizationCompositeType();
String url = entry.getKey();
if (sanitize) {
url = URISupport.sanitizeUri(url);
}
Long hits = entry.getValue();
if (hits == null) {
hits = 0L;
}
CompositeData data
= new CompositeDataSupport(ct, new String[] { "url", "hits" }, new Object[] { url, hits });
answer.put(data);
}
}
return answer;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
| apache-2.0 |
nectoc/analytics-apim | product/integration/tests-common/integration-test-utils/src/main/java/org/wso2/analytics/apim/analytics/rest/beans/ResponseBean.java | 2188 | /**
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.analytics.apim.analytics.rest.beans;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* The Class ResponseBean.
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(propOrder = { "status", "message" })
@XmlRootElement(name = "response")
public class ResponseBean {
/** The status. */
@XmlElement(required = true)
private String status;
/** The message. */
@XmlElement(required = false)
private String message;
/**
* Instantiates a new response bean.
*/
public ResponseBean() {
}
/**
* Instantiates a new response bean.
* @param status the status
* @param message the message
*/
public ResponseBean(String status, String message) {
this.status = status;
this.message = message;
}
/**
* Instantiates a new response bean.
* @param status the status
*/
public ResponseBean(String status) {
this.status = status;
}
/**
* Gets the status.
* @return the status
*/
public String getStatus() {
return status;
}
/**
* Sets the status.
* @param status the new status
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Gets the message.
* @return the message
*/
public String getMessage() {
return message;
}
/**
* Sets the message.
* @param message the new message
*/
public void setMessage(String message) {
this.message = message;
}
}
| apache-2.0 |
hequn8128/flink | flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/FlinkPod.java | 2441 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.kubernetes.kubeclient;
import io.fabric8.kubernetes.api.model.Container;
import io.fabric8.kubernetes.api.model.ContainerBuilder;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.PodBuilder;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* A collection of variables that composes a JobManager/TaskManager Pod. This can include
* the Pod, the main Container, and the InitContainer, etc.
*/
public class FlinkPod {
private final Pod pod;
private final Container mainContainer;
public FlinkPod(Pod pod, Container mainContainer) {
this.pod = pod;
this.mainContainer = mainContainer;
}
public Pod getPod() {
return pod;
}
public Container getMainContainer() {
return mainContainer;
}
/**
* Builder for creating a {@link FlinkPod}.
*/
public static class Builder {
private Pod pod;
private Container mainContainer;
public Builder() {
this.pod = new PodBuilder()
.withNewMetadata()
.endMetadata()
.withNewSpec()
.endSpec()
.build();
this.mainContainer = new ContainerBuilder().build();
}
public Builder(FlinkPod flinkPod) {
checkNotNull(flinkPod);
this.pod = checkNotNull(flinkPod.getPod());
this.mainContainer = checkNotNull(flinkPod.getMainContainer());
}
public Builder withPod(Pod pod) {
this.pod = checkNotNull(pod);
return this;
}
public Builder withMainContainer(Container mainContainer) {
this.mainContainer = checkNotNull(mainContainer);
return this;
}
public FlinkPod build() {
return new FlinkPod(this.pod, this.mainContainer);
}
}
}
| apache-2.0 |
wudingli/openfire | src/plugins/fastpath/src/java/org/jivesoftware/xmpp/workgroup/MessageHandler.java | 2073 | /**
* $RCSfile$
* $Revision: 18992 $
* $Date: 2005-06-06 16:20:13 -0700 (Mon, 06 Jun 2005) $
*
* Copyright (C) 2004-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.xmpp.workgroup;
import org.jivesoftware.xmpp.workgroup.chatbot.Chatbot;
import org.jivesoftware.xmpp.workgroup.chatbot.ChatbotSession;
import org.xmpp.packet.Message;
/**
* <p>The Workgroup's message handler processes all incoming message packets sent to the workgroup.</p>
* <p/>
* <p>Currently the workgroup recognises:</p>
* <ul>
* <li>No message packets (all are silently dropped)</li>
* </ul>
*
* @author Derek DeMoro
*/
public class MessageHandler {
private Workgroup workgroup;
public MessageHandler(Workgroup workgroup) {
this.workgroup = workgroup;
}
public void process(Message packet) {
if (packet.getBody() == null) {
// TODO Handle statistics reported by the agents????
// ignore this packet
return;
}
// Get the chatbot of the workgroup. It is not mandatory for workgroups to have a chatbot
// so if no chatbot was defined for the workgroup then do nothing
Chatbot bot = workgroup.getChatBot();
if (bot != null) {
// Get the chatbot session of the user (create one if necessary)
ChatbotSession session = bot.getSession(packet.getFrom(), true);
// Let the bot process the received message
bot.onMessage(session, packet);
}
}
}
| apache-2.0 |
dropbox/djinni | test-suite/handwritten-src/java/com/dropbox/djinni/test/WcharTest.java | 617 | package com.dropbox.djinni.test;
import junit.framework.TestCase;
public class WcharTest extends TestCase {
private static final String STR1 = "some string with unicode \u0000, \u263A, \uD83D\uDCA9 symbols";
private static final String STR2 = "another string with unicode \u263B, \uD83D\uDCA8 symbols";
public void test() {
assertEquals(WcharTestHelpers.getRecord().getS(), STR1);
assertEquals(WcharTestHelpers.getString(), STR2);
assertEquals(WcharTestHelpers.checkString(STR2), true);
assertEquals(WcharTestHelpers.checkRecord(new WcharTestRec(STR1)), true);
}
}
| apache-2.0 |
samaitra/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheInvokeResult.java | 3794 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.EntryProcessorResult;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.IgniteException;
import org.apache.ignite.internal.UnregisteredBinaryTypeException;
import org.apache.ignite.internal.UnregisteredClassException;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
/**
* Implementation of {@link EntryProcessorResult}.
*/
public class CacheInvokeResult<T> implements EntryProcessorResult<T>, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** */
@GridToStringInclude(sensitive = true)
private T res;
/** */
private Exception err;
/**
* Empty constructor required by {@link Externalizable}.
*/
public CacheInvokeResult() {
// No-op.
}
/**
* Static constructor.
*
* @param res Computed result.
* @return New instance.
*/
public static <T> CacheInvokeResult<T> fromResult(T res) {
CacheInvokeResult<T> cacheRes = new CacheInvokeResult<>();
cacheRes.res = res;
return cacheRes;
}
/**
* @return Result.
*/
public T result() {
return res;
}
/**
* Entry processor error;
*/
public Exception error() {
return err;
}
/**
* Static constructor.
*
* @param err Exception thrown by {@link EntryProcessor#process(MutableEntry, Object...)}.
* @return New instance.
*/
public static <T> CacheInvokeResult<T> fromError(Exception err) {
assert err != null;
CacheInvokeResult<T> res = new CacheInvokeResult<>();
res.err = err;
return res;
}
/** {@inheritDoc} */
@Override public T get() throws EntryProcessorException {
if (err != null) {
if (err instanceof UnregisteredClassException || err instanceof UnregisteredBinaryTypeException)
throw (IgniteException) err;
if (err instanceof EntryProcessorException)
throw (EntryProcessorException)err;
throw new EntryProcessorException(err);
}
return res;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(res);
out.writeObject(err);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
res = (T)in.readObject();
err = (Exception)in.readObject();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheInvokeResult.class, this);
}
}
| apache-2.0 |
s1monw/elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java | 9339 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Constant;
import org.elasticsearch.painless.Def;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Parameter;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.WriterConstants;
import org.elasticsearch.painless.node.SSource.Reserved;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Opcodes;
import java.lang.invoke.MethodType;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE;
/**
* Represents a user-defined function.
*/
public final class SFunction extends AStatement {
public static final class FunctionReserved implements Reserved {
private final Set<String> usedVariables = new HashSet<>();
private int maxLoopCounter = 0;
@Override
public void markUsedVariable(String name) {
usedVariables.add(name);
}
@Override
public Set<String> getUsedVariables() {
return unmodifiableSet(usedVariables);
}
@Override
public void addUsedVariables(FunctionReserved reserved) {
usedVariables.addAll(reserved.getUsedVariables());
}
@Override
public void setMaxLoopCounter(int max) {
maxLoopCounter = max;
}
@Override
public int getMaxLoopCounter() {
return maxLoopCounter;
}
}
final FunctionReserved reserved;
private final String rtnTypeStr;
public final String name;
private final List<String> paramTypeStrs;
private final List<String> paramNameStrs;
private final List<AStatement> statements;
public final boolean synthetic;
Class<?> rtnType = null;
List<Parameter> parameters = new ArrayList<>();
Method method = null;
private Variable loop = null;
public SFunction(FunctionReserved reserved, Location location, String rtnType, String name,
List<String> paramTypes, List<String> paramNames, List<AStatement> statements,
boolean synthetic) {
super(location);
this.reserved = Objects.requireNonNull(reserved);
this.rtnTypeStr = Objects.requireNonNull(rtnType);
this.name = Objects.requireNonNull(name);
this.paramTypeStrs = Collections.unmodifiableList(paramTypes);
this.paramNameStrs = Collections.unmodifiableList(paramNames);
this.statements = Collections.unmodifiableList(statements);
this.synthetic = synthetic;
}
@Override
void extractVariables(Set<String> variables) {
// we should never be extracting from a function, as functions are top-level!
throw new IllegalStateException("Illegal tree structure");
}
void generateSignature(Definition definition) {
try {
rtnType = Definition.TypeToClass(definition.getType(rtnTypeStr));
} catch (IllegalArgumentException exception) {
throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "]."));
}
if (paramTypeStrs.size() != paramNameStrs.size()) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
Class<?>[] paramClasses = new Class<?>[this.paramTypeStrs.size()];
List<Class<?>> paramTypes = new ArrayList<>();
for (int param = 0; param < this.paramTypeStrs.size(); ++param) {
try {
Class<?> paramType = Definition.TypeToClass(definition.getType(this.paramTypeStrs.get(param)));
paramClasses[param] = Definition.defClassToObjectClass(paramType);
paramTypes.add(paramType);
parameters.add(new Parameter(location, paramNameStrs.get(param), paramType));
} catch (IllegalArgumentException exception) {
throw createError(new IllegalArgumentException(
"Illegal parameter type [" + this.paramTypeStrs.get(param) + "] for function [" + name + "]."));
}
}
org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(
name, MethodType.methodType(Definition.defClassToObjectClass(rtnType), paramClasses).toMethodDescriptorString());
this.method = new Method(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null);
}
@Override
void analyze(Locals locals) {
if (statements == null || statements.isEmpty()) {
throw createError(new IllegalArgumentException("Cannot generate an empty function [" + name + "]."));
}
locals = Locals.newLocalScope(locals);
AStatement last = statements.get(statements.size() - 1);
for (AStatement statement : statements) {
// Note that we do not need to check after the last statement because
// there is no statement that can be unreachable after the last.
if (allEscape) {
throw createError(new IllegalArgumentException("Unreachable statement."));
}
statement.lastSource = statement == last;
statement.analyze(locals);
methodEscape = statement.methodEscape;
allEscape = statement.allEscape;
}
if (!methodEscape && rtnType != void.class) {
throw createError(new IllegalArgumentException("Not all paths provide a return value for method [" + name + "]."));
}
if (reserved.getMaxLoopCounter() > 0) {
loop = locals.getVariable(null, Locals.LOOP);
}
}
/** Writes the function to given ClassVisitor. */
void write (ClassVisitor writer, CompilerSettings settings, Globals globals) {
int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC;
if (synthetic) {
access |= Opcodes.ACC_SYNTHETIC;
}
final MethodWriter function = new MethodWriter(access, method.method, writer, globals.getStatements(), settings);
function.visitCode();
write(function, globals);
function.endMethod();
}
@Override
void write(MethodWriter function, Globals globals) {
if (reserved.getMaxLoopCounter() > 0) {
// if there is infinite loop protection, we do this once:
// int #loop = settings.getMaxLoopCounter()
function.push(reserved.getMaxLoopCounter());
function.visitVarInsn(Opcodes.ISTORE, loop.getSlot());
}
for (AStatement statement : statements) {
statement.write(function, globals);
}
if (!methodEscape) {
if (rtnType == void.class) {
function.returnValue();
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
String staticHandleFieldName = Def.getUserFunctionHandleFieldName(name, parameters.size());
globals.addConstantInitializer(new Constant(location, WriterConstants.METHOD_HANDLE_TYPE,
staticHandleFieldName, this::initializeConstant));
}
private void initializeConstant(MethodWriter writer) {
final Handle handle = new Handle(Opcodes.H_INVOKESTATIC,
CLASS_TYPE.getInternalName(),
name,
method.method.getDescriptor(),
false);
writer.push(handle);
}
@Override
public String toString() {
List<Object> description = new ArrayList<>();
description.add(rtnTypeStr);
description.add(name);
if (false == (paramTypeStrs.isEmpty() && paramNameStrs.isEmpty())) {
description.add(joinWithName("Args", pairwiseToString(paramTypeStrs, paramNameStrs), emptyList()));
}
return multilineToString(description, statements);
}
}
| apache-2.0 |
awhitford/Resteasy | testsuite/integration-tests/src/test/java/org/jboss/resteasy/test/resource/basic/resource/ResourceLocatorAnnotationFreeSubResource.java | 350 | package org.jboss.resteasy.test.resource.basic.resource;
public class ResourceLocatorAnnotationFreeSubResource extends ResourceLocatorAbstractAnnotationFreeResouce implements ResourceLocatorSubInterface {
public String post(String s) {
return "posted: " + s;
}
public Object getSubSubResource(String id) {
return null;
}
}
| apache-2.0 |
apache/camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SolrComponentBuilderFactory.java | 4606 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.solr.SolrComponent;
/**
* Perform operations against Apache Lucene Solr.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface SolrComponentBuilderFactory {
/**
* Solr (camel-solr)
* Perform operations against Apache Lucene Solr.
*
* Category: monitoring,search
* Since: 2.9
* Maven coordinates: org.apache.camel:camel-solr
*
* @return the dsl builder
*/
static SolrComponentBuilder solr() {
return new SolrComponentBuilderImpl();
}
/**
* Builder for the Solr component.
*/
interface SolrComponentBuilder extends ComponentBuilder<SolrComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SolrComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default SolrComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
class SolrComponentBuilderImpl
extends
AbstractComponentBuilder<SolrComponent>
implements
SolrComponentBuilder {
@Override
protected SolrComponent buildConcreteComponent() {
return new SolrComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((SolrComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((SolrComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | apache-2.0 |
utah-scs/RAMCloud | bindings/java/src/test/java/edu/stanford/ramcloud/test/TestCluster.java | 1207 | package edu.stanford.ramcloud.test;
import edu.stanford.ramcloud.Util;
public class TestCluster {
static {
// Load native library
Util.loadLibrary("ramcloud_java_test");
}
// Keep pointers to the C++ RAMCloud and MockCluster objects.
private long[] pointers;
/**
* Construct a new TestCluster with one master.
*/
public TestCluster() {
pointers = new long[2];
createMockCluster(pointers);
}
// Native methods documented in corresponding C++ file.
private static native void createMockCluster(long[] pointers);
private static native void destroy(long[] pointers);
/**
* Get the pointer to the C++ RamCloud object.
*
* @return The memory address of the C++ RamCloud object, for use in
* contructing a Java RAMCloud object tied to the C++ RamCloud
* object.
*/
public long getRamcloudClientPointer() {
return pointers[1];
}
/**
* When the test is done with the TestCluster and RAMCloud object, call this
* method to delete the pointers to the corresponding C++ objects.
*/
public void destroy() {
destroy(pointers);
}
}
| isc |
treejames/Android-Anim-Playground | AnimationPlayground/src/fr/northborders/AnimationPlayground/Utils/SvgHelper.java | 2913 | package fr.northborders.AnimationPlayground.Utils;
import android.content.Context;
import android.graphics.*;
import android.util.Log;
import com.caverock.androidsvg.PreserveAspectRatio;
import com.caverock.androidsvg.SVG;
import com.caverock.androidsvg.SVGParseException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by thibaultguegan on 29/05/2014.
*/
public class SvgHelper {
private static final String LOG_TAG = "SVG";
private final List<SvgPath> mPaths = new ArrayList<SvgPath>();
private final Paint mSourcePaint;
private SVG mSvg;
public SvgHelper(Paint sourcePaint) {
mSourcePaint = sourcePaint;
}
public void load(Context context, int svgResource) {
if (mSvg != null) return;
try {
mSvg = SVG.getFromResource(context, svgResource);
mSvg.setDocumentPreserveAspectRatio(PreserveAspectRatio.UNSCALED);
} catch (SVGParseException e) {
Log.e(LOG_TAG, "Could not load specified SVG resource", e);
}
}
public static class SvgPath {
private static final Region sRegion = new Region();
private static final Region sMaxClip = new Region(
Integer.MIN_VALUE, Integer.MIN_VALUE,
Integer.MAX_VALUE, Integer.MAX_VALUE);
public final Path path;
public final Paint paint;
public final float length;
public final Rect bounds;
public SvgPath(Path path, Paint paint) {
this.path = path;
this.paint = paint;
PathMeasure measure = new PathMeasure(path, false);
this.length = measure.getLength();
sRegion.setPath(path, sMaxClip);
bounds = sRegion.getBounds();
}
}
public List<SvgPath> getPathsForViewport(final int width, final int height) {
mPaths.clear();
Canvas canvas = new Canvas() {
private final Matrix mMatrix = new Matrix();
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public void drawPath(Path path, Paint paint) {
Path dst = new Path();
//noinspection deprecation
getMatrix(mMatrix);
path.transform(mMatrix, dst);
mPaths.add(new SvgPath(dst, new Paint(mSourcePaint)));
}
};
RectF viewBox = mSvg.getDocumentViewBox();
float scale = Math.min(width / viewBox.width(), height / viewBox.height());
canvas.translate(
(width - viewBox.width() * scale) / 2.0f,
(height - viewBox.height() * scale) / 2.0f);
canvas.scale(scale, scale);
mSvg.renderToCanvas(canvas);
return mPaths;
}
}
| mit |
dhuebner/che | plugins/plugin-machine/che-plugin-machine-ext-client/src/main/java/org/eclipse/che/ide/extension/machine/client/perspective/widgets/tab/header/TabHeaderImpl.java | 2966 | /*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.machine.client.perspective.widgets.tab.header;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.assistedinject.Assisted;
import org.eclipse.che.ide.extension.machine.client.MachineResources;
import javax.validation.constraints.NotNull;
/**
* The class provides methods to control view representation of tab's header.
*
* @author Dmitry Shnurenko
*/
public class TabHeaderImpl extends Composite implements TabHeader, ClickHandler {
interface TabHeaderImplUiBinder extends UiBinder<Widget, TabHeaderImpl> {
}
private final static TabHeaderImplUiBinder UI_BINDER = GWT.create(TabHeaderImplUiBinder.class);
private final MachineResources resources;
private final String name;
@UiField
Label tabName;
private ActionDelegate delegate;
@Inject
public TabHeaderImpl(MachineResources resources, @Assisted String tabName) {
this.resources = resources;
initWidget(UI_BINDER.createAndBindUi(this));
this.name = tabName;
this.tabName.setText(tabName);
addDomHandler(this, ClickEvent.getType());
}
/** {@inheritDoc} */
@Override
public void onClick(ClickEvent event) {
delegate.onTabClicked(tabName.getText());
}
/** {@inheritDoc} */
@Override
public void setDelegate(@NotNull ActionDelegate delegate) {
this.delegate = delegate;
}
/** {@inheritDoc} */
@Override
public void setEnable() {
removeStyleName(resources.getCss().disableTab());
addStyleName(resources.getCss().activeTab());
addStyleName(resources.getCss().activeTabText());
}
/** {@inheritDoc} */
@Override
public void setDisable() {
removeStyleName(resources.getCss().activeTab());
removeStyleName(resources.getCss().activeTabText());
addStyleName(resources.getCss().disableTab());
}
/** {@inheritDoc} */
@Override
@NotNull
public String getName() {
return name;
}
} | epl-1.0 |
md-5/jdk10 | test/jdk/com/sun/jdi/RedefineChangeClassOrder.java | 4549 | /*
* Copyright (c) 2006, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6270982
* @summary Redefine a class so that the order of external classes in
* the constant pool are changed.
* @comment converted from test/jdk/com/sun/jdi/RedefineChangeClassOrder.sh
*
* @library /test/lib
* @compile -g RedefineChangeClassOrder.java
* @run main/othervm RedefineChangeClassOrder
*/
import jdk.test.lib.process.OutputAnalyzer;
import lib.jdb.JdbCommand;
import lib.jdb.JdbTest;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
class RedefineChangeClassOrderTarg {
public static void main(String[] args) {
new RedefineChangeClassOrderTarg().hi(false);
new RedefineChangeClassOrderTarg().hi(true); // @1 breakpoint
}
public void hi(boolean expected) {
boolean isNewVersion = false; // @1 commentout
// @1 uncomment boolean isNewVersion = true;
if (expected == isNewVersion) {
System.out.println("PASS: expected and isNewVersion match.");
} else {
System.out.println("FAIL: expected and isNewVersion do not match.");
System.out.println("expected=" + expected
+ " isNewVersion=" + isNewVersion);
}
Foo1 foo1 = new Foo1(); // @1 commentout
foo1.hi(); // @1 commentout
// This Hack code block exists to force some verification_type_info
// objects of subtype Object_variable_info into the StackMapTable.
//
// In the redefined code, the above Foo1 code is effectively
// moved after the Foo2 code below which causes things to be
// layed out in a different order in the constant pool. The
// cpool_index in the Object_variable_info has to be updated
// in the redefined code's StackMapTable to refer to right
/// constant pool index in the merged constant pool.
Hack hack = getClass().getAnnotation(Hack.class);
if (hack != null) {
String class_annotation = hack.value();
System.out.println("class annotation is: " + class_annotation);
if (isNewVersion) {
if (class_annotation.equals("JUNK")) {
System.out.println("class_annotation is JUNK.");
} else {
System.out.println("class_annotation is NOT JUNK.");
}
}
}
Foo2 foo2 = new Foo2();
foo2.hi();
// @1 uncomment Foo1 foo1 = new Foo1();
// @1 uncomment foo1.hi();
}
}
class Foo1 {
public void hi() {
System.out.println("Hello from " + getClass());
}
}
class Foo2 {
public void hi() {
System.out.println("Hello from " + getClass());
}
}
@Retention(RetentionPolicy.RUNTIME)
@interface Hack {
String value();
}
public class RedefineChangeClassOrder extends JdbTest {
public static void main(String argv[]) {
new RedefineChangeClassOrder().run();
}
private RedefineChangeClassOrder() {
super(DEBUGGEE_CLASS, SOURCE_FILE);
}
private static final String DEBUGGEE_CLASS = RedefineChangeClassOrderTarg.class.getName();
private static final String SOURCE_FILE = "RedefineChangeClassOrder.java";
@Override
protected void runCases() {
setBreakpoints(1);
jdb.command(JdbCommand.run());
redefineClass(1, "-g");
jdb.contToExit(1);
new OutputAnalyzer(getDebuggeeOutput())
.shouldNotContain("FAIL:");
}
}
| gpl-2.0 |
seadsystem/SchemaSpy | src/com/mysql/jdbc/StandardLoadBalanceExceptionChecker.java | 3521 | /*
Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
The MySQL Connector/J is licensed under the terms of the GPLv2
<http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most MySQL Connectors.
There are special exceptions to the terms and conditions of the GPLv2 as it is applied to
this software, see the FLOSS License Exception
<http://www.mysql.com/about/legal/licensing/foss-exception.html>.
This program is free software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation; version 2
of the License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this
program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth
Floor, Boston, MA 02110-1301 USA
*/
package com.mysql.jdbc;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
public class StandardLoadBalanceExceptionChecker implements
LoadBalanceExceptionChecker {
private List<String> sqlStateList;
private List<Class<?>> sqlExClassList;
public boolean shouldExceptionTriggerFailover(SQLException ex) {
String sqlState = ex.getSQLState();
if (sqlState != null) {
if (sqlState.startsWith("08")) {
// connection error
return true;
}
if(this.sqlStateList != null){
// check against SQLState list
for(Iterator<String> i = sqlStateList.iterator(); i.hasNext(); ){
if(sqlState.startsWith(i.next().toString())){
return true;
}
}
}
}
// always handle CommunicationException
if(ex instanceof CommunicationsException){
return true;
}
if(this.sqlExClassList != null){
// check against configured class lists
for(Iterator<Class<?>> i = sqlExClassList.iterator(); i.hasNext(); ){
if(i.next().isInstance(ex)){
return true;
}
}
}
// no matches
return false;
}
public void destroy() {
// TODO Auto-generated method stub
}
public void init(Connection conn, Properties props) throws SQLException {
configureSQLStateList(props.getProperty("loadBalanceSQLStateFailover", null));
configureSQLExceptionSubclassList(props.getProperty("loadBalanceSQLExceptionSubclassFailover", null));
}
private void configureSQLStateList(String sqlStates){
if(sqlStates == null || "".equals(sqlStates)){
return;
}
List<String> states = StringUtils.split(sqlStates, ",", true);
List<String> newStates = new ArrayList<String>();
for (String state : states){
if(state.length() > 0){
newStates.add(state);
}
}
if(newStates.size() > 0){
this.sqlStateList = newStates;
}
}
private void configureSQLExceptionSubclassList(String sqlExClasses){
if(sqlExClasses == null || "".equals(sqlExClasses)){
return;
}
List<String> classes = StringUtils.split(sqlExClasses, ",", true);
List<Class<?>> newClasses = new ArrayList<Class<?>>();
for (String exClass : classes) {
try{
Class<?> c = Class.forName(exClass);
newClasses.add(c);
} catch (Exception e){
// ignore and don't check, class doesn't exist
}
}
if(newClasses.size() > 0){
this.sqlExClassList = newClasses;
}
}
}
| gpl-2.0 |
karahiyo/btrace | samples/JMap.java | 1743 | /*
* Copyright 2008-2010 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.sun.btrace.samples;
import com.sun.btrace.annotations.*;
import static com.sun.btrace.BTraceUtils.*;
/*
* A simple sample that dumps heap of the target at start and exits.
* This BTrace program mimics the jmap tool (with -dump option).
*/
@BTrace
public class JMap {
static {
String name;
if (Sys.$length() == 3) {
name = Sys.$(2);
} else {
name = "heap.bin";
}
Sys.Memory.dumpHeap(name);
println("heap dumped!");
Sys.exit(0);
}
}
| gpl-2.0 |
md-5/jdk10 | test/jdk/javax/swing/JSpinner/4788637/bug4788637.java | 5533 | /*
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.event.InputEvent;
import javax.swing.JFrame;
import javax.swing.JSpinner;
import javax.swing.SpinnerModel;
import javax.swing.SpinnerNumberModel;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import static javax.swing.UIManager.getInstalledLookAndFeels;
/**
* @test
* @bug 4788637 7124307
* @key headful
* @summary JSpinner buttons don't conform to most platform conventions
*/
public final class bug4788637 {
private static JSpinner spinner;
private static JFrame fr;
private static Robot robot;
private int step;
private boolean spinnerValueChanged[] = {false, false, false};
private static Point p;
private static Rectangle rect;
public static void main(final String[] args) throws Exception {
robot = new Robot();
robot.setAutoDelay(50);
robot.setAutoWaitForIdle(true);
for (final UIManager.LookAndFeelInfo laf : getInstalledLookAndFeels()) {
SwingUtilities.invokeAndWait(() -> setLookAndFeel(laf));
bug4788637 app = new bug4788637();
try {
SwingUtilities.invokeAndWait(app::createAndShowGUI);
robot.waitForIdle();
SwingUtilities.invokeAndWait(()-> {
spinner.requestFocus();
p = spinner.getLocationOnScreen();
rect = spinner.getBounds();
});
app.start();
} finally {
SwingUtilities.invokeAndWait(app::destroy);
}
}
}
public void createAndShowGUI() {
fr = new JFrame("Test");
fr.setLayout( new GridBagLayout() );
SpinnerModel model = new SpinnerNumberModel(50, 1, 100, 1);
spinner = new JSpinner(model);
fr.add(spinner,new GridBagConstraints());
spinner.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
synchronized (bug4788637.this) {
spinnerValueChanged[step] = true;
bug4788637.this.notifyAll();
}
}
});
fr.setSize(200, 200);
fr.setLocationRelativeTo(null);
fr.setVisible(true);
fr.toFront();
}
public void start() {
try {
Thread.sleep(1000);
// Move mouse to the up arrow button
robot.mouseMove(p.x+rect.width-3, p.y+3);
robot.mousePress(InputEvent.BUTTON1_MASK);
synchronized (bug4788637.this) {
if (!spinnerValueChanged[step]) {
bug4788637.this.wait(3000);
}
}
// Move mouse out of JSpinner
robot.mouseMove(p.x+rect.width-3, p.y-3);
synchronized (bug4788637.this) {
step++;
if (!spinnerValueChanged[step]) {
bug4788637.this.wait(3000);
}
}
// Move mouse to the up arrow button
robot.mouseMove(p.x+rect.width-3, p.y+3);
synchronized (bug4788637.this) {
step++;
if (!spinnerValueChanged[step]) {
bug4788637.this.wait(3000);
}
}
robot.mouseRelease(InputEvent.BUTTON1_MASK);
} catch(Throwable t) {
throw new RuntimeException(t);
}
}
public void destroy() {
fr.dispose();
synchronized (bug4788637.this) {
if (!spinnerValueChanged[0] ||
spinnerValueChanged[1] ||
!spinnerValueChanged[2]) {
throw new Error("JSpinner buttons don't conform to most platform conventions");
}
}
}
private static void setLookAndFeel(final UIManager.LookAndFeelInfo laf) {
try {
UIManager.setLookAndFeel(laf.getClassName());
System.out.println("LookAndFeel: " + laf.getClassName());
} catch (ClassNotFoundException | InstantiationException |
UnsupportedLookAndFeelException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
| gpl-2.0 |
erpcya/adempierePOS | base/src/org/compiere/model/I_M_ProductPriceVendorBreak.java | 6448 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
package org.compiere.model;
import java.math.BigDecimal;
import java.sql.Timestamp;
import org.compiere.util.KeyNamePair;
/** Generated Interface for M_ProductPriceVendorBreak
* @author Adempiere (generated)
* @version Release 3.8.0
*/
public interface I_M_ProductPriceVendorBreak
{
/** TableName=M_ProductPriceVendorBreak */
public static final String Table_Name = "M_ProductPriceVendorBreak";
/** AD_Table_ID=53172 */
public static final int Table_ID = MTable.getTable_ID(Table_Name);
KeyNamePair Model = new KeyNamePair(Table_ID, Table_Name);
/** AccessLevel = 3 - Client - Org
*/
BigDecimal accessLevel = BigDecimal.valueOf(3);
/** Load Meta Data */
/** Column name AD_Client_ID */
public static final String COLUMNNAME_AD_Client_ID = "AD_Client_ID";
/** Get Client.
* Client/Tenant for this installation.
*/
public int getAD_Client_ID();
/** Column name AD_Org_ID */
public static final String COLUMNNAME_AD_Org_ID = "AD_Org_ID";
/** Set Organization.
* Organizational entity within client
*/
public void setAD_Org_ID (int AD_Org_ID);
/** Get Organization.
* Organizational entity within client
*/
public int getAD_Org_ID();
/** Column name BreakValue */
public static final String COLUMNNAME_BreakValue = "BreakValue";
/** Set Break Value.
* Low Value of trade discount break level
*/
public void setBreakValue (BigDecimal BreakValue);
/** Get Break Value.
* Low Value of trade discount break level
*/
public BigDecimal getBreakValue();
/** Column name C_BPartner_ID */
public static final String COLUMNNAME_C_BPartner_ID = "C_BPartner_ID";
/** Set Business Partner .
* Identifies a Business Partner
*/
public void setC_BPartner_ID (int C_BPartner_ID);
/** Get Business Partner .
* Identifies a Business Partner
*/
public int getC_BPartner_ID();
public org.compiere.model.I_C_BPartner getC_BPartner() throws RuntimeException;
/** Column name Created */
public static final String COLUMNNAME_Created = "Created";
/** Get Created.
* Date this record was created
*/
public Timestamp getCreated();
/** Column name CreatedBy */
public static final String COLUMNNAME_CreatedBy = "CreatedBy";
/** Get Created By.
* User who created this records
*/
public int getCreatedBy();
/** Column name IsActive */
public static final String COLUMNNAME_IsActive = "IsActive";
/** Set Active.
* The record is active in the system
*/
public void setIsActive (boolean IsActive);
/** Get Active.
* The record is active in the system
*/
public boolean isActive();
/** Column name M_PriceList_Version_ID */
public static final String COLUMNNAME_M_PriceList_Version_ID = "M_PriceList_Version_ID";
/** Set Price List Version.
* Identifies a unique instance of a Price List
*/
public void setM_PriceList_Version_ID (int M_PriceList_Version_ID);
/** Get Price List Version.
* Identifies a unique instance of a Price List
*/
public int getM_PriceList_Version_ID();
public org.compiere.model.I_M_PriceList_Version getM_PriceList_Version() throws RuntimeException;
/** Column name M_ProductPriceVendorBreak_ID */
public static final String COLUMNNAME_M_ProductPriceVendorBreak_ID = "M_ProductPriceVendorBreak_ID";
/** Set Product Price Break */
public void setM_ProductPriceVendorBreak_ID (int M_ProductPriceVendorBreak_ID);
/** Get Product Price Break */
public int getM_ProductPriceVendorBreak_ID();
/** Column name M_Product_ID */
public static final String COLUMNNAME_M_Product_ID = "M_Product_ID";
/** Set Product.
* Product, Service, Item
*/
public void setM_Product_ID (int M_Product_ID);
/** Get Product.
* Product, Service, Item
*/
public int getM_Product_ID();
public org.compiere.model.I_M_Product getM_Product() throws RuntimeException;
/** Column name PriceLimit */
public static final String COLUMNNAME_PriceLimit = "PriceLimit";
/** Set Limit Price.
* Lowest price for a product
*/
public void setPriceLimit (BigDecimal PriceLimit);
/** Get Limit Price.
* Lowest price for a product
*/
public BigDecimal getPriceLimit();
/** Column name PriceList */
public static final String COLUMNNAME_PriceList = "PriceList";
/** Set List Price.
* List Price
*/
public void setPriceList (BigDecimal PriceList);
/** Get List Price.
* List Price
*/
public BigDecimal getPriceList();
/** Column name PriceStd */
public static final String COLUMNNAME_PriceStd = "PriceStd";
/** Set Standard Price.
* Standard Price
*/
public void setPriceStd (BigDecimal PriceStd);
/** Get Standard Price.
* Standard Price
*/
public BigDecimal getPriceStd();
/** Column name Updated */
public static final String COLUMNNAME_Updated = "Updated";
/** Get Updated.
* Date this record was updated
*/
public Timestamp getUpdated();
/** Column name UpdatedBy */
public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
/** Get Updated By.
* User who updated this records
*/
public int getUpdatedBy();
}
| gpl-2.0 |
GlowstonePlusPlus/Glowkit | src/main/javadoc/org/bukkit/event/server/package-info.java | 140 | /**
* {@link org.bukkit.event.Event Events} relating to programmatic state
* changes on the server.
*/
package org.bukkit.event.server;
| gpl-3.0 |
abbeyj/sonarqube | server/sonar-server/src/main/java/org/sonar/server/measure/custom/ws/MetricsAction.java | 3973 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.measure.custom.ws;
import com.google.common.io.Resources;
import java.util.List;
import org.sonar.api.server.ws.Request;
import org.sonar.api.server.ws.Response;
import org.sonar.api.server.ws.WebService;
import org.sonar.api.utils.text.JsonWriter;
import org.sonar.db.DbSession;
import org.sonar.db.MyBatis;
import org.sonar.db.component.ComponentDto;
import org.sonar.db.metric.MetricDto;
import org.sonar.server.component.ComponentFinder;
import org.sonar.server.db.DbClient;
import org.sonar.server.metric.ws.MetricJsonWriter;
import org.sonar.server.user.UserSession;
import static org.sonar.server.measure.custom.ws.CustomMeasureValidator.checkPermissions;
public class MetricsAction implements CustomMeasuresWsAction {
public static final String ACTION = "metrics";
public static final String PARAM_PROJECT_ID = "projectId";
public static final String PARAM_PROJECT_KEY = "projectKey";
private final DbClient dbClient;
private final UserSession userSession;
private final ComponentFinder componentFinder;
public MetricsAction(DbClient dbClient, UserSession userSession, ComponentFinder componentFinder) {
this.dbClient = dbClient;
this.userSession = userSession;
this.componentFinder = componentFinder;
}
@Override
public void define(WebService.NewController context) {
WebService.NewAction action = context.createAction(ACTION)
.setSince("5.2")
.setInternal(true)
.setHandler(this)
.setResponseExample(Resources.getResource(getClass(), "example-metrics.json"))
.setDescription("List all custom metrics for which no custom measure already exists on a given project.<br /> " +
"The project id or project key must be provided.<br />" +
"Requires 'Administer System' permission or 'Administer' permission on the project.");
action.createParam(PARAM_PROJECT_ID)
.setDescription("Project id")
.setExampleValue("ce4c03d6-430f-40a9-b777-ad877c00aa4d");
action.createParam(PARAM_PROJECT_KEY)
.setDescription("Project key")
.setExampleValue("org.apache.hbas:hbase");
}
@Override
public void handle(Request request, Response response) throws Exception {
DbSession dbSession = dbClient.openSession(false);
try {
ComponentDto project = componentFinder.getByUuidOrKey(dbSession, request.param(CreateAction.PARAM_PROJECT_ID), request.param(CreateAction.PARAM_PROJECT_KEY));
checkPermissions(userSession, project);
List<MetricDto> metrics = searchMetrics(dbSession, project);
writeResponse(response.newJsonWriter(), metrics);
} finally {
MyBatis.closeQuietly(dbSession);
}
}
private static void writeResponse(JsonWriter json, List<MetricDto> metrics) {
json.beginObject();
MetricJsonWriter.write(json, metrics, MetricJsonWriter.ALL_FIELDS);
json.endObject();
json.close();
}
private List<MetricDto> searchMetrics(DbSession dbSession, ComponentDto project) {
return dbClient.metricDao().selectAvailableCustomMetricsByComponentUuid(dbSession, project.uuid());
}
}
| lgpl-3.0 |
MatthiasMann/EnderIO | src/main/java/crazypants/enderio/rail/TeleportUtil.java | 6493 | package crazypants.enderio.rail;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.item.EntityMinecart;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
import cpw.mods.fml.common.network.NetworkRegistry.TargetPoint;
import crazypants.enderio.config.Config;
import crazypants.enderio.machine.transceiver.TileTransceiver;
import crazypants.enderio.network.PacketHandler;
public class TeleportUtil {
public static List<Entity> createEntitiesForReciever(EntityMinecart cart, TileTransceiver sender, TileTransceiver reciever) {
int toDimension = reciever.getWorldObj().provider.dimensionId;
int toX = reciever.xCoord;
int toY = reciever.yCoord + 1;
int toZ = reciever.zCoord;
MinecraftServer minecraftserver = MinecraftServer.getServer();
WorldServer worldserver1 = minecraftserver.worldServerForDimension(toDimension);
EntityMinecart newCart = (EntityMinecart) EntityList.createEntityByName(EntityList.getEntityString(cart), worldserver1);
if(newCart == null) {
return null;
}
NBTTagCompound nbttagcompound = new NBTTagCompound();
cart.writeToNBT(nbttagcompound);
newCart.readFromNBT(nbttagcompound);
newCart.dimension = toDimension;
newCart.setLocationAndAngles(toX + 0.5, toY, toZ + 0.5, cart.rotationYaw, cart.rotationPitch);
newCart.isDead = false;
List<Entity> result = new ArrayList<Entity>();
result.add(newCart);
Entity passenger = cart.riddenByEntity;
if(passenger != null && !(passenger instanceof EntityPlayer)) {
Entity newPas = EntityList.createEntityByName(EntityList.getEntityString(passenger), worldserver1);
newPas.copyDataFrom(passenger, true);
newPas.dimension = toDimension;
newPas.setLocationAndAngles(toX + 0.5, toY, toZ + 0.5, cart.rotationYaw, cart.rotationPitch);
newCart.riddenByEntity = newPas;
newPas.ridingEntity = newCart;
result.add(newPas);
}
return result;
}
public static void despawn(World world, EntityMinecart cart) {
if(cart instanceof IInventory) {
IInventory cont = (IInventory) cart;
for (int i = 0; i < cont.getSizeInventory(); i++) {
cont.setInventorySlotContents(i, null);
}
}
MinecraftServer minecraftserver = MinecraftServer.getServer();
WorldServer worldserver = minecraftserver.worldServerForDimension(world.provider.dimensionId);
Entity passenger = cart.riddenByEntity;
if(passenger != null && !(passenger instanceof EntityPlayer)) {
worldserver.removeEntity(passenger);
passenger.isDead = true;
}
worldserver.removeEntity(cart);
cart.isDead = true;
}
public static void spawn(World world, Entity entity) {
if(entity != null) {
MinecraftServer minecraftserver = MinecraftServer.getServer();
WorldServer worldserver = minecraftserver.worldServerForDimension(world.provider.dimensionId);
worldserver.spawnEntityInWorld(entity);
}
}
public static void spawnTeleportEffects(World world, Entity entity) {
PacketHandler.INSTANCE.sendToAllAround(new PacketTeleportEffects(entity), new TargetPoint(world.provider.dimensionId, entity.posX, entity.posY,
entity.posZ, 64));
if(Config.machineSoundsEnabled) {
world.playSoundEffect(entity.posX, entity.posY, entity.posZ, "mob.endermen.portal", 0.5F, 0.25F);
}
}
// public static void teleportPlayer(WorldServer teleportTo, EntityPlayerMP player, int dimension, ChunkCoordinates spawn) {
// WorldServer originalWorld = (WorldServer) player.worldObj;
// if(player.ridingEntity != null) {
// player.mountEntity(null);
// }
// boolean changeDimension = originalWorld != teleportTo;
//
// player.closeScreen();
// player.setLocationAndAngles(spawn.posX + 0.5D, spawn.posY, spawn.posZ + 0.5D, player.rotationYaw, player.rotationPitch);
//
// if(changeDimension) {
// player.dimension = dimension;
// player.playerNetServerHandler.sendPacket(new S07PacketRespawn(player.dimension, player.worldObj.difficultySetting, teleportTo.getWorldInfo()
// .getTerrainType(), player.theItemInWorldManager.getGameType()));
// removePlayerFromWorld(originalWorld, player);
// teleportTo.spawnEntityInWorld(player);
// player.setWorld(teleportTo);
// player.mcServer.getConfigurationManager().func_72375_a(player, teleportTo);
// player.theItemInWorldManager.setWorld((WorldServer) teleportTo);
// player.mcServer.getConfigurationManager().updateTimeAndWeatherForPlayer(player, (WorldServer) teleportTo);
// player.mcServer.getConfigurationManager().syncPlayerInventory(player);
// Iterator iter = player.getActivePotionEffects().iterator();
// while (iter.hasNext()) {
// PotionEffect effect = (PotionEffect) iter.next();
// player.playerNetServerHandler.sendPacket(new S1DPacketEntityEffect(player.getEntityId(), effect));
// }
// player.playerNetServerHandler.sendPacket(new S1FPacketSetExperience(player.experience, player.experienceTotal, player.experienceLevel));
//
// } else {
// teleportTo.theChunkProviderServer.loadChunk(spawn.posX >> 4, spawn.posZ >> 4);
// }
//
// player.playerNetServerHandler.setPlayerLocation(spawn.posX + 0.5D, spawn.posY, spawn.posZ + 0.5D, player.rotationYaw, player.rotationPitch);
// player.motionX = 0;
// player.motionY = 0;
// player.motionZ = 0;
// teleportTo.updateEntityWithOptionalForce(player, false);
// }
//
// private static void removePlayerFromWorld(WorldServer world, EntityPlayerMP player) {
// world.removePlayerEntityDangerously(player);
//// world.getPlayerManager().removePlayer(player);
//// world.playerEntities.remove(player);
//// world.updateAllPlayersSleepingFlag();
//// int cx = player.chunkCoordX;
//// int cy = player.chunkCoordZ;
//// if((player.addedToChunk) && (world.getChunkProvider().chunkExists(cx, cy))) {
//// world.getChunkFromChunkCoords(cx, cy).removeEntity(player);
//// world.getChunkFromChunkCoords(cx, cy).isModified = true;
//// }
//// world.loadedEntityList.remove(player);
//// world.getEntityTracker().removeEntityFromAllTrackingPlayers(player);
// }
}
| unlicense |
apurtell/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ReplicationWork.java | 2800 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import org.apache.hadoop.hdfs.server.protocol.BlockCommand;
import org.apache.hadoop.net.Node;
import java.util.List;
import java.util.Set;
class ReplicationWork extends BlockReconstructionWork {
public ReplicationWork(BlockInfo block, BlockCollection bc,
DatanodeDescriptor[] srcNodes, List<DatanodeDescriptor> containingNodes,
List<DatanodeStorageInfo> liveReplicaStorages, int additionalReplRequired,
int priority) {
super(block, bc, srcNodes, containingNodes,
liveReplicaStorages, additionalReplRequired, priority);
assert getSrcNodes().length == 1 :
"There should be exactly 1 source node that have been selected";
getSrcNodes()[0].incrementPendingReplicationWithoutTargets();
LOG.debug("Creating a ReplicationWork to reconstruct " + block);
}
@Override
void chooseTargets(BlockPlacementPolicy blockplacement,
BlockStoragePolicySuite storagePolicySuite,
Set<Node> excludedNodes) {
assert getSrcNodes().length > 0
: "At least 1 source node should have been selected";
try {
DatanodeStorageInfo[] chosenTargets = null;
// HDFS-14720 If the block is deleted, the block size will become
// BlockCommand.NO_ACK (LONG.MAX_VALUE) . This kind of block we don't need
// to send for replication or reconstruction
if (getBlock().getNumBytes() != BlockCommand.NO_ACK) {
chosenTargets = blockplacement.chooseTarget(getSrcPath(),
getAdditionalReplRequired(), getSrcNodes()[0],
getLiveReplicaStorages(), false, excludedNodes, getBlockSize(),
storagePolicySuite.getPolicy(getStoragePolicyID()), null);
}
setTargets(chosenTargets);
} finally {
getSrcNodes()[0].decrementPendingReplicationWithoutTargets();
}
}
@Override
void addTaskToDatanode(NumberReplicas numberReplicas) {
getSrcNodes()[0].addBlockToBeReplicated(getBlock(), getTargets());
}
}
| apache-2.0 |
wwjiang007/flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java | 59280 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.factories;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.DelegatingConfiguration;
import org.apache.flink.configuration.FallbackKey;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.CommonCatalogOptions;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.connector.format.DecodingFormat;
import org.apache.flink.table.connector.format.EncodingFormat;
import org.apache.flink.table.connector.sink.DynamicTableSink;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.module.Module;
import org.apache.flink.table.utils.EncodingUtils;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static org.apache.flink.configuration.ConfigurationUtils.canBePrefixMap;
import static org.apache.flink.configuration.ConfigurationUtils.filterPrefixMapKey;
import static org.apache.flink.configuration.GlobalConfiguration.HIDDEN_CONTENT;
import static org.apache.flink.table.factories.ManagedTableFactory.DEFAULT_IDENTIFIER;
import static org.apache.flink.table.module.CommonModuleOptions.MODULE_TYPE;
/** Utility for working with {@link Factory}s. */
@PublicEvolving
public final class FactoryUtil {
private static final Logger LOG = LoggerFactory.getLogger(FactoryUtil.class);
/**
* Describes the property version. This can be used for backwards compatibility in case the
* property format changes.
*/
public static final ConfigOption<Integer> PROPERTY_VERSION =
ConfigOptions.key("property-version")
.intType()
.defaultValue(1)
.withDescription(
"Version of the overall property design. This option is meant for future backwards compatibility.");
public static final ConfigOption<String> CONNECTOR =
ConfigOptions.key("connector")
.stringType()
.noDefaultValue()
.withDescription(
"Uniquely identifies the connector of a dynamic table that is used for accessing data in "
+ "an external system. Its value is used during table source and table sink discovery.");
public static final ConfigOption<String> FORMAT =
ConfigOptions.key("format")
.stringType()
.noDefaultValue()
.withDescription(
"Defines the format identifier for encoding data. "
+ "The identifier is used to discover a suitable format factory.");
public static final ConfigOption<Integer> SINK_PARALLELISM =
ConfigOptions.key("sink.parallelism")
.intType()
.noDefaultValue()
.withDescription(
"Defines a custom parallelism for the sink. "
+ "By default, if this option is not defined, the planner will derive the parallelism "
+ "for each statement individually by also considering the global configuration.");
/**
* Suffix for keys of {@link ConfigOption} in case a connector requires multiple formats (e.g.
* for both key and value).
*
* <p>See {@link #createTableFactoryHelper(DynamicTableFactory, DynamicTableFactory.Context)}
* for more information.
*/
public static final String FORMAT_SUFFIX = ".format";
/**
* The placeholder symbol to be used for keys of options which can be templated. See {@link
* Factory} for details.
*/
public static final String PLACEHOLDER_SYMBOL = "#";
/**
* Creates a {@link DynamicTableSource} from a {@link CatalogTable}.
*
* <p>If {@param preferredFactory} is passed, the table source is created from that factory.
* Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link
* Factory} for details).
*/
public static DynamicTableSource createDynamicTableSource(
@Nullable DynamicTableSourceFactory preferredFactory,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
Map<String, String> enrichmentOptions,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
final DefaultDynamicTableContext context =
new DefaultDynamicTableContext(
objectIdentifier,
catalogTable,
enrichmentOptions,
configuration,
classLoader,
isTemporary);
try {
final DynamicTableSourceFactory factory =
preferredFactory != null
? preferredFactory
: discoverTableFactory(DynamicTableSourceFactory.class, context);
return factory.createDynamicTableSource(context);
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Unable to create a source for reading table '%s'.\n\n"
+ "Table options are:\n\n"
+ "%s",
objectIdentifier.asSummaryString(),
catalogTable.getOptions().entrySet().stream()
.map(e -> stringifyOption(e.getKey(), e.getValue()))
.sorted()
.collect(Collectors.joining("\n"))),
t);
}
}
/**
* @deprecated Use {@link #createDynamicTableSource(DynamicTableSourceFactory, ObjectIdentifier,
* ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)}
*/
@Deprecated
public static DynamicTableSource createDynamicTableSource(
@Nullable DynamicTableSourceFactory preferredFactory,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
return createDynamicTableSource(
preferredFactory,
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
}
/**
* Creates a {@link DynamicTableSource} from a {@link CatalogTable}.
*
* <p>It considers {@link Catalog#getFactory()} if provided.
*
* @deprecated Use {@link #createDynamicTableSource(DynamicTableSourceFactory, ObjectIdentifier,
* ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)} instead.
*/
@Deprecated
public static DynamicTableSource createTableSource(
@Nullable Catalog catalog,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
final DefaultDynamicTableContext context =
new DefaultDynamicTableContext(
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
return createDynamicTableSource(
getDynamicTableFactory(DynamicTableSourceFactory.class, catalog, context),
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
}
/**
* Creates a {@link DynamicTableSink} from a {@link CatalogTable}.
*
* <p>If {@param preferredFactory} is passed, the table sink is created from that factory.
* Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link
* Factory} for details).
*/
public static DynamicTableSink createDynamicTableSink(
@Nullable DynamicTableSinkFactory preferredFactory,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
Map<String, String> enrichmentOptions,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
final DefaultDynamicTableContext context =
new DefaultDynamicTableContext(
objectIdentifier,
catalogTable,
enrichmentOptions,
configuration,
classLoader,
isTemporary);
try {
final DynamicTableSinkFactory factory =
preferredFactory != null
? preferredFactory
: discoverTableFactory(DynamicTableSinkFactory.class, context);
return factory.createDynamicTableSink(context);
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Unable to create a sink for writing table '%s'.\n\n"
+ "Table options are:\n\n"
+ "%s",
objectIdentifier.asSummaryString(),
catalogTable.getOptions().entrySet().stream()
.map(e -> stringifyOption(e.getKey(), e.getValue()))
.sorted()
.collect(Collectors.joining("\n"))),
t);
}
}
/**
* @deprecated Use {@link #createDynamicTableSink(DynamicTableSinkFactory, ObjectIdentifier,
* ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)}
*/
@Deprecated
public static DynamicTableSink createDynamicTableSink(
@Nullable DynamicTableSinkFactory preferredFactory,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
return createDynamicTableSink(
preferredFactory,
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
}
/**
* Creates a {@link DynamicTableSink} from a {@link CatalogTable}.
*
* <p>It considers {@link Catalog#getFactory()} if provided.
*
* @deprecated Use {@link #createDynamicTableSink(DynamicTableSinkFactory, ObjectIdentifier,
* ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)} instead.
*/
@Deprecated
public static DynamicTableSink createTableSink(
@Nullable Catalog catalog,
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
final DefaultDynamicTableContext context =
new DefaultDynamicTableContext(
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
return createDynamicTableSink(
getDynamicTableFactory(DynamicTableSinkFactory.class, catalog, context),
objectIdentifier,
catalogTable,
Collections.emptyMap(),
configuration,
classLoader,
isTemporary);
}
/**
* Creates a utility that helps validating options for a {@link CatalogFactory}.
*
* <p>Note: This utility checks for left-over options in the final step.
*/
public static CatalogFactoryHelper createCatalogFactoryHelper(
CatalogFactory factory, CatalogFactory.Context context) {
return new CatalogFactoryHelper(factory, context);
}
/**
* Creates a utility that helps validating options for a {@link ModuleFactory}.
*
* <p>Note: This utility checks for left-over options in the final step.
*/
public static ModuleFactoryHelper createModuleFactoryHelper(
ModuleFactory factory, ModuleFactory.Context context) {
return new ModuleFactoryHelper(factory, context);
}
/**
* Creates a utility that helps in discovering formats, merging options with {@link
* DynamicTableFactory.Context#getEnrichmentOptions()} and validating them all for a {@link
* DynamicTableFactory}.
*
* <p>The following example sketches the usage:
*
* <pre>{@code
* // in createDynamicTableSource()
* helper = FactoryUtil.createTableFactoryHelper(this, context);
*
* keyFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, KEY_FORMAT);
* valueFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, VALUE_FORMAT);
*
* helper.validate();
*
* ... // construct connector with discovered formats
* }</pre>
*
* <p>Note: The format option parameter of {@link
* TableFactoryHelper#discoverEncodingFormat(Class, ConfigOption)} and {@link
* TableFactoryHelper#discoverDecodingFormat(Class, ConfigOption)} must be {@link #FORMAT} or
* end with {@link #FORMAT_SUFFIX}. The discovery logic will replace 'format' with the factory
* identifier value as the format prefix. For example, assuming the identifier is 'json', if the
* format option key is 'format', then the format prefix is 'json.'. If the format option key is
* 'value.format', then the format prefix is 'value.json'. The format prefix is used to project
* the options for the format factory.
*
* <p>Note: When created, this utility merges the options from {@link
* DynamicTableFactory.Context#getEnrichmentOptions()} using {@link
* DynamicTableFactory#forwardOptions()}. When invoking {@link TableFactoryHelper#validate()},
* this utility checks for left-over options in the final step.
*/
public static TableFactoryHelper createTableFactoryHelper(
DynamicTableFactory factory, DynamicTableFactory.Context context) {
return new TableFactoryHelper(factory, context);
}
/**
* Attempts to discover an appropriate catalog factory and creates an instance of the catalog.
*
* <p>This first uses the legacy {@link TableFactory} stack to discover a matching {@link
* CatalogFactory}. If none is found, it falls back to the new stack using {@link Factory}
* instead.
*/
public static Catalog createCatalog(
String catalogName,
Map<String, String> options,
ReadableConfig configuration,
ClassLoader classLoader) {
// Use the legacy mechanism first for compatibility
try {
final CatalogFactory legacyFactory =
TableFactoryService.find(CatalogFactory.class, options, classLoader);
return legacyFactory.createCatalog(catalogName, options);
} catch (NoMatchingTableFactoryException e) {
// No matching legacy factory found, try using the new stack
final DefaultCatalogContext discoveryContext =
new DefaultCatalogContext(catalogName, options, configuration, classLoader);
try {
final CatalogFactory factory = getCatalogFactory(discoveryContext);
// The type option is only used for discovery, we don't actually want to forward it
// to the catalog factory itself.
final Map<String, String> factoryOptions =
options.entrySet().stream()
.filter(
entry ->
!CommonCatalogOptions.CATALOG_TYPE
.key()
.equals(entry.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
final DefaultCatalogContext context =
new DefaultCatalogContext(
catalogName, factoryOptions, configuration, classLoader);
return factory.createCatalog(context);
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Unable to create catalog '%s'.%n%nCatalog options are:%n%s",
catalogName,
options.entrySet().stream()
.map(
optionEntry ->
stringifyOption(
optionEntry.getKey(),
optionEntry.getValue()))
.sorted()
.collect(Collectors.joining("\n"))),
t);
}
}
}
/**
* Discovers a matching module factory and creates an instance of it.
*
* <p>This first uses the legacy {@link TableFactory} stack to discover a matching {@link
* ModuleFactory}. If none is found, it falls back to the new stack using {@link Factory}
* instead.
*/
public static Module createModule(
String moduleName,
Map<String, String> options,
ReadableConfig configuration,
ClassLoader classLoader) {
if (options.containsKey(MODULE_TYPE.key())) {
throw new ValidationException(
String.format(
"Option '%s' = '%s' is not supported since module name "
+ "is used to find module",
MODULE_TYPE.key(), options.get(MODULE_TYPE.key())));
}
try {
final Map<String, String> optionsWithType = new HashMap<>(options);
optionsWithType.put(MODULE_TYPE.key(), moduleName);
final ModuleFactory legacyFactory =
TableFactoryService.find(ModuleFactory.class, optionsWithType, classLoader);
return legacyFactory.createModule(optionsWithType);
} catch (NoMatchingTableFactoryException e) {
final DefaultModuleContext discoveryContext =
new DefaultModuleContext(options, configuration, classLoader);
try {
final ModuleFactory factory =
discoverFactory(
((ModuleFactory.Context) discoveryContext).getClassLoader(),
ModuleFactory.class,
moduleName);
final DefaultModuleContext context =
new DefaultModuleContext(options, configuration, classLoader);
return factory.createModule(context);
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Unable to create module '%s'.%n%nModule options are:%n%s",
moduleName,
options.entrySet().stream()
.map(
optionEntry ->
stringifyOption(
optionEntry.getKey(),
optionEntry.getValue()))
.sorted()
.collect(Collectors.joining("\n"))),
t);
}
}
}
/**
* Discovers a factory using the given factory base class and identifier.
*
* <p>This method is meant for cases where {@link #createTableFactoryHelper(DynamicTableFactory,
* DynamicTableFactory.Context)} {@link #createTableSource(Catalog, ObjectIdentifier,
* ResolvedCatalogTable, ReadableConfig, ClassLoader, boolean)}, and {@link
* #createTableSink(Catalog, ObjectIdentifier, ResolvedCatalogTable, ReadableConfig,
* ClassLoader, boolean)} are not applicable.
*/
@SuppressWarnings("unchecked")
public static <T extends Factory> T discoverFactory(
ClassLoader classLoader, Class<T> factoryClass, String factoryIdentifier) {
final List<Factory> factories = discoverFactories(classLoader);
final List<Factory> foundFactories =
factories.stream()
.filter(f -> factoryClass.isAssignableFrom(f.getClass()))
.collect(Collectors.toList());
if (foundFactories.isEmpty()) {
throw new ValidationException(
String.format(
"Could not find any factories that implement '%s' in the classpath.",
factoryClass.getName()));
}
final List<Factory> matchingFactories =
foundFactories.stream()
.filter(f -> f.factoryIdentifier().equals(factoryIdentifier))
.collect(Collectors.toList());
if (matchingFactories.isEmpty()) {
throw new ValidationException(
String.format(
"Could not find any factory for identifier '%s' that implements '%s' in the classpath.\n\n"
+ "Available factory identifiers are:\n\n"
+ "%s",
factoryIdentifier,
factoryClass.getName(),
foundFactories.stream()
.map(Factory::factoryIdentifier)
.filter(identifier -> !DEFAULT_IDENTIFIER.equals(identifier))
.distinct()
.sorted()
.collect(Collectors.joining("\n"))));
}
if (matchingFactories.size() > 1) {
throw new ValidationException(
String.format(
"Multiple factories for identifier '%s' that implement '%s' found in the classpath.\n\n"
+ "Ambiguous factory classes are:\n\n"
+ "%s",
factoryIdentifier,
factoryClass.getName(),
matchingFactories.stream()
.map(f -> f.getClass().getName())
.sorted()
.collect(Collectors.joining("\n"))));
}
return (T) matchingFactories.get(0);
}
/**
* Validates the required and optional {@link ConfigOption}s of a factory.
*
* <p>Note: It does not check for left-over options.
*/
public static void validateFactoryOptions(Factory factory, ReadableConfig options) {
validateFactoryOptions(factory.requiredOptions(), factory.optionalOptions(), options);
}
/**
* Validates the required options and optional options.
*
* <p>Note: It does not check for left-over options.
*/
public static void validateFactoryOptions(
Set<ConfigOption<?>> requiredOptions,
Set<ConfigOption<?>> optionalOptions,
ReadableConfig options) {
// currently Flink's options have no validation feature which is why we access them eagerly
// to provoke a parsing error
final List<String> missingRequiredOptions =
requiredOptions.stream()
// Templated options will never appear with their template key, so we need
// to ignore them as required properties here
.filter(
option ->
allKeys(option)
.noneMatch(k -> k.contains(PLACEHOLDER_SYMBOL)))
.filter(option -> readOption(options, option) == null)
.map(ConfigOption::key)
.sorted()
.collect(Collectors.toList());
if (!missingRequiredOptions.isEmpty()) {
throw new ValidationException(
String.format(
"One or more required options are missing.\n\n"
+ "Missing required options are:\n\n"
+ "%s",
String.join("\n", missingRequiredOptions)));
}
optionalOptions.forEach(option -> readOption(options, option));
}
/** Validates unconsumed option keys. */
public static void validateUnconsumedKeys(
String factoryIdentifier,
Set<String> allOptionKeys,
Set<String> consumedOptionKeys,
Set<String> deprecatedOptionKeys) {
final Set<String> remainingOptionKeys = new HashSet<>(allOptionKeys);
remainingOptionKeys.removeAll(consumedOptionKeys);
if (!remainingOptionKeys.isEmpty()) {
throw new ValidationException(
String.format(
"Unsupported options found for '%s'.\n\n"
+ "Unsupported options:\n\n"
+ "%s\n\n"
+ "Supported options:\n\n"
+ "%s",
factoryIdentifier,
remainingOptionKeys.stream().sorted().collect(Collectors.joining("\n")),
consumedOptionKeys.stream()
.map(
k -> {
if (deprecatedOptionKeys.contains(k)) {
return String.format("%s (deprecated)", k);
}
return k;
})
.sorted()
.collect(Collectors.joining("\n"))));
}
}
/** Validates unconsumed option keys. */
public static void validateUnconsumedKeys(
String factoryIdentifier, Set<String> allOptionKeys, Set<String> consumedOptionKeys) {
validateUnconsumedKeys(
factoryIdentifier, allOptionKeys, consumedOptionKeys, Collections.emptySet());
}
/** Returns the required option prefix for options of the given format. */
public static String getFormatPrefix(
ConfigOption<String> formatOption, String formatIdentifier) {
final String formatOptionKey = formatOption.key();
if (formatOptionKey.equals(FORMAT.key())) {
return formatIdentifier + ".";
} else if (formatOptionKey.endsWith(FORMAT_SUFFIX)) {
// extract the key prefix, e.g. extract 'key' from 'key.format'
String keyPrefix =
formatOptionKey.substring(0, formatOptionKey.length() - FORMAT_SUFFIX.length());
return keyPrefix + "." + formatIdentifier + ".";
} else {
throw new ValidationException(
"Format identifier key should be 'format' or suffix with '.format', "
+ "don't support format identifier key '"
+ formatOptionKey
+ "'.");
}
}
// --------------------------------------------------------------------------------------------
// Helper methods
// --------------------------------------------------------------------------------------------
private static <T extends DynamicTableFactory> T getDynamicTableFactory(
Class<T> factoryClass, @Nullable Catalog catalog, DynamicTableFactory.Context context) {
return getDynamicTableFactory(factoryClass, catalog)
.orElseGet(() -> discoverTableFactory(factoryClass, context));
}
@SuppressWarnings("unchecked")
private static <T extends DynamicTableFactory> Optional<T> getDynamicTableFactory(
Class<T> factoryClass, @Nullable Catalog catalog) {
if (catalog == null) {
return Optional.empty();
}
return catalog.getFactory()
.map(f -> factoryClass.isAssignableFrom(f.getClass()) ? (T) f : null);
}
private static <T extends DynamicTableFactory> T discoverTableFactory(
Class<T> factoryClass, DynamicTableFactory.Context context) {
final String connectorOption = context.getCatalogTable().getOptions().get(CONNECTOR.key());
if (connectorOption == null) {
return discoverManagedTableFactory(context.getClassLoader(), factoryClass);
}
try {
return discoverFactory(context.getClassLoader(), factoryClass, connectorOption);
} catch (ValidationException e) {
throw enrichNoMatchingConnectorError(factoryClass, context, connectorOption);
}
}
private static CatalogFactory getCatalogFactory(CatalogFactory.Context context) {
final String catalogType =
context.getOptions().get(CommonCatalogOptions.CATALOG_TYPE.key());
if (catalogType == null) {
throw new ValidationException(
String.format(
"Catalog options do not contain an option key '%s' for discovering a catalog.",
CommonCatalogOptions.CATALOG_TYPE.key()));
}
return discoverFactory(context.getClassLoader(), CatalogFactory.class, catalogType);
}
private static ValidationException enrichNoMatchingConnectorError(
Class<?> factoryClass, DynamicTableFactory.Context context, String connectorOption) {
final DynamicTableFactory factory;
try {
factory =
discoverFactory(
context.getClassLoader(), DynamicTableFactory.class, connectorOption);
} catch (ValidationException e) {
return new ValidationException(
String.format(
"Cannot discover a connector using option: %s",
stringifyOption(CONNECTOR.key(), connectorOption)),
e);
}
final Class<?> sourceFactoryClass = DynamicTableSourceFactory.class;
final Class<?> sinkFactoryClass = DynamicTableSinkFactory.class;
// for a better exception message
if (sourceFactoryClass.equals(factoryClass)
&& sinkFactoryClass.isAssignableFrom(factory.getClass())) {
// discovering source, but not found, and this is a sink connector.
return new ValidationException(
String.format(
"Connector '%s' can only be used as a sink. It cannot be used as a source.",
connectorOption));
} else if (sinkFactoryClass.equals(factoryClass)
&& sourceFactoryClass.isAssignableFrom(factory.getClass())) {
// discovering sink, but not found, and this is a source connector.
return new ValidationException(
String.format(
"Connector '%s' can only be used as a source. It cannot be used as a sink.",
connectorOption));
} else {
return new ValidationException(
String.format(
"Connector '%s' does neither implement the '%s' nor the '%s' interface.",
connectorOption,
sourceFactoryClass.getName(),
sinkFactoryClass.getName()));
}
}
@SuppressWarnings("unchecked")
static <T extends DynamicTableFactory> T discoverManagedTableFactory(
ClassLoader classLoader, Class<T> implementClass) {
final List<Factory> factories = discoverFactories(classLoader);
final List<Factory> foundFactories =
factories.stream()
.filter(f -> ManagedTableFactory.class.isAssignableFrom(f.getClass()))
.filter(f -> implementClass.isAssignableFrom(f.getClass()))
.collect(Collectors.toList());
if (foundFactories.isEmpty()) {
throw new ValidationException(
String.format(
"Table options do not contain an option key 'connector' for discovering a connector. "
+ "Therefore, Flink assumes a managed table. However, a managed table factory "
+ "that implements %s is not in the classpath.",
implementClass.getName()));
}
if (foundFactories.size() > 1) {
throw new ValidationException(
String.format(
"Multiple factories for managed table found in the classpath.\n\n"
+ "Ambiguous factory classes are:\n\n"
+ "%s",
foundFactories.stream()
.map(f -> f.getClass().getName())
.sorted()
.collect(Collectors.joining("\n"))));
}
return (T) foundFactories.get(0);
}
static List<Factory> discoverFactories(ClassLoader classLoader) {
final List<Factory> result = new LinkedList<>();
ServiceLoaderUtil.load(Factory.class, classLoader)
.forEach(
loadResult -> {
if (loadResult.hasFailed()) {
if (loadResult.getError() instanceof NoClassDefFoundError) {
LOG.debug(
"NoClassDefFoundError when loading a "
+ Factory.class
+ ". This is expected when trying to load a format dependency but no flink-connector-files is loaded.",
loadResult.getError());
// After logging, we just ignore this failure
return;
}
throw new TableException(
"Unexpected error when trying to load service provider for factories.",
loadResult.getError());
}
result.add(loadResult.getService());
});
return result;
}
private static String stringifyOption(String key, String value) {
if (GlobalConfiguration.isSensitive(key)) {
value = HIDDEN_CONTENT;
}
return String.format(
"'%s'='%s'",
EncodingUtils.escapeSingleQuotes(key), EncodingUtils.escapeSingleQuotes(value));
}
private static <T> T readOption(ReadableConfig options, ConfigOption<T> option) {
try {
return options.get(option);
} catch (Throwable t) {
throw new ValidationException(
String.format("Invalid value for option '%s'.", option.key()), t);
}
}
private static Set<String> allKeysExpanded(ConfigOption<?> option, Set<String> actualKeys) {
return allKeysExpanded("", option, actualKeys);
}
private static Set<String> allKeysExpanded(
String prefix, ConfigOption<?> option, Set<String> actualKeys) {
final Set<String> staticKeys =
allKeys(option).map(k -> prefix + k).collect(Collectors.toSet());
if (!canBePrefixMap(option)) {
return staticKeys;
}
// include all prefix keys of a map option by considering the actually provided keys
return Stream.concat(
staticKeys.stream(),
staticKeys.stream()
.flatMap(
k ->
actualKeys.stream()
.filter(c -> filterPrefixMapKey(k, c))))
.collect(Collectors.toSet());
}
private static Stream<String> allKeys(ConfigOption<?> option) {
return Stream.concat(Stream.of(option.key()), fallbackKeys(option));
}
private static Stream<String> fallbackKeys(ConfigOption<?> option) {
return StreamSupport.stream(option.fallbackKeys().spliterator(), false)
.map(FallbackKey::getKey);
}
private static Stream<String> deprecatedKeys(ConfigOption<?> option) {
return StreamSupport.stream(option.fallbackKeys().spliterator(), false)
.filter(FallbackKey::isDeprecated)
.map(FallbackKey::getKey);
}
// --------------------------------------------------------------------------------------------
// Helper classes
// --------------------------------------------------------------------------------------------
private static class FactoryHelper<F extends Factory> {
protected final F factory;
protected final Configuration allOptions;
protected final Set<String> consumedOptionKeys;
protected final Set<String> deprecatedOptionKeys;
FactoryHelper(
F factory, Map<String, String> configuration, ConfigOption<?>... implicitOptions) {
this.factory = factory;
this.allOptions = Configuration.fromMap(configuration);
final List<ConfigOption<?>> consumedOptions = new ArrayList<>();
consumedOptions.addAll(Arrays.asList(implicitOptions));
consumedOptions.addAll(factory.requiredOptions());
consumedOptions.addAll(factory.optionalOptions());
consumedOptionKeys =
consumedOptions.stream()
.flatMap(
option -> allKeysExpanded(option, allOptions.keySet()).stream())
.collect(Collectors.toSet());
deprecatedOptionKeys =
consumedOptions.stream()
.flatMap(FactoryUtil::deprecatedKeys)
.collect(Collectors.toSet());
}
/** Validates the options of the factory. It checks for unconsumed option keys. */
public void validate() {
validateFactoryOptions(factory, allOptions);
validateUnconsumedKeys(
factory.factoryIdentifier(),
allOptions.keySet(),
consumedOptionKeys,
deprecatedOptionKeys);
}
/**
* Validates the options of the factory. It checks for unconsumed option keys while ignoring
* the options with given prefixes.
*
* <p>The option keys that have given prefix {@code prefixToSkip} would just be skipped for
* validation.
*
* @param prefixesToSkip Set of option key prefixes to skip validation
*/
public void validateExcept(String... prefixesToSkip) {
Preconditions.checkArgument(
prefixesToSkip.length > 0, "Prefixes to skip can not be empty.");
final List<String> prefixesList = Arrays.asList(prefixesToSkip);
consumedOptionKeys.addAll(
allOptions.keySet().stream()
.filter(key -> prefixesList.stream().anyMatch(key::startsWith))
.collect(Collectors.toSet()));
validate();
}
/** Returns all options currently being consumed by the factory. */
public ReadableConfig getOptions() {
return allOptions;
}
}
/**
* Helper utility for validating all options for a {@link CatalogFactory}.
*
* @see #createCatalogFactoryHelper(CatalogFactory, CatalogFactory.Context)
*/
@PublicEvolving
public static class CatalogFactoryHelper extends FactoryHelper<CatalogFactory> {
public CatalogFactoryHelper(CatalogFactory catalogFactory, CatalogFactory.Context context) {
super(catalogFactory, context.getOptions(), PROPERTY_VERSION);
}
}
/**
* Helper utility for validating all options for a {@link ModuleFactory}.
*
* @see #createModuleFactoryHelper(ModuleFactory, ModuleFactory.Context)
*/
@PublicEvolving
public static class ModuleFactoryHelper extends FactoryHelper<ModuleFactory> {
public ModuleFactoryHelper(ModuleFactory moduleFactory, ModuleFactory.Context context) {
super(moduleFactory, context.getOptions(), PROPERTY_VERSION);
}
}
/**
* Helper utility for discovering formats and validating all options for a {@link
* DynamicTableFactory}.
*
* @see #createTableFactoryHelper(DynamicTableFactory, DynamicTableFactory.Context)
*/
@PublicEvolving
public static class TableFactoryHelper extends FactoryHelper<DynamicTableFactory> {
private final DynamicTableFactory.Context context;
private final Configuration enrichingOptions;
private TableFactoryHelper(
DynamicTableFactory tableFactory, DynamicTableFactory.Context context) {
super(
tableFactory,
context.getCatalogTable().getOptions(),
PROPERTY_VERSION,
CONNECTOR);
this.context = context;
this.enrichingOptions = Configuration.fromMap(context.getEnrichmentOptions());
this.forwardOptions();
}
/**
* Returns all options currently being consumed by the factory. This method returns the
* options already merged with {@link DynamicTableFactory.Context#getEnrichmentOptions()},
* using {@link DynamicTableFactory#forwardOptions()} as reference of mergeable options.
*/
@Override
public ReadableConfig getOptions() {
return super.getOptions();
}
/**
* Discovers a {@link DecodingFormat} of the given type using the given option as factory
* identifier.
*/
public <I, F extends DecodingFormatFactory<I>> DecodingFormat<I> discoverDecodingFormat(
Class<F> formatFactoryClass, ConfigOption<String> formatOption) {
return discoverOptionalDecodingFormat(formatFactoryClass, formatOption)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Could not find required scan format '%s'.",
formatOption.key())));
}
/**
* Discovers a {@link DecodingFormat} of the given type using the given option (if present)
* as factory identifier.
*/
public <I, F extends DecodingFormatFactory<I>>
Optional<DecodingFormat<I>> discoverOptionalDecodingFormat(
Class<F> formatFactoryClass, ConfigOption<String> formatOption) {
return discoverOptionalFormatFactory(formatFactoryClass, formatOption)
.map(
formatFactory -> {
String formatPrefix = formatPrefix(formatFactory, formatOption);
try {
return formatFactory.createDecodingFormat(
context,
createFormatOptions(formatPrefix, formatFactory));
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Error creating scan format '%s' in option space '%s'.",
formatFactory.factoryIdentifier(),
formatPrefix),
t);
}
});
}
/**
* Discovers a {@link EncodingFormat} of the given type using the given option as factory
* identifier.
*/
public <I, F extends EncodingFormatFactory<I>> EncodingFormat<I> discoverEncodingFormat(
Class<F> formatFactoryClass, ConfigOption<String> formatOption) {
return discoverOptionalEncodingFormat(formatFactoryClass, formatOption)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Could not find required sink format '%s'.",
formatOption.key())));
}
/**
* Discovers a {@link EncodingFormat} of the given type using the given option (if present)
* as factory identifier.
*/
public <I, F extends EncodingFormatFactory<I>>
Optional<EncodingFormat<I>> discoverOptionalEncodingFormat(
Class<F> formatFactoryClass, ConfigOption<String> formatOption) {
return discoverOptionalFormatFactory(formatFactoryClass, formatOption)
.map(
formatFactory -> {
String formatPrefix = formatPrefix(formatFactory, formatOption);
try {
return formatFactory.createEncodingFormat(
context,
createFormatOptions(formatPrefix, formatFactory));
} catch (Throwable t) {
throw new ValidationException(
String.format(
"Error creating sink format '%s' in option space '%s'.",
formatFactory.factoryIdentifier(),
formatPrefix),
t);
}
});
}
// ----------------------------------------------------------------------------------------
/**
* Forwards the options declared in {@link DynamicTableFactory#forwardOptions()} and
* possibly {@link FormatFactory#forwardOptions()} from {@link
* DynamicTableFactory.Context#getEnrichmentOptions()} to the final options, if present.
*/
@SuppressWarnings({"unchecked"})
private void forwardOptions() {
for (ConfigOption<?> option : factory.forwardOptions()) {
enrichingOptions
.getOptional(option)
.ifPresent(o -> allOptions.set((ConfigOption<? super Object>) option, o));
}
}
private <F extends Factory> Optional<F> discoverOptionalFormatFactory(
Class<F> formatFactoryClass, ConfigOption<String> formatOption) {
final String identifier = allOptions.get(formatOption);
checkFormatIdentifierMatchesWithEnrichingOptions(formatOption, identifier);
if (identifier == null) {
return Optional.empty();
}
final F factory =
discoverFactory(context.getClassLoader(), formatFactoryClass, identifier);
String formatPrefix = formatPrefix(factory, formatOption);
// log all used options of other factories
final List<ConfigOption<?>> consumedOptions = new ArrayList<>();
consumedOptions.addAll(factory.requiredOptions());
consumedOptions.addAll(factory.optionalOptions());
consumedOptions.stream()
.flatMap(
option ->
allKeysExpanded(formatPrefix, option, allOptions.keySet())
.stream())
.forEach(consumedOptionKeys::add);
consumedOptions.stream()
.flatMap(FactoryUtil::deprecatedKeys)
.map(k -> formatPrefix + k)
.forEach(deprecatedOptionKeys::add);
return Optional.of(factory);
}
private String formatPrefix(Factory formatFactory, ConfigOption<String> formatOption) {
String identifier = formatFactory.factoryIdentifier();
return getFormatPrefix(formatOption, identifier);
}
@SuppressWarnings({"unchecked"})
private ReadableConfig createFormatOptions(
String formatPrefix, FormatFactory formatFactory) {
Set<ConfigOption<?>> forwardableConfigOptions = formatFactory.forwardOptions();
Configuration formatConf = new DelegatingConfiguration(allOptions, formatPrefix);
if (forwardableConfigOptions.isEmpty()) {
return formatConf;
}
Configuration formatConfFromEnrichingOptions =
new DelegatingConfiguration(enrichingOptions, formatPrefix);
for (ConfigOption<?> option : forwardableConfigOptions) {
formatConfFromEnrichingOptions
.getOptional(option)
.ifPresent(o -> formatConf.set((ConfigOption<? super Object>) option, o));
}
return formatConf;
}
/**
* This function assumes that the format config is used only and only if the original
* configuration contains the format config option. It will fail if there is a mismatch of
* the identifier between the format in the plan table map and the one in enriching table
* map.
*/
private void checkFormatIdentifierMatchesWithEnrichingOptions(
ConfigOption<String> formatOption, String identifierFromPlan) {
Optional<String> identifierFromEnrichingOptions =
enrichingOptions.getOptional(formatOption);
if (!identifierFromEnrichingOptions.isPresent()) {
return;
}
if (identifierFromPlan == null) {
throw new ValidationException(
String.format(
"The persisted plan has no format option '%s' specified, while the catalog table has it with value '%s'. "
+ "This is invalid, as either only the persisted plan table defines the format, "
+ "or both the persisted plan table and the catalog table defines the same format.",
formatOption, identifierFromEnrichingOptions.get()));
}
if (!Objects.equals(identifierFromPlan, identifierFromEnrichingOptions.get())) {
throw new ValidationException(
String.format(
"Both persisted plan table and catalog table define the format option '%s', "
+ "but they mismatch: '%s' != '%s'.",
formatOption,
identifierFromPlan,
identifierFromEnrichingOptions.get()));
}
}
}
/** Default implementation of {@link DynamicTableFactory.Context}. */
@Internal
public static class DefaultDynamicTableContext implements DynamicTableFactory.Context {
private final ObjectIdentifier objectIdentifier;
private final ResolvedCatalogTable catalogTable;
private final Map<String, String> enrichmentOptions;
private final ReadableConfig configuration;
private final ClassLoader classLoader;
private final boolean isTemporary;
public DefaultDynamicTableContext(
ObjectIdentifier objectIdentifier,
ResolvedCatalogTable catalogTable,
Map<String, String> enrichmentOptions,
ReadableConfig configuration,
ClassLoader classLoader,
boolean isTemporary) {
this.objectIdentifier = objectIdentifier;
this.catalogTable = catalogTable;
this.enrichmentOptions = enrichmentOptions;
this.configuration = configuration;
this.classLoader = classLoader;
this.isTemporary = isTemporary;
}
@Override
public ObjectIdentifier getObjectIdentifier() {
return objectIdentifier;
}
@Override
public ResolvedCatalogTable getCatalogTable() {
return catalogTable;
}
@Override
public Map<String, String> getEnrichmentOptions() {
return enrichmentOptions;
}
@Override
public ReadableConfig getConfiguration() {
return configuration;
}
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
@Override
public boolean isTemporary() {
return isTemporary;
}
}
/** Default implementation of {@link CatalogFactory.Context}. */
@Internal
public static class DefaultCatalogContext implements CatalogFactory.Context {
private final String name;
private final Map<String, String> options;
private final ReadableConfig configuration;
private final ClassLoader classLoader;
public DefaultCatalogContext(
String name,
Map<String, String> options,
ReadableConfig configuration,
ClassLoader classLoader) {
this.name = name;
this.options = options;
this.configuration = configuration;
this.classLoader = classLoader;
}
@Override
public String getName() {
return name;
}
@Override
public Map<String, String> getOptions() {
return options;
}
@Override
public ReadableConfig getConfiguration() {
return configuration;
}
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
}
/** Default implementation of {@link ModuleFactory.Context}. */
@Internal
public static class DefaultModuleContext implements ModuleFactory.Context {
private final Map<String, String> options;
private final ReadableConfig configuration;
private final ClassLoader classLoader;
public DefaultModuleContext(
Map<String, String> options,
ReadableConfig configuration,
ClassLoader classLoader) {
this.options = options;
this.configuration = configuration;
this.classLoader = classLoader;
}
@Override
public Map<String, String> getOptions() {
return options;
}
@Override
public ReadableConfig getConfiguration() {
return configuration;
}
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
}
// --------------------------------------------------------------------------------------------
private FactoryUtil() {
// no instantiation
}
}
| apache-2.0 |
jinglining/flink | flink-core/src/test/java/org/apache/flink/types/RowTest.java | 2914 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class RowTest {
@Test
public void testRowToString() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
assertEquals("1,hello,null,(2,hi),hello world", row.toString());
}
@Test
public void testRowOf() {
Row row1 = Row.of(1, "hello", null, Tuple2.of(2L, "hi"), true);
Row row2 = new Row(5);
row2.setField(0, 1);
row2.setField(1, "hello");
row2.setField(2, null);
row2.setField(3, new Tuple2<>(2L, "hi"));
row2.setField(4, true);
assertEquals(row1, row2);
}
@Test
public void testRowCopy() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
Row copy = Row.copy(row);
assertEquals(row, copy);
assertTrue(row != copy);
}
@Test
public void testRowProject() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
Row projected = Row.project(row, new int[]{0, 2, 4});
Row expected = new Row(3);
expected.setField(0, 1);
expected.setField(1, null);
expected.setField(2, "hello world");
assertEquals(expected, projected);
}
@Test
public void testRowJoin() {
Row row1 = new Row(2);
row1.setField(0, 1);
row1.setField(1, "hello");
Row row2 = new Row(2);
row2.setField(0, null);
row2.setField(1, new Tuple2<>(2, "hi"));
Row row3 = new Row(1);
row3.setField(0, "hello world");
Row joinedRow = Row.join(row1, row2, row3);
Row expected = new Row(5);
expected.setField(0, 1);
expected.setField(1, "hello");
expected.setField(2, null);
expected.setField(3, new Tuple2<>(2, "hi"));
expected.setField(4, "hello world");
assertEquals(expected, joinedRow);
}
}
| apache-2.0 |
SupunS/carbon-identity | components/identity/org.wso2.carbon.identity.mgt/src/main/java/org/wso2/carbon/identity/mgt/policy/PolicyRegistry.java | 1305 | /*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.mgt.policy;
import java.util.ArrayList;
public class PolicyRegistry {
private ArrayList<PolicyEnforcer> policyCollection = new ArrayList<PolicyEnforcer>();
public PolicyRegistry() {
}
public void enforcePasswordPolicies(Object... args) throws PolicyViolationException {
if (args != null) {
for (PolicyEnforcer policy : policyCollection) {
if (policy instanceof AbstractPasswordPolicyEnforcer) {
if (!policy.enforce(args)) {
throw new PolicyViolationException(policy.getErrorMessage());
}
}
}
}
}
public void addPolicy(PolicyEnforcer policy) {
policyCollection.add(policy);
}
}
| apache-2.0 |
szpak/spock | spock-core/src/main/java/org/spockframework/mock/runtime/FailingRealMethodInvoker.java | 1120 | /*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spockframework.mock.runtime;
import org.spockframework.mock.CannotInvokeRealMethodException;
import org.spockframework.mock.IMockInvocation;
import org.spockframework.mock.IResponseGenerator;
public class FailingRealMethodInvoker implements IResponseGenerator {
private final String message;
public FailingRealMethodInvoker(String message) {
this.message = message;
}
public Object respond(IMockInvocation invocation) {
throw new CannotInvokeRealMethodException(message);
}
}
| apache-2.0 |
Drifftr/devstudio-tooling-bps | plugins/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/uiextensionmodel/ActivityExtension.java | 2611 | /*******************************************************************************
* Copyright (c) 2005, 2012 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.bpel.ui.uiextensionmodel;
import org.eclipse.emf.ecore.EObject;
/**
* @model
*/
public interface ActivityExtension extends EObject {
/**
* @model
*/
public int getX();
/**
* Sets the value of the '{@link org.eclipse.bpel.ui.uiextensionmodel.ActivityExtension#getX <em>X</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>X</em>' attribute.
* @see #getX()
* @generated
*/
void setX(int value);
/**
* @model
*/
public int getY();
/**
* Sets the value of the '{@link org.eclipse.bpel.ui.uiextensionmodel.ActivityExtension#getY <em>Y</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Y</em>' attribute.
* @see #getY()
* @generated
*/
void setY(int value);
/**
* @model
*/
public int getWidth();
/**
* Sets the value of the '{@link org.eclipse.bpel.ui.uiextensionmodel.ActivityExtension#getWidth <em>Width</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Width</em>' attribute.
* @see #getWidth()
* @generated
*/
void setWidth(int value);
/**
* @model
*/
public int getHeight();
/**
* Sets the value of the '{@link org.eclipse.bpel.ui.uiextensionmodel.ActivityExtension#getHeight <em>Height</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Height</em>' attribute.
* @see #getHeight()
* @generated
*/
void setHeight(int value);
/**
* @model
*/
public boolean isImplicit();
/**
* Sets the value of the '{@link org.eclipse.bpel.ui.uiextensionmodel.ActivityExtension#isImplicit <em>Implicit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Implicit</em>' attribute.
* @see #isImplicit()
* @generated
*/
void setImplicit(boolean value);
}
| apache-2.0 |
droolsjbpm/drools | drools-traits/src/test/java/org/drools/traits/compiler/factmodel/traits/IPerson.java | 1035 | /*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.traits.compiler.factmodel.traits;
import org.drools.core.factmodel.traits.Thing;
import org.drools.core.factmodel.traits.Trait;
import org.drools.core.factmodel.traits.TraitableBean;
@Trait
public interface IPerson<K extends TraitableBean> extends Thing<K> {
public String getName();
public void setName( String name );
public int getAge();
public void setAge( int age );
}
| apache-2.0 |
spring-projects/spring-android | spring-android-core/src/main/java/org/springframework/util/StreamUtils.java | 5809 | /*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.io.ByteArrayOutputStream;
import java.io.FilterInputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
/**
* Simple utility methods for dealing with streams. The copy methods of this class are
* similar to those defined in {@link FileCopyUtils} except that all affected streams are
* left open when done. All copy methods use a block size of 4096 bytes.
*
* <p>Mainly for use within the framework, but also useful for application code.
*
* @author Juergen Hoeller
* @author Phillip Webb
* @since 2.0
* @see FileCopyUtils
*/
public abstract class StreamUtils {
public static final int BUFFER_SIZE = 4096;
/**
* Copy the contents of the given InputStream into a new byte array.
* Leaves the stream open when done.
* @param in the stream to copy from
* @return the new byte array that has been copied to
* @throws IOException in case of I/O errors
*/
public static byte[] copyToByteArray(InputStream in) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream(BUFFER_SIZE);
copy(in, out);
return out.toByteArray();
}
/**
* Copy the contents of the given InputStream into a String.
* Leaves the stream open when done.
* @param in the InputStream to copy from
* @return the String that has been copied to
* @throws IOException in case of I/O errors
*/
public static String copyToString(InputStream in, Charset charset) throws IOException {
Assert.notNull(in, "No InputStream specified");
StringBuilder out = new StringBuilder();
InputStreamReader reader = new InputStreamReader(in, charset);
char[] buffer = new char[BUFFER_SIZE];
int bytesRead = -1;
while ((bytesRead = reader.read(buffer)) != -1) {
out.append(buffer, 0, bytesRead);
}
return out.toString();
}
/**
* Copy the contents of the given byte array to the given OutputStream.
* Leaves the stream open when done.
* @param in the byte array to copy from
* @param out the OutputStream to copy to
* @throws IOException in case of I/O errors
*/
public static void copy(byte[] in, OutputStream out) throws IOException {
Assert.notNull(in, "No input byte array specified");
Assert.notNull(out, "No OutputStream specified");
out.write(in);
}
/**
* Copy the contents of the given String to the given output OutputStream.
* Leaves the stream open when done.
* @param in the String to copy from
* @param charset the Charset
* @param out the OutputStream to copy to
* @throws IOException in case of I/O errors
*/
public static void copy(String in, Charset charset, OutputStream out) throws IOException {
Assert.notNull(in, "No input String specified");
Assert.notNull(charset, "No charset specified");
Assert.notNull(out, "No OutputStream specified");
Writer writer = new OutputStreamWriter(out, charset);
writer.write(in);
writer.flush();
}
/**
* Copy the contents of the given InputStream to the given OutputStream.
* Leaves both streams open when done.
* @param in the InputStream to copy from
* @param out the OutputStream to copy to
* @return the number of bytes copied
* @throws IOException in case of I/O errors
*/
public static int copy(InputStream in, OutputStream out) throws IOException {
Assert.notNull(in, "No InputStream specified");
Assert.notNull(out, "No OutputStream specified");
int byteCount = 0;
byte[] buffer = new byte[BUFFER_SIZE];
int bytesRead = -1;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
byteCount += bytesRead;
}
out.flush();
return byteCount;
}
/**
* Returns a variant of the given {@link InputStream} where calling
* {@link InputStream#close() close()} has no effect.
* @param in the InputStream to decorate
* @return a version of the InputStream that ignores calls to close
*/
public static InputStream nonClosing(InputStream in) {
Assert.notNull(in, "No InputStream specified");
return new NonClosingInputStream(in);
}
/**
* Returns a variant of the given {@link OutputStream} where calling
* {@link OutputStream#close() close()} has no effect.
* @param out the OutputStream to decorate
* @return a version of the OutputStream that ignores calls to close
*/
public static OutputStream nonClosing(OutputStream out) {
Assert.notNull(out, "No OutputStream specified");
return new NonClosingOutputStream(out);
}
private static class NonClosingInputStream extends FilterInputStream {
public NonClosingInputStream(InputStream in) {
super(in);
}
@Override
public void close() throws IOException {
}
}
private static class NonClosingOutputStream extends FilterOutputStream {
public NonClosingOutputStream(OutputStream out) {
super(out);
}
@Override
public void write(byte[] b, int off, int let) throws IOException {
// It is critical that we override this method for performance
out.write(b, off, let);
}
@Override
public void close() throws IOException {
}
}
}
| apache-2.0 |
steveloughran/hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FutureDataInputStreamBuilder.java | 2288 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.IOException;
import java.util.concurrent.CompletableFuture;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Builder for input streams and subclasses whose return value is
* actually a completable future: this allows for better asynchronous
* operation.
*
* To be more generic, {@link #opt(String, int)} and {@link #must(String, int)}
* variants provide implementation-agnostic way to customize the builder.
* Each FS-specific builder implementation can interpret the FS-specific
* options accordingly, for example:
*
* If the option is not related to the file system, the option will be ignored.
* If the option is must, but not supported by the file system, a
* {@link IllegalArgumentException} will be thrown.
*
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public interface FutureDataInputStreamBuilder
extends FSBuilder<CompletableFuture<FSDataInputStream>, FutureDataInputStreamBuilder> {
@Override
CompletableFuture<FSDataInputStream> build()
throws IllegalArgumentException, UnsupportedOperationException,
IOException;
/**
* A FileStatus may be provided to the open request.
* It is up to the implementation whether to use this or not.
* @param status status.
* @return the builder.
*/
default FutureDataInputStreamBuilder withFileStatus(FileStatus status) {
return this;
}
}
| apache-2.0 |
xiaoshi316/RememberEditText | library/src/androidTest/java/cn/zhaiyifan/rememberedittext/ApplicationTest.java | 372 | package cn.zhaiyifan.rememberedittext;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
cmelchior/caliper | old/caliper/main/java/com/google/caliper/runner/ExperimentModule.java | 4964 | /*
* Copyright (C) 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.caliper.runner;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.caliper.Param;
import com.google.caliper.bridge.WorkerSpec;
import com.google.caliper.util.Parser;
import com.google.caliper.util.Parsers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedMap;
import com.google.inject.AbstractModule;
import com.google.inject.MembersInjector;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.AbstractMatcher;
import com.google.inject.spi.TypeEncounter;
import com.google.inject.spi.TypeListener;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.ParseException;
/**
* A module that binds data specific to a single experiment.
*/
public final class ExperimentModule extends AbstractModule {
private final Class<?> benchmarkClass;
private final ImmutableSortedMap<String, String> parameters;
private final Method benchmarkMethod;
private ExperimentModule(Class<?> benchmarkClass, Method benchmarkMethod,
ImmutableSortedMap<String, String> parameters) {
this.benchmarkClass = checkNotNull(benchmarkClass);
this.parameters = checkNotNull(parameters);
this.benchmarkMethod = benchmarkMethod;
}
public static ExperimentModule forExperiment(Experiment experiment) {
Method benchmarkMethod = experiment.instrumentation().benchmarkMethod();
return new ExperimentModule(benchmarkMethod.getDeclaringClass(),
benchmarkMethod,
experiment.userParameters());
}
public static ExperimentModule forWorkerSpec(WorkerSpec spec)
throws ClassNotFoundException {
Class<?> benchmarkClass = Class.forName(spec.benchmarkSpec.className());
Method benchmarkMethod = findBenchmarkMethod(benchmarkClass, spec.benchmarkSpec.methodName(),
spec.methodParameterClasses);
benchmarkMethod.setAccessible(true);
return new ExperimentModule(benchmarkClass, benchmarkMethod, spec.benchmarkSpec.parameters());
}
@Override protected void configure() {
binder().requireExplicitBindings();
bind(benchmarkClass); // TypeListener doesn't fire without this
bind(Object.class).annotatedWith(Running.Benchmark.class).to(benchmarkClass);
bindConstant().annotatedWith(Running.BenchmarkMethod.class).to(benchmarkMethod.getName());
bind(Method.class).annotatedWith(Running.BenchmarkMethod.class).toInstance(benchmarkMethod);
bindListener(new BenchmarkTypeMatcher(), new BenchmarkParameterInjector());
}
private final class BenchmarkTypeMatcher extends AbstractMatcher<TypeLiteral<?>> {
@Override
public boolean matches(TypeLiteral<?> t) {
return t.getType().equals(benchmarkClass);
}
}
private final class BenchmarkParameterInjector implements TypeListener {
@Override
public <I> void hear(TypeLiteral<I> type, TypeEncounter<I> encounter) {
for (final Field field : type.getRawType().getDeclaredFields()) {
if (field.isAnnotationPresent(Param.class)) {
encounter.register(new MembersInjector<I>() {
@Override public void injectMembers(I instance) {
try {
field.setAccessible(true);
Parser<?> parser = Parsers.conventionalParser(field.getType());
field.set(instance, parser.parse(parameters.get(field.getName())));
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
} catch (ParseException e) {
throw new RuntimeException(e);
} catch (IllegalArgumentException e) {
throw new AssertionError("types have been checked");
} catch (IllegalAccessException e) {
throw new AssertionError("already set access");
}
}
});
}
}
}
}
private static Method findBenchmarkMethod(Class<?> benchmark, String methodName,
ImmutableList<Class<?>> methodParameterClasses) {
Class<?>[] params = methodParameterClasses.toArray(new Class[methodParameterClasses.size()]);
try {
return benchmark.getDeclaredMethod(methodName, params);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
} catch (SecurityException e) {
// assertion error?
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
corochoone/elasticsearch | src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java | 6122 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport.netty;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.util.concurrent.KeyedLock;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
public class KeyedLockTests extends ElasticsearchTestCase {
@Test
public void checkIfMapEmptyAfterLotsOfAcquireAndReleases() throws InterruptedException {
ConcurrentHashMap<String, Integer> counter = new ConcurrentHashMap<>();
ConcurrentHashMap<String, AtomicInteger> safeCounter = new ConcurrentHashMap<>();
KeyedLock<String> connectionLock = new KeyedLock<>();
String[] names = new String[randomIntBetween(1, 40)];
for (int i = 0; i < names.length; i++) {
names[i] = randomRealisticUnicodeOfLengthBetween(10, 20);
}
CountDownLatch startLatch = new CountDownLatch(1);
int numThreads = randomIntBetween(3, 10);
Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
threads[i] = new AcquireAndReleaseThread(startLatch, connectionLock, names, counter, safeCounter);
}
for (int i = 0; i < numThreads; i++) {
threads[i].start();
}
startLatch.countDown();
for (int i = 0; i < numThreads; i++) {
threads[i].join();
}
assertThat(connectionLock.hasLockedKeys(), equalTo(false));
Set<Entry<String, Integer>> entrySet = counter.entrySet();
assertThat(counter.size(), equalTo(safeCounter.size()));
for (Entry<String, Integer> entry : entrySet) {
AtomicInteger atomicInteger = safeCounter.get(entry.getKey());
assertThat(atomicInteger, not(Matchers.nullValue()));
assertThat(atomicInteger.get(), equalTo(entry.getValue()));
}
}
@Test(expected = ElasticsearchIllegalStateException.class)
public void checkCannotAcquireTwoLocks() throws InterruptedException {
ConcurrentHashMap<String, Integer> counters = new ConcurrentHashMap<>();
ConcurrentHashMap<String, AtomicInteger> safeCounter = new ConcurrentHashMap<>();
KeyedLock<String> connectionLock = new KeyedLock<>();
String[] names = new String[randomIntBetween(1, 40)];
connectionLock = new KeyedLock<>();
String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50));
connectionLock.acquire(name);
connectionLock.acquire(name);
}
@Test(expected = ElasticsearchIllegalStateException.class)
public void checkCannotReleaseUnacquiredLock() throws InterruptedException {
ConcurrentHashMap<String, Integer> counters = new ConcurrentHashMap<>();
ConcurrentHashMap<String, AtomicInteger> safeCounter = new ConcurrentHashMap<>();
KeyedLock<String> connectionLock = new KeyedLock<>();
String[] names = new String[randomIntBetween(1, 40)];
connectionLock = new KeyedLock<>();
String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50));
connectionLock.release(name);
}
public static class AcquireAndReleaseThread extends Thread {
private CountDownLatch startLatch;
KeyedLock<String> connectionLock;
String[] names;
ConcurrentHashMap<String, Integer> counter;
ConcurrentHashMap<String, AtomicInteger> safeCounter;
public AcquireAndReleaseThread(CountDownLatch startLatch, KeyedLock<String> connectionLock, String[] names,
ConcurrentHashMap<String, Integer> counter, ConcurrentHashMap<String, AtomicInteger> safeCounter) {
this.startLatch = startLatch;
this.connectionLock = connectionLock;
this.names = names;
this.counter = counter;
this.safeCounter = safeCounter;
}
public void run() {
try {
startLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException();
}
int numRuns = scaledRandomIntBetween(500, 5000);
for (int i = 0; i < numRuns; i++) {
String curName = names[randomInt(names.length - 1)];
connectionLock.acquire(curName);
try {
Integer integer = counter.get(curName);
if (integer == null) {
counter.put(curName, 1);
} else {
counter.put(curName, integer.intValue() + 1);
}
} finally {
connectionLock.release(curName);
}
AtomicInteger atomicInteger = new AtomicInteger(0);
AtomicInteger value = safeCounter.putIfAbsent(curName, atomicInteger);
if (value == null) {
atomicInteger.incrementAndGet();
} else {
value.incrementAndGet();
}
}
}
}
}
| apache-2.0 |
Kast0rTr0y/jackrabbit | jackrabbit-spi-commons/src/main/java/org/apache/jackrabbit/spi/commons/query/qom/ColumnImpl.java | 3815 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.spi.commons.query.qom;
import javax.jcr.query.qom.Column;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver;
/**
* <code>ColumnImpl</code>...
*/
public class ColumnImpl extends AbstractQOMNode implements Column {
/**
* Empty <code>ColumnImpl</code> array.
*/
public static final ColumnImpl[] EMPTY_ARRAY = new ColumnImpl[0];
/**
* The name of the selector.
*/
private final Name selectorName;
/**
* The name of the property.
*/
private final Name propertyName;
/**
* The name of the column.
*/
private final String columnName;
ColumnImpl(NamePathResolver resolver,
Name selectorName,
Name propertyName,
String columnName) {
super(resolver);
this.selectorName = selectorName;
this.propertyName = propertyName;
this.columnName = columnName;
}
/**
* Gets the name of the selector.
*
* @return the selector name; non-null
*/
public Name getSelectorQName() {
return selectorName;
}
/**
* Gets the name of the property.
*
* @return the property name, or null to include a column for each
* single-value non-residual property of the selector's node type
*/
public Name getPropertyQName() {
return propertyName;
}
//---------------------------< Column >-------------------------------------
/**
* Gets the name of the selector.
*
* @return the selector name; non-null
*/
public String getSelectorName() {
return getJCRName(selectorName);
}
/**
* Gets the name of the property.
*
* @return the property name, or null to include a column for each
* single-value non-residual property of the selector's node type
*/
public String getPropertyName() {
return getJCRName(propertyName);
}
/**
* Gets the column name.
* <p>
*
* @return the column name; must be null if <code>getPropertyName</code> is
* null and non-null otherwise
*/
public String getColumnName() {
return columnName;
}
//------------------------< AbstractQOMNode >-------------------------------
/**
* Accepts a <code>visitor</code> and calls the appropriate visit method
* depending on the type of this QOM node.
*
* @param visitor the visitor.
*/
public Object accept(QOMTreeVisitor visitor, Object data) throws Exception {
return visitor.visit(this, data);
}
//------------------------< Object >----------------------------------------
public String toString() {
if (propertyName != null) {
return getSelectorName() + "." + getPropertyName()
+ " AS " + getColumnName();
} else {
return getSelectorName() + ".*";
}
}
}
| apache-2.0 |
four2five/hop | src/mapred/org/apache/hadoop/mapred/TaskLog.java | 19596 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.log4j.Appender;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
/**
* A simple logger to handle the task-specific user logs.
* This class uses the system property <code>hadoop.log.dir</code>.
*
*/
public class TaskLog {
private static final Log LOG =
LogFactory.getLog(TaskLog.class.getName());
private static final File LOG_DIR =
new File(System.getProperty("hadoop.log.dir"),
"userlogs").getAbsoluteFile();
static {
if (!LOG_DIR.exists()) {
LOG_DIR.mkdirs();
}
}
public static File getTaskLogFile(TaskAttemptID taskid, LogName filter) {
return new File(getBaseDir(taskid.toString()), filter.toString());
}
public static File getRealTaskLogFileLocation(TaskAttemptID taskid,
LogName filter) {
LogFileDetail l;
try {
l = getTaskLogFileDetail(taskid, filter);
} catch (IOException ie) {
LOG.error("getTaskLogFileDetail threw an exception " + ie);
return null;
}
return new File(getBaseDir(l.location), filter.toString());
}
private static class LogFileDetail {
final static String LOCATION = "LOG_DIR:";
String location;
long start;
long length;
}
private static LogFileDetail getTaskLogFileDetail(TaskAttemptID taskid,
LogName filter) throws IOException {
return getLogFileDetail(taskid, filter, false);
}
private static LogFileDetail getLogFileDetail(TaskAttemptID taskid,
LogName filter,
boolean isCleanup)
throws IOException {
File indexFile = getIndexFile(taskid.toString(), isCleanup);
BufferedReader fis = new BufferedReader(new java.io.FileReader(indexFile));
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//stdout:<start-offset in the stdout file> <length>
//stderr:<start-offset in the stderr file> <length>
//syslog:<start-offset in the syslog file> <length>
LogFileDetail l = new LogFileDetail();
String str = fis.readLine();
if (str == null) { //the file doesn't have anything
throw new IOException ("Index file for the log of " + taskid+" doesn't exist.");
}
l.location = str.substring(str.indexOf(LogFileDetail.LOCATION)+
LogFileDetail.LOCATION.length());
//special cases are the debugout and profile.out files. They are guaranteed
//to be associated with each task attempt since jvm reuse is disabled
//when profiling/debugging is enabled
if (filter.equals(LogName.DEBUGOUT) || filter.equals(LogName.PROFILE)) {
l.length = new File(getBaseDir(l.location), filter.toString()).length();
l.start = 0;
fis.close();
return l;
}
str = fis.readLine();
while (str != null) {
//look for the exact line containing the logname
if (str.contains(filter.toString())) {
str = str.substring(filter.toString().length()+1);
String[] startAndLen = str.split(" ");
l.start = Long.parseLong(startAndLen[0]);
l.length = Long.parseLong(startAndLen[1]);
break;
}
str = fis.readLine();
}
fis.close();
return l;
}
public static File getIndexFile(String taskid) {
return getIndexFile(taskid, false);
}
public static File getIndexFile(String taskid, boolean isCleanup) {
if (isCleanup) {
return new File(getBaseDir(taskid), "log.index.cleanup");
} else {
return new File(getBaseDir(taskid), "log.index");
}
}
private static File getBaseDir(String taskid) {
return new File(LOG_DIR, taskid);
}
private static long prevOutLength;
private static long prevErrLength;
private static long prevLogLength;
private static void writeToIndexFile(TaskAttemptID firstTaskid,
boolean isCleanup)
throws IOException {
File indexFile = getIndexFile(currentTaskid.toString(), isCleanup);
BufferedOutputStream bos =
new BufferedOutputStream(new FileOutputStream(indexFile,false));
DataOutputStream dos = new DataOutputStream(bos);
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//STDOUT: <start-offset in the stdout file> <length>
//STDERR: <start-offset in the stderr file> <length>
//SYSLOG: <start-offset in the syslog file> <length>
dos.writeBytes(LogFileDetail.LOCATION + firstTaskid.toString()+"\n"+
LogName.STDOUT.toString()+":");
dos.writeBytes(Long.toString(prevOutLength)+" ");
dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.STDOUT)
.length() - prevOutLength)+"\n"+LogName.STDERR+":");
dos.writeBytes(Long.toString(prevErrLength)+" ");
dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.STDERR)
.length() - prevErrLength)+"\n"+LogName.SYSLOG.toString()+":");
dos.writeBytes(Long.toString(prevLogLength)+" ");
dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.SYSLOG)
.length() - prevLogLength)+"\n");
dos.close();
}
private static void resetPrevLengths(TaskAttemptID firstTaskid) {
prevOutLength = getTaskLogFile(firstTaskid, LogName.STDOUT).length();
prevErrLength = getTaskLogFile(firstTaskid, LogName.STDERR).length();
prevLogLength = getTaskLogFile(firstTaskid, LogName.SYSLOG).length();
}
private volatile static TaskAttemptID currentTaskid = null;
public synchronized static void syncLogs(TaskAttemptID firstTaskid,
TaskAttemptID taskid)
throws IOException {
syncLogs(firstTaskid, taskid, false);
}
@SuppressWarnings("unchecked")
public synchronized static void syncLogs(TaskAttemptID firstTaskid,
TaskAttemptID taskid,
boolean isCleanup)
throws IOException {
System.out.flush();
System.err.flush();
Enumeration<Logger> allLoggers = LogManager.getCurrentLoggers();
while (allLoggers.hasMoreElements()) {
Logger l = allLoggers.nextElement();
Enumeration<Appender> allAppenders = l.getAllAppenders();
while (allAppenders.hasMoreElements()) {
Appender a = allAppenders.nextElement();
if (a instanceof TaskLogAppender) {
((TaskLogAppender)a).flush();
}
}
}
if (currentTaskid != taskid) {
currentTaskid = taskid;
resetPrevLengths(firstTaskid);
}
writeToIndexFile(firstTaskid, isCleanup);
}
/**
* The filter for userlogs.
*/
public static enum LogName {
/** Log on the stdout of the task. */
STDOUT ("stdout"),
/** Log on the stderr of the task. */
STDERR ("stderr"),
/** Log on the map-reduce system logs of the task. */
SYSLOG ("syslog"),
/** The java profiler information. */
PROFILE ("profile.out"),
/** Log the debug script's stdout */
DEBUGOUT ("debugout");
private String prefix;
private LogName(String prefix) {
this.prefix = prefix;
}
@Override
public String toString() {
return prefix;
}
}
private static class TaskLogsPurgeFilter implements FileFilter {
long purgeTimeStamp;
TaskLogsPurgeFilter(long purgeTimeStamp) {
this.purgeTimeStamp = purgeTimeStamp;
}
public boolean accept(File file) {
LOG.debug("PurgeFilter - file: " + file + ", mtime: " + file.lastModified() + ", purge: " + purgeTimeStamp);
return file.lastModified() < purgeTimeStamp;
}
}
/**
* Purge old user logs.
*
* @throws IOException
*/
public static synchronized void cleanup(int logsRetainHours
) throws IOException {
// Purge logs of tasks on this tasktracker if their
// mtime has exceeded "mapred.task.log.retain" hours
long purgeTimeStamp = System.currentTimeMillis() -
(logsRetainHours*60L*60*1000);
File[] oldTaskLogs = LOG_DIR.listFiles
(new TaskLogsPurgeFilter(purgeTimeStamp));
if (oldTaskLogs != null) {
for (int i=0; i < oldTaskLogs.length; ++i) {
FileUtil.fullyDelete(oldTaskLogs[i]);
}
}
}
static class Reader extends InputStream {
private long bytesRemaining;
private FileInputStream file;
public Reader(TaskAttemptID taskid, LogName kind,
long start, long end) throws IOException {
this(taskid, kind, start, end, false);
}
/**
* Read a log file from start to end positions. The offsets may be negative,
* in which case they are relative to the end of the file. For example,
* Reader(taskid, kind, 0, -1) is the entire file and
* Reader(taskid, kind, -4197, -1) is the last 4196 bytes.
* @param taskid the id of the task to read the log file for
* @param kind the kind of log to read
* @param start the offset to read from (negative is relative to tail)
* @param end the offset to read upto (negative is relative to tail)
* @param isCleanup whether the attempt is cleanup attempt or not
* @throws IOException
*/
public Reader(TaskAttemptID taskid, LogName kind,
long start, long end, boolean isCleanup) throws IOException {
// find the right log file
LogFileDetail fileDetail = getLogFileDetail(taskid, kind, isCleanup);
// calculate the start and stop
long size = fileDetail.length;
if (start < 0) {
start += size + 1;
}
if (end < 0) {
end += size + 1;
}
start = Math.max(0, Math.min(start, size));
end = Math.max(0, Math.min(end, size));
start += fileDetail.start;
end += fileDetail.start;
bytesRemaining = end - start;
file = new FileInputStream(new File(getBaseDir(fileDetail.location),
kind.toString()));
// skip upto start
long pos = 0;
while (pos < start) {
long result = file.skip(start - pos);
if (result < 0) {
bytesRemaining = 0;
break;
}
pos += result;
}
}
@Override
public int read() throws IOException {
int result = -1;
if (bytesRemaining > 0) {
bytesRemaining -= 1;
result = file.read();
}
return result;
}
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
length = (int) Math.min(length, bytesRemaining);
int bytes = file.read(buffer, offset, length);
if (bytes > 0) {
bytesRemaining -= bytes;
}
return bytes;
}
@Override
public int available() throws IOException {
return (int) Math.min(bytesRemaining, file.available());
}
@Override
public void close() throws IOException {
file.close();
}
}
private static final String bashCommand = "bash";
private static final String tailCommand = "tail";
/**
* Get the desired maximum length of task's logs.
* @param conf the job to look in
* @return the number of bytes to cap the log files at
*/
public static long getTaskLogLength(JobConf conf) {
return conf.getLong("mapred.userlog.limit.kb", 100) * 1024;
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* If the tailLength is 0, the entire output will be saved.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength
) throws IOException {
return captureOutAndError(null, cmd, stdoutFilename,
stderrFilename, tailLength, null );
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength
) throws IOException {
return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename,
tailLength, null);
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @param pidFileName The name of the pid-file
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength,
String pidFileName
) throws IOException {
String stdout = FileUtil.makeShellPath(stdoutFilename);
String stderr = FileUtil.makeShellPath(stderrFilename);
List<String> result = new ArrayList<String>(3);
result.add(bashCommand);
result.add("-c");
StringBuffer mergedCmd = new StringBuffer();
// Spit out the pid to pidFileName
if (pidFileName != null) {
mergedCmd.append("echo $$ > ");
mergedCmd.append(pidFileName);
mergedCmd.append(" ;");
}
if (setup != null && setup.size() > 0) {
mergedCmd.append(addCommand(setup, false));
mergedCmd.append(";");
}
if (tailLength > 0) {
mergedCmd.append("(");
} else {
mergedCmd.append("exec ");
}
mergedCmd.append(addCommand(cmd, true));
mergedCmd.append(" < /dev/null ");
if (tailLength > 0) {
mergedCmd.append(" | ");
mergedCmd.append(tailCommand);
mergedCmd.append(" -c ");
mergedCmd.append(tailLength);
mergedCmd.append(" >> ");
mergedCmd.append(stdout);
mergedCmd.append(" ; exit $PIPESTATUS ) 2>&1 | ");
mergedCmd.append(tailCommand);
mergedCmd.append(" -c ");
mergedCmd.append(tailLength);
mergedCmd.append(" >> ");
mergedCmd.append(stderr);
mergedCmd.append(" ; exit $PIPESTATUS");
} else {
mergedCmd.append(" 1>> ");
mergedCmd.append(stdout);
mergedCmd.append(" 2>> ");
mergedCmd.append(stderr);
}
result.add(mergedCmd.toString());
return result;
}
/**
* Add quotes to each of the command strings and
* return as a single string
* @param cmd The command to be quoted
* @param isExecutable makes shell path if the first
* argument is executable
* @return returns The quoted string.
* @throws IOException
*/
public static String addCommand(List<String> cmd, boolean isExecutable)
throws IOException {
StringBuffer command = new StringBuffer();
for(String s: cmd) {
command.append('\'');
if (isExecutable) {
// the executable name needs to be expressed as a shell path for the
// shell to find it.
command.append(FileUtil.makeShellPath(new File(s)));
isExecutable = false;
} else {
command.append(s);
}
command.append('\'');
command.append(" ");
}
return command.toString();
}
/**
* Wrap a command in a shell to capture debug script's
* stdout and stderr to debugout.
* @param cmd The command and the arguments that should be run
* @param debugoutFilename The filename that stdout and stderr
* should be saved to.
* @return the modified command that should be run
* @throws IOException
*/
public static List<String> captureDebugOut(List<String> cmd,
File debugoutFilename
) throws IOException {
String debugout = FileUtil.makeShellPath(debugoutFilename);
List<String> result = new ArrayList<String>(3);
result.add(bashCommand);
result.add("-c");
StringBuffer mergedCmd = new StringBuffer();
mergedCmd.append("exec ");
boolean isExecutable = true;
for(String s: cmd) {
if (isExecutable) {
// the executable name needs to be expressed as a shell path for the
// shell to find it.
mergedCmd.append(FileUtil.makeShellPath(new File(s)));
isExecutable = false;
} else {
mergedCmd.append(s);
}
mergedCmd.append(" ");
}
mergedCmd.append(" < /dev/null ");
mergedCmd.append(" >");
mergedCmd.append(debugout);
mergedCmd.append(" 2>&1 ");
result.add(mergedCmd.toString());
return result;
}
} // TaskLog
| apache-2.0 |
AlanJinTS/zstack | sdk/src/main/java/org/zstack/sdk/UpdateDiskOfferingAction.java | 2546 | package org.zstack.sdk;
import java.util.HashMap;
import java.util.Map;
public class UpdateDiskOfferingAction extends AbstractAction {
private static final HashMap<String, Parameter> parameterMap = new HashMap<>();
public static class Result {
public ErrorCode error;
public UpdateDiskOfferingResult value;
public Result throwExceptionIfError() {
if (error != null) {
throw new ApiException(
String.format("error[code: %s, description: %s, details: %s]", error.code, error.description, error.details)
);
}
return this;
}
}
@Param(required = true, nonempty = false, nullElements = false, emptyString = true, noTrim = false)
public java.lang.String uuid;
@Param(required = false, maxLength = 255, nonempty = false, nullElements = false, emptyString = true, noTrim = false)
public java.lang.String name;
@Param(required = false, maxLength = 2048, nonempty = false, nullElements = false, emptyString = true, noTrim = false)
public java.lang.String description;
@Param(required = false)
public java.util.List systemTags;
@Param(required = false)
public java.util.List userTags;
@Param(required = true)
public String sessionId;
public long timeout;
public long pollingInterval;
private Result makeResult(ApiResult res) {
Result ret = new Result();
if (res.error != null) {
ret.error = res.error;
return ret;
}
UpdateDiskOfferingResult value = res.getResult(UpdateDiskOfferingResult.class);
ret.value = value == null ? new UpdateDiskOfferingResult() : value;
return ret;
}
public Result call() {
ApiResult res = ZSClient.call(this);
return makeResult(res);
}
public void call(final Completion<Result> completion) {
ZSClient.call(this, new InternalCompletion() {
@Override
public void complete(ApiResult res) {
completion.complete(makeResult(res));
}
});
}
Map<String, Parameter> getParameterMap() {
return parameterMap;
}
RestInfo getRestInfo() {
RestInfo info = new RestInfo();
info.httpMethod = "PUT";
info.path = "/disk-offerings/{uuid}/actions";
info.needSession = true;
info.needPoll = true;
info.parameterName = "updateDiskOffering";
return info;
}
}
| apache-2.0 |
vroyer/elassandra | server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java | 6106 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fieldcaps;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Before;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Predicate;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
public class FieldCapabilitiesIT extends ESIntegTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
XContentBuilder oldIndexMapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("distance")
.field("type", "double")
.endObject()
.startObject("route_length_miles")
.field("type", "alias")
.field("path", "distance")
.endObject()
.startObject("playlist")
.field("type", "text")
.endObject()
.startObject("secret_soundtrack")
.field("type", "alias")
.field("path", "playlist")
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate("old_index").addMapping("_doc", oldIndexMapping));
XContentBuilder newIndexMapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("distance")
.field("type", "text")
.endObject()
.startObject("route_length_miles")
.field("type", "double")
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate("new_index").addMapping("_doc", newIndexMapping));
}
public static class FieldFilterPlugin extends Plugin implements MapperPlugin {
@Override
public Function<String, Predicate<String>> getFieldFilter() {
return index -> field -> !field.equals("playlist");
}
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(FieldFilterPlugin.class);
}
public void testFieldAlias() {
FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("distance", "route_length_miles")
.execute().actionGet();
// Ensure the response has entries for both requested fields.
assertTrue(response.get().containsKey("distance"));
assertTrue(response.get().containsKey("route_length_miles"));
// Check the capabilities for the 'distance' field.
Map<String, FieldCapabilities> distance = response.getField("distance");
assertEquals(2, distance.size());
assertTrue(distance.containsKey("double"));
assertEquals(
new FieldCapabilities("distance", "double", true, true, new String[] {"old_index"}, null, null),
distance.get("double"));
assertTrue(distance.containsKey("text"));
assertEquals(
new FieldCapabilities("distance", "text", true, false, new String[] {"new_index"}, null, null),
distance.get("text"));
// Check the capabilities for the 'route_length_miles' alias.
Map<String, FieldCapabilities> routeLength = response.getField("route_length_miles");
assertEquals(1, routeLength.size());
assertTrue(routeLength.containsKey("double"));
assertEquals(
new FieldCapabilities("route_length_miles", "double", true, true),
routeLength.get("double"));
}
public void testFieldAliasWithWildcard() {
FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("route*")
.execute().actionGet();
assertEquals(1, response.get().size());
assertTrue(response.get().containsKey("route_length_miles"));
}
public void testFieldAliasFiltering() {
FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields(
"secret-soundtrack", "route_length_miles")
.execute().actionGet();
assertEquals(1, response.get().size());
assertTrue(response.get().containsKey("route_length_miles"));
}
public void testFieldAliasFilteringWithWildcard() {
FieldCapabilitiesResponse response = client().prepareFieldCaps()
.setFields("distance", "secret*")
.execute().actionGet();
assertEquals(1, response.get().size());
assertTrue(response.get().containsKey("distance"));
}
}
| apache-2.0 |
smgoller/geode | geode-core/src/main/java/org/apache/geode/internal/cache/DestroyedEntry.java | 2174 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import org.apache.geode.cache.CacheStatistics;
import org.apache.geode.cache.EntryDestroyedException;
import org.apache.geode.cache.Region;
/**
* Represents a destroyed entry that can be returned from an <code>Iterator</code> instead of null.
* All methods throw {@link EntryDestroyedException} except for {@link #isDestroyed()}.
*
*/
public class DestroyedEntry implements Region.Entry<Object, Object> {
private final String msg;
public DestroyedEntry(String msg) {
this.msg = msg;
}
@Override
public Object getKey() {
throw entryDestroyed();
}
@Override
public Object getValue() {
throw entryDestroyed();
}
@Override
public Region<Object, Object> getRegion() {
throw entryDestroyed();
}
@Override
public boolean isLocal() {
throw entryDestroyed();
}
@Override
public CacheStatistics getStatistics() {
throw entryDestroyed();
}
@Override
public Object getUserAttribute() {
throw entryDestroyed();
}
@Override
public Object setUserAttribute(Object userAttribute) {
throw entryDestroyed();
}
@Override
public boolean isDestroyed() {
return true;
}
@Override
public Object setValue(Object value) {
throw entryDestroyed();
}
private EntryDestroyedException entryDestroyed() {
return new EntryDestroyedException(msg);
}
}
| apache-2.0 |
gavinying/kura | target-platform/jdk.dio-parent/src/dev/test/jdk/dio/uart/UARTConfigTest.java | 11684 | /*
* Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package dio.uart;
import com.sun.javatest.Status;
import com.sun.javatest.Test;
import static dio.shared.TestBase.STATUS_OK;
import java.io.IOException;
import java.io.PrintWriter;
import java.security.Policy;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import jdk.dio.DeviceConfig;
import jdk.dio.DeviceDescriptor;
import jdk.dio.DeviceManager;
import jdk.dio.DeviceMgmtPermission;
import jdk.dio.uart.ModemUART;
import jdk.dio.uart.UART;
import jdk.dio.uart.UARTConfig;
import jdk.dio.uart.UARTPermission;
/**
*
* @test
* @sources UARTConfigTest.java
* @executeClass dio.uart.UARTConfigTest
*
* @title UART configuration testing
*
* @author stanislav.smirnov@oracle.com
*/
public class UARTConfigTest extends UARTTestBase implements Test {
/**
* Standard command-line entry point.
*
* @param args command line args (ignored)
*/
public static void main(String[] args) {
PrintWriter err = new PrintWriter(System.err, true);
Test t = new UARTConfigTest();
Status s = t.run(args, null, err);
s.exit();
}
/**
* Main test method. The test consists of a series of test cases; the test
* passes only if all the individual test cases pass.
*
* @param args ignored
* @param out ignored
* @param err a stream to which to write details about test failures
* @return a Status object indicating if the test passed or failed
*/
@Override
public Status run(String[] args, PrintWriter out, PrintWriter err) {
if(!decodeConfig(args)){
return printFailedStatus("Error occured while decoding input arguments");
}
Policy.setPolicy(new UARTPolicy(
new DeviceMgmtPermission("*:*", "open"),
new UARTPermission("*:*", "open")));
List<Integer> result = new LinkedList<>();
result.add(testUARTConfig().getType());
result.add(testUARTDesc().getType());
result.add(testUARTDefault().getType());
return (result.contains(Status.FAILED)
? printFailedStatus("some test cases failed") : Status.passed("OK"));
}
/**
* Testcase to verify uart configuration against initial configuration
* @return Status passed/failed
*/
private Status testUARTConfig() {
Status result = Status.passed(STATUS_OK);
Status resultFail = null;
start("Verify UART config against initial configuration");
Config initialConfig;
Iterator<Config> devicesList = UARTDevices.iterator();
while (devicesList.hasNext()) {
initialConfig = devicesList.next();
UARTConfig devCfg = initialConfig.getDevCfg();
try (UART device = (initialConfig.isModemUART() ? (ModemUART) initialConfig.open() : (UART) initialConfig.open())) {
int baudRate = device.getBaudRate();
int cfgBaudRate = devCfg.getBaudRate();
if (baudRate == cfgBaudRate) {
System.out.println("Correct BAUD RATE received: " + baudRate);
} else {
resultFail = printFailedStatus("Unexpected BAUD RATE returned: " + baudRate);
}
int dataBits = device.getDataBits();
int cfgDataBits = devCfg.getDataBits();
if (dataBits == cfgDataBits) {
System.out.println("Correct DATA BITS received: " + printDataBits(dataBits));
} else {
resultFail = printFailedStatus("Unexpected DATA BITS returned: " + printDataBits(dataBits));
}
int parity = device.getParity();
int cfgParity = devCfg.getParity();
if (parity == cfgParity) {
System.out.println("Correct PARITY received: " + printParity(parity));
} else {
resultFail = printFailedStatus("Unexpected PARITY returned: " + printParity(parity));
}
int stopBits = device.getStopBits();
int cfgStopBits = devCfg.getStopBits();
if (stopBits == cfgStopBits) {
System.out.println("Correct STOP BITS received: " + printStopBits(cfgStopBits));
} else {
resultFail = printFailedStatus("Unexpected STOP BITS returned: " + printStopBits(cfgStopBits));
}
System.out.println("UART was open/closed OK");
} catch(IOException ex){
resultFail = printFailedStatus("Unexpected IOException: " + ex.getClass().getName() + ":" + ex.getMessage());
}
}
stop();
if (resultFail != null) {
return resultFail;
}
return result;
}
/**
* Testcase to verify uart configuration against initial configuration using DeviceDescriptor methods
* @return Status passed/failed
*/
private Status testUARTDesc() {
Status result = Status.passed(STATUS_OK);
Status resultFail = null;
start("Verify UART config against initial configuration using DeviceDescriptor methods");
Config initialConfig;
Iterator<Config> devicesList = UARTDevices.iterator();
while(devicesList.hasNext()){
initialConfig = devicesList.next();
UARTConfig devCfg = initialConfig.getDevCfg();
try (UART device = (initialConfig.isModemUART() ? (ModemUART) initialConfig.open() : (UART) initialConfig.open())) {
DeviceDescriptor devDecriptor = (DeviceDescriptor) device.getDescriptor();
UARTConfig devConfig = (UARTConfig) devDecriptor.getConfiguration();
int controllerNumber = devConfig.getControllerNumber();
int cfgControllerNumber = devCfg.getControllerNumber();
if (controllerNumber == cfgControllerNumber) {
System.out.println("Device number is correct: " + controllerNumber);
} else {
resultFail = printFailedStatus("Unexpected device number returned: " + controllerNumber);
}
String controllerName = devConfig.getControllerName();
String cfgControllerName = devCfg.getControllerName();
if (controllerName == null) {
if (cfgControllerName == null) {
System.out.println("Device name is null");
}
} else if (controllerName.equals(cfgControllerName)) {
System.out.println("Device name is correct: " + controllerName);
} else {
resultFail = printFailedStatus("Not expected Device name: " + controllerName);
}
int baudRate = devConfig.getBaudRate();
int cfgBaudRate = devCfg.getBaudRate();
if (baudRate == cfgBaudRate) {
System.out.println("Correct BAUD RATE received: " + baudRate);
} else {
resultFail = printFailedStatus("Unexpected BAUD RATE returned: " + baudRate);
}
int dataBits = devConfig.getDataBits();
int cfgDataBits = devCfg.getDataBits();
if (dataBits == cfgDataBits) {
System.out.println("Correct DATA BITS received: " + printDataBits(dataBits));
} else {
resultFail = printFailedStatus("Unexpected DATA BITS returned: " + printDataBits(dataBits));
}
int parity = devConfig.getParity();
int cfgParity = devCfg.getParity();
if (parity == cfgParity) {
System.out.println("Correct PARITY received: " + printParity(parity));
} else {
resultFail = printFailedStatus("Unexpected PARITY returned: " + printParity(parity));
}
int stopBits = devConfig.getStopBits();
int cfgStopBits = devCfg.getStopBits();
if (stopBits == cfgStopBits) {
System.out.println("Correct STOP BITS received: " + printStopBits(cfgStopBits));
} else {
resultFail = printFailedStatus("Unexpected STOP BITS returned: " + printStopBits(cfgStopBits));
}
int flowControlMode = devConfig.getFlowControlMode();
int cfgflowControlMode = devCfg.getFlowControlMode();
if (flowControlMode == cfgflowControlMode) {
System.out.println("Correct FLOW CONTROL received: " + printFlowControl(devConfig.getFlowControlMode()));
} else {
resultFail = printFailedStatus("Unexpected STOP BITS returned: " + printFlowControl(devConfig.getFlowControlMode()));
}
} catch(IOException ex){
resultFail = printFailedStatus("Unexpected IOException: " + ex.getClass().getName() + ":" + ex.getMessage());
}
}
stop();
if (resultFail != null) {
return resultFail;
}
return result;
}
/**
* Testcase to verify uart configuration using DEFAULT parameters where it is possible
* @return Status passed/failed
*/
private Status testUARTDefault() {
Status result = Status.passed(STATUS_OK);
start("Creating UART configs using DEFAULT parameters where it is possible");
Config dev;
UARTConfig cfg;
UARTConfig defCfg;
Iterator<Config> devicesList = UARTDevices.iterator();
while (devicesList.hasNext()) {
dev = devicesList.next();
try {
cfg = dev.getDevCfg();
defCfg = new UARTConfig(DeviceConfig.DEFAULT, DeviceConfig.DEFAULT, cfg.getBaudRate(), cfg.getDataBits(), cfg.getParity(), cfg.getStopBits(), cfg.getFlowControlMode());
try (UART device = (dev.isModemUART() ? (ModemUART) DeviceManager.open(defCfg) : (UART) DeviceManager.open(defCfg))) {
System.out.println("New uart device was open/closed OK");
}
} catch (IOException e) {
result = printFailedStatus("Unexpected IOException: " + e.getClass().getName() + ":" + e.getMessage());
}
}
stop();
return result;
}
}
| epl-1.0 |
thenaesh/teammates | src/main/java/teammates/ui/template/ArchivedCoursesTable.java | 446 | package teammates.ui.template;
import java.util.ArrayList;
import java.util.List;
public class ArchivedCoursesTable {
private List<ArchivedCoursesTableRow> rows;
public ArchivedCoursesTable() {
rows = new ArrayList<ArchivedCoursesTableRow>();
}
public List<ArchivedCoursesTableRow> getRows() {
return rows;
}
public void setRows(List<ArchivedCoursesTableRow> rows) {
this.rows = rows;
}
}
| gpl-2.0 |
mabels/jmupdf | src/main/cpp/android/src/com/artifex/mupdf/OutlineItem.java | 251 | package com.artifex.mupdf;
public class OutlineItem {
public final int level;
public final String title;
public final int page;
OutlineItem(int _level, String _title, int _page) {
level = _level;
title = _title;
page = _page;
}
}
| gpl-2.0 |
zhiqinghuang/core | src/com/dotcms/h2/FileVersionCheckTrigger.java | 893 | package com.dotcms.h2;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.h2.tools.TriggerAdapter;
public class FileVersionCheckTrigger extends TriggerAdapter {
@Override
public void fire(Connection conn, ResultSet oldF, ResultSet newF) throws SQLException {
PreparedStatement smt=conn.prepareStatement("select count(*) from file_asset where identifier = ?");
smt.setString(1, oldF.getString("identifier"));
ResultSet rs=smt.executeQuery();
rs.next();
int versions=rs.getInt(1);
rs.close(); smt.close();
if(versions==0) {
smt=conn.prepareStatement("delete from identifier where id =?");
smt.setString(1, oldF.getString("identifier"));
smt.executeUpdate();
smt.close();
}
}
}
| gpl-3.0 |
abbeyj/sonarqube | server/sonar-server/src/main/java/org/sonar/server/user/GroupMembershipService.java | 2400 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.user;
import java.util.Map;
import org.sonar.api.server.ServerSide;
import org.sonar.db.user.GroupMembershipQuery;
import org.sonar.server.util.RubyUtils;
/**
* Used by ruby code <pre>Internal.group_membership</pre>
*/
@ServerSide
public class GroupMembershipService {
private static final String SELECTED_MEMBERSHIP = "selected";
private static final String DESELECTED_MEMBERSHIP = "deselected";
private final GroupMembershipFinder finder;
public GroupMembershipService(GroupMembershipFinder finder) {
this.finder = finder;
}
public GroupMembershipFinder.Membership find(Map<String, Object> params) {
return finder.find(parseQuery(params));
}
private static GroupMembershipQuery parseQuery(Map<String, Object> params) {
GroupMembershipQuery.Builder builder = GroupMembershipQuery.builder();
builder.membership(membership(params));
builder.groupSearch((String) params.get("query"));
builder.pageIndex(RubyUtils.toInteger(params.get("page")));
builder.pageSize(RubyUtils.toInteger(params.get("pageSize")));
builder.login((String) params.get("user"));
return builder.build();
}
private static String membership(Map<String, Object> params) {
String selected = (String) params.get("selected");
if (SELECTED_MEMBERSHIP.equals(selected)) {
return GroupMembershipQuery.IN;
}
if (DESELECTED_MEMBERSHIP.equals(selected)) {
return GroupMembershipQuery.OUT;
}
return GroupMembershipQuery.ANY;
}
}
| lgpl-3.0 |
robin13/elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java | 3219 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.eql.expression.function.scalar.string;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.eql.EqlIllegalArgumentException;
import org.elasticsearch.xpack.ql.QlIllegalArgumentException;
import org.elasticsearch.xpack.ql.expression.Expression;
import java.util.ArrayList;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l;
import static org.elasticsearch.xpack.ql.tree.Source.EMPTY;
public class CIDRMatchFunctionProcessorTests extends ESTestCase {
public void testCIDRMatchFunctionValidInput() {
// Expects null if source was null
assertNull(new CIDRMatch(EMPTY, l(null), null).makePipe().asProcessor().process(null));
ArrayList<Expression> addresses = new ArrayList<>();
assertNull(new CIDRMatch(EMPTY, l(null), addresses).makePipe().asProcessor().process(null));
assertFalse((Boolean) new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null));
addresses.add(l("10.6.48.157/8"));
assertTrue((Boolean) new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null));
}
public void testCIDRMatchFunctionInvalidInput() {
ArrayList<Expression> addresses = new ArrayList<>();
// Invalid source address
EqlIllegalArgumentException e = expectThrows(EqlIllegalArgumentException.class,
() -> new CIDRMatch(EMPTY, l("10.6.48"), addresses).makePipe().asProcessor().process(null));
assertEquals("'10.6.48' is not an IP string literal.", e.getMessage());
// Invalid match ip address
addresses.add(l("10.6.48"));
e = expectThrows(EqlIllegalArgumentException.class,
() -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null));
assertEquals("'10.6.48' is not an IP string literal.", e.getMessage());
addresses.clear();
// Invalid CIDR
addresses.add(l("10.6.12/12"));
e = expectThrows(EqlIllegalArgumentException.class,
() -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null));
assertEquals("'10.6.12' is not an IP string literal.", e.getMessage());
addresses.clear();
// Invalid source type
QlIllegalArgumentException eqe = expectThrows(QlIllegalArgumentException.class,
() -> new CIDRMatch(EMPTY, l(12345), addresses).makePipe().asProcessor().process(null));
assertEquals("A string/char is required; received [12345]", eqe.getMessage());
// Invalid cidr type
addresses.add(l(5678));
eqe = expectThrows(QlIllegalArgumentException.class,
() -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null));
assertEquals("A string/char is required; received [5678]", eqe.getMessage());
}
}
| apache-2.0 |
bhupeshchawda/incubator-samoa | samoa-local/src/main/java/org/apache/samoa/topology/impl/SimpleProcessingItem.java | 2941 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.apache.samoa.topology.impl;
/*
* #%L
* SAMOA
* %%
* Copyright (C) 2014 - 2015 Apache Software Foundation
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.samoa.core.ContentEvent;
import org.apache.samoa.core.Processor;
import org.apache.samoa.topology.AbstractProcessingItem;
import org.apache.samoa.topology.IProcessingItem;
import org.apache.samoa.topology.ProcessingItem;
import org.apache.samoa.topology.Stream;
import org.apache.samoa.utils.PartitioningScheme;
import org.apache.samoa.utils.StreamDestination;
/**
*
* @author abifet
*/
class SimpleProcessingItem extends AbstractProcessingItem {
private IProcessingItem[] arrayProcessingItem;
SimpleProcessingItem(Processor processor) {
super(processor);
}
SimpleProcessingItem(Processor processor, int parallelism) {
super(processor);
this.setParallelism(parallelism);
}
public IProcessingItem getProcessingItem(int i) {
return arrayProcessingItem[i];
}
@Override
protected ProcessingItem addInputStream(Stream inputStream, PartitioningScheme scheme) {
StreamDestination destination = new StreamDestination(this, this.getParallelism(), scheme);
((SimpleStream) inputStream).addDestination(destination);
return this;
}
public SimpleProcessingItem copy() {
Processor processor = this.getProcessor();
return new SimpleProcessingItem(processor.newProcessor(processor));
}
public void processEvent(ContentEvent event, int counter) {
int parallelism = this.getParallelism();
// System.out.println("Process event "+event+" (isLast="+event.isLastEvent()+") with counter="+counter+" while parallelism="+parallelism);
if (this.arrayProcessingItem == null && parallelism > 0) {
// Init processing elements, the first time they are needed
this.arrayProcessingItem = new IProcessingItem[parallelism];
for (int j = 0; j < parallelism; j++) {
arrayProcessingItem[j] = this.copy();
arrayProcessingItem[j].getProcessor().onCreate(j);
}
}
if (this.arrayProcessingItem != null) {
IProcessingItem pi = this.getProcessingItem(counter);
Processor p = pi.getProcessor();
// System.out.println("PI="+pi+", p="+p);
this.getProcessingItem(counter).getProcessor().process(event);
}
}
}
| apache-2.0 |
sneivandt/elasticsearch | core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java | 12748 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.shards;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.health.ClusterShardHealth;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.gateway.AsyncShardFetch;
import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards;
import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Transport action that reads the cluster state for shards with the requested criteria (see {@link ClusterHealthStatus}) of specific indices
* and fetches store information from all the nodes using {@link TransportNodesListGatewayStartedShards}
*/
public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAction<IndicesShardStoresRequest, IndicesShardStoresResponse> {
private final TransportNodesListGatewayStartedShards listShardStoresInfo;
@Inject
public TransportIndicesShardStoresAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, TransportNodesListGatewayStartedShards listShardStoresInfo) {
super(settings, IndicesShardStoresAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, IndicesShardStoresRequest::new);
this.listShardStoresInfo = listShardStoresInfo;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected IndicesShardStoresResponse newResponse() {
return new IndicesShardStoresResponse();
}
@Override
protected void masterOperation(IndicesShardStoresRequest request, ClusterState state, ActionListener<IndicesShardStoresResponse> listener) {
final RoutingTable routingTables = state.routingTable();
final RoutingNodes routingNodes = state.getRoutingNodes();
final String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
final Set<ShardId> shardIdsToFetch = new HashSet<>();
logger.trace("using cluster state version [{}] to determine shards", state.version());
// collect relevant shard ids of the requested indices for fetching store infos
for (String index : concreteIndices) {
IndexRoutingTable indexShardRoutingTables = routingTables.index(index);
if (indexShardRoutingTables == null) {
continue;
}
for (IndexShardRoutingTable routing : indexShardRoutingTables) {
final int shardId = routing.shardId().id();
ClusterShardHealth shardHealth = new ClusterShardHealth(shardId, routing);
if (request.shardStatuses().contains(shardHealth.getStatus())) {
shardIdsToFetch.add(routing.shardId());
}
}
}
// async fetch store infos from all the nodes
// NOTE: instead of fetching shard store info one by one from every node (nShards * nNodes requests)
// we could fetch all shard store info from every node once (nNodes requests)
// we have to implement a TransportNodesAction instead of using TransportNodesListGatewayStartedShards
// for fetching shard stores info, that operates on a list of shards instead of a single shard
new AsyncShardStoresInfoFetches(state.nodes(), routingNodes, shardIdsToFetch, listener).start();
}
@Override
protected ClusterBlockException checkBlock(IndicesShardStoresRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
}
private class AsyncShardStoresInfoFetches {
private final DiscoveryNodes nodes;
private final RoutingNodes routingNodes;
private final Set<ShardId> shardIds;
private final ActionListener<IndicesShardStoresResponse> listener;
private CountDown expectedOps;
private final Queue<InternalAsyncFetch.Response> fetchResponses;
AsyncShardStoresInfoFetches(DiscoveryNodes nodes, RoutingNodes routingNodes, Set<ShardId> shardIds, ActionListener<IndicesShardStoresResponse> listener) {
this.nodes = nodes;
this.routingNodes = routingNodes;
this.shardIds = shardIds;
this.listener = listener;
this.fetchResponses = new ConcurrentLinkedQueue<>();
this.expectedOps = new CountDown(shardIds.size());
}
void start() {
if (shardIds.isEmpty()) {
listener.onResponse(new IndicesShardStoresResponse());
} else {
for (ShardId shardId : shardIds) {
InternalAsyncFetch fetch = new InternalAsyncFetch(logger, "shard_stores", shardId, listShardStoresInfo);
fetch.fetchData(nodes, Collections.<String>emptySet());
}
}
}
private class InternalAsyncFetch extends AsyncShardFetch<NodeGatewayStartedShards> {
InternalAsyncFetch(Logger logger, String type, ShardId shardId, TransportNodesListGatewayStartedShards action) {
super(logger, type, shardId, action);
}
@Override
protected synchronized void processAsyncFetch(List<NodeGatewayStartedShards> responses, List<FailedNodeException> failures, long fetchingRound) {
fetchResponses.add(new Response(shardId, responses, failures));
if (expectedOps.countDown()) {
finish();
}
}
void finish() {
ImmutableOpenMap.Builder<String, ImmutableOpenIntMap<java.util.List<IndicesShardStoresResponse.StoreStatus>>> indicesStoreStatusesBuilder = ImmutableOpenMap.builder();
java.util.List<IndicesShardStoresResponse.Failure> failureBuilder = new ArrayList<>();
for (Response fetchResponse : fetchResponses) {
ImmutableOpenIntMap<java.util.List<IndicesShardStoresResponse.StoreStatus>> indexStoreStatuses = indicesStoreStatusesBuilder.get(fetchResponse.shardId.getIndexName());
final ImmutableOpenIntMap.Builder<java.util.List<IndicesShardStoresResponse.StoreStatus>> indexShardsBuilder;
if (indexStoreStatuses == null) {
indexShardsBuilder = ImmutableOpenIntMap.builder();
} else {
indexShardsBuilder = ImmutableOpenIntMap.builder(indexStoreStatuses);
}
java.util.List<IndicesShardStoresResponse.StoreStatus> storeStatuses = indexShardsBuilder.get(fetchResponse.shardId.id());
if (storeStatuses == null) {
storeStatuses = new ArrayList<>();
}
for (NodeGatewayStartedShards response : fetchResponse.responses) {
if (shardExistsInNode(response)) {
IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), response.getNode());
storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.allocationId(), allocationStatus, response.storeException()));
}
}
CollectionUtil.timSort(storeStatuses);
indexShardsBuilder.put(fetchResponse.shardId.id(), storeStatuses);
indicesStoreStatusesBuilder.put(fetchResponse.shardId.getIndexName(), indexShardsBuilder.build());
for (FailedNodeException failure : fetchResponse.failures) {
failureBuilder.add(new IndicesShardStoresResponse.Failure(failure.nodeId(), fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), failure.getCause()));
}
}
listener.onResponse(new IndicesShardStoresResponse(indicesStoreStatusesBuilder.build(), Collections.unmodifiableList(failureBuilder)));
}
private IndicesShardStoresResponse.StoreStatus.AllocationStatus getAllocationStatus(String index, int shardID, DiscoveryNode node) {
for (ShardRouting shardRouting : routingNodes.node(node.getId())) {
ShardId shardId = shardRouting.shardId();
if (shardId.id() == shardID && shardId.getIndexName().equals(index)) {
if (shardRouting.primary()) {
return IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY;
} else if (shardRouting.assignedToNode()) {
return IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA;
} else {
return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED;
}
}
}
return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED;
}
/**
* A shard exists/existed in a node only if shard state file exists in the node
*/
private boolean shardExistsInNode(final NodeGatewayStartedShards response) {
return response.storeException() != null || response.allocationId() != null;
}
@Override
protected void reroute(ShardId shardId, String reason) {
// no-op
}
public class Response {
private final ShardId shardId;
private final List<NodeGatewayStartedShards> responses;
private final List<FailedNodeException> failures;
Response(ShardId shardId, List<NodeGatewayStartedShards> responses, List<FailedNodeException> failures) {
this.shardId = shardId;
this.responses = responses;
this.failures = failures;
}
}
}
}
}
| apache-2.0 |
Shashikanth-Huawei/bmp | apps/routing/src/test/java/org/onosproject/routing/bgp/PathSegmentTest.java | 4327 | /*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.routing.bgp;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.util.ArrayList;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
/**
* Unit tests for the BgpRouteEntry.PathSegment class.
*/
public class PathSegmentTest {
/**
* Generates a Path Segment.
*
* @return a generated PathSegment
*/
private BgpRouteEntry.PathSegment generatePathSegment() {
byte pathSegmentType = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE;
ArrayList<Long> segmentAsNumbers = new ArrayList<>();
segmentAsNumbers.add(1L);
segmentAsNumbers.add(2L);
segmentAsNumbers.add(3L);
BgpRouteEntry.PathSegment pathSegment =
new BgpRouteEntry.PathSegment(pathSegmentType, segmentAsNumbers);
return pathSegment;
}
/**
* Tests valid class constructor.
*/
@Test
public void testConstructor() {
BgpRouteEntry.PathSegment pathSegment = generatePathSegment();
String expectedString =
"PathSegment{type=AS_SEQUENCE, segmentAsNumbers=[1, 2, 3]}";
assertThat(pathSegment.toString(), is(expectedString));
}
/**
* Tests invalid class constructor for null Segment AS Numbers.
*/
@Test(expected = NullPointerException.class)
public void testInvalidConstructorNullSegmentAsNumbers() {
byte pathSegmentType = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE;
ArrayList<Long> segmentAsNumbers = null;
new BgpRouteEntry.PathSegment(pathSegmentType, segmentAsNumbers);
}
/**
* Tests getting the fields of a Path Segment.
*/
@Test
public void testGetFields() {
// Create the fields to compare against
byte pathSegmentType = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE;
ArrayList<Long> segmentAsNumbers = new ArrayList<>();
segmentAsNumbers.add(1L);
segmentAsNumbers.add(2L);
segmentAsNumbers.add(3L);
// Generate the entry to test
BgpRouteEntry.PathSegment pathSegment = generatePathSegment();
assertThat(pathSegment.getType(), is(pathSegmentType));
assertThat(pathSegment.getSegmentAsNumbers(), is(segmentAsNumbers));
}
/**
* Tests equality of {@link BgpRouteEntry.PathSegment}.
*/
@Test
public void testEquality() {
BgpRouteEntry.PathSegment pathSegment1 = generatePathSegment();
BgpRouteEntry.PathSegment pathSegment2 = generatePathSegment();
assertThat(pathSegment1, is(pathSegment2));
}
/**
* Tests non-equality of {@link BgpRouteEntry.PathSegment}.
*/
@Test
public void testNonEquality() {
BgpRouteEntry.PathSegment pathSegment1 = generatePathSegment();
// Setup Path Segment 2
byte pathSegmentType = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE;
ArrayList<Long> segmentAsNumbers = new ArrayList<>();
segmentAsNumbers.add(1L);
segmentAsNumbers.add(22L); // Different
segmentAsNumbers.add(3L);
//
BgpRouteEntry.PathSegment pathSegment2 =
new BgpRouteEntry.PathSegment(pathSegmentType, segmentAsNumbers);
assertThat(pathSegment1, Matchers.is(not(pathSegment2)));
}
/**
* Tests object string representation.
*/
@Test
public void testToString() {
BgpRouteEntry.PathSegment pathSegment = generatePathSegment();
String expectedString =
"PathSegment{type=AS_SEQUENCE, segmentAsNumbers=[1, 2, 3]}";
assertThat(pathSegment.toString(), is(expectedString));
}
}
| apache-2.0 |
jwren/intellij-community | plugins/kotlin/idea/tests/testData/runConfigurations/junit/Simple/module/test/MyJavaTest.java | 136 | import org.junit.Test;
public class MyJavaTest {
@Test
public void testA() {
}
@Test
public void testB() {
}
} | apache-2.0 |
droolsjbpm/drools | kie-pmml/src/test/java/org/kie/pmml/pmml_4_2/PMMLErrorTest.java | 4323 | /*
* Copyright 2011 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.pmml_4_2;
import org.junit.Ignore;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.Message;
import org.kie.api.io.ResourceType;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@Ignore
public class PMMLErrorTest {
String pmlm = "<PMML version=\"4.2\" xsi:schemaLocation=\"http://www.dmg.org/PMML-4_2 http://www.dmg.org/v4-1/pmml-4-2.xsd\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"http://www.dmg.org/PMML-4_2\">\n" +
" <Header copyright=\"opensource\" description=\"test\">\n" +
" <Application name=\"handmade\" version=\"1.0\"/>\n" +
" <Annotation>notes here</Annotation>\n" +
" <Timestamp>now</Timestamp>\n" +
" </Header>\n" +
"<IllegalModel>\n" +
"</IllegalModel>" +
"</PMML>";
String pmml = "<PMML version=\"4.2\" xsi:schemaLocation=\"http://www.dmg.org/PMML-4_2 http://www.dmg.org/v4-1/pmml-4-2.xsd\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"http://www.dmg.org/PMML-4_2\">\n" +
" <Header copyright=\"opensource\" description=\"test\">\n" +
" <Application name=\"handmade\" version=\"1.0\"/>\n" +
" <Annotation>notes here</Annotation>\n" +
" <Timestamp>now</Timestamp>\n" +
" </Header>" +
"<DataDictionary>\n" +
" <DataField name=\"fld\" dataType=\"string\" optype=\"categorical\" />" +
"</DataDictionary>\n" +
"</PMML>";
@Test
public void testErrorDuringGenrationAPICompatibility() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( pmlm.getBytes() ), ResourceType.PMML );
System.out.print( kbuilder.getErrors() );
assertTrue( kbuilder.hasErrors() );
KnowledgeBuilder kbuilder2 = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder2.add( ResourceFactory.newByteArrayResource( pmml.getBytes() ),ResourceType.PMML );
System.out.print( kbuilder2.getErrors() );
assertFalse( kbuilder2.hasErrors() );
}
@Test
public void testErrorDuringGeneration() {
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( ResourceFactory.newByteArrayResource( pmlm.getBytes() )
.setResourceType( ResourceType.PMML )
.setSourcePath( "pmlm.pmml" )
);
KieBuilder kb = ks.newKieBuilder( kfs );
kb.buildAll();
assertEquals( 1, kb.getResults().getMessages( Message.Level.ERROR ).size() );
}
@Test
public void testNoErrorDuringGeneration() {
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( ResourceFactory.newByteArrayResource( pmml.getBytes() )
.setResourceType( ResourceType.PMML )
.setSourcePath( "pmml.pmml" )
);
KieBuilder kb = ks.newKieBuilder( kfs );
kb.buildAll();
assertEquals( 0, kb.getResults().getMessages( Message.Level.ERROR ).size() );
}
}
| apache-2.0 |
nikhilvibhav/camel | components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanConstants.java | 1075 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean;
@Deprecated
public final class BeanConstants {
@Deprecated
public static final String BEAN_PARAMETER_MAPPING_STRATEGY = "CamelBeanParameterMappingStrategy";
private BeanConstants() {
// Utility class
}
}
| apache-2.0 |
jwren/intellij-community | plugins/kotlin/j2k/old/tests/testData/fileOrElement/postfixOperator/decrement.java | 16 | //statement
i--; | apache-2.0 |
apache/flink | flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/decorators/CmdJobManagerDecoratorTest.java | 3815 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.kubernetes.kubeclient.decorators;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.kubernetes.configuration.KubernetesConfigOptions;
import org.apache.flink.kubernetes.configuration.KubernetesDeploymentTarget;
import org.apache.flink.kubernetes.kubeclient.FlinkPod;
import org.apache.flink.kubernetes.kubeclient.KubernetesJobManagerTestBase;
import org.apache.flink.kubernetes.utils.Constants;
import org.apache.flink.kubernetes.utils.KubernetesUtils;
import org.junit.Test;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
/** General tests for the {@link CmdJobManagerDecorator}. */
public class CmdJobManagerDecoratorTest extends KubernetesJobManagerTestBase {
private CmdJobManagerDecorator cmdJobManagerDecorator;
@Override
protected void onSetup() throws Exception {
super.onSetup();
this.cmdJobManagerDecorator = new CmdJobManagerDecorator(kubernetesJobManagerParameters);
}
@Test
public void testContainerIsDecorated() {
flinkConfig.set(DeploymentOptions.TARGET, KubernetesDeploymentTarget.SESSION.getName());
final FlinkPod resultFlinkPod = cmdJobManagerDecorator.decorateFlinkPod(baseFlinkPod);
assertThat(
resultFlinkPod.getPodWithoutMainContainer(),
is(equalTo(baseFlinkPod.getPodWithoutMainContainer())));
assertThat(
resultFlinkPod.getMainContainer(), not(equalTo(baseFlinkPod.getMainContainer())));
}
@Test
public void testSessionClusterCommandsAndArgs() {
testJobManagerCommandsAndArgs(KubernetesDeploymentTarget.SESSION.getName());
}
@Test
public void testApplicationClusterCommandsAndArgs() {
testJobManagerCommandsAndArgs(KubernetesDeploymentTarget.APPLICATION.getName());
}
@Test(expected = IllegalArgumentException.class)
public void testUnsupportedDeploymentTargetShouldFail() {
testJobManagerCommandsAndArgs("unsupported-deployment-target");
}
private void testJobManagerCommandsAndArgs(String target) {
flinkConfig.set(DeploymentOptions.TARGET, target);
final FlinkPod resultFlinkPod = cmdJobManagerDecorator.decorateFlinkPod(baseFlinkPod);
final String entryCommand = flinkConfig.get(KubernetesConfigOptions.KUBERNETES_ENTRY_PATH);
assertThat(
resultFlinkPod.getMainContainer().getCommand(), containsInAnyOrder(entryCommand));
List<String> flinkCommands =
KubernetesUtils.getStartCommandWithBashWrapper(
Constants.KUBERNETES_JOB_MANAGER_SCRIPT_PATH + " " + target);
assertThat(resultFlinkPod.getMainContainer().getArgs(), contains(flinkCommands.toArray()));
}
}
| apache-2.0 |
samaitra/ignite | modules/ml/src/main/java/org/apache/ignite/ml/tree/boosting/GDBBinaryClassifierOnTreesTrainer.java | 4191 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ml.tree.boosting;
import org.apache.ignite.ml.composition.boosting.GDBBinaryClassifierTrainer;
import org.apache.ignite.ml.composition.boosting.GDBLearningStrategy;
import org.apache.ignite.ml.environment.LearningEnvironmentBuilder;
import org.apache.ignite.ml.tree.DecisionTreeRegressionTrainer;
import org.jetbrains.annotations.NotNull;
/**
* Implementation of Gradient Boosting Classifier Trainer on trees.
*/
public class GDBBinaryClassifierOnTreesTrainer extends GDBBinaryClassifierTrainer {
/** Max depth. */
private int maxDepth;
/** Min impurity decrease. */
private double minImpurityDecrease;
/** Use index structure instead of using sorting during the learning process. */
private boolean usingIdx = true;
/**
* Constructs instance of GDBBinaryClassifierOnTreesTrainer.
*
* @param gradStepSize Gradient step size.
* @param cntOfIterations Count of iterations.
* @param maxDepth Max depth.
* @param minImpurityDecrease Min impurity decrease.
*/
public GDBBinaryClassifierOnTreesTrainer(double gradStepSize, Integer cntOfIterations,
int maxDepth, double minImpurityDecrease) {
super(gradStepSize, cntOfIterations);
this.maxDepth = maxDepth;
this.minImpurityDecrease = minImpurityDecrease;
}
/** {@inheritDoc} */
@NotNull @Override protected DecisionTreeRegressionTrainer buildBaseModelTrainer() {
return new DecisionTreeRegressionTrainer(maxDepth, minImpurityDecrease).withUsingIdx(usingIdx);
}
/** {@inheritDoc} */
@Override protected GDBLearningStrategy getLearningStrategy() {
return new GDBOnTreesLearningStrategy(usingIdx);
}
/** {@inheritDoc} */
@Override public GDBBinaryClassifierOnTreesTrainer withEnvironmentBuilder(LearningEnvironmentBuilder envBuilder) {
return super.withEnvironmentBuilder(envBuilder);
}
/**
* Set useIndex parameter and returns trainer instance.
*
* @param usingIdx Use index.
* @return Decision tree trainer.
*/
public GDBBinaryClassifierOnTreesTrainer withUsingIdx(boolean usingIdx) {
this.usingIdx = usingIdx;
return this;
}
/**
* Get the max depth.
*
* @return The property value.
*/
public int getMaxDepth() {
return maxDepth;
}
/**
* Set up the max depth.
*
* @param maxDepth The parameter value.
* @return Decision tree trainer.
*/
public GDBBinaryClassifierOnTreesTrainer setMaxDepth(int maxDepth) {
this.maxDepth = maxDepth;
return this;
}
/**
* Get the min impurity decrease.
*
* @return The property value.
*/
public double getMinImpurityDecrease() {
return minImpurityDecrease;
}
/**
* Set up the min impurity decrease.
*
* @param minImpurityDecrease The parameter value.
* @return Decision tree trainer.
*/
public GDBBinaryClassifierOnTreesTrainer setMinImpurityDecrease(double minImpurityDecrease) {
this.minImpurityDecrease = minImpurityDecrease;
return this;
}
/**
* Get the using index structure property instead of using sorting during the learning process.
*
* @return The property value.
*/
public boolean isUsingIdx() {
return usingIdx;
}
}
| apache-2.0 |
kidaa/incubator-ignite | modules/schedule/src/main/java/org/apache/ignite/internal/processors/schedule/IgniteScheduleProcessor.java | 3765 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.schedule;
import it.sauronsoftware.cron4j.*;
import org.apache.ignite.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.scheduler.*;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.concurrent.*;
/**
* Schedules cron-based execution of grid tasks and closures.
*/
public class IgniteScheduleProcessor extends IgniteScheduleProcessorAdapter {
/** Cron scheduler. */
private Scheduler sched;
/** Schedule futures. */
private Set<SchedulerFuture<?>> schedFuts = new GridConcurrentHashSet<>();
/**
* @param ctx Kernal context.
*/
public IgniteScheduleProcessor(GridKernalContext ctx) {
super(ctx);
}
/** {@inheritDoc} */
@Override public SchedulerFuture<?> schedule(final Runnable c, String ptrn) {
assert c != null;
assert ptrn != null;
ScheduleFutureImpl<Object> fut = new ScheduleFutureImpl<>(sched, ctx, ptrn);
fut.schedule(new IgniteCallable<Object>() {
@Nullable @Override public Object call() {
c.run();
return null;
}
});
return fut;
}
/** {@inheritDoc} */
@Override public <R> SchedulerFuture<R> schedule(Callable<R> c, String pattern) {
assert c != null;
assert pattern != null;
ScheduleFutureImpl<R> fut = new ScheduleFutureImpl<>(sched, ctx, pattern);
fut.schedule(c);
return fut;
}
/**
*
* @return Future objects of currently scheduled active(not finished) tasks.
*/
public Collection<SchedulerFuture<?>> getScheduledFutures() {
return Collections.unmodifiableList(new ArrayList<>(schedFuts));
}
/**
* Removes future object from the collection of scheduled futures.
*
* @param fut Future object.
*/
void onDescheduled(SchedulerFuture<?> fut) {
assert fut != null;
schedFuts.remove(fut);
}
/**
* Adds future object to the collection of scheduled futures.
*
* @param fut Future object.
*/
void onScheduled(SchedulerFuture<?> fut) {
assert fut != null;
schedFuts.add(fut);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
sched = new Scheduler();
sched.start();
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
if (sched.isStarted())
sched.stop();
sched = null;
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>>");
X.println(">>> Schedule processor memory stats [grid=" + ctx.gridName() + ']');
X.println(">>> schedFutsSize: " + schedFuts.size());
}
}
| apache-2.0 |
serDrem/incubator-trafficcontrol | traffic_router/core/src/main/java/com/comcast/cdn/traffic_control/traffic_router/core/dns/RRSetsBuilder.java | 1942 | /*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.dns;
import org.xbill.DNS.RRset;
import org.xbill.DNS.Record;
import org.xbill.DNS.Type;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public class RRSetsBuilder {
final private Function<List<Record>, RRset> recordsToRRSet = (Function<List<Record>, RRset>) records -> {
final RRset rrSet = new RRset();
records.forEach(rrSet::addRR);
return rrSet;
};
private static String qualifer(final Record record) {
return String.format("%s %d %d %d", record.getName().toString(), record.getDClass(), record.getType(), record.getTTL());
}
final private Comparator<RRset> rrSetComparator = (rrSet1, rrSet2) -> {
int x = rrSet1.getName().compareTo(rrSet2.getName());
if (x != 0) {
return x;
}
x = rrSet1.getDClass() - rrSet2.getDClass();
if (x != 0) {
return x;
}
if (rrSet1.getType() == Type.SOA) {
return -1;
}
return rrSet1.getType() - rrSet2.getType();
};
public List<RRset> build(final List<Record> records) {
final Map<String, List<Record>> map = records.stream().sorted().collect(
Collectors.groupingBy(RRSetsBuilder::qualifer, Collectors.toList())
);
return map.values().stream().map(recordsToRRSet).sorted(rrSetComparator).collect(Collectors.toList());
}
}
| apache-2.0 |
irudyak/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheCountDownLatchImpl.java | 12100 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.datastructures;
import java.io.Externalizable;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.ObjectStreamException;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.internal.processors.cluster.IgniteChangeGlobalStateSupport;
import org.apache.ignite.lang.IgniteBiTuple;
import static org.apache.ignite.internal.util.typedef.internal.CU.retryTopologySafe;
import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
/**
* Cache count down latch implementation.
*/
public final class GridCacheCountDownLatchImpl extends AtomicDataStructureProxy<GridCacheCountDownLatchValue>
implements GridCacheCountDownLatchEx, IgniteChangeGlobalStateSupport, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** Internal latch is in unitialized state. */
private static final int UNINITIALIZED_LATCH_STATE = 0;
/** Internal latch is being created. */
private static final int CREATING_LATCH_STATE = 1;
/** Internal latch is ready for the usage. */
private static final int READY_LATCH_STATE = 2;
/** Deserialization stash. */
private static final ThreadLocal<IgniteBiTuple<GridKernalContext, String>> stash =
new ThreadLocal<IgniteBiTuple<GridKernalContext, String>>() {
@Override protected IgniteBiTuple<GridKernalContext, String> initialValue() {
return new IgniteBiTuple<>();
}
};
/** Initial count. */
private int initCnt;
/** Auto delete flag. */
private boolean autoDel;
/** Internal latch (transient). */
private CountDownLatch internalLatch;
/** Initialization guard. */
private final AtomicInteger initGuard = new AtomicInteger();
/** Initialization latch. */
private final CountDownLatch initLatch = new CountDownLatch(1);
/** Latest latch value that is used at the stage while the internal latch is being initialized. */
private Integer lastLatchVal = null;
/**
* Empty constructor required by {@link Externalizable}.
*/
public GridCacheCountDownLatchImpl() {
// No-op.
}
/**
* Constructor.
*
* @param name Latch name.
* @param initCnt Initial count.
* @param autoDel Auto delete flag.
* @param key Latch key.
* @param latchView Latch projection.
*/
public GridCacheCountDownLatchImpl(String name,
int initCnt,
boolean autoDel,
GridCacheInternalKey key,
IgniteInternalCache<GridCacheInternalKey, GridCacheCountDownLatchValue> latchView)
{
super(name, key, latchView);
assert name != null;
assert key != null;
assert latchView != null;
this.initCnt = initCnt;
this.autoDel = autoDel;
}
/** {@inheritDoc} */
@Override public int count() {
try {
GridCacheCountDownLatchValue latchVal = cacheView.get(key);
return latchVal == null ? 0 : latchVal.get();
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public int initialCount() {
return initCnt;
}
/** {@inheritDoc} */
@Override public boolean autoDelete() {
return autoDel;
}
/** {@inheritDoc} */
@Override public void await() {
try {
initializeLatch();
U.await(internalLatch);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public boolean await(long timeout, TimeUnit unit) {
try {
initializeLatch();
return U.await(internalLatch, timeout, unit);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public boolean await(long timeout) {
return await(timeout, TimeUnit.MILLISECONDS);
}
/** {@inheritDoc} */
@Override public int countDown() {
return countDown(1);
}
/** {@inheritDoc} */
@Override public int countDown(int val) {
A.ensure(val > 0, "val should be positive");
try {
return retryTopologySafe(new CountDownCallable(val));
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc}*/
@Override public void countDownAll() {
try {
retryTopologySafe(new CountDownCallable(0));
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public void needCheckNotRemoved() {
// No-op.
}
/** {@inheritDoc} */
@Override public void onUpdate(int cnt) {
assert cnt >= 0;
CountDownLatch latch0;
synchronized (initGuard) {
int state = initGuard.get();
if (state != READY_LATCH_STATE) {
/* Internal latch is not fully initialized yet. Remember latest latch value. */
lastLatchVal = cnt;
return;
}
/* 'synchronized' statement guarantees visibility of internalLatch. No need to make it volatile. */
latch0 = internalLatch;
}
/* Internal latch is fully initialized and ready for the usage. */
assert latch0 != null;
while (latch0.getCount() > cnt)
latch0.countDown();
}
/**
* @throws IgniteCheckedException If operation failed.
*/
private void initializeLatch() throws IgniteCheckedException {
if (initGuard.compareAndSet(UNINITIALIZED_LATCH_STATE, CREATING_LATCH_STATE)) {
try {
internalLatch = retryTopologySafe(new Callable<CountDownLatch>() {
@Override public CountDownLatch call() throws Exception {
try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) {
GridCacheCountDownLatchValue val = cacheView.get(key);
if (val == null) {
if (log.isDebugEnabled())
log.debug("Failed to find count down latch with given name: " + name);
return new CountDownLatch(0);
}
tx.commit();
return new CountDownLatch(val.get());
}
}
});
synchronized (initGuard) {
if (lastLatchVal != null) {
while (internalLatch.getCount() > lastLatchVal)
internalLatch.countDown();
}
initGuard.set(READY_LATCH_STATE);
}
if (log.isDebugEnabled())
log.debug("Initialized internal latch: " + internalLatch);
}
finally {
initLatch.countDown();
}
}
else {
U.await(initLatch);
if (internalLatch == null)
throw new IgniteCheckedException("Internal latch has not been properly initialized.");
}
}
/** {@inheritDoc} */
@Override public void close() {
if (!rmvd) {
try {
ctx.kernalContext().dataStructures().removeCountDownLatch(name, ctx.group().name());
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(ctx.kernalContext());
out.writeUTF(name);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
IgniteBiTuple<GridKernalContext, String> t = stash.get();
t.set1((GridKernalContext)in.readObject());
t.set2(in.readUTF());
}
/**
* Reconstructs object on unmarshalling.
*
* @return Reconstructed object.
* @throws ObjectStreamException Thrown in case of unmarshalling error.
*/
@SuppressWarnings({"ConstantConditions"})
private Object readResolve() throws ObjectStreamException {
try {
IgniteBiTuple<GridKernalContext, String> t = stash.get();
return t.get1().dataStructures().countDownLatch(t.get2(), null, 0, false, false);
}
catch (IgniteCheckedException e) {
throw U.withCause(new InvalidObjectException(e.getMessage()), e);
}
finally {
stash.remove();
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridCacheCountDownLatchImpl.class, this);
}
/**
*
*/
private class CountDownCallable implements Callable<Integer> {
/** Value to count down on (if 0 then latch is counted down to 0). */
private final int val;
/**
* @param val Value to count down on (if 0 is passed latch is counted down to 0).
*/
private CountDownCallable(int val) {
assert val >= 0;
this.val = val;
}
/** {@inheritDoc} */
@Override public Integer call() throws Exception {
try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) {
GridCacheCountDownLatchValue latchVal = cacheView.get(key);
if (latchVal == null) {
if (log.isDebugEnabled())
log.debug("Failed to find count down latch with given name: " + name);
return 0;
}
int retVal;
if (val > 0) {
retVal = latchVal.get() - val;
if (retVal < 0)
retVal = 0;
}
else
retVal = 0;
latchVal.set(retVal);
cacheView.put(key, latchVal);
tx.commit();
return retVal;
}
}
}
}
| apache-2.0 |
GlenRSmith/elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopySettingsStep.java | 4274 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ilm;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import java.util.Locale;
import java.util.Objects;
/**
* Copy the provided settings from the source to the target index.
* <p>
* The target index is derived from the source index using the provided prefix.
* This is useful for actions like shrink or searchable snapshot that create a new index and migrate the ILM execution from the source
* to the target index.
*/
public class CopySettingsStep extends ClusterStateActionStep {
public static final String NAME = "copy-settings";
private static final Logger logger = LogManager.getLogger(CopySettingsStep.class);
private final String[] settingsKeys;
private final String indexPrefix;
public CopySettingsStep(StepKey key, StepKey nextStepKey, String indexPrefix, String... settingsKeys) {
super(key, nextStepKey);
Objects.requireNonNull(indexPrefix);
Objects.requireNonNull(settingsKeys);
this.indexPrefix = indexPrefix;
this.settingsKeys = settingsKeys;
}
@Override
public boolean isRetryable() {
return true;
}
public String[] getSettingsKeys() {
return settingsKeys;
}
public String getIndexPrefix() {
return indexPrefix;
}
@Override
public ClusterState performAction(Index index, ClusterState clusterState) {
String sourceIndexName = index.getName();
IndexMetadata sourceIndexMetadata = clusterState.metadata().index(sourceIndexName);
String targetIndexName = indexPrefix + sourceIndexName;
IndexMetadata targetIndexMetadata = clusterState.metadata().index(targetIndexName);
if (sourceIndexMetadata == null) {
// Index must have been since deleted, ignore it
logger.debug("[{}] lifecycle action for index [{}] executed but index no longer exists", getKey().getAction(), sourceIndexName);
return clusterState;
}
if (settingsKeys == null || settingsKeys.length == 0) {
return clusterState;
}
if (targetIndexMetadata == null) {
String errorMessage = String.format(
Locale.ROOT,
"index [%s] is being referenced by ILM action [%s] on step [%s] but " + "it doesn't exist",
targetIndexName,
getKey().getAction(),
getKey().getName()
);
logger.debug(errorMessage);
throw new IllegalStateException(errorMessage);
}
Settings.Builder settings = Settings.builder().put(targetIndexMetadata.getSettings());
for (String key : settingsKeys) {
String value = sourceIndexMetadata.getSettings().get(key);
settings.put(key, value);
}
Metadata.Builder newMetaData = Metadata.builder(clusterState.getMetadata())
.put(
IndexMetadata.builder(targetIndexMetadata).settingsVersion(targetIndexMetadata.getSettingsVersion() + 1).settings(settings)
);
return ClusterState.builder(clusterState).metadata(newMetaData.build(false)).build();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (super.equals(o) == false) {
return false;
}
CopySettingsStep that = (CopySettingsStep) o;
return Objects.equals(settingsKeys, that.settingsKeys) && Objects.equals(indexPrefix, that.indexPrefix);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), settingsKeys, indexPrefix);
}
}
| apache-2.0 |
rmulvey/bridgepoint | src/org.xtuml.bp.xtext.masl.parent/org.xtuml.bp.xtext.masl/emf-gen/org/xtuml/bp/xtext/masl/masl/structure/impl/TransitionRowImpl.java | 7380 | /**
* generated by Xtext 2.9.2
*/
package org.xtuml.bp.xtext.masl.masl.structure.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.xtuml.bp.xtext.masl.masl.structure.StateDeclaration;
import org.xtuml.bp.xtext.masl.masl.structure.StructurePackage;
import org.xtuml.bp.xtext.masl.masl.structure.TransitionOption;
import org.xtuml.bp.xtext.masl.masl.structure.TransitionRow;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Transition Row</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.xtuml.bp.xtext.masl.masl.structure.impl.TransitionRowImpl#getStart <em>Start</em>}</li>
* <li>{@link org.xtuml.bp.xtext.masl.masl.structure.impl.TransitionRowImpl#isNonExistant <em>Non Existant</em>}</li>
* <li>{@link org.xtuml.bp.xtext.masl.masl.structure.impl.TransitionRowImpl#getOptions <em>Options</em>}</li>
* </ul>
*
* @generated
*/
public class TransitionRowImpl extends PragmatizedImpl implements TransitionRow {
/**
* The cached value of the '{@link #getStart() <em>Start</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStart()
* @generated
* @ordered
*/
protected StateDeclaration start;
/**
* The default value of the '{@link #isNonExistant() <em>Non Existant</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isNonExistant()
* @generated
* @ordered
*/
protected static final boolean NON_EXISTANT_EDEFAULT = false;
/**
* The cached value of the '{@link #isNonExistant() <em>Non Existant</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isNonExistant()
* @generated
* @ordered
*/
protected boolean nonExistant = NON_EXISTANT_EDEFAULT;
/**
* The cached value of the '{@link #getOptions() <em>Options</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOptions()
* @generated
* @ordered
*/
protected EList<TransitionOption> options;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TransitionRowImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return StructurePackage.Literals.TRANSITION_ROW;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public StateDeclaration getStart() {
if (start != null && start.eIsProxy()) {
InternalEObject oldStart = (InternalEObject)start;
start = (StateDeclaration)eResolveProxy(oldStart);
if (start != oldStart) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, StructurePackage.TRANSITION_ROW__START, oldStart, start));
}
}
return start;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public StateDeclaration basicGetStart() {
return start;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStart(StateDeclaration newStart) {
StateDeclaration oldStart = start;
start = newStart;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, StructurePackage.TRANSITION_ROW__START, oldStart, start));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isNonExistant() {
return nonExistant;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setNonExistant(boolean newNonExistant) {
boolean oldNonExistant = nonExistant;
nonExistant = newNonExistant;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, StructurePackage.TRANSITION_ROW__NON_EXISTANT, oldNonExistant, nonExistant));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TransitionOption> getOptions() {
if (options == null) {
options = new EObjectContainmentEList<TransitionOption>(TransitionOption.class, this, StructurePackage.TRANSITION_ROW__OPTIONS);
}
return options;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case StructurePackage.TRANSITION_ROW__OPTIONS:
return ((InternalEList<?>)getOptions()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case StructurePackage.TRANSITION_ROW__START:
if (resolve) return getStart();
return basicGetStart();
case StructurePackage.TRANSITION_ROW__NON_EXISTANT:
return isNonExistant();
case StructurePackage.TRANSITION_ROW__OPTIONS:
return getOptions();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case StructurePackage.TRANSITION_ROW__START:
setStart((StateDeclaration)newValue);
return;
case StructurePackage.TRANSITION_ROW__NON_EXISTANT:
setNonExistant((Boolean)newValue);
return;
case StructurePackage.TRANSITION_ROW__OPTIONS:
getOptions().clear();
getOptions().addAll((Collection<? extends TransitionOption>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case StructurePackage.TRANSITION_ROW__START:
setStart((StateDeclaration)null);
return;
case StructurePackage.TRANSITION_ROW__NON_EXISTANT:
setNonExistant(NON_EXISTANT_EDEFAULT);
return;
case StructurePackage.TRANSITION_ROW__OPTIONS:
getOptions().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case StructurePackage.TRANSITION_ROW__START:
return start != null;
case StructurePackage.TRANSITION_ROW__NON_EXISTANT:
return nonExistant != NON_EXISTANT_EDEFAULT;
case StructurePackage.TRANSITION_ROW__OPTIONS:
return options != null && !options.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (nonExistant: ");
result.append(nonExistant);
result.append(')');
return result.toString();
}
} //TransitionRowImpl
| apache-2.0 |
dahlstrom-g/intellij-community | platform/platform-api/src/com/intellij/ui/components/JBPanelWithEmptyText.java | 1922 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.components;
import com.intellij.util.ui.ComponentWithEmptyText;
import com.intellij.util.ui.JBSwingUtilities;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import java.awt.*;
/**
* @author gregsh
*/
public class JBPanelWithEmptyText extends JBPanel<JBPanelWithEmptyText> implements ComponentWithEmptyText {
private final StatusText myEmptyText = new StatusText(this) {
@Override
protected boolean isStatusVisible() {
return UIUtil.uiChildren(JBPanelWithEmptyText.this).filter(Component::isVisible).isEmpty();
}
};
public JBPanelWithEmptyText() {
super();
}
public JBPanelWithEmptyText(LayoutManager layout) {
super(layout);
}
@NotNull
@Override
public StatusText getEmptyText() {
return myEmptyText;
}
@NotNull
public JBPanelWithEmptyText withEmptyText(@Nls String str) {
myEmptyText.setText(str);
return this;
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
myEmptyText.paint(this, g);
}
@Override
protected Graphics getComponentGraphics(Graphics graphics) {
return JBSwingUtilities.runGlobalCGTransform(this, super.getComponentGraphics(graphics));
}
}
| apache-2.0 |
siosio/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/comparatorCanBeSimplified/afterComparingMapEntry.java | 890 | // "Fix all 'Comparator method can be simplified' problems in file" "true"
import java.util.*;
import java.util.Map.Entry;
import java.util.stream.Collectors;
class Test {
void test() {
Map<String, Integer> unsortMap = new HashMap<>();
unsortMap.put("z", 10);
unsortMap.put("b", 5);
unsortMap.put("a", 6);
unsortMap.put("c", 20);
unsortMap.put("d", 1);
Map<String, Integer> result = unsortMap.entrySet().stream()
.sorted(Entry.comparingByValue(Comparator.reverseOrder()))
.sorted(Entry.comparingByValue())
.sorted(Entry.comparingByKey())
.sorted(Entry.comparingByKey(String.CASE_INSENSITIVE_ORDER))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue,
(oldValue, newValue) -> oldValue, LinkedHashMap::new));
System.out.println("Sorted...");
System.out.println(result);
}
} | apache-2.0 |
pplatek/adempiere | base/src/org/compiere/util/HashcodeBuilder.java | 820 | package org.compiere.util;
import java.util.Map;
/**
* Helper class for building {@link Object#hashCode()} method.
*
* This class is similar with apache commons HashCodeBuilder, but much more simple.
*
* @author tsa
*
*/
public class HashcodeBuilder
{
private final static int prime = 31;
private int hashcode = 0;
public HashcodeBuilder()
{
super();
}
public HashcodeBuilder append(Object value)
{
hashcode = prime * hashcode + (value == null ? 0 : value.hashCode());
return this;
}
public HashcodeBuilder append(Map<?, ?> map, boolean handleEmptyAsNull)
{
if (handleEmptyAsNull && (map == null || map.isEmpty()))
{
return append((Object)null);
}
return append((Object)map);
}
public int toHashcode()
{
return hashcode;
}
}
| gpl-2.0 |
jvanz/core | qadevOOo/tests/java/ifc/style/_CharacterStyle.java | 1421 | /*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
package ifc.style;
import lib.MultiPropertyTest;
/**
* Testing <code>com.sun.star.style.CharacterStyle</code>
* service properties :
* <ul>
* <li><code> CharDiffHeight</code></li>
* <li><code> CharPropHeight</code></li>
* <li><code> CharDiffHeightAsian</code></li>
* <li><code> CharPropHeightAsian</code></li>
* <li><code> CharDiffHeightComplex</code></li>
* <li><code> CharPropHeightComplex</code></li>
* </ul> <p>
* Properties testing is automated by <code>lib.MultiPropertyTest</code>.
* @see com.sun.star.style.CharacterStyle
*/
public class _CharacterStyle extends MultiPropertyTest {
}
| gpl-3.0 |
zhx828/cbioportal | security/security-spring/src/main/java/org/cbioportal/security/spring/PortalSavedRequestAwareAuthenticationSuccessHandler.java | 7019 | /*
* Copyright (c) 2017 Memorial Sloan-Kettering Cancer Center.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS
* FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder
* is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no
* obligations to provide maintenance, support, updates, enhancements or
* modifications. In no event shall Memorial Sloan-Kettering Cancer Center be
* liable to any party for direct, indirect, special, incidental or
* consequential damages, including lost profits, arising out of the use of this
* software and its documentation, even if Memorial Sloan-Kettering Cancer
* Center has been advised of the possibility of such damage.
*/
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file is part of cBioPortal.
*
* cBioPortal is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.cbioportal.security.spring;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.security.core.Authentication;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationSuccessHandler;
import org.springframework.security.web.savedrequest.SavedRequest;
import org.springframework.security.web.savedrequest.RequestCache;
import org.springframework.security.web.savedrequest.HttpSessionRequestCache;
import org.springframework.util.StringUtils;
import org.springframework.util.Assert;
/**
* A reimplementation of SavedRequestAwareAuthenticationSuccessHandler that checks for
* targetUrlParameter on the SavedRequest rather than on the current request.
*
* @author Manda Wilson
* @see <a href="https://github.com/spring-projects/spring-security/blob/master/web/src/main/java/org/springframework/security/web/authentication/SavedRequestAwareAuthenticationSuccessHandler.java">SavedRequestAwareAuthenticationSuccessHandler</a>
*/
class PortalSavedRequestAwareAuthenticationSuccessHandler extends SimpleUrlAuthenticationSuccessHandler {
protected final Log logger = LogFactory.getLog(this.getClass());
private RequestCache requestCache = new HttpSessionRequestCache();
@Override
public void onAuthenticationSuccess(HttpServletRequest request,
HttpServletResponse response, Authentication authentication)
throws ServletException, IOException {
SavedRequest savedRequest = requestCache.getRequest(request, response);
if (savedRequest == null) {
super.onAuthenticationSuccess(request, response, authentication);
return;
}
if (isAlwaysUseDefaultTargetUrl()) {
requestCache.removeRequest(request, response);
super.onAuthenticationSuccess(request, response, authentication);
return;
}
String targetUrlParameter = getTargetUrlParameter();
String targetUrl = null;
String[] targetUrlParameterValues = savedRequest.getParameterValues(targetUrlParameter);
if (targetUrlParameter != null
&& targetUrlParameterValues != null
&& targetUrlParameterValues.length > 0
&& StringUtils.hasText(targetUrlParameterValues[0])) {
requestCache.removeRequest(request, response);
// force this to be relative
targetUrl = getRelativeURI(request, savedRequest.getParameterValues(targetUrlParameter)[0]);
Assert.notNull(targetUrl, "'" + savedRequest.getParameterValues(targetUrlParameter)[0] + "' could not be turned into a valid relative URI");
Assert.isTrue(StringUtils.hasText(targetUrl), "'" + savedRequest.getParameterValues(targetUrlParameter)[0] + "' could not be turned into a valid relative URI");
} else {
clearAuthenticationAttributes(request);
// Use the DefaultSavedRequest URL
targetUrl = savedRequest.getRedirectUrl();
}
logger.debug("Redirecting to Url: " + targetUrl);
getRedirectStrategy().sendRedirect(request, response, targetUrl);
}
public void setRequestCache(RequestCache requestCache) {
this.requestCache = requestCache;
}
private String getRelativeURI(HttpServletRequest request, String targetURI) {
String relativeURI = null;
try {
URI originalURI = new URI(targetURI);
logger.debug("getRelativeURI(): request.getServletContext() = '" + request.getServletContext() + "'");
logger.debug("getRelativeURI(): testing '" + new URI(request.getContextPath()) + "'");
// URI(String scheme, String authority, String path, String query, String fragment)
// use relativize so we do not include context path e.g. /cbioportal/
// use resolve to make sure we have a "/" at the front
relativeURI = new URI("/").resolve(new URI(request.getContextPath()).relativize(new URI(null,
null,
originalURI.getRawPath(),
originalURI.getRawQuery(),
originalURI.getRawFragment()))).toString();
logger.debug("getRelativeURI(): changing '" + targetURI + "' to '" + relativeURI + "'");
} catch (URISyntaxException e) {
return null;
}
return relativeURI;
}
}
| agpl-3.0 |
nkhuyu/pinot | pinot-core/src/main/java/com/linkedin/pinot/core/data/manager/offline/OfflineTableDataManager.java | 12343 | /**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.data.manager.offline;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.FileUtils;
import org.apache.helix.ZNRecord;
import org.apache.helix.store.zk.ZkHelixPropertyStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.linkedin.pinot.common.config.AbstractTableConfig;
import com.linkedin.pinot.common.metadata.instance.InstanceZKMetadata;
import com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata;
import com.linkedin.pinot.common.metadata.segment.OfflineSegmentZKMetadata;
import com.linkedin.pinot.common.metadata.segment.SegmentZKMetadata;
import com.linkedin.pinot.common.segment.ReadMode;
import com.linkedin.pinot.common.segment.SegmentMetadata;
import com.linkedin.pinot.common.utils.CommonConstants;
import com.linkedin.pinot.common.utils.NamedThreadFactory;
import com.linkedin.pinot.core.data.manager.config.TableDataManagerConfig;
import com.linkedin.pinot.core.indexsegment.IndexSegment;
import com.linkedin.pinot.core.indexsegment.columnar.ColumnarSegmentLoader;
import com.linkedin.pinot.core.segment.index.SegmentMetadataImpl;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
/**
* An implemenation of offline TableDataManager.
* Provide add and remove segment functionality.
*
*
*/
public class OfflineTableDataManager implements TableDataManager {
private Logger LOGGER = LoggerFactory.getLogger(OfflineTableDataManager.class);
private volatile boolean _isStarted = false;
private final Object _globalLock = new Object();
private String _tableName;
private ReadMode _readMode;
private ExecutorService _queryExecutorService;
private TableDataManagerConfig _tableDataManagerConfig;
private final ExecutorService _segmentAsyncExecutorService = Executors
.newSingleThreadExecutor(new NamedThreadFactory("SegmentAsyncExecutorService"));
private String _tableDataDir;
private int _numberOfTableQueryExecutorThreads;
private IndexLoadingConfigMetadata _indexLoadingConfigMetadata;
private final Map<String, OfflineSegmentDataManager> _segmentsMap = new ConcurrentHashMap<String, OfflineSegmentDataManager>();
private final List<String> _activeSegments = new ArrayList<String>();
private final List<String> _loadingSegments = new ArrayList<String>();
private Map<String, AtomicInteger> _referenceCounts = new ConcurrentHashMap<String, AtomicInteger>();
private Counter _currentNumberOfSegments = Metrics.newCounter(OfflineTableDataManager.class,
CommonConstants.Metric.Server.CURRENT_NUMBER_OF_SEGMENTS);
private Counter _currentNumberOfDocuments = Metrics.newCounter(OfflineTableDataManager.class,
CommonConstants.Metric.Server.CURRENT_NUMBER_OF_DOCUMENTS);
private Counter _numDeletedSegments = Metrics.newCounter(OfflineTableDataManager.class,
CommonConstants.Metric.Server.NUMBER_OF_DELETED_SEGMENTS);
public OfflineTableDataManager() {
}
@Override
public void init(TableDataManagerConfig tableDataManagerConfig) {
_tableDataManagerConfig = tableDataManagerConfig;
_tableName = _tableDataManagerConfig.getTableName();
LOGGER = LoggerFactory.getLogger(_tableName + "-OfflineTableDataManager");
_currentNumberOfSegments =
Metrics.newCounter(OfflineTableDataManager.class, _tableName + "-"
+ CommonConstants.Metric.Server.CURRENT_NUMBER_OF_SEGMENTS);
_currentNumberOfDocuments =
Metrics.newCounter(OfflineTableDataManager.class, _tableName + "-"
+ CommonConstants.Metric.Server.CURRENT_NUMBER_OF_DOCUMENTS);
_numDeletedSegments =
Metrics.newCounter(OfflineTableDataManager.class, _tableName + "-"
+ CommonConstants.Metric.Server.NUMBER_OF_DELETED_SEGMENTS);
_tableDataDir = _tableDataManagerConfig.getDataDir();
if (!new File(_tableDataDir).exists()) {
new File(_tableDataDir).mkdirs();
}
_numberOfTableQueryExecutorThreads = _tableDataManagerConfig.getNumberOfTableQueryExecutorThreads();
//_numberOfTableQueryExecutorThreads = 1;
if (_numberOfTableQueryExecutorThreads > 0) {
_queryExecutorService =
Executors.newFixedThreadPool(_numberOfTableQueryExecutorThreads, new NamedThreadFactory(
"parallel-query-executor-" + _tableName));
} else {
_queryExecutorService =
Executors.newCachedThreadPool(new NamedThreadFactory("parallel-query-executor-" + _tableName));
}
_readMode = ReadMode.valueOf(_tableDataManagerConfig.getReadMode());
_indexLoadingConfigMetadata = _tableDataManagerConfig.getIndexLoadingConfigMetadata();
LOGGER
.info("Initialized table : " + _tableName + " with :\n\tData Directory: " + _tableDataDir
+ "\n\tRead Mode : " + _readMode + "\n\tQuery Exeutor with "
+ ((_numberOfTableQueryExecutorThreads > 0) ? _numberOfTableQueryExecutorThreads : "cached")
+ " threads");
}
@Override
public void start() {
LOGGER.info("Trying to start table : " + _tableName);
if (_tableDataManagerConfig != null) {
if (_isStarted) {
LOGGER.warn("Already start the OfflineTableDataManager for table : " + _tableName);
} else {
_isStarted = true;
}
} else {
LOGGER.error("The OfflineTableDataManager hasn't been initialized.");
}
}
@Override
public void shutDown() {
LOGGER.info("Trying to shutdown table : " + _tableName);
if (_isStarted) {
_queryExecutorService.shutdown();
_segmentAsyncExecutorService.shutdown();
_tableDataManagerConfig = null;
_isStarted = false;
} else {
LOGGER.warn("Already shutDown table : " + _tableName);
}
}
@Override
public void addSegment(SegmentMetadata segmentMetadata) throws Exception {
IndexSegment indexSegment =
ColumnarSegmentLoader.loadSegment(segmentMetadata, _readMode, _indexLoadingConfigMetadata);
LOGGER.info("Added IndexSegment : " + indexSegment.getSegmentName() + " to table : " + _tableName);
addSegment(indexSegment);
}
@Override
public void addSegment(final IndexSegment indexSegmentToAdd) {
LOGGER.info("Trying to add a new segment to table : " + _tableName);
synchronized (getGlobalLock()) {
if (!_segmentsMap.containsKey(indexSegmentToAdd.getSegmentName())) {
LOGGER.info("Trying to add segment - " + indexSegmentToAdd.getSegmentName());
_segmentsMap.put(indexSegmentToAdd.getSegmentName(), new OfflineSegmentDataManager(indexSegmentToAdd));
markSegmentAsLoaded(indexSegmentToAdd.getSegmentName());
_referenceCounts.put(indexSegmentToAdd.getSegmentName(), new AtomicInteger(1));
} else {
LOGGER.info("Trying to refresh segment - " + indexSegmentToAdd.getSegmentName());
OfflineSegmentDataManager segment = _segmentsMap.get(indexSegmentToAdd.getSegmentName());
_segmentsMap.put(indexSegmentToAdd.getSegmentName(), new OfflineSegmentDataManager(indexSegmentToAdd));
if (segment != null) {
_currentNumberOfDocuments.dec(segment.getSegment().getTotalDocs());
_currentNumberOfDocuments.inc(indexSegmentToAdd.getTotalDocs());
segment.getSegment().destroy();
}
}
}
}
@Override
public void addSegment(SegmentZKMetadata indexSegmentToAdd) throws Exception {
SegmentMetadata segmentMetadata = new SegmentMetadataImpl((OfflineSegmentZKMetadata) indexSegmentToAdd);
IndexSegment indexSegment =
ColumnarSegmentLoader.loadSegment(segmentMetadata, _readMode, _indexLoadingConfigMetadata);
LOGGER.info("Added IndexSegment : " + indexSegment.getSegmentName() + " to table : " + _tableName);
addSegment(indexSegment);
}
@Override
public void addSegment(ZkHelixPropertyStore<ZNRecord> propertyStore, AbstractTableConfig tableConfig,
InstanceZKMetadata instanceZKMetadata, SegmentZKMetadata segmentZKMetadata) throws Exception {
addSegment(segmentZKMetadata);
}
@Override
public void removeSegment(String indexSegmentToRemove) {
if (!_isStarted) {
LOGGER.warn("Could not remove segment, as the tracker is already stopped");
return;
}
decrementCount(indexSegmentToRemove);
}
public void decrementCount(final String segmentId) {
if (!_referenceCounts.containsKey(segmentId)) {
LOGGER.warn("Received command to delete unexisting segment - " + segmentId);
return;
}
AtomicInteger count = _referenceCounts.get(segmentId);
if (count.get() == 1) {
OfflineSegmentDataManager segment = null;
synchronized (getGlobalLock()) {
if (count.get() == 1) {
segment = _segmentsMap.remove(segmentId);
_activeSegments.remove(segmentId);
_referenceCounts.remove(segmentId);
}
}
if (segment != null) {
_currentNumberOfSegments.dec();
_currentNumberOfDocuments.dec(segment.getSegment().getTotalDocs());
_numDeletedSegments.inc();
segment.getSegment().destroy();
}
LOGGER.info("Segment " + segmentId + " has been deleted");
_segmentAsyncExecutorService.execute(new Runnable() {
@Override
public void run() {
FileUtils.deleteQuietly(new File(_tableDataDir, segmentId));
LOGGER.info("The index directory for the segment " + segmentId + " has been deleted");
}
});
} else {
count.decrementAndGet();
}
}
@Override
public boolean isStarted() {
return _isStarted;
}
public Object getGlobalLock() {
return _globalLock;
}
private void markSegmentAsLoaded(String segmentId) {
_currentNumberOfSegments.inc();
_currentNumberOfDocuments.inc(_segmentsMap.get(segmentId).getSegment().getTotalDocs());
if (!_activeSegments.contains(segmentId)) {
_activeSegments.add(segmentId);
}
_loadingSegments.remove(segmentId);
if (!_referenceCounts.containsKey(segmentId)) {
_referenceCounts.put(segmentId, new AtomicInteger(1));
}
}
public List<String> getActiveSegments() {
return _activeSegments;
}
public List<String> getLoadingSegments() {
return _loadingSegments;
}
@Override
public List<SegmentDataManager> getAllSegments() {
List<SegmentDataManager> ret = new ArrayList<SegmentDataManager>();
for (OfflineSegmentDataManager segment : _segmentsMap.values()) {
incrementCount(segment.getSegmentName());
ret.add(segment);
}
return ret;
}
public void incrementCount(final String segmentId) {
_referenceCounts.get(segmentId).incrementAndGet();
}
@Override
public ExecutorService getExecutorService() {
return _queryExecutorService;
}
@Override
public List<SegmentDataManager> getSegments(List<String> segmentList) {
List<SegmentDataManager> ret = new ArrayList<SegmentDataManager>();
for (String segmentName : segmentList) {
if (_segmentsMap.containsKey(segmentName)) {
incrementCount(segmentName);
ret.add(_segmentsMap.get(segmentName));
}
}
return ret;
}
@Override
public OfflineSegmentDataManager getSegment(String segmentName) {
if (_segmentsMap.containsKey(segmentName)) {
incrementCount(segmentName);
return _segmentsMap.get(segmentName);
} else {
return null;
}
}
@Override
public void returnSegmentReaders(List<String> segmentList) {
for (String segmentId : segmentList) {
decrementCount(segmentId);
}
}
}
| apache-2.0 |
honestrock/light-admin | lightadmin-core/src/main/java/org/lightadmin/core/view/LightAdminTilesView.java | 2074 | /*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lightadmin.core.view;
import org.apache.tiles.TilesContainer;
import org.apache.tiles.access.TilesAccess;
import org.apache.tiles.renderer.DefinitionRenderer;
import org.apache.tiles.request.ApplicationContext;
import org.apache.tiles.request.render.Renderer;
import org.apache.tiles.request.servlet.ServletUtil;
import org.springframework.web.servlet.view.tiles3.TilesView;
import static org.lightadmin.core.view.LightAdminSpringTilesInitializer.LIGHT_ADMIN_TILES_CONTAINER_ATTRIBUTE;
import static org.springframework.beans.PropertyAccessorFactory.forDirectFieldAccess;
public class LightAdminTilesView extends TilesView {
@Override
public void afterPropertiesSet() throws Exception {
ApplicationContext applicationContext = ServletUtil.getApplicationContext(getServletContext());
setApplicationContext(applicationContext);
if (getRenderer() == null) {
TilesContainer container = TilesAccess.getContainer(applicationContext, LIGHT_ADMIN_TILES_CONTAINER_ATTRIBUTE);
Renderer renderer = new DefinitionRenderer(container);
setRenderer(renderer);
}
}
private void setApplicationContext(ApplicationContext applicationContext) {
forDirectFieldAccess(this).setPropertyValue("applicationContext", applicationContext);
}
private Renderer getRenderer() {
return (Renderer) forDirectFieldAccess(this).getPropertyValue("renderer");
}
} | apache-2.0 |
akosyakov/intellij-community | platform/platform-impl/src/com/intellij/openapi/components/ServiceDescriptor.java | 1126 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.components;
import com.intellij.util.xmlb.annotations.Attribute;
public class ServiceDescriptor {
@Attribute("serviceInterface")
public String serviceInterface;
@Attribute("serviceImplementation")
public String serviceImplementation;
@Attribute("overrides")
public boolean overrides;
public String getInterface() {
return serviceInterface != null ? serviceInterface : getImplementation();
}
public String getImplementation() {
return serviceImplementation;
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-compute/v1/1.31.0/com/google/api/services/compute/model/ConsistentHashLoadBalancerSettingsHttpCookie.java | 3283 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* The information about the HTTP Cookie on which the hash function is based for load balancing
* policies that use a consistent hash.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ConsistentHashLoadBalancerSettingsHttpCookie extends com.google.api.client.json.GenericJson {
/**
* Name of the cookie.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Path to set for the cookie.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String path;
/**
* Lifetime of the cookie.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Duration ttl;
/**
* Name of the cookie.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the cookie.
* @param name name or {@code null} for none
*/
public ConsistentHashLoadBalancerSettingsHttpCookie setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Path to set for the cookie.
* @return value or {@code null} for none
*/
public java.lang.String getPath() {
return path;
}
/**
* Path to set for the cookie.
* @param path path or {@code null} for none
*/
public ConsistentHashLoadBalancerSettingsHttpCookie setPath(java.lang.String path) {
this.path = path;
return this;
}
/**
* Lifetime of the cookie.
* @return value or {@code null} for none
*/
public Duration getTtl() {
return ttl;
}
/**
* Lifetime of the cookie.
* @param ttl ttl or {@code null} for none
*/
public ConsistentHashLoadBalancerSettingsHttpCookie setTtl(Duration ttl) {
this.ttl = ttl;
return this;
}
@Override
public ConsistentHashLoadBalancerSettingsHttpCookie set(String fieldName, Object value) {
return (ConsistentHashLoadBalancerSettingsHttpCookie) super.set(fieldName, value);
}
@Override
public ConsistentHashLoadBalancerSettingsHttpCookie clone() {
return (ConsistentHashLoadBalancerSettingsHttpCookie) super.clone();
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-api/kie-wb-common-stunner-backend-api/src/main/java/org/kie/workbench/common/stunner/core/registry/BackendRegistryFactory.java | 1131 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.core.registry;
import org.kie.workbench.common.stunner.core.diagram.Diagram;
import org.kie.workbench.common.stunner.core.registry.diagram.DiagramRegistry;
/**
* Factory for backend registries.
*/
public interface BackendRegistryFactory extends RegistryFactory {
/**
* Constructs a new synchronized diagram registry.
* @param <T> The type of the diagram registry.
*/
<T extends Diagram> DiagramRegistry<T> newDiagramSynchronizedRegistry();
}
| apache-2.0 |
mhd911/openfire | src/plugins/rayo/src/java/com/rayo/core/verb/PublicCommand.java | 94 | package com.rayo.core.verb;
public class PublicCommand extends AbstractVerbCommand {
}
| apache-2.0 |
MichaelEvans/assertj-android | assertj-android/src/main/java/org/assertj/android/api/graphics/drawable/AnimationDrawableAssert.java | 1673 | // Copyright 2013 Square, Inc.
package org.assertj.android.api.graphics.drawable;
import android.graphics.drawable.AnimationDrawable;
import static org.assertj.core.api.Assertions.assertThat;
/** Assertions for {@link AnimationDrawable} instances. */
public class AnimationDrawableAssert
extends AbstractDrawableAssert<AnimationDrawableAssert, AnimationDrawable> {
public AnimationDrawableAssert(AnimationDrawable actual) {
super(actual, AnimationDrawableAssert.class);
}
public AnimationDrawableAssert hasFrameCount(int count) {
isNotNull();
int actualCount = actual.getNumberOfFrames();
assertThat(actualCount) //
.overridingErrorMessage("Expected frame count <%s> but was <%s>.", count, actualCount) //
.isEqualTo(count);
return this;
}
public AnimationDrawableAssert isOneShot() {
isNotNull();
assertThat(actual.isOneShot()) //
.overridingErrorMessage("Expected to be one-short but was not.") //
.isTrue();
return this;
}
public AnimationDrawableAssert isNotOneShot() {
isNotNull();
assertThat(actual.isOneShot()) //
.overridingErrorMessage("Expected to not be one-shot but was.") //
.isFalse();
return this;
}
public AnimationDrawableAssert isRunning() {
isNotNull();
assertThat(actual.isRunning()) //
.overridingErrorMessage("Expected to be running but was not.") //
.isTrue();
return this;
}
public AnimationDrawableAssert isNotRunning() {
isNotNull();
assertThat(actual.isRunning()) //
.overridingErrorMessage("Expected to not be running but was.") //
.isFalse();
return this;
}
}
| apache-2.0 |
WangTaoTheTonic/flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/windowing/assigners/TumblingProcessingTimeWindows.java | 4774 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.windowing.assigners;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ProcessingTimeTrigger;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import java.util.Collection;
import java.util.Collections;
/**
* A {@link WindowAssigner} that windows elements into windows based on the current
* system time of the machine the operation is running on. Windows cannot overlap.
*
* <p>For example, in order to window into windows of 1 minute, every 10 seconds:
* <pre> {@code
* DataStream<Tuple2<String, Integer>> in = ...;
* KeyedStream<String, Tuple2<String, Integer>> keyed = in.keyBy(...);
* WindowedStream<Tuple2<String, Integer>, String, TimeWindows> windowed =
* keyed.window(TumblingProcessingTimeWindows.of(Time.of(1, MINUTES), Time.of(10, SECONDS));
* } </pre>
*/
public class TumblingProcessingTimeWindows extends WindowAssigner<Object, TimeWindow> {
private static final long serialVersionUID = 1L;
private final long size;
private final long offset;
private TumblingProcessingTimeWindows(long size, long offset) {
if (offset < 0 || offset >= size) {
throw new IllegalArgumentException("TumblingProcessingTimeWindows parameters must satisfy 0 <= offset < size");
}
this.size = size;
this.offset = offset;
}
@Override
public Collection<TimeWindow> assignWindows(Object element, long timestamp, WindowAssignerContext context) {
final long now = context.getCurrentProcessingTime();
long start = TimeWindow.getWindowStartWithOffset(now, offset, size);
return Collections.singletonList(new TimeWindow(start, start + size));
}
public long getSize() {
return size;
}
@Override
public Trigger<Object, TimeWindow> getDefaultTrigger(StreamExecutionEnvironment env) {
return ProcessingTimeTrigger.create();
}
@Override
public String toString() {
return "TumblingProcessingTimeWindows(" + size + ")";
}
/**
* Creates a new {@code TumblingProcessingTimeWindows} {@link WindowAssigner} that assigns
* elements to time windows based on the element timestamp.
*
* @param size The size of the generated windows.
* @return The time policy.
*/
public static TumblingProcessingTimeWindows of(Time size) {
return new TumblingProcessingTimeWindows(size.toMilliseconds(), 0);
}
/**
* Creates a new {@code TumblingProcessingTimeWindows} {@link WindowAssigner} that assigns
* elements to time windows based on the element timestamp and offset.
*
* <p>For example, if you want window a stream by hour,but window begins at the 15th minutes
* of each hour, you can use {@code of(Time.hours(1),Time.minutes(15))},then you will get
* time windows start at 0:15:00,1:15:00,2:15:00,etc.
*
* <p>Rather than that,if you are living in somewhere which is not using UTC±00:00 time,
* such as China which is using UTC+08:00,and you want a time window with size of one day,
* and window begins at every 00:00:00 of local time,you may use {@code of(Time.days(1),Time.hours(-8))}.
* The parameter of offset is {@code Time.hours(-8))} since UTC+08:00 is 8 hours earlier than UTC time.
*
* @param size The size of the generated windows.
* @param offset The offset which window start would be shifted by.
* @return The time policy.
*/
public static TumblingProcessingTimeWindows of(Time size, Time offset) {
return new TumblingProcessingTimeWindows(size.toMilliseconds(), offset.toMilliseconds());
}
@Override
public TypeSerializer<TimeWindow> getWindowSerializer(ExecutionConfig executionConfig) {
return new TimeWindow.Serializer();
}
@Override
public boolean isEventTime() {
return false;
}
}
| apache-2.0 |
itamaker/android-vcard | src/a_vcard/android/text/SpannableStringBuilder.java | 35717 | /*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package a_vcard.android.text;
import a_vcard.com.android.internal.util.ArrayUtils;
//import android.graphics.Paint;
//import android.graphics.Canvas;
import java.lang.reflect.Array;
/**
* This is the class for text whose content and markup can both be changed.
*/
public class SpannableStringBuilder
implements /*CharSequence, GetChars,*/ Spannable, Editable//, Appendable,
// GraphicsOperations
{
/**
* Create a new SpannableStringBuilder with empty contents
*/
public SpannableStringBuilder() {
this("");
}
/**
* Create a new SpannableStringBuilder containing a copy of the
* specified text, including its spans if any.
*/
public SpannableStringBuilder(CharSequence text) {
this(text, 0, text.length());
}
/**
* Create a new SpannableStringBuilder containing a copy of the
* specified slice of the specified text, including its spans if any.
*/
public SpannableStringBuilder(CharSequence text, int start, int end) {
int srclen = end - start;
int len = ArrayUtils.idealCharArraySize(srclen + 1);
mText = new char[len];
mGapStart = srclen;
mGapLength = len - srclen;
TextUtils.getChars(text, start, end, mText, 0);
mSpanCount = 0;
int alloc = ArrayUtils.idealIntArraySize(0);
mSpans = new Object[alloc];
mSpanStarts = new int[alloc];
mSpanEnds = new int[alloc];
mSpanFlags = new int[alloc];
if (text instanceof Spanned) {
Spanned sp = (Spanned) text;
Object[] spans = sp.getSpans(start, end, Object.class);
for (int i = 0; i < spans.length; i++) {
if (spans[i] instanceof NoCopySpan) {
continue;
}
int st = sp.getSpanStart(spans[i]) - start;
int en = sp.getSpanEnd(spans[i]) - start;
int fl = sp.getSpanFlags(spans[i]);
if (st < 0)
st = 0;
if (st > end - start)
st = end - start;
if (en < 0)
en = 0;
if (en > end - start)
en = end - start;
setSpan(spans[i], st, en, fl);
}
}
}
public static SpannableStringBuilder valueOf(CharSequence source) {
if (source instanceof SpannableStringBuilder) {
return (SpannableStringBuilder) source;
} else {
return new SpannableStringBuilder(source);
}
}
/**
* Return the char at the specified offset within the buffer.
*/
public char charAt(int where) {
int len = length();
if (where < 0) {
throw new IndexOutOfBoundsException("charAt: " + where + " < 0");
} else if (where >= len) {
throw new IndexOutOfBoundsException("charAt: " + where +
" >= length " + len);
}
if (where >= mGapStart)
return mText[where + mGapLength];
else
return mText[where];
}
/**
* Return the number of chars in the buffer.
*/
public int length() {
return mText.length - mGapLength;
}
private void resizeFor(int size) {
int newlen = ArrayUtils.idealCharArraySize(size + 1);
char[] newtext = new char[newlen];
int after = mText.length - (mGapStart + mGapLength);
System.arraycopy(mText, 0, newtext, 0, mGapStart);
System.arraycopy(mText, mText.length - after,
newtext, newlen - after, after);
for (int i = 0; i < mSpanCount; i++) {
if (mSpanStarts[i] > mGapStart)
mSpanStarts[i] += newlen - mText.length;
if (mSpanEnds[i] > mGapStart)
mSpanEnds[i] += newlen - mText.length;
}
int oldlen = mText.length;
mText = newtext;
mGapLength += mText.length - oldlen;
if (mGapLength < 1)
new Exception("mGapLength < 1").printStackTrace();
}
private void moveGapTo(int where) {
if (where == mGapStart)
return;
boolean atend = (where == length());
if (where < mGapStart) {
int overlap = mGapStart - where;
System.arraycopy(mText, where,
mText, mGapStart + mGapLength - overlap, overlap);
} else /* where > mGapStart */ {
int overlap = where - mGapStart;
System.arraycopy(mText, where + mGapLength - overlap,
mText, mGapStart, overlap);
}
// XXX be more clever
for (int i = 0; i < mSpanCount; i++) {
int start = mSpanStarts[i];
int end = mSpanEnds[i];
if (start > mGapStart)
start -= mGapLength;
if (start > where)
start += mGapLength;
else if (start == where) {
int flag = (mSpanFlags[i] & START_MASK) >> START_SHIFT;
if (flag == POINT || (atend && flag == PARAGRAPH))
start += mGapLength;
}
if (end > mGapStart)
end -= mGapLength;
if (end > where)
end += mGapLength;
else if (end == where) {
int flag = (mSpanFlags[i] & END_MASK);
if (flag == POINT || (atend && flag == PARAGRAPH))
end += mGapLength;
}
mSpanStarts[i] = start;
mSpanEnds[i] = end;
}
mGapStart = where;
}
// Documentation from interface
public SpannableStringBuilder insert(int where, CharSequence tb, int start, int end) {
return replace(where, where, tb, start, end);
}
// Documentation from interface
public SpannableStringBuilder insert(int where, CharSequence tb) {
return replace(where, where, tb, 0, tb.length());
}
// Documentation from interface
public SpannableStringBuilder delete(int start, int end) {
SpannableStringBuilder ret = replace(start, end, "", 0, 0);
if (mGapLength > 2 * length())
resizeFor(length());
return ret; // == this
}
// Documentation from interface
public void clear() {
replace(0, length(), "", 0, 0);
}
// Documentation from interface
public void clearSpans() {
for (int i = mSpanCount - 1; i >= 0; i--) {
Object what = mSpans[i];
int ostart = mSpanStarts[i];
int oend = mSpanEnds[i];
if (ostart > mGapStart)
ostart -= mGapLength;
if (oend > mGapStart)
oend -= mGapLength;
mSpanCount = i;
mSpans[i] = null;
sendSpanRemoved(what, ostart, oend);
}
}
// Documentation from interface
public SpannableStringBuilder append(CharSequence text) {
int length = length();
return replace(length, length, text, 0, text.length());
}
// Documentation from interface
public SpannableStringBuilder append(CharSequence text, int start, int end) {
int length = length();
return replace(length, length, text, start, end);
}
// Documentation from interface
public SpannableStringBuilder append(char text) {
return append(String.valueOf(text));
}
private int change(int start, int end,
CharSequence tb, int tbstart, int tbend) {
return change(true, start, end, tb, tbstart, tbend);
}
private int change(boolean notify, int start, int end,
CharSequence tb, int tbstart, int tbend) {
checkRange("replace", start, end);
int ret = tbend - tbstart;
TextWatcher[] recipients = null;
if (notify)
recipients = sendTextWillChange(start, end - start,
tbend - tbstart);
for (int i = mSpanCount - 1; i >= 0; i--) {
if ((mSpanFlags[i] & SPAN_PARAGRAPH) == SPAN_PARAGRAPH) {
int st = mSpanStarts[i];
if (st > mGapStart)
st -= mGapLength;
int en = mSpanEnds[i];
if (en > mGapStart)
en -= mGapLength;
int ost = st;
int oen = en;
int clen = length();
if (st > start && st <= end) {
for (st = end; st < clen; st++)
if (st > end && charAt(st - 1) == '\n')
break;
}
if (en > start && en <= end) {
for (en = end; en < clen; en++)
if (en > end && charAt(en - 1) == '\n')
break;
}
if (st != ost || en != oen)
setSpan(mSpans[i], st, en, mSpanFlags[i]);
}
}
moveGapTo(end);
if (tbend - tbstart >= mGapLength + (end - start))
resizeFor(mText.length - mGapLength +
tbend - tbstart - (end - start));
mGapStart += tbend - tbstart - (end - start);
mGapLength -= tbend - tbstart - (end - start);
if (mGapLength < 1)
new Exception("mGapLength < 1").printStackTrace();
TextUtils.getChars(tb, tbstart, tbend, mText, start);
if (tb instanceof Spanned) {
Spanned sp = (Spanned) tb;
Object[] spans = sp.getSpans(tbstart, tbend, Object.class);
for (int i = 0; i < spans.length; i++) {
int st = sp.getSpanStart(spans[i]);
int en = sp.getSpanEnd(spans[i]);
if (st < tbstart)
st = tbstart;
if (en > tbend)
en = tbend;
if (getSpanStart(spans[i]) < 0) {
setSpan(false, spans[i],
st - tbstart + start,
en - tbstart + start,
sp.getSpanFlags(spans[i]));
}
}
}
// no need for span fixup on pure insertion
if (tbend > tbstart && end - start == 0) {
if (notify) {
sendTextChange(recipients, start, end - start, tbend - tbstart);
sendTextHasChanged(recipients);
}
return ret;
}
boolean atend = (mGapStart + mGapLength == mText.length);
for (int i = mSpanCount - 1; i >= 0; i--) {
if (mSpanStarts[i] >= start &&
mSpanStarts[i] < mGapStart + mGapLength) {
int flag = (mSpanFlags[i] & START_MASK) >> START_SHIFT;
if (flag == POINT || (flag == PARAGRAPH && atend))
mSpanStarts[i] = mGapStart + mGapLength;
else
mSpanStarts[i] = start;
}
if (mSpanEnds[i] >= start &&
mSpanEnds[i] < mGapStart + mGapLength) {
int flag = (mSpanFlags[i] & END_MASK);
if (flag == POINT || (flag == PARAGRAPH && atend))
mSpanEnds[i] = mGapStart + mGapLength;
else
mSpanEnds[i] = start;
}
// remove 0-length SPAN_EXCLUSIVE_EXCLUSIVE
// XXX send notification on removal
if (mSpanEnds[i] < mSpanStarts[i]) {
System.arraycopy(mSpans, i + 1,
mSpans, i, mSpanCount - (i + 1));
System.arraycopy(mSpanStarts, i + 1,
mSpanStarts, i, mSpanCount - (i + 1));
System.arraycopy(mSpanEnds, i + 1,
mSpanEnds, i, mSpanCount - (i + 1));
System.arraycopy(mSpanFlags, i + 1,
mSpanFlags, i, mSpanCount - (i + 1));
mSpanCount--;
}
}
if (notify) {
sendTextChange(recipients, start, end - start, tbend - tbstart);
sendTextHasChanged(recipients);
}
return ret;
}
// Documentation from interface
public SpannableStringBuilder replace(int start, int end, CharSequence tb) {
return replace(start, end, tb, 0, tb.length());
}
// Documentation from interface
public SpannableStringBuilder replace(final int start, final int end,
CharSequence tb, int tbstart, int tbend) {
int filtercount = mFilters.length;
for (int i = 0; i < filtercount; i++) {
CharSequence repl = mFilters[i].filter(tb, tbstart, tbend,
this, start, end);
if (repl != null) {
tb = repl;
tbstart = 0;
tbend = repl.length();
}
}
if (end == start && tbstart == tbend) {
return this;
}
if (end == start || tbstart == tbend) {
change(start, end, tb, tbstart, tbend);
} else {
int selstart = Selection.getSelectionStart(this);
int selend = Selection.getSelectionEnd(this);
// XXX just make the span fixups in change() do the right thing
// instead of this madness!
checkRange("replace", start, end);
moveGapTo(end);
TextWatcher[] recipients;
recipients = sendTextWillChange(start, end - start,
tbend - tbstart);
int origlen = end - start;
if (mGapLength < 2)
resizeFor(length() + 1);
for (int i = mSpanCount - 1; i >= 0; i--) {
if (mSpanStarts[i] == mGapStart)
mSpanStarts[i]++;
if (mSpanEnds[i] == mGapStart)
mSpanEnds[i]++;
}
mText[mGapStart] = ' ';
mGapStart++;
mGapLength--;
if (mGapLength < 1)
new Exception("mGapLength < 1").printStackTrace();
int oldlen = (end + 1) - start;
int inserted = change(false, start + 1, start + 1,
tb, tbstart, tbend);
change(false, start, start + 1, "", 0, 0);
change(false, start + inserted, start + inserted + oldlen - 1,
"", 0, 0);
/*
* Special case to keep the cursor in the same position
* if it was somewhere in the middle of the replaced region.
* If it was at the start or the end or crossing the whole
* replacement, it should already be where it belongs.
* TODO: Is there some more general mechanism that could
* accomplish this?
*/
if (selstart > start && selstart < end) {
long off = selstart - start;
off = off * inserted / (end - start);
selstart = (int) off + start;
setSpan(false, Selection.SELECTION_START, selstart, selstart,
Spanned.SPAN_POINT_POINT);
}
if (selend > start && selend < end) {
long off = selend - start;
off = off * inserted / (end - start);
selend = (int) off + start;
setSpan(false, Selection.SELECTION_END, selend, selend,
Spanned.SPAN_POINT_POINT);
}
sendTextChange(recipients, start, origlen, inserted);
sendTextHasChanged(recipients);
}
return this;
}
/**
* Mark the specified range of text with the specified object.
* The flags determine how the span will behave when text is
* inserted at the start or end of the span's range.
*/
public void setSpan(Object what, int start, int end, int flags) {
setSpan(true, what, start, end, flags);
}
private void setSpan(boolean send,
Object what, int start, int end, int flags) {
int nstart = start;
int nend = end;
checkRange("setSpan", start, end);
if ((flags & START_MASK) == (PARAGRAPH << START_SHIFT)) {
if (start != 0 && start != length()) {
char c = charAt(start - 1);
if (c != '\n')
throw new RuntimeException(
"PARAGRAPH span must start at paragraph boundary");
}
}
if ((flags & END_MASK) == PARAGRAPH) {
if (end != 0 && end != length()) {
char c = charAt(end - 1);
if (c != '\n')
throw new RuntimeException(
"PARAGRAPH span must end at paragraph boundary");
}
}
if (start > mGapStart)
start += mGapLength;
else if (start == mGapStart) {
int flag = (flags & START_MASK) >> START_SHIFT;
if (flag == POINT || (flag == PARAGRAPH && start == length()))
start += mGapLength;
}
if (end > mGapStart)
end += mGapLength;
else if (end == mGapStart) {
int flag = (flags & END_MASK);
if (flag == POINT || (flag == PARAGRAPH && end == length()))
end += mGapLength;
}
int count = mSpanCount;
Object[] spans = mSpans;
for (int i = 0; i < count; i++) {
if (spans[i] == what) {
int ostart = mSpanStarts[i];
int oend = mSpanEnds[i];
if (ostart > mGapStart)
ostart -= mGapLength;
if (oend > mGapStart)
oend -= mGapLength;
mSpanStarts[i] = start;
mSpanEnds[i] = end;
mSpanFlags[i] = flags;
if (send)
sendSpanChanged(what, ostart, oend, nstart, nend);
return;
}
}
if (mSpanCount + 1 >= mSpans.length) {
int newsize = ArrayUtils.idealIntArraySize(mSpanCount + 1);
Object[] newspans = new Object[newsize];
int[] newspanstarts = new int[newsize];
int[] newspanends = new int[newsize];
int[] newspanflags = new int[newsize];
System.arraycopy(mSpans, 0, newspans, 0, mSpanCount);
System.arraycopy(mSpanStarts, 0, newspanstarts, 0, mSpanCount);
System.arraycopy(mSpanEnds, 0, newspanends, 0, mSpanCount);
System.arraycopy(mSpanFlags, 0, newspanflags, 0, mSpanCount);
mSpans = newspans;
mSpanStarts = newspanstarts;
mSpanEnds = newspanends;
mSpanFlags = newspanflags;
}
mSpans[mSpanCount] = what;
mSpanStarts[mSpanCount] = start;
mSpanEnds[mSpanCount] = end;
mSpanFlags[mSpanCount] = flags;
mSpanCount++;
if (send)
sendSpanAdded(what, nstart, nend);
}
/**
* Remove the specified markup object from the buffer.
*/
public void removeSpan(Object what) {
for (int i = mSpanCount - 1; i >= 0; i--) {
if (mSpans[i] == what) {
int ostart = mSpanStarts[i];
int oend = mSpanEnds[i];
if (ostart > mGapStart)
ostart -= mGapLength;
if (oend > mGapStart)
oend -= mGapLength;
int count = mSpanCount - (i + 1);
System.arraycopy(mSpans, i + 1, mSpans, i, count);
System.arraycopy(mSpanStarts, i + 1, mSpanStarts, i, count);
System.arraycopy(mSpanEnds, i + 1, mSpanEnds, i, count);
System.arraycopy(mSpanFlags, i + 1, mSpanFlags, i, count);
mSpanCount--;
mSpans[mSpanCount] = null;
sendSpanRemoved(what, ostart, oend);
return;
}
}
}
/**
* Return the buffer offset of the beginning of the specified
* markup object, or -1 if it is not attached to this buffer.
*/
public int getSpanStart(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
int where = mSpanStarts[i];
if (where > mGapStart)
where -= mGapLength;
return where;
}
}
return -1;
}
/**
* Return the buffer offset of the end of the specified
* markup object, or -1 if it is not attached to this buffer.
*/
public int getSpanEnd(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
int where = mSpanEnds[i];
if (where > mGapStart)
where -= mGapLength;
return where;
}
}
return -1;
}
/**
* Return the flags of the end of the specified
* markup object, or 0 if it is not attached to this buffer.
*/
public int getSpanFlags(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
return mSpanFlags[i];
}
}
return 0;
}
/**
* Return an array of the spans of the specified type that overlap
* the specified range of the buffer. The kind may be Object.class to get
* a list of all the spans regardless of type.
*/
public <T> T[] getSpans(int queryStart, int queryEnd, Class<T> kind) {
int spanCount = mSpanCount;
Object[] spans = mSpans;
int[] starts = mSpanStarts;
int[] ends = mSpanEnds;
int[] flags = mSpanFlags;
int gapstart = mGapStart;
int gaplen = mGapLength;
int count = 0;
Object[] ret = null;
Object ret1 = null;
for (int i = 0; i < spanCount; i++) {
int spanStart = starts[i];
int spanEnd = ends[i];
if (spanStart > gapstart) {
spanStart -= gaplen;
}
if (spanEnd > gapstart) {
spanEnd -= gaplen;
}
if (spanStart > queryEnd) {
continue;
}
if (spanEnd < queryStart) {
continue;
}
if (spanStart != spanEnd && queryStart != queryEnd) {
if (spanStart == queryEnd)
continue;
if (spanEnd == queryStart)
continue;
}
if (kind != null && !kind.isInstance(spans[i])) {
continue;
}
if (count == 0) {
ret1 = spans[i];
count++;
} else {
if (count == 1) {
ret = (Object[]) Array.newInstance(kind, spanCount - i + 1);
ret[0] = ret1;
}
int prio = flags[i] & SPAN_PRIORITY;
if (prio != 0) {
int j;
for (j = 0; j < count; j++) {
int p = getSpanFlags(ret[j]) & SPAN_PRIORITY;
if (prio > p) {
break;
}
}
System.arraycopy(ret, j, ret, j + 1, count - j);
ret[j] = spans[i];
count++;
} else {
ret[count++] = spans[i];
}
}
}
if (count == 0) {
return (T[]) ArrayUtils.emptyArray(kind);
}
if (count == 1) {
ret = (Object[]) Array.newInstance(kind, 1);
ret[0] = ret1;
return (T[]) ret;
}
if (count == ret.length) {
return (T[]) ret;
}
Object[] nret = (Object[]) Array.newInstance(kind, count);
System.arraycopy(ret, 0, nret, 0, count);
return (T[]) nret;
}
/**
* Return the next offset after <code>start</code> but less than or
* equal to <code>limit</code> where a span of the specified type
* begins or ends.
*/
public int nextSpanTransition(int start, int limit, Class kind) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] starts = mSpanStarts;
int[] ends = mSpanEnds;
int gapstart = mGapStart;
int gaplen = mGapLength;
if (kind == null) {
kind = Object.class;
}
for (int i = 0; i < count; i++) {
int st = starts[i];
int en = ends[i];
if (st > gapstart)
st -= gaplen;
if (en > gapstart)
en -= gaplen;
if (st > start && st < limit && kind.isInstance(spans[i]))
limit = st;
if (en > start && en < limit && kind.isInstance(spans[i]))
limit = en;
}
return limit;
}
/**
* Return a new CharSequence containing a copy of the specified
* range of this buffer, including the overlapping spans.
*/
public CharSequence subSequence(int start, int end) {
return new SpannableStringBuilder(this, start, end);
}
/**
* Copy the specified range of chars from this buffer into the
* specified array, beginning at the specified offset.
*/
public void getChars(int start, int end, char[] dest, int destoff) {
checkRange("getChars", start, end);
if (end <= mGapStart) {
System.arraycopy(mText, start, dest, destoff, end - start);
} else if (start >= mGapStart) {
System.arraycopy(mText, start + mGapLength,
dest, destoff, end - start);
} else {
System.arraycopy(mText, start, dest, destoff, mGapStart - start);
System.arraycopy(mText, mGapStart + mGapLength,
dest, destoff + (mGapStart - start),
end - mGapStart);
}
}
/**
* Return a String containing a copy of the chars in this buffer.
*/
public String toString() {
int len = length();
char[] buf = new char[len];
getChars(0, len, buf, 0);
return new String(buf);
}
private TextWatcher[] sendTextWillChange(int start, int before, int after) {
TextWatcher[] recip = getSpans(start, start + before, TextWatcher.class);
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].beforeTextChanged(this, start, before, after);
}
return recip;
}
private void sendTextChange(TextWatcher[] recip, int start, int before,
int after) {
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].onTextChanged(this, start, before, after);
}
}
private void sendTextHasChanged(TextWatcher[] recip) {
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].afterTextChanged(this);
}
}
private void sendSpanAdded(Object what, int start, int end) {
SpanWatcher[] recip = getSpans(start, end, SpanWatcher.class);
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].onSpanAdded(this, what, start, end);
}
}
private void sendSpanRemoved(Object what, int start, int end) {
SpanWatcher[] recip = getSpans(start, end, SpanWatcher.class);
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].onSpanRemoved(this, what, start, end);
}
}
private void sendSpanChanged(Object what, int s, int e, int st, int en) {
SpanWatcher[] recip = getSpans(Math.min(s, st), Math.max(e, en),
SpanWatcher.class);
int n = recip.length;
for (int i = 0; i < n; i++) {
recip[i].onSpanChanged(this, what, s, e, st, en);
}
}
private static String region(int start, int end) {
return "(" + start + " ... " + end + ")";
}
private void checkRange(final String operation, int start, int end) {
if (end < start) {
throw new IndexOutOfBoundsException(operation + " " +
region(start, end) +
" has end before start");
}
int len = length();
if (start > len || end > len) {
throw new IndexOutOfBoundsException(operation + " " +
region(start, end) +
" ends beyond length " + len);
}
if (start < 0 || end < 0) {
throw new IndexOutOfBoundsException(operation + " " +
region(start, end) +
" starts before 0");
}
}
private boolean isprint(char c) { // XXX
if (c >= ' ' && c <= '~')
return true;
else
return false;
}
/*
private static final int startFlag(int flag) {
return (flag >> 4) & 0x0F;
}
private static final int endFlag(int flag) {
return flag & 0x0F;
}
public void dump() { // XXX
for (int i = 0; i < mGapStart; i++) {
System.out.print('|');
System.out.print(' ');
System.out.print(isprint(mText[i]) ? mText[i] : '.');
System.out.print(' ');
}
for (int i = mGapStart; i < mGapStart + mGapLength; i++) {
System.out.print('|');
System.out.print('(');
System.out.print(isprint(mText[i]) ? mText[i] : '.');
System.out.print(')');
}
for (int i = mGapStart + mGapLength; i < mText.length; i++) {
System.out.print('|');
System.out.print(' ');
System.out.print(isprint(mText[i]) ? mText[i] : '.');
System.out.print(' ');
}
System.out.print('\n');
for (int i = 0; i < mText.length + 1; i++) {
int found = 0;
int wfound = 0;
for (int j = 0; j < mSpanCount; j++) {
if (mSpanStarts[j] == i) {
found = 1;
wfound = j;
break;
}
if (mSpanEnds[j] == i) {
found = 2;
wfound = j;
break;
}
}
if (found == 1) {
if (startFlag(mSpanFlags[wfound]) == MARK)
System.out.print("( ");
if (startFlag(mSpanFlags[wfound]) == PARAGRAPH)
System.out.print("< ");
else
System.out.print("[ ");
} else if (found == 2) {
if (endFlag(mSpanFlags[wfound]) == POINT)
System.out.print(") ");
if (endFlag(mSpanFlags[wfound]) == PARAGRAPH)
System.out.print("> ");
else
System.out.print("] ");
} else {
System.out.print(" ");
}
}
System.out.print("\n");
}
*/
//
// /**
// * Don't call this yourself -- exists for Canvas to use internally.
// * {@hide}
// */
// public void drawText(Canvas c, int start, int end,
// float x, float y, Paint p) {
// checkRange("drawText", start, end);
//
// if (end <= mGapStart) {
// c.drawText(mText, start, end - start, x, y, p);
// } else if (start >= mGapStart) {
// c.drawText(mText, start + mGapLength, end - start, x, y, p);
// } else {
// char[] buf = TextUtils.obtain(end - start);
//
// getChars(start, end, buf, 0);
// c.drawText(buf, 0, end - start, x, y, p);
// TextUtils.recycle(buf);
// }
// }
//
// /**
// * Don't call this yourself -- exists for Paint to use internally.
// * {@hide}
// */
// public float measureText(int start, int end, Paint p) {
// checkRange("measureText", start, end);
//
// float ret;
//
// if (end <= mGapStart) {
// ret = p.measureText(mText, start, end - start);
// } else if (start >= mGapStart) {
// ret = p.measureText(mText, start + mGapLength, end - start);
// } else {
// char[] buf = TextUtils.obtain(end - start);
//
// getChars(start, end, buf, 0);
// ret = p.measureText(buf, 0, end - start);
// TextUtils.recycle(buf);
// }
//
// return ret;
// }
//
// /**
// * Don't call this yourself -- exists for Paint to use internally.
// * {@hide}
// */
// public int getTextWidths(int start, int end, float[] widths, Paint p) {
// checkRange("getTextWidths", start, end);
//
// int ret;
//
// if (end <= mGapStart) {
// ret = p.getTextWidths(mText, start, end - start, widths);
// } else if (start >= mGapStart) {
// ret = p.getTextWidths(mText, start + mGapLength, end - start,
// widths);
// } else {
// char[] buf = TextUtils.obtain(end - start);
//
// getChars(start, end, buf, 0);
// ret = p.getTextWidths(buf, 0, end - start, widths);
// TextUtils.recycle(buf);
// }
//
// return ret;
// }
//
// Documentation from interface
public void setFilters(InputFilter[] filters) {
if (filters == null) {
throw new IllegalArgumentException();
}
mFilters = filters;
}
// Documentation from interface
public InputFilter[] getFilters() {
return mFilters;
}
private static final InputFilter[] NO_FILTERS = new InputFilter[0];
private InputFilter[] mFilters = NO_FILTERS;
private char[] mText;
private int mGapStart;
private int mGapLength;
private Object[] mSpans;
private int[] mSpanStarts;
private int[] mSpanEnds;
private int[] mSpanFlags;
private int mSpanCount;
private static final int MARK = 1;
private static final int POINT = 2;
private static final int PARAGRAPH = 3;
private static final int START_MASK = 0xF0;
private static final int END_MASK = 0x0F;
private static final int START_SHIFT = 4;
}
| apache-2.0 |
asposewordscloud/Aspose.Words_Cloud_SDK_For_Java | src/main/java/com/aspose/words/model/LineStyle.java | 237 | package com.aspose.words.model;
public class LineStyle {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class LineStyle {\n");
sb.append("}\n");
return sb.toString();
}
}
| mit |
WeRockStar/java-design-patterns | factory-kit/src/main/java/com/iluwatar/factorykit/WeaponType.java | 1269 | /**
* The MIT License
* Copyright (c) 2014 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.factorykit;
/**
* Enumerates {@link Weapon} types
*/
public enum WeaponType {
SWORD, AXE, BOW, SPEAR
}
| mit |
xiaohanz/softcontroller | third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java | 10002 | /*
* Copyright (c) 2003, the JUNG Project and the Regents of the University
* of California
* All rights reserved.
*
* This software is open-source under the BSD license; see either
* "license.txt" or
* http://jung.sourceforge.net/license.txt for a description.
*/
/*
*
* Created on Oct 29, 2003
*/
package edu.uci.ics.jung.algorithms.util;
import java.util.AbstractCollection;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Vector;
import org.apache.commons.collections15.IteratorUtils;
/**
* An array-based binary heap implementation of a priority queue,
* which also provides
* efficient <code>update()</code> and <code>contains</code> operations.
* It contains extra infrastructure (a hash table) to keep track of the
* position of each element in the array; thus, if the key value of an element
* changes, it may be "resubmitted" to the heap via <code>update</code>
* so that the heap can reposition it efficiently, as necessary.
*
* @author Joshua O'Madadhain
*/
public class MapBinaryHeap<T>
extends AbstractCollection<T>
implements Queue<T>
{
private Vector<T> heap = new Vector<T>(); // holds the heap as an implicit binary tree
private Map<T,Integer> object_indices = new HashMap<T,Integer>(); // maps each object in the heap to its index in the heap
private Comparator<T> comp;
private final static int TOP = 0; // the index of the top of the heap
/**
* Creates a <code>MapBinaryHeap</code> whose heap ordering
* is based on the ordering of the elements specified by <code>c</code>.
*/
public MapBinaryHeap(Comparator<T> comp)
{
initialize(comp);
}
/**
* Creates a <code>MapBinaryHeap</code> whose heap ordering
* will be based on the <i>natural ordering</i> of the elements,
* which must be <code>Comparable</code>.
*/
public MapBinaryHeap()
{
initialize(new ComparableComparator());
}
/**
* Creates a <code>MapBinaryHeap</code> based on the specified
* collection whose heap ordering
* will be based on the <i>natural ordering</i> of the elements,
* which must be <code>Comparable</code>.
*/
public MapBinaryHeap(Collection<T> c)
{
this();
addAll(c);
}
/**
* Creates a <code>MapBinaryHeap</code> based on the specified collection
* whose heap ordering
* is based on the ordering of the elements specified by <code>c</code>.
*/
public MapBinaryHeap(Collection<T> c, Comparator<T> comp)
{
this(comp);
addAll(c);
}
private void initialize(Comparator<T> comp)
{
this.comp = comp;
clear();
}
/**
* @see Collection#clear()
*/
@Override
public void clear()
{
object_indices.clear();
heap.clear();
}
/**
* Inserts <code>o</code> into this collection.
*/
@Override
public boolean add(T o)
{
int i = heap.size(); // index 1 past the end of the heap
heap.setSize(i+1);
percolateUp(i, o);
return true;
}
/**
* Returns <code>true</code> if this collection contains no elements, and
* <code>false</code> otherwise.
*/
@Override
public boolean isEmpty()
{
return heap.isEmpty();
}
/**
* Returns the element at the top of the heap; does not
* alter the heap.
*/
public T peek()
{
if (heap.size() > 0)
return heap.elementAt(TOP);
else
return null;
}
/**
* Removes the element at the top of this heap, and returns it.
* @deprecated Use {@link MapBinaryHeap#poll()}
* or {@link MapBinaryHeap#remove()} instead.
*/
@Deprecated
public T pop() throws NoSuchElementException
{
return this.remove();
}
/**
* Returns the size of this heap.
*/
@Override
public int size()
{
return heap.size();
}
/**
* Informs the heap that this object's internal key value has been
* updated, and that its place in the heap may need to be shifted
* (up or down).
* @param o
*/
public void update(T o)
{
// Since we don't know whether the key value increased or
// decreased, we just percolate up followed by percolating down;
// one of the two will have no effect.
int cur = object_indices.get(o).intValue(); // current index
int new_idx = percolateUp(cur, o);
percolateDown(new_idx);
}
/**
* @see Collection#contains(java.lang.Object)
*/
@Override
public boolean contains(Object o)
{
return object_indices.containsKey(o);
}
/**
* Moves the element at position <code>cur</code> closer to
* the bottom of the heap, or returns if no further motion is
* necessary. Calls itself recursively if further motion is
* possible.
*/
private void percolateDown(int cur)
{
int left = lChild(cur);
int right = rChild(cur);
int smallest;
if ((left < heap.size()) &&
(comp.compare(heap.elementAt(left), heap.elementAt(cur)) < 0)) {
smallest = left;
} else {
smallest = cur;
}
if ((right < heap.size()) &&
(comp.compare(heap.elementAt(right), heap.elementAt(smallest)) < 0)) {
smallest = right;
}
if (cur != smallest)
{
swap(cur, smallest);
percolateDown(smallest);
}
}
/**
* Moves the element <code>o</code> at position <code>cur</code>
* as high as it can go in the heap. Returns the new position of the
* element in the heap.
*/
private int percolateUp(int cur, T o)
{
int i = cur;
while ((i > TOP) && (comp.compare(heap.elementAt(parent(i)), o) > 0))
{
T parentElt = heap.elementAt(parent(i));
heap.setElementAt(parentElt, i);
object_indices.put(parentElt, new Integer(i)); // reset index to i (new location)
i = parent(i);
}
// place object in heap at appropriate place
object_indices.put(o, new Integer(i));
heap.setElementAt(o, i);
return i;
}
/**
* Returns the index of the left child of the element at
* index <code>i</code> of the heap.
* @param i
* @return the index of the left child of the element at
* index <code>i</code> of the heap
*/
private int lChild(int i)
{
return (i<<1) + 1;
}
/**
* Returns the index of the right child of the element at
* index <code>i</code> of the heap.
* @param i
* @return the index of the right child of the element at
* index <code>i</code> of the heap
*/
private int rChild(int i)
{
return (i<<1) + 2;
}
/**
* Returns the index of the parent of the element at
* index <code>i</code> of the heap.
* @param i
* @return the index of the parent of the element at index i of the heap
*/
private int parent(int i)
{
return (i-1)>>1;
}
/**
* Swaps the positions of the elements at indices <code>i</code>
* and <code>j</code> of the heap.
* @param i
* @param j
*/
private void swap(int i, int j)
{
T iElt = heap.elementAt(i);
T jElt = heap.elementAt(j);
heap.setElementAt(jElt, i);
object_indices.put(jElt, new Integer(i));
heap.setElementAt(iElt, j);
object_indices.put(iElt, new Integer(j));
}
/**
* Comparator used if none is specified in the constructor.
* @author Joshua O'Madadhain
*/
private class ComparableComparator implements Comparator<T>
{
/**
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
@SuppressWarnings("unchecked")
public int compare(T arg0, T arg1)
{
if (!(arg0 instanceof Comparable) || !(arg1 instanceof Comparable))
throw new IllegalArgumentException("Arguments must be Comparable");
return ((Comparable<T>)arg0).compareTo(arg1);
}
}
/**
* Returns an <code>Iterator</code> that does not support modification
* of the heap.
*/
@Override
public Iterator<T> iterator()
{
return IteratorUtils.<T>unmodifiableIterator(heap.iterator());
}
/**
* This data structure does not support the removal of arbitrary elements.
*/
@Override
public boolean remove(Object o)
{
throw new UnsupportedOperationException();
}
/**
* This data structure does not support the removal of arbitrary elements.
*/
@Override
public boolean removeAll(Collection<?> c)
{
throw new UnsupportedOperationException();
}
/**
* This data structure does not support the removal of arbitrary elements.
*/
@Override
public boolean retainAll(Collection<?> c)
{
throw new UnsupportedOperationException();
}
public T element() throws NoSuchElementException
{
T top = this.peek();
if (top == null)
throw new NoSuchElementException();
return top;
}
public boolean offer(T o)
{
return add(o);
}
public T poll()
{
T top = this.peek();
if (top != null)
{
T bottom_elt = heap.lastElement();
heap.setElementAt(bottom_elt, TOP);
object_indices.put(bottom_elt, new Integer(TOP));
heap.setSize(heap.size() - 1); // remove the last element
if (heap.size() > 1)
percolateDown(TOP);
object_indices.remove(top);
}
return top;
}
public T remove()
{
T top = this.poll();
if (top == null)
throw new NoSuchElementException();
return top;
}
}
| epl-1.0 |
FauxFaux/jdk9-jaxws | src/java.xml.bind/share/classes/com/sun/xml/internal/bind/v2/schemagen/xmlschema/SimpleTypeHost.java | 1573 | /*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.bind.v2.schemagen.xmlschema;
import com.sun.xml.internal.txw2.TypedXmlWriter;
import com.sun.xml.internal.txw2.annotation.XmlElement;
/**
* <p><b>
* Auto-generated, do not edit.
* </b></p>
*/
public interface SimpleTypeHost
extends TypeHost, TypedXmlWriter
{
@XmlElement
public SimpleType simpleType();
}
| gpl-2.0 |
malaporte/kaziranga | test/src/jdk/nashorn/internal/parser/test/ParserTest.java | 6651 | /*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.parser.test;
import static jdk.nashorn.internal.runtime.Source.readFully;
import static jdk.nashorn.internal.runtime.Source.sourceFor;
import java.io.File;
import jdk.nashorn.internal.parser.Parser;
import jdk.nashorn.internal.runtime.Context;
import jdk.nashorn.internal.runtime.ErrorManager;
import jdk.nashorn.internal.runtime.Source;
import jdk.nashorn.internal.runtime.options.Options;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Run tests to check Nashorn's parser.
*/
@SuppressWarnings("javadoc")
public class ParserTest {
private static final boolean VERBOSE = Boolean.valueOf(System.getProperty("parsertest.verbose"));
private static final boolean TEST262 = Boolean.valueOf(System.getProperty("parsertest.test262"));
private static final String TEST_BASIC_DIR = System.getProperty("test.basic.dir");
private static final String TEST262_SUITE_DIR = System.getProperty("test262.suite.dir");
interface TestFilter {
public boolean exclude(File file, String content);
}
private static void log(final String msg) {
org.testng.Reporter.log(msg, true);
}
private Context context;
@BeforeClass
public void setupTest() {
final Options options = new Options("nashorn");
options.set("anon.functions", true);
options.set("parse.only", true);
options.set("scripting", true);
options.set("const.as.var", true);
final ErrorManager errors = new ErrorManager();
this.context = new Context(options, errors, Thread.currentThread().getContextClassLoader());
}
@AfterClass
public void tearDownTest() {
this.context = null;
}
@Test
public void parseAllTests() {
if (TEST262) {
parseTestSet(TEST262_SUITE_DIR, new TestFilter() {
@Override
public boolean exclude(final File file, final String content) {
return content != null && content.contains("@negative");
}
});
}
parseTestSet(TEST_BASIC_DIR, new TestFilter() {
@Override
public boolean exclude(final File file, final String content) {
return file.getName().equals("es6");
}
});
}
private void parseTestSet(final String testSet, final TestFilter filter) {
passed = 0;
failed = 0;
skipped = 0;
final File testSetDir = new File(testSet);
if (! testSetDir.isDirectory()) {
log("WARNING: " + testSetDir + " not found or not a directory");
return;
}
log(testSetDir.getAbsolutePath());
parseJSDirectory(testSetDir, filter);
log(testSet + " parse done!");
log("parse ok: " + passed);
log("parse failed: " + failed);
log("parse skipped: " + skipped);
if (failed != 0) {
Assert.fail(failed + " tests failed to compile in " + testSetDir.getAbsolutePath());
}
}
// number of scripts that parsed fine
private int passed;
// number of scripts resulting in parse failure
private int failed;
// scripts that were skipped - all tests with @negative are
// skipped for now.
private int skipped;
private void parseJSDirectory(final File dir, final TestFilter filter) {
if (filter != null && filter.exclude(dir, null)) {
return;
}
for (final File f : dir.listFiles()) {
if (f.isDirectory()) {
parseJSDirectory(f, filter);
} else if (f.getName().endsWith(".js")) {
parseJSFile(f, filter);
}
}
}
private void parseJSFile(final File file, final TestFilter filter) {
if (VERBOSE) {
log("Begin parsing " + file.getAbsolutePath());
}
try {
final char[] buffer = readFully(file);
boolean excluded = false;
if (filter != null) {
final String content = new String(buffer);
excluded = filter.exclude(file, content);
}
if (excluded) {
if (VERBOSE) {
log("Skipping " + file.getAbsolutePath());
}
skipped++;
return;
}
final ErrorManager errors = new ErrorManager() {
@Override
public void error(final String msg) {
log(msg);
}
};
errors.setLimit(0);
final Source source = sourceFor(file.getAbsolutePath(), buffer);
new Parser(context.getEnv(), source, errors, context.getEnv()._strict, null).parse();
if (errors.getNumberOfErrors() > 0) {
log("Parse failed: " + file.getAbsolutePath());
failed++;
} else {
passed++;
}
} catch (final Throwable exp) {
exp.printStackTrace();
log("Parse failed: " + file.getAbsolutePath() + " : " + exp);
if (VERBOSE) {
exp.printStackTrace(System.out);
}
failed++;
}
if (VERBOSE) {
log("Done parsing " + file.getAbsolutePath());
}
}
}
| gpl-2.0 |
siosio/intellij-community | platform/lang-impl/src/com/intellij/openapi/roots/libraries/ui/RootDetector.java | 2214 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.libraries.ui;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
/**
* Provides automatic detection of root type for files added to a library. Implementations of this class should be returned from
* {@link LibraryRootsComponentDescriptor#getRootDetectors} method
*
* @see RootFilter
* @see DescendentBasedRootFilter
*/
public abstract class RootDetector {
private final OrderRootType myRootType;
private final boolean myJarDirectory;
private final String myPresentableRootTypeName;
protected RootDetector(OrderRootType rootType, boolean jarDirectory, String presentableRootTypeName) {
myRootType = rootType;
myJarDirectory = jarDirectory;
myPresentableRootTypeName = presentableRootTypeName;
}
public boolean isJarDirectory() {
return myJarDirectory;
}
public OrderRootType getRootType() {
return myRootType;
}
public String getPresentableRootTypeName() {
return myPresentableRootTypeName;
}
/**
* Find suitable roots in {@code rootCandidate} or its descendants.
* @param rootCandidate file selected in the file chooser by user
* @param progressIndicator can be used to show information about the progress and to abort searching if process is cancelled
* @return suitable roots
*/
@NotNull
public abstract Collection<VirtualFile> detectRoots(@NotNull VirtualFile rootCandidate, @NotNull ProgressIndicator progressIndicator);
}
| apache-2.0 |
tottiyq/Mycat-Server | src/main/java/io/mycat/server/config/loader/zkloader/ZkUserConfigLoader.java | 1767 | package io.mycat.server.config.loader.zkloader;
import com.alibaba.fastjson.JSON;
import io.mycat.server.config.ConfigException;
import io.mycat.server.config.node.UserConfig;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.utils.ZKPaths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toMap;
/**
* <p>
* load user configuration from Zookeeper.
* </p>
* Created by v1.lion on 2015/9/27.
*/
public class ZkUserConfigLoader extends AbstractZKLoaders {
private static final Logger LOGGER = LoggerFactory.getLogger(ZkUserConfigLoader.class);
//directory name of user config in zookeeper
private static final String USERS_DIRECTORY = "user";
//hold user name mapping to UserConfig
private Map<String, UserConfig> userConfigs;
public ZkUserConfigLoader(final String clusterID) {
super(clusterID, ZkSystemConfigLoader.SERVER_CONFIG_DIRECTORY);
}
@Override
public void fetchConfig(CuratorFramework zkConnection) {
//user config path in zookeeper
//example: /mycat-cluster-1/server-config/user
this.userConfigs = super
.fetchChildren(zkConnection, USERS_DIRECTORY)
.stream()
.map(username -> (UserConfig) JSON.parseObject(
super.fetchData(zkConnection, USERS_DIRECTORY, username), UserConfig.class))
.collect(toMap(UserConfig::getName, Function.identity()));
LOGGER.trace("done fetch user config : {}", this.userConfigs);
}
public Map<String, UserConfig> getUserConfig() {
return userConfigs;
}
}
| apache-2.0 |
Phaneendra-Huawei/demo | protocols/ospf/api/src/main/java/org/onosproject/ospf/controller/OspfRouterListener.java | 1306 | /*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ospf.controller;
/**
* Abstraction of an OSPF Router Listener.
* Allows for providers interested in switch events to be notified.
*/
public interface OspfRouterListener {
/**
* Notifies that a router is added.
*
* @param ospfRouter OSPF router instance
*/
void routerAdded(OspfRouter ospfRouter);
/**
* Notifies that a router is removed.
*
* @param ospfRouter OSPF router instance
*/
void routerRemoved(OspfRouter ospfRouter);
/**
* Notifies that the router has changed in some way.
*
* @param ospfRouter OSPF router instance
*/
void routerChanged(OspfRouter ospfRouter);
} | apache-2.0 |
jlpedrosa/camel | components/camel-elasticsearch/src/main/java/org/apache/camel/component/elasticsearch/converter/ElasticsearchActionRequestConverter.java | 8691 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.elasticsearch.converter;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Converter;
import org.apache.camel.Exchange;
import org.apache.camel.component.elasticsearch.ElasticsearchConstants;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.exists.ExistsRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequest.Item;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.xcontent.XContentBuilder;
@Converter
public final class ElasticsearchActionRequestConverter {
private ElasticsearchActionRequestConverter() {
}
// Update requests
private static UpdateRequest createUpdateRequest(Object document, Exchange exchange) {
if (document instanceof UpdateRequest) {
return (UpdateRequest)document;
}
UpdateRequest updateRequest = new UpdateRequest();
if (document instanceof byte[]) {
updateRequest.doc((byte[]) document);
} else if (document instanceof Map) {
updateRequest.doc((Map<String, Object>) document);
} else if (document instanceof String) {
updateRequest.doc((String) document);
} else if (document instanceof XContentBuilder) {
updateRequest.doc((XContentBuilder) document);
} else {
return null;
}
return updateRequest
.consistencyLevel(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_CONSISTENCY_LEVEL, WriteConsistencyLevel.class))
.parent(exchange.getIn().getHeader(
ElasticsearchConstants.PARENT, String.class))
.index(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.type(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_TYPE, String.class))
.id(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
// Index requests
private static IndexRequest createIndexRequest(Object document, Exchange exchange) {
if (document instanceof IndexRequest) {
return (IndexRequest)document;
}
IndexRequest indexRequest = new IndexRequest();
if (document instanceof byte[]) {
indexRequest.source((byte[]) document);
} else if (document instanceof Map) {
indexRequest.source((Map<String, Object>) document);
} else if (document instanceof String) {
indexRequest.source((String) document);
} else if (document instanceof XContentBuilder) {
indexRequest.source((XContentBuilder) document);
} else {
return null;
}
return indexRequest
.consistencyLevel(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_CONSISTENCY_LEVEL, WriteConsistencyLevel.class))
.parent(exchange.getIn().getHeader(
ElasticsearchConstants.PARENT, String.class))
.index(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.type(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_TYPE, String.class));
}
@Converter
public static IndexRequest toIndexRequest(Object document, Exchange exchange) {
return createIndexRequest(document, exchange)
.id(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
@Converter
public static UpdateRequest toUpdateRequest(Object document, Exchange exchange) {
return createUpdateRequest(document, exchange)
.id(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
@Converter
public static GetRequest toGetRequest(String id, Exchange exchange) {
return new GetRequest(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.type(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_TYPE,
String.class)).id(id);
}
@Converter
public static ExistsRequest toExistsRequest(String id, Exchange exchange) {
return new ExistsRequest(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_NAME, String.class));
}
@Converter
public static MultiGetRequest toMultiGetRequest(Object document, Exchange exchange) {
List<Item> items = (List<Item>) document;
MultiGetRequest multiGetRequest = new MultiGetRequest();
Iterator<Item> it = items.iterator();
while (it.hasNext()) {
MultiGetRequest.Item item = it.next();
multiGetRequest.add(item);
}
return multiGetRequest;
}
@Converter
public static MultiSearchRequest toMultiSearchRequest(Object document, Exchange exchange) {
List<SearchRequest> items = (List<SearchRequest>) document;
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
Iterator<SearchRequest> it = items.iterator();
while (it.hasNext()) {
SearchRequest item = it.next();
multiSearchRequest.add(item);
}
return multiSearchRequest;
}
@Converter
public static DeleteRequest toDeleteRequest(String id, Exchange exchange) {
return new DeleteRequest()
.index(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_NAME,
String.class))
.type(exchange.getIn().getHeader(
ElasticsearchConstants.PARAM_INDEX_TYPE,
String.class)).id(id);
}
@Converter
public static SearchRequest toSearchRequest(Object queryObject, Exchange exchange) {
SearchRequest searchRequest = new SearchRequest(exchange.getIn()
.getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.types(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_TYPE, String.class));
// Setup the query object into the search request
if (queryObject instanceof byte[]) {
searchRequest.source((byte[]) queryObject);
} else if (queryObject instanceof Map) {
searchRequest.source((Map<String, Object>) queryObject);
} else if (queryObject instanceof String) {
searchRequest.source((String) queryObject);
} else if (queryObject instanceof XContentBuilder) {
searchRequest.source((XContentBuilder) queryObject);
} else {
// Cannot convert the queryObject into SearchRequest
return null;
}
return searchRequest;
}
@Converter
public static BulkRequest toBulkRequest(List<Object> documents,
Exchange exchange) {
BulkRequest request = new BulkRequest();
for (Object document : documents) {
request.add(createIndexRequest(document, exchange));
}
return request;
}
}
| apache-2.0 |
siosio/intellij-community | java/compiler/impl/src/com/intellij/packaging/impl/ui/properties/DirectoryElementPropertiesPanel.java | 1042 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.packaging.impl.ui.properties;
import com.intellij.packaging.impl.elements.DirectoryPackagingElement;
import com.intellij.packaging.ui.ArtifactEditorContext;
public class DirectoryElementPropertiesPanel extends ElementWithManifestPropertiesPanel<DirectoryPackagingElement> {
public DirectoryElementPropertiesPanel(DirectoryPackagingElement element, ArtifactEditorContext context) {
super(element, context);
}
}
| apache-2.0 |
nikhilvibhav/camel | core/camel-core/src/test/java/org/apache/camel/processor/EventNotifierExchangeSentExampleTest.java | 2185 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.DefaultCamelContext;
import org.junit.jupiter.api.Test;
public class EventNotifierExchangeSentExampleTest extends ContextTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
DefaultCamelContext context = (DefaultCamelContext) super.createCamelContext();
// START SNIPPET: e1
// add event notifier where we can log the times it took to process
// exchanges sent to an endpoint
context.getManagementStrategy().addEventNotifier(new MyLoggingSentEventNotifer());
// END SNIPPET: e1
return context;
}
@Test
public void testExchangeSent() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("direct:bar").to("mock:result");
from("direct:bar").delay(1000);
}
};
}
}
| apache-2.0 |
nikhilvibhav/camel | components/camel-file-watch/src/test/java/org/apache/camel/component/file/watch/FileWatchComponentRecursiveTest.java | 2405 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.watch;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.UUID;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Test;
public class FileWatchComponentRecursiveTest extends FileWatchComponentTestBase {
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("file-watch://" + testPath() + "?recursive=true").to("mock:recursive");
from("file-watch://" + testPath() + "?recursive=false").to("mock:nonRecursive");
}
};
}
@Test
public void testCreateFileInSubdirectories() throws Exception {
Path newDir = Paths.get(testPath(), "a", "b", "c", "d");
newDir = Files.createDirectories(newDir);
File newFile = new File(newDir.toFile(), UUID.randomUUID().toString());
if (!newFile.createNewFile()) {
throw new RuntimeException("cannot create file");
}
MockEndpoint recursive = getMockEndpoint("mock:recursive");
recursive.expectedMessageCount(5); // 4 directories and one file
recursive.assertIsSatisfied();
MockEndpoint nonRecursive = getMockEndpoint("mock:nonRecursive");
nonRecursive.expectedMessageCount(1); // 1 directory
nonRecursive.assertIsSatisfied();
}
}
| apache-2.0 |
bhathiya/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.persistence/src/main/java/org/wso2/carbon/apimgt/persistence/LCManagerFactory.java | 2897 | /*
* Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.apimgt.persistence;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.cache.Cache;
import javax.cache.Caching;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.stream.XMLStreamException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.persistence.exceptions.PersistenceException;
import org.wso2.carbon.apimgt.persistence.utils.RegistryLCManager;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.xml.sax.SAXException;
public class LCManagerFactory {
private static final Log log = LogFactory.getLog(LCManagerFactory.class);
private static LCManagerFactory instance;
private LCManagerFactory() {
}
public static LCManagerFactory getInstance() {
if (instance == null) {
instance = new LCManagerFactory();
}
return instance;
}
public RegistryLCManager getLCManager() throws PersistenceException {
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
String cacheName = tenantDomain + "_" + APIConstants.LC_CACHE_NAME;
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
Cache lcCache = Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.LC_CACHE_NAME);
RegistryLCManager lcManager = (RegistryLCManager) lcCache.get(cacheName);
if (lcManager != null) {
log.debug("Lifecycle info servered from Cache.");
return lcManager;
} else {
try {
log.debug("Lifecycle info not found in Cache.");
lcManager = new RegistryLCManager(tenantId);
lcCache.put(cacheName, lcManager);
return lcManager;
} catch (RegistryException | XMLStreamException | ParserConfigurationException | SAXException
| IOException e) {
throw new PersistenceException("Error while accessing the lifecycle resource ", e);
}
}
}
}
| apache-2.0 |
amyvmiwei/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java | 8372 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALProvider;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.SequenceFile.Metadata;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
/**
* Implementation of {@link WALProvider.Writer} that delegates to
* SequenceFile.Writer. Legacy implementation only used for compat tests.
*
* Note that because this class writes to the legacy hadoop-specific SequenceFile
* format, users of it must write {@link HLogKey} keys and not arbitrary
* {@link WALKey}s because the latter are not Writables (nor made to work with
* Hadoop serialization).
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class SequenceFileLogWriter extends WriterBase {
private final Log LOG = LogFactory.getLog(this.getClass());
// The sequence file we delegate to.
private SequenceFile.Writer writer;
// This is the FSDataOutputStream instance that is the 'out' instance
// in the SequenceFile.Writer 'writer' instance above.
private FSDataOutputStream writer_out;
// Legacy stuff from pre-PB WAL metadata.
private static final Text WAL_VERSION_KEY = new Text("version");
private static final Text WAL_COMPRESSION_TYPE_KEY = new Text("compression.type");
private static final Text DICTIONARY_COMPRESSION_TYPE = new Text("dictionary");
/**
* Default constructor.
*/
public SequenceFileLogWriter() {
super();
}
/**
* Create sequence file Metadata for our WAL file with version and compression
* type (if any).
* @param conf
* @param compress
* @return Metadata instance.
*/
private static Metadata createMetadata(final Configuration conf,
final boolean compress) {
TreeMap<Text, Text> metaMap = new TreeMap<Text, Text>();
metaMap.put(WAL_VERSION_KEY, new Text("1"));
if (compress) {
// Currently we only do one compression type.
metaMap.put(WAL_COMPRESSION_TYPE_KEY, DICTIONARY_COMPRESSION_TYPE);
}
return new Metadata(metaMap);
}
@Override
public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable)
throws IOException {
super.init(fs, path, conf, overwritable);
boolean compress = initializeCompressionContext(conf, path);
// Create a SF.Writer instance.
try {
// reflection for a version of SequenceFile.createWriter that doesn't
// automatically create the parent directory (see HBASE-2312)
this.writer = (SequenceFile.Writer) SequenceFile.class
.getMethod("createWriter", new Class[] {FileSystem.class,
Configuration.class, Path.class, Class.class, Class.class,
Integer.TYPE, Short.TYPE, Long.TYPE, Boolean.TYPE,
CompressionType.class, CompressionCodec.class, Metadata.class})
.invoke(null, new Object[] {fs, conf, path, HLogKey.class, WALEdit.class,
Integer.valueOf(FSUtils.getDefaultBufferSize(fs)),
Short.valueOf((short)
conf.getInt("hbase.regionserver.hlog.replication",
FSUtils.getDefaultReplication(fs, path))),
Long.valueOf(conf.getLong("hbase.regionserver.hlog.blocksize",
FSUtils.getDefaultBlockSize(fs, path))),
Boolean.valueOf(false) /*createParent*/,
SequenceFile.CompressionType.NONE, new DefaultCodec(),
createMetadata(conf, compress)
});
} catch (InvocationTargetException ite) {
// function was properly called, but threw it's own exception
throw new IOException(ite.getCause());
} catch (Exception e) {
// ignore all other exceptions. related to reflection failure
}
// if reflection failed, use the old createWriter
if (this.writer == null) {
LOG.debug("new createWriter -- HADOOP-6840 -- not available");
this.writer = SequenceFile.createWriter(fs, conf, path,
HLogKey.class, WALEdit.class,
FSUtils.getDefaultBufferSize(fs),
(short) conf.getInt("hbase.regionserver.hlog.replication",
FSUtils.getDefaultReplication(fs, path)),
conf.getLong("hbase.regionserver.hlog.blocksize",
FSUtils.getDefaultBlockSize(fs, path)),
SequenceFile.CompressionType.NONE,
new DefaultCodec(),
null,
createMetadata(conf, compress));
} else {
if (LOG.isTraceEnabled()) LOG.trace("Using new createWriter -- HADOOP-6840");
}
this.writer_out = getSequenceFilePrivateFSDataOutputStreamAccessible();
if (LOG.isTraceEnabled()) LOG.trace("Path=" + path + ", compression=" + compress);
}
// Get at the private FSDataOutputStream inside in SequenceFile so we can
// call sync on it. Make it accessible.
private FSDataOutputStream getSequenceFilePrivateFSDataOutputStreamAccessible()
throws IOException {
FSDataOutputStream out = null;
final Field fields [] = this.writer.getClass().getDeclaredFields();
final String fieldName = "out";
for (int i = 0; i < fields.length; ++i) {
if (fieldName.equals(fields[i].getName())) {
try {
// Make the 'out' field up in SF.Writer accessible.
fields[i].setAccessible(true);
out = (FSDataOutputStream)fields[i].get(this.writer);
break;
} catch (IllegalAccessException ex) {
throw new IOException("Accessing " + fieldName, ex);
} catch (SecurityException e) {
LOG.warn("Does not have access to out field from FSDataOutputStream",
e);
}
}
}
return out;
}
@Override
public void append(WAL.Entry entry) throws IOException {
entry.setCompressionContext(compressionContext);
try {
this.writer.append(entry.getKey(), entry.getEdit());
} catch (NullPointerException npe) {
// Concurrent close...
throw new IOException(npe);
}
}
@Override
public void close() throws IOException {
if (this.writer != null) {
try {
this.writer.close();
} catch (NullPointerException npe) {
// Can get a NPE coming up from down in DFSClient$DFSOutputStream#close
LOG.warn(npe);
}
this.writer = null;
}
}
@Override
public void sync() throws IOException {
try {
this.writer.syncFs();
} catch (NullPointerException npe) {
// Concurrent close...
throw new IOException(npe);
}
}
@Override
public long getLength() throws IOException {
try {
return this.writer.getLength();
} catch (NullPointerException npe) {
// Concurrent close...
throw new IOException(npe);
}
}
/**
* @return The dfsclient out stream up inside SF.Writer made accessible, or
* null if not available.
*/
public FSDataOutputStream getWriterFSDataOutputStream() {
return this.writer_out;
}
}
| apache-2.0 |
vaibhavbhatnagar/CrossoverProb | src/uk/co/jemos/podam/test/dto/annotations/CharValuePojo.java | 6363 | /**
*
*/
package uk.co.jemos.podam.test.dto.annotations;
import java.io.Serializable;
import uk.co.jemos.podam.common.PodamCharValue;
import uk.co.jemos.podam.test.utils.PodamTestConstants;
/**
* POJO to test {@link PodamCharValue} annotation
*
* @author mtedone
*
*/
public class CharValuePojo implements Serializable {
// ------------------->> Constants
private static final long serialVersionUID = 1L;
// ------------------->> Instance / Static variables
@PodamCharValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE)
private char charFieldWithMinValueOnly;
@PodamCharValue(maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private char charFieldWithMaxValueOnly;
@PodamCharValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE, maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private char charFieldWithMinAndMaxValue;
@PodamCharValue(charValue = ' ')
private char charFieldWithBlankInPreciseValue;
@PodamCharValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE)
private Character charObjectFieldWithMinValueOnly;
@PodamCharValue(maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private Character charObjectFieldWithMaxValueOnly;
@PodamCharValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE, maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private Character charObjectFieldWithMinAndMaxValue;
@PodamCharValue(charValue = PodamTestConstants.CHAR_PRECISE_VALUE)
private char charFieldWithPreciseValue;
// ------------------->> Constructors
// ------------------->> Public methods
// ------------------->> Getters / Setters
/**
* @return the charFieldWithMinValueOnly
*/
public char getCharFieldWithMinValueOnly() {
return charFieldWithMinValueOnly;
}
/**
* @param charFieldWithMinValueOnly
* the charFieldWithMinValueOnly to set
*/
public void setCharFieldWithMinValueOnly(char charFieldWithMinValueOnly) {
this.charFieldWithMinValueOnly = charFieldWithMinValueOnly;
}
/**
* @return the charFieldWithMaxValueOnly
*/
public char getCharFieldWithMaxValueOnly() {
return charFieldWithMaxValueOnly;
}
/**
* @param charFieldWithMaxValueOnly
* the charFieldWithMaxValueOnly to set
*/
public void setCharFieldWithMaxValueOnly(char charFieldWithMaxValueOnly) {
this.charFieldWithMaxValueOnly = charFieldWithMaxValueOnly;
}
/**
* @return the charFieldWithMinAndMaxValue
*/
public char getCharFieldWithMinAndMaxValue() {
return charFieldWithMinAndMaxValue;
}
/**
* @param charFieldWithMinAndMaxValue
* the charFieldWithMinAndMaxValue to set
*/
public void setCharFieldWithMinAndMaxValue(char charFieldWithMinAndMaxValue) {
this.charFieldWithMinAndMaxValue = charFieldWithMinAndMaxValue;
}
/**
* @return the charObjectFieldWithMinValueOnly
*/
public Character getCharObjectFieldWithMinValueOnly() {
return charObjectFieldWithMinValueOnly;
}
/**
* @param charObjectFieldWithMinValueOnly
* the charObjectFieldWithMinValueOnly to set
*/
public void setCharObjectFieldWithMinValueOnly(
Character charObjectFieldWithMinValueOnly) {
this.charObjectFieldWithMinValueOnly = charObjectFieldWithMinValueOnly;
}
/**
* @return the charObjectFieldWithMaxValueOnly
*/
public Character getCharObjectFieldWithMaxValueOnly() {
return charObjectFieldWithMaxValueOnly;
}
/**
* @param charObjectFieldWithMaxValueOnly
* the charObjectFieldWithMaxValueOnly to set
*/
public void setCharObjectFieldWithMaxValueOnly(
Character charObjectFieldWithMaxValueOnly) {
this.charObjectFieldWithMaxValueOnly = charObjectFieldWithMaxValueOnly;
}
/**
* @return the charObjectFieldWithMinAndMaxValue
*/
public Character getCharObjectFieldWithMinAndMaxValue() {
return charObjectFieldWithMinAndMaxValue;
}
/**
* @param charObjectFieldWithMinAndMaxValue
* the charObjectFieldWithMinAndMaxValue to set
*/
public void setCharObjectFieldWithMinAndMaxValue(
Character charObjectFieldWithMinAndMaxValue) {
this.charObjectFieldWithMinAndMaxValue = charObjectFieldWithMinAndMaxValue;
}
/**
* @return the charFieldWithPreciseValue
*/
public char getCharFieldWithPreciseValue() {
return charFieldWithPreciseValue;
}
/**
* @param charFieldWithPreciseValue
* the charFieldWithPreciseValue to set
*/
public void setCharFieldWithPreciseValue(char charFieldWithPreciseValue) {
this.charFieldWithPreciseValue = charFieldWithPreciseValue;
}
// ------------------->> Private methods
// ------------------->> equals() / hashcode() / toString()
/**
* @return the charFieldWithBlankInPreciseValue
*/
public char getCharFieldWithBlankInPreciseValue() {
return charFieldWithBlankInPreciseValue;
}
/**
* @param charFieldWithBlankInPreciseValue
* the charFieldWithBlankInPreciseValue to set
*/
public void setCharFieldWithBlankInPreciseValue(
char charFieldWithBlankInPreciseValue) {
this.charFieldWithBlankInPreciseValue = charFieldWithBlankInPreciseValue;
}
/**
* Constructs a <code>String</code> with all attributes
* in name = value format.
*
* @return a <code>String</code> representation
* of this object.
*/
public String toString()
{
final String TAB = " ";
StringBuilder retValue = new StringBuilder();
retValue.append("CharValuePojo ( ")
.append("charFieldWithMinValueOnly = ").append(this.charFieldWithMinValueOnly).append(TAB)
.append("charFieldWithMaxValueOnly = ").append(this.charFieldWithMaxValueOnly).append(TAB)
.append("charFieldWithMinAndMaxValue = ").append(this.charFieldWithMinAndMaxValue).append(TAB)
.append("charFieldWithBlankInPreciseValue = ").append(this.charFieldWithBlankInPreciseValue).append(TAB)
.append("charObjectFieldWithMinValueOnly = ").append(this.charObjectFieldWithMinValueOnly).append(TAB)
.append("charObjectFieldWithMaxValueOnly = ").append(this.charObjectFieldWithMaxValueOnly).append(TAB)
.append("charObjectFieldWithMinAndMaxValue = ").append(this.charObjectFieldWithMinAndMaxValue).append(TAB)
.append("charFieldWithPreciseValue = ").append(this.charFieldWithPreciseValue).append(TAB)
.append(" )");
return retValue.toString();
}
// ------------------->> Inner classes
}
| mit |
greghaskins/openjdk-jdk7u-jdk | src/macosx/classes/com/apple/laf/AquaButtonCheckBoxUI.java | 2674 | /*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.apple.laf;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import apple.laf.JRSUIConstants.*;
import com.apple.laf.AquaUtilControlSize.*;
import com.apple.laf.AquaUtils.*;
public class AquaButtonCheckBoxUI extends AquaButtonLabeledUI {
protected static final RecyclableSingleton<AquaButtonCheckBoxUI> instance = new RecyclableSingletonFromDefaultConstructor<AquaButtonCheckBoxUI>(AquaButtonCheckBoxUI.class);
protected static final RecyclableSingleton<ImageIcon> sizingIcon = new RecyclableSingleton<ImageIcon>() {
protected ImageIcon getInstance() {
return new ImageIcon(AquaNativeResources.getRadioButtonSizerImage());
}
};
public static ComponentUI createUI(final JComponent b) {
return instance.get();
}
public static Icon getSizingCheckBoxIcon() {
return sizingIcon.get();
}
public String getPropertyPrefix() {
return "CheckBox" + ".";
}
protected AquaButtonBorder getPainter() {
return new CheckBoxButtonBorder();
}
public static class CheckBoxButtonBorder extends LabeledButtonBorder {
public CheckBoxButtonBorder() {
super(new SizeDescriptor(new SizeVariant().replaceMargins("CheckBox.margin")));
painter.state.set(Widget.BUTTON_CHECK_BOX);
}
public CheckBoxButtonBorder(final CheckBoxButtonBorder sizeDescriptor) {
super(sizeDescriptor);
}
}
}
| gpl-2.0 |
greghaskins/openjdk-jdk7u-jdk | src/macosx/classes/com/apple/eawt/FullScreenAdapter.java | 2062 | /*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.apple.eawt;
import com.apple.eawt.AppEvent.FullScreenEvent;
/**
* Abstract adapter class for receiving fullscreen events. This class is provided
* as a convenience for creating listeners.
*
* Subclasses registered with {@link FullScreenUtilities#addFullScreenListenerTo(javax.swing.RootPaneContainer, FullScreenListener)}
* will receive all entering/entered/exiting/exited full screen events.
*
* @see FullScreenUtilities
*
* @since Java for Mac OS X 10.7 Update 1
*/
public abstract class FullScreenAdapter implements FullScreenListener {
public void windowEnteringFullScreen(final FullScreenEvent e) {}
public void windowEnteredFullScreen(final FullScreenEvent e) {}
public void windowExitingFullScreen(final FullScreenEvent e) {}
public void windowExitedFullScreen(final FullScreenEvent e) {}
}
| gpl-2.0 |
andre-nunes/fenixedu-academic | src/main/java/org/fenixedu/academic/ui/struts/action/administrativeOffice/studentEnrolment/CurriculumLinesLocationManagementDA.java | 2367 | /**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.ui.struts.action.administrativeOffice.studentEnrolment;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.fenixedu.academic.ui.struts.action.administrativeOffice.student.SearchForStudentsDA;
import org.fenixedu.academic.ui.struts.action.commons.student.AbstractCurriculumLinesLocationManagementDA;
import org.fenixedu.bennu.struts.annotations.Forward;
import org.fenixedu.bennu.struts.annotations.Forwards;
import org.fenixedu.bennu.struts.annotations.Mapping;
@Mapping(path = "/curriculumLinesLocationManagement", module = "academicAdministration",
functionality = SearchForStudentsDA.class)
@Forwards({
@Forward(name = "showCurriculum", path = "/academicAdminOffice/curriculum/curriculumLines/location/showCurriculum.jsp"),
@Forward(name = "chooseNewLocation",
path = "/academicAdminOffice/curriculum/curriculumLines/location/chooseNewLocation.jsp"),
@Forward(name = "backToStudentEnrolments", path = "/academicAdministration/studentEnrolments.do?method=prepare") })
public class CurriculumLinesLocationManagementDA extends AbstractCurriculumLinesLocationManagementDA {
public ActionForward backToStudentEnrolments(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) {
return mapping.findForward("backToStudentEnrolments");
}
}
| lgpl-3.0 |
jomarko/kie-wb-common | kie-wb-common-widgets/kie-wb-metadata-widget/src/main/java/org/kie/workbench/common/widgets/metadata/client/widget/TagWidget.java | 2542 | /*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.widgets.metadata.client.widget;
import javax.inject.Inject;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.Widget;
import org.guvnor.common.services.shared.metadata.model.Metadata;
/**
* This is a viewer/selector for tags.
* It will show a list of tags currently applicable, and allow you to
* remove/add to them.
* <p/>
* It is intended to work with the meta data form.
*/
public class TagWidget implements IsWidget {
private Metadata data;
private TagWidgetView view;
private boolean readOnly;
@Inject
public void setView( TagWidgetView view ) {
this.view = view;
view.setPresenter( this );
}
/**
* @param d The meta data.
* @param readOnly If it is to be non editable.
*/
public void setContent( Metadata d,
boolean readOnly ) {
this.data = d;
this.readOnly = readOnly;
view.setReadOnly(readOnly);
loadData();
}
public void onAddTags( String text ) {
if (text != null) {
String[] tags = text.split( " " );
for (String tag : tags) {
if (!data.getTags().contains( tag )) {
data.addTag( tag );
view.addTag( tag, readOnly );
}
}
}
}
public void onRemoveTag( String tag ) {
data.getTags().remove( tag );
loadData();
}
public void loadData( ) {
view.clear();
for (String tag : data.getTags()) {
view.addTag( tag, readOnly );
}
}
@Override
public Widget asWidget() {
if (view == null) initView();
return view.asWidget();
}
// TODO: remove this method when the MetaDataWidget is moved to MVP
private void initView() {
setView( new TagWidgetViewImpl() );
}
}
| apache-2.0 |