repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
xiangyuan/googleio
src/com/google/android/apps/iosched/io/LocalExecutor.java
2372
/* * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.iosched.io; import com.google.android.apps.iosched.io.XmlHandler.HandlerException; import com.google.android.apps.iosched.util.ParserUtils; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import android.content.ContentResolver; import android.content.Context; import android.content.res.Resources; import android.content.res.XmlResourceParser; import java.io.IOException; import java.io.InputStream; /** * Opens a local {@link Resources#getXml(int)} and passes the resulting * {@link XmlPullParser} to the given {@link XmlHandler}. */ public class LocalExecutor { private Resources mRes; private ContentResolver mResolver; public LocalExecutor(Resources res, ContentResolver resolver) { mRes = res; mResolver = resolver; } public void execute(Context context, String assetName, XmlHandler handler) throws HandlerException { try { final InputStream input = context.getAssets().open(assetName); final XmlPullParser parser = ParserUtils.newPullParser(input); handler.parseAndApply(parser, mResolver); } catch (HandlerException e) { throw e; } catch (XmlPullParserException e) { throw new HandlerException("Problem parsing local asset: " + assetName, e); } catch (IOException e) { throw new HandlerException("Problem parsing local asset: " + assetName, e); } } public void execute(int resId, XmlHandler handler) throws HandlerException { final XmlResourceParser parser = mRes.getXml(resId); try { handler.parseAndApply(parser, mResolver); } finally { parser.close(); } } }
apache-2.0
gfyoung/elasticsearch
server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java
10451
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.similarity; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; import org.elasticsearch.script.SimilarityScript; import org.elasticsearch.script.SimilarityWeightScript; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class ScriptedSimilarityTests extends ESTestCase { public void testSameNormsAsBM25CountOverlaps() { doTestSameNormsAsBM25(false); } public void testSameNormsAsBM25DiscountOverlaps() { doTestSameNormsAsBM25(true); } private void doTestSameNormsAsBM25(boolean discountOverlaps) { ScriptedSimilarity sim1 = new ScriptedSimilarity("foobar", null, "foobaz", null, discountOverlaps); BM25Similarity sim2 = new BM25Similarity(); sim2.setDiscountOverlaps(discountOverlaps); for (int iter = 0; iter < 100; ++iter) { final int length = TestUtil.nextInt(random(), 1, 100); final int position = random().nextInt(length); final int numOverlaps = random().nextInt(length); int maxTermFrequency = TestUtil.nextInt(random(), 1, 10); int uniqueTermCount = TestUtil.nextInt(random(), 1, 10); FieldInvertState state = new FieldInvertState(Version.LATEST.major, "foo", IndexOptions.DOCS_AND_FREQS, position, length, numOverlaps, 100, maxTermFrequency, uniqueTermCount); assertEquals( sim2.computeNorm(state), sim1.computeNorm(state), 0f); } } public void testBasics() throws IOException { final AtomicBoolean called = new AtomicBoolean(); SimilarityScript.Factory scriptFactory = () -> { return new SimilarityScript() { @Override public double execute(double weight, ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, ScriptedSimilarity.Doc doc) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); if (Arrays.stream(stackTraceElements).anyMatch(ste -> { return ste.getClassName().endsWith(".TermScorer") && ste.getMethodName().equals("score"); }) == false) { // this might happen when computing max scores return Float.MAX_VALUE; } assertEquals(1, weight, 0); assertNotNull(doc); assertEquals(2f, doc.getFreq(), 0); assertEquals(3, doc.getLength(), 0); assertNotNull(field); assertEquals(3, field.getDocCount()); assertEquals(5, field.getSumDocFreq()); assertEquals(6, field.getSumTotalTermFreq()); assertNotNull(term); assertEquals(2, term.getDocFreq()); assertEquals(3, term.getTotalTermFreq()); assertNotNull(query); assertEquals(3.2f, query.getBoost(), 0); called.set(true); return 42f; } }; }; ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", scriptFactory, true); Directory dir = new RAMDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); doc.add(new TextField("f", "foo bar", Store.NO)); doc.add(new StringField("match", "no", Store.NO)); w.addDocument(doc); doc = new Document(); doc.add(new TextField("f", "foo foo bar", Store.NO)); doc.add(new StringField("match", "yes", Store.NO)); w.addDocument(doc); doc = new Document(); doc.add(new TextField("f", "bar", Store.NO)); doc.add(new StringField("match", "no", Store.NO)); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); w.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(sim); Query query = new BoostQuery(new BooleanQuery.Builder() .add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) .build(), 3.2f); TopDocs topDocs = searcher.search(query, 1); assertEquals(1, topDocs.totalHits.value); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); w.close(); dir.close(); } public void testInitScript() throws IOException { final AtomicBoolean initCalled = new AtomicBoolean(); SimilarityWeightScript.Factory weightScriptFactory = () -> { return new SimilarityWeightScript() { @Override public double execute(ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term) { assertEquals(3, field.getDocCount()); assertEquals(5, field.getSumDocFreq()); assertEquals(6, field.getSumTotalTermFreq()); assertNotNull(term); assertEquals(1, term.getDocFreq()); assertEquals(2, term.getTotalTermFreq()); assertNotNull(query); assertEquals(3.2f, query.getBoost(), 0); initCalled.set(true); return 28; } }; }; final AtomicBoolean called = new AtomicBoolean(); SimilarityScript.Factory scriptFactory = () -> { return new SimilarityScript() { @Override public double execute(double weight, ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, ScriptedSimilarity.Doc doc) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); if (Arrays.stream(stackTraceElements).anyMatch(ste -> { return ste.getClassName().endsWith(".TermScorer") && ste.getMethodName().equals("score"); }) == false) { // this might happen when computing max scores return Float.MAX_VALUE; } assertEquals(28, weight, 0d); assertNotNull(doc); assertEquals(2f, doc.getFreq(), 0); assertEquals(3, doc.getLength(), 0); assertNotNull(field); assertEquals(3, field.getDocCount()); assertEquals(5, field.getSumDocFreq()); assertEquals(6, field.getSumTotalTermFreq()); assertNotNull(term); assertEquals(1, term.getDocFreq()); assertEquals(2, term.getTotalTermFreq()); assertNotNull(query); assertEquals(3.2f, query.getBoost(), 0); called.set(true); return 42; } }; }; ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightScriptFactory, "foobaz", scriptFactory, true); Directory dir = new RAMDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); doc.add(new TextField("f", "bar baz", Store.NO)); w.addDocument(doc); doc = new Document(); doc.add(new TextField("f", "foo foo bar", Store.NO)); doc.add(new StringField("match", "yes", Store.NO)); w.addDocument(doc); doc = new Document(); doc.add(new TextField("f", "bar", Store.NO)); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); w.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); assertEquals(1, topDocs.totalHits.value); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); w.close(); dir.close(); } }
apache-2.0
zhiqinghuang/elasticsearch
plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
9075
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.*; import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.transport.TransportService; import java.util.*; /** * */ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { private static enum HostType { PRIVATE_IP, PUBLIC_IP, PRIVATE_DNS, PUBLIC_DNS } private final TransportService transportService; private final AmazonEC2 client; private final Version version; private final boolean bindAnyGroup; private final Set<String> groups; private final Map<String, String> tags; private final Set<String> availabilityZones; private final HostType hostType; @Inject public AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service, Version version) { super(settings); this.transportService = transportService; this.client = awsEc2Service.client(); this.version = version; this.hostType = HostType.valueOf(settings.get("discovery.ec2.host_type", "private_ip").toUpperCase(Locale.ROOT)); this.bindAnyGroup = settings.getAsBoolean("discovery.ec2.any_group", true); this.groups = new HashSet<>(); groups.addAll(Arrays.asList(settings.getAsArray("discovery.ec2.groups"))); this.tags = settings.getByPrefix("discovery.ec2.tag.").getAsMap(); Set<String> availabilityZones = new HashSet<>(); availabilityZones.addAll(Arrays.asList(settings.getAsArray("discovery.ec2.availability_zones"))); if (settings.get("discovery.ec2.availability_zones") != null) { availabilityZones.addAll(Strings.commaDelimitedListToSet(settings.get("discovery.ec2.availability_zones"))); } this.availabilityZones = availabilityZones; if (logger.isDebugEnabled()) { logger.debug("using host_type [{}], tags [{}], groups [{}] with any_group [{}], availability_zones [{}]", hostType, tags, groups, bindAnyGroup, availabilityZones); } } @Override public List<DiscoveryNode> buildDynamicNodes() { List<DiscoveryNode> discoNodes = new ArrayList<>(); DescribeInstancesResult descInstances; try { // Query EC2 API based on AZ, instance state, and tag. // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) descInstances = client.describeInstances(buildDescribeInstancesRequest()); } catch (AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); return discoNodes; } logger.trace("building dynamic unicast discovery nodes..."); for (Reservation reservation : descInstances.getReservations()) { for (Instance instance : reservation.getInstances()) { // lets see if we can filter based on groups if (!groups.isEmpty()) { List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups(); ArrayList<String> securityGroupNames = new ArrayList<String>(); ArrayList<String> securityGroupIds = new ArrayList<String>(); for (GroupIdentifier sg : instanceSecurityGroups) { securityGroupNames.add(sg.getGroupName()); securityGroupIds.add(sg.getGroupId()); } if (bindAnyGroup) { // We check if we can find at least one group name or one group id in groups. if (Collections.disjoint(securityGroupNames, groups) && Collections.disjoint(securityGroupIds, groups)) { logger.trace("filtering out instance {} based on groups {}, not part of {}", instance.getInstanceId(), instanceSecurityGroups, groups); // continue to the next instance continue; } } else { // We need tp match all group names or group ids, otherwise we ignore this instance if (!(securityGroupNames.containsAll(groups) || securityGroupIds.containsAll(groups))) { logger.trace("filtering out instance {} based on groups {}, does not include all of {}", instance.getInstanceId(), instanceSecurityGroups, groups); // continue to the next instance continue; } } } String address = null; switch (hostType) { case PRIVATE_DNS: address = instance.getPrivateDnsName(); break; case PRIVATE_IP: address = instance.getPrivateIpAddress(); break; case PUBLIC_DNS: address = instance.getPublicDnsName(); break; case PUBLIC_IP: address = instance.getPublicIpAddress(); break; } if (address != null) { try { // we only limit to 1 port per address, makes no sense to ping 100 ports TransportAddress[] addresses = transportService.addressesFromString(address, 1); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); discoNodes.add(new DiscoveryNode("#cloud-" + instance.getInstanceId() + "-" + i, addresses[i], version.minimumCompatibilityVersion())); } } catch (Exception e) { logger.warn("failed ot add {}, address {}", e, instance.getInstanceId(), address); } } else { logger.trace("not adding {}, address is null, host_type {}", instance.getInstanceId(), hostType); } } } logger.debug("using dynamic discovery nodes {}", discoNodes); return discoNodes; } private DescribeInstancesRequest buildDescribeInstancesRequest() { DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest() .withFilters( new Filter("instance-state-name").withValues("running", "pending") ); for (Map.Entry<String, String> tagFilter : tags.entrySet()) { // for a given tag key, OR relationship for multiple different values describeInstancesRequest.withFilters( new Filter("tag:" + tagFilter.getKey()).withValues(tagFilter.getValue()) ); } if (!availabilityZones.isEmpty()) { // OR relationship amongst multiple values of the availability-zone filter describeInstancesRequest.withFilters( new Filter("availability-zone").withValues(availabilityZones) ); } return describeInstancesRequest; } }
apache-2.0
wangcy6/storm_app
frame/storm-master/external/storm-elasticsearch/src/main/java/org/apache/storm/elasticsearch/response/PercolateResponse.java
2082
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.elasticsearch.response; import java.util.List; import org.apache.storm.elasticsearch.doc.Shards; import com.fasterxml.jackson.annotation.JsonProperty; /** * Mapped response for percolate. */ public class PercolateResponse { private long took; private long total; private List<Match> matches; @JsonProperty("_shards") private Shards shards; public static class Match { @JsonProperty("_index") private String index; @JsonProperty("_id") private String id; public String getIndex() { return index; } public void setIndex(String index) { this.index = index; } public String getId() { return id; } public void setId(String id) { this.id = id; } } public long getTook() { return took; } public void setTook(long took) { this.took = took; } public long getTotal() { return total; } public void setTotal(long total) { this.total = total; } public List<Match> getMatches() { return matches; } public void setMatches(List<Match> matches) { this.matches = matches; } }
apache-2.0
dxxfire/nutz
test/org/nutz/ioc/loader/annotation/meta/ClassB.java
335
package org.nutz.ioc.loader.annotation.meta; import org.nutz.dao.Dao; import org.nutz.ioc.loader.annotation.Inject; import org.nutz.ioc.loader.annotation.IocBean; @IocBean public class ClassB { @Inject("refer:dao") public Dao dao; public void setDao(Dao dao) { this.dao = dao; } }
apache-2.0
android-ia/platform_external_robolectric
src/test/java/com/xtremelabs/robolectric/util/TestRunnable.java
191
package com.xtremelabs.robolectric.util; public class TestRunnable implements Runnable { public boolean wasRun = false; @Override public void run() { wasRun = true; } }
mit
md-5/jdk10
src/java.xml/share/classes/com/sun/org/apache/xerces/internal/impl/xs/XSWildcardDecl.java
24285
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.impl.xs; import com.sun.org.apache.xerces.internal.impl.xs.util.StringListImpl; import com.sun.org.apache.xerces.internal.impl.xs.util.XSObjectListImpl; import com.sun.org.apache.xerces.internal.xs.StringList; import com.sun.org.apache.xerces.internal.xs.XSAnnotation; import com.sun.org.apache.xerces.internal.xs.XSConstants; import com.sun.org.apache.xerces.internal.xs.XSNamespaceItem; import com.sun.org.apache.xerces.internal.xs.XSWildcard; import com.sun.org.apache.xerces.internal.xs.XSObjectList; /** * The XML representation for a wildcard declaration * schema component is an <any> or <anyAttribute> element information item * * @xerces.internal * * @author Sandy Gao, IBM * @author Rahul Srivastava, Sun Microsystems Inc. * */ public class XSWildcardDecl implements XSWildcard { public static final String ABSENT = null; // the type of wildcard: any, other, or list public short fType = NSCONSTRAINT_ANY; // the type of process contents: strict, lax, or skip public short fProcessContents = PC_STRICT; // the namespace list: // for NSCONSTRAINT_LIST, it means one of the namespaces in the list // for NSCONSTRAINT_NOT, it means not any of the namespaces in the list public String[] fNamespaceList; // optional annotation public XSObjectList fAnnotations = null; // I'm trying to implement the following constraint exactly as what the // spec describes. Sometimes it seems redundant, and sometimes there seems // to be much easier solutions. But it makes it easy to understand, // easy to maintain, and easy to find a bug (either in the code, or in the // spec). -SG // // NOTE: Schema spec only requires that ##other not(tNS,absent). // The way we store ##other is not(NS1,NS2,...,NSN), which covers // what's required by Schema, and allows future enhanced features. // // In the following in-line comments: // - Bullet removed from w3c specification. // + Bullet added as proposed by Sandy Gao, IBM. // / Since we store ##other as not(NS1,NS2,...,NSN), we need to put some // comments on where we didn't follow the spec exactly. // * When we really support not(NS1,NS2,...,NSN), we need to revisit these items. /** * Validation Rule: Wildcard allows Namespace Name */ public boolean allowNamespace(String namespace) { // For a value which is either a namespace name or absent to be valid with respect to a wildcard constraint (the value of a {namespace constraint}) one of the following must be true: // 1 The constraint must be any. if (fType == NSCONSTRAINT_ANY) return true; // 2 All of the following must be true: // 2.1 The constraint is a pair of not and a namespace name or absent ([Definition:] call this the namespace test). // 2.2 The value must not be identical to the namespace test. // 2.3 The value must not be absent. // / we store ##other as not(list), so our actual rule is // / 2 The constraint is a pair of not and a set, and the value is not in such set. if (fType == NSCONSTRAINT_NOT) { boolean found = false; int listNum = fNamespaceList.length; for (int i = 0; i < listNum && !found; i++) { if (namespace == fNamespaceList[i]) found = true; } if (!found) return true; } // 3 The constraint is a set, and the value is identical to one of the members of the set. if (fType == NSCONSTRAINT_LIST) { int listNum = fNamespaceList.length; for (int i = 0; i < listNum; i++) { if (namespace == fNamespaceList[i]) return true; } } // none of the above conditions applied, so return false. return false; } /** * Schema Component Constraint: Wildcard Subset */ public boolean isSubsetOf(XSWildcardDecl superWildcard) { // if the super is null (not expressible), return false if (superWildcard == null) return false; // For a namespace constraint (call it sub) to be an intensional subset of another // namespace constraint (call it super) one of the following must be true: // 1 super must be any. if (superWildcard.fType == NSCONSTRAINT_ANY) { return true; } // 2 All of the following must be true: // 2.1 sub must be a pair of not and a namespace name or absent. // 2.2 super must be a pair of not and the same value. // * we can't just compare whether the namespace are the same value // since we store other as not(list) if (fType == NSCONSTRAINT_NOT) { if (superWildcard.fType == NSCONSTRAINT_NOT && fNamespaceList[0] == superWildcard.fNamespaceList[0]) { return true; } } // 3 All of the following must be true: // 3.1 sub must be a set whose members are either namespace names or absent. // 3.2 One of the following must be true: // 3.2.1 super must be the same set or a superset thereof. // -3.2.2 super must be a pair of not and a namespace name or absent and // that value must not be in sub's set. // +3.2.2 super must be a pair of not and a namespace name or absent and // either that value or absent must not be in sub's set. // * since we store ##other as not(list), we acturally need to make sure // that none of the namespaces in super.list is in sub.list. if (fType == NSCONSTRAINT_LIST) { if (superWildcard.fType == NSCONSTRAINT_LIST && subset2sets(fNamespaceList, superWildcard.fNamespaceList)) { return true; } if (superWildcard.fType == NSCONSTRAINT_NOT && !elementInSet(superWildcard.fNamespaceList[0], fNamespaceList) && !elementInSet(ABSENT, fNamespaceList)) { return true; } } // none of the above conditions applied, so return false. return false; } // isSubsetOf /** * Check whether this wildcard has a weaker process contents than the super. */ public boolean weakerProcessContents(XSWildcardDecl superWildcard) { return fProcessContents == XSWildcardDecl.PC_LAX && superWildcard.fProcessContents == XSWildcardDecl.PC_STRICT || fProcessContents == XSWildcardDecl.PC_SKIP && superWildcard.fProcessContents != XSWildcardDecl.PC_SKIP; } /** * Schema Component Constraint: Attribute Wildcard Union */ public XSWildcardDecl performUnionWith(XSWildcardDecl wildcard, short processContents) { // if the other wildcard is not expressible, the result is still not expressible if (wildcard == null) return null; // For a wildcard's {namespace constraint} value to be the intensional union of two // other such values (call them O1 and O2): the appropriate case among the following // must be true: XSWildcardDecl unionWildcard = new XSWildcardDecl(); unionWildcard.fProcessContents = processContents; // 1 If O1 and O2 are the same value, then that value must be the value. if (areSame(wildcard)) { unionWildcard.fType = fType; unionWildcard.fNamespaceList = fNamespaceList; } // 2 If either O1 or O2 is any, then any must be the value. else if ( (fType == NSCONSTRAINT_ANY) || (wildcard.fType == NSCONSTRAINT_ANY) ) { unionWildcard.fType = NSCONSTRAINT_ANY; } // 3 If both O1 and O2 are sets of (namespace names or absent), then the union of // those sets must be the value. else if ( (fType == NSCONSTRAINT_LIST) && (wildcard.fType == NSCONSTRAINT_LIST) ) { unionWildcard.fType = NSCONSTRAINT_LIST; unionWildcard.fNamespaceList = union2sets(fNamespaceList, wildcard.fNamespaceList); } // -4 If the two are negations of different namespace names, then the intersection // is not expressible. // +4 If the two are negations of different namespace names or absent, then // a pair of not and absent must be the value. // * now we store ##other as not(list), the result should be // not(intersection of two lists). else if (fType == NSCONSTRAINT_NOT && wildcard.fType == NSCONSTRAINT_NOT) { unionWildcard.fType = NSCONSTRAINT_NOT; unionWildcard.fNamespaceList = new String[2]; unionWildcard.fNamespaceList[0] = ABSENT; unionWildcard.fNamespaceList[1] = ABSENT; } // 5 If either O1 or O2 is a pair of not and a namespace name and the other is a set of // (namespace names or absent), then The appropriate case among the following must be true: // -5.1 If the set includes the negated namespace name, then any must be the value. // -5.2 If the set does not include the negated namespace name, then whichever of O1 or O2 // is a pair of not and a namespace name must be the value. // +5.1 If the negated value is a namespace name, then The appropriate case // among the following must be true: // +5.1.1 If the set includes both the namespace name and absent, then any // must be the value. // +5.1.2 If the set includes the namespace name but does not include // absent, then a pair of not and absent must be the value. // +5.1.3 If the set does not include the namespace name but includes // absent, then the union is not expressible. // +5.1.4 If the set does not include either the namespace name or absent, // then whichever of O1 or O2 is a pair of not and a namespace name must be // the value. // +5.2 If the negated value is absent, then The appropriate case among the // following must be true: // +5.2.1 If the set includes absent, then any must be the value. // +5.2.2 If the set does not include absent, then whichever of O1 or O2 is // a pair of not and a namespace name must be the value. // * when we have not(list), the operation is just not(otherlist-list) else if ( ((fType == NSCONSTRAINT_NOT) && (wildcard.fType == NSCONSTRAINT_LIST)) || ((fType == NSCONSTRAINT_LIST) && (wildcard.fType == NSCONSTRAINT_NOT)) ) { String[] other = null; String[] list = null; if (fType == NSCONSTRAINT_NOT) { other = fNamespaceList; list = wildcard.fNamespaceList; } else { other = wildcard.fNamespaceList; list = fNamespaceList; } boolean foundAbsent = elementInSet(ABSENT, list); if (other[0] != ABSENT) { boolean foundNS = elementInSet(other[0], list); if (foundNS && foundAbsent) { unionWildcard.fType = NSCONSTRAINT_ANY; } else if (foundNS && !foundAbsent) { unionWildcard.fType = NSCONSTRAINT_NOT; unionWildcard.fNamespaceList = new String[2]; unionWildcard.fNamespaceList[0] = ABSENT; unionWildcard.fNamespaceList[1] = ABSENT; } else if (!foundNS && foundAbsent) { return null; } else { // !foundNS && !foundAbsent unionWildcard.fType = NSCONSTRAINT_NOT; unionWildcard.fNamespaceList = other; } } else { // other[0] == ABSENT if (foundAbsent) { unionWildcard.fType = NSCONSTRAINT_ANY; } else { // !foundAbsent unionWildcard.fType = NSCONSTRAINT_NOT; unionWildcard.fNamespaceList = other; } } } return unionWildcard; } // performUnionWith /** * Schema Component Constraint: Attribute Wildcard Intersection */ public XSWildcardDecl performIntersectionWith(XSWildcardDecl wildcard, short processContents) { // if the other wildcard is not expressible, the result is still not expressible if (wildcard == null) return null; // For a wildcard's {namespace constraint} value to be the intensional intersection of // two other such values (call them O1 and O2): the appropriate case among the following // must be true: XSWildcardDecl intersectWildcard = new XSWildcardDecl(); intersectWildcard.fProcessContents = processContents; // 1 If O1 and O2 are the same value, then that value must be the value. if (areSame(wildcard)) { intersectWildcard.fType = fType; intersectWildcard.fNamespaceList = fNamespaceList; } // 2 If either O1 or O2 is any, then the other must be the value. else if ( (fType == NSCONSTRAINT_ANY) || (wildcard.fType == NSCONSTRAINT_ANY) ) { // both cannot be ANY, if we have reached here. XSWildcardDecl other = this; if (fType == NSCONSTRAINT_ANY) other = wildcard; intersectWildcard.fType = other.fType; intersectWildcard.fNamespaceList = other.fNamespaceList; } // -3 If either O1 or O2 is a pair of not and a namespace name and the other is a set of // (namespace names or absent), then that set, minus the negated namespace name if // it was in the set, must be the value. // +3 If either O1 or O2 is a pair of not and a namespace name and the other // is a set of (namespace names or absent), then that set, minus the negated // namespace name if it was in the set, then minus absent if it was in the // set, must be the value. // * when we have not(list), the operation is just list-otherlist else if ( ((fType == NSCONSTRAINT_NOT) && (wildcard.fType == NSCONSTRAINT_LIST)) || ((fType == NSCONSTRAINT_LIST) && (wildcard.fType == NSCONSTRAINT_NOT)) ) { String[] list = null; String[] other = null; if (fType == NSCONSTRAINT_NOT) { other = fNamespaceList; list = wildcard.fNamespaceList; } else { other = wildcard.fNamespaceList; list = fNamespaceList; } int listSize = list.length; String[] intersect = new String[listSize]; int newSize = 0; for (int i = 0; i < listSize; i++) { if (list[i] != other[0] && list[i] != ABSENT) intersect[newSize++] = list[i]; } intersectWildcard.fType = NSCONSTRAINT_LIST; intersectWildcard.fNamespaceList = new String[newSize]; System.arraycopy(intersect, 0, intersectWildcard.fNamespaceList, 0, newSize); } // 4 If both O1 and O2 are sets of (namespace names or absent), then the intersection of those // sets must be the value. else if ( (fType == NSCONSTRAINT_LIST) && (wildcard.fType == NSCONSTRAINT_LIST) ) { intersectWildcard.fType = NSCONSTRAINT_LIST; intersectWildcard.fNamespaceList = intersect2sets(fNamespaceList, wildcard.fNamespaceList); } // -5 If the two are negations of different namespace names, then the intersection is not expressible. // +5 If the two are negations of namespace names or absent, then The // appropriate case among the following must be true: // +5.1 If the two are negations of different namespace names, then the // intersection is not expressible. // +5.2 If one of the two is a pair of not and absent, the other must be // the value. // * when we have not(list), the operation is just not(onelist+otherlist) else if (fType == NSCONSTRAINT_NOT && wildcard.fType == NSCONSTRAINT_NOT) { if (fNamespaceList[0] != ABSENT && wildcard.fNamespaceList[0] != ABSENT) return null; XSWildcardDecl other = this; if (fNamespaceList[0] == ABSENT) other = wildcard; intersectWildcard.fType = other.fType; intersectWildcard.fNamespaceList = other.fNamespaceList; } return intersectWildcard; } // performIntersectionWith private boolean areSame(XSWildcardDecl wildcard) { if (fType == wildcard.fType) { // ##any, true if (fType == NSCONSTRAINT_ANY) return true; // ##other, only check the negated value // * when we support not(list), we need to check in the same way // as for NSCONSTRAINT_LIST. if (fType == NSCONSTRAINT_NOT) return fNamespaceList[0] == wildcard.fNamespaceList[0]; // ## list, must have the same length, // and each item in one list must appear in the other one // (we are assuming that there are no duplicate items in a list) if (fNamespaceList.length == wildcard.fNamespaceList.length) { for (int i=0; i<fNamespaceList.length; i++) { if (!elementInSet(fNamespaceList[i], wildcard.fNamespaceList)) return false; } return true; } } return false; } // areSame String[] intersect2sets(String[] one, String[] theOther){ String[] result = new String[Math.min(one.length,theOther.length)]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { if (elementInSet(one[i], theOther)) result[count++] = one[i]; } String[] result2 = new String[count]; System.arraycopy(result, 0, result2, 0, count); return result2; } String[] union2sets(String[] one, String[] theOther){ String[] result1 = new String[one.length]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { if (!elementInSet(one[i], theOther)) result1[count++] = one[i]; } String[] result2 = new String[count+theOther.length]; System.arraycopy(result1, 0, result2, 0, count); System.arraycopy(theOther, 0, result2, count, theOther.length); return result2; } boolean subset2sets(String[] subSet, String[] superSet){ for (int i=0; i<subSet.length; i++) { if (!elementInSet(subSet[i], superSet)) return false; } return true; } boolean elementInSet(String ele, String[] set){ boolean found = false; for (int i=0; i<set.length && !found; i++) { if (ele==set[i]) found = true; } return found; } /** * get the string description of this wildcard */ private String fDescription = null; public String toString() { if (fDescription == null) { StringBuffer buffer = new StringBuffer(); buffer.append("WC["); switch (fType) { case NSCONSTRAINT_ANY: buffer.append(SchemaSymbols.ATTVAL_TWOPOUNDANY); break; case NSCONSTRAINT_NOT: buffer.append(SchemaSymbols.ATTVAL_TWOPOUNDOTHER); buffer.append(":\""); if (fNamespaceList[0] != null) buffer.append(fNamespaceList[0]); buffer.append("\""); break; case NSCONSTRAINT_LIST: if (fNamespaceList.length == 0) break; buffer.append("\""); if (fNamespaceList[0] != null) buffer.append(fNamespaceList[0]); buffer.append("\""); for (int i = 1; i < fNamespaceList.length; i++) { buffer.append(",\""); if (fNamespaceList[i] != null) buffer.append(fNamespaceList[i]); buffer.append("\""); } break; } buffer.append(']'); fDescription = buffer.toString(); } return fDescription; } /** * Get the type of the object, i.e ELEMENT_DECLARATION. */ public short getType() { return XSConstants.WILDCARD; } /** * The <code>name</code> of this <code>XSObject</code> depending on the * <code>XSObject</code> type. */ public String getName() { return null; } /** * The namespace URI of this node, or <code>null</code> if it is * unspecified. defines how a namespace URI is attached to schema * components. */ public String getNamespace() { return null; } /** * Namespace constraint: A constraint type: any, not, list. */ public short getConstraintType() { return fType; } /** * Namespace constraint. For <code>constraintType</code> * LIST_NSCONSTRAINT, the list contains allowed namespaces. For * <code>constraintType</code> NOT_NSCONSTRAINT, the list contains * disallowed namespaces. */ public StringList getNsConstraintList() { return new StringListImpl(fNamespaceList, fNamespaceList == null ? 0 : fNamespaceList.length); } /** * {process contents} One of skip, lax or strict. Valid constants values * are: PC_SKIP, PC_LAX, PC_STRICT. */ public short getProcessContents() { return fProcessContents; } /** * String valid of {process contents}. One of "skip", "lax" or "strict". */ public String getProcessContentsAsString() { switch (fProcessContents) { case XSWildcardDecl.PC_SKIP: return "skip"; case XSWildcardDecl.PC_LAX: return "lax"; case XSWildcardDecl.PC_STRICT: return "strict"; default: return "invalid value"; } } /** * Optional. Annotation. */ public XSAnnotation getAnnotation() { return (fAnnotations != null) ? (XSAnnotation) fAnnotations.item(0) : null; } /** * Optional. Annotations. */ public XSObjectList getAnnotations() { return (fAnnotations != null) ? fAnnotations : XSObjectListImpl.EMPTY_LIST; } /** * @see org.apache.xerces.xs.XSObject#getNamespaceItem() */ public XSNamespaceItem getNamespaceItem() { return null; } } // class XSWildcardDecl
gpl-2.0
md-5/jdk10
test/jdk/java/awt/Mixing/AWT_Mixing/JScrollBarOverlapping.java
2568
/* * Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ import java.awt.Point; import java.awt.Dimension; import java.awt.event.AdjustmentEvent; import java.awt.event.AdjustmentListener; import javax.swing.*; /** * AWT/Swing overlapping test for {@link javax.swing.JScrollBar } component. * <p>See base class for details. */ /* * @test * @key headful * @summary Simple Overlapping test for javax.swing.JScrollBar * @author sergey.grinev@oracle.com: area=awt.mixing * @library /java/awt/patchlib ../../regtesthelpers * @modules java.desktop/sun.awt * java.desktop/java.awt.peer * @build java.desktop/java.awt.Helper * @build Util * @run main JScrollBarOverlapping */ public class JScrollBarOverlapping extends SimpleOverlappingTestBase { public JScrollBarOverlapping() { super(false); } @Override protected JComponent getSwingComponent() { JScrollBar ch = new JScrollBar(JScrollBar.VERTICAL); ch.setPreferredSize(new Dimension(50, 50)); ch.setValue(50); ch.addAdjustmentListener(new AdjustmentListener() { public void adjustmentValueChanged(AdjustmentEvent e) { wasLWClicked = true; } }); OverlappingTestBase.shift = new Point(20, 16); return ch; } // this strange plumbing stuff is required due to "Standard Test Machinery" in base class public static void main(String args[]) throws InterruptedException { instance = new JScrollBarOverlapping(); OverlappingTestBase.doMain(args); } }
gpl-2.0
md-5/jdk10
src/java.desktop/share/classes/java/awt/image/BandCombineOp.java
11471
/* * Copyright (c) 1997, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.awt.image; import java.awt.GraphicsEnvironment; import java.awt.color.ICC_Profile; import java.awt.geom.Rectangle2D; import java.awt.Rectangle; import java.awt.geom.Point2D; import java.awt.RenderingHints; import sun.awt.image.ImagingLib; import java.util.Arrays; /** * This class performs an arbitrary linear combination of the bands * in a {@code Raster}, using a specified matrix. * <p> * The width of the matrix must be equal to the number of bands in the * source {@code Raster}, optionally plus one. If there is one more * column in the matrix than the number of bands, there is an implied 1 at the * end of the vector of band samples representing a pixel. The height * of the matrix must be equal to the number of bands in the destination. * <p> * For example, a 3-banded {@code Raster} might have the following * transformation applied to each pixel in order to invert the second band of * the {@code Raster}. * <pre> * [ 1.0 0.0 0.0 0.0 ] [ b1 ] * [ 0.0 -1.0 0.0 255.0 ] x [ b2 ] * [ 0.0 0.0 1.0 0.0 ] [ b3 ] * [ 1 ] * </pre> * * <p> * Note that the source and destination can be the same object. */ public class BandCombineOp implements RasterOp { float[][] matrix; int nrows = 0; int ncols = 0; RenderingHints hints; /** * Constructs a {@code BandCombineOp} with the specified matrix. * The width of the matrix must be equal to the number of bands in * the source {@code Raster}, optionally plus one. If there is one * more column in the matrix than the number of bands, there is an implied * 1 at the end of the vector of band samples representing a pixel. The * height of the matrix must be equal to the number of bands in the * destination. * <p> * The first subscript is the row index and the second * is the column index. This operation uses none of the currently * defined rendering hints; the {@code RenderingHints} argument can be * null. * * @param matrix The matrix to use for the band combine operation. * @param hints The {@code RenderingHints} object for this operation. * Not currently used so it can be null. */ public BandCombineOp (float[][] matrix, RenderingHints hints) { nrows = matrix.length; ncols = matrix[0].length; this.matrix = new float[nrows][]; for (int i=0; i < nrows; i++) { /* Arrays.copyOf is forgiving of the source array being * too short, but it is also faster than other cloning * methods, so we provide our own protection for short * matrix rows. */ if (ncols > matrix[i].length) { throw new IndexOutOfBoundsException("row "+i+" too short"); } this.matrix[i] = Arrays.copyOf(matrix[i], ncols); } this.hints = hints; } /** * Returns a copy of the linear combination matrix. * * @return The matrix associated with this band combine operation. */ public final float[][] getMatrix() { float[][] ret = new float[nrows][]; for (int i = 0; i < nrows; i++) { ret[i] = Arrays.copyOf(matrix[i], ncols); } return ret; } /** * Transforms the {@code Raster} using the matrix specified in the * constructor. An {@code IllegalArgumentException} may be thrown if * the number of bands in the source or destination is incompatible with * the matrix. See the class comments for more details. * <p> * If the destination is null, it will be created with a number of bands * equalling the number of rows in the matrix. No exception is thrown * if the operation causes a data overflow. * * @param src The {@code Raster} to be filtered. * @param dst The {@code Raster} in which to store the results * of the filter operation. * * @return The filtered {@code Raster}. * * @throws IllegalArgumentException If the number of bands in the * source or destination is incompatible with the matrix. */ public WritableRaster filter(Raster src, WritableRaster dst) { int nBands = src.getNumBands(); if (ncols != nBands && ncols != (nBands+1)) { throw new IllegalArgumentException("Number of columns in the "+ "matrix ("+ncols+ ") must be equal to the number"+ " of bands ([+1]) in src ("+ nBands+")."); } if (dst == null) { dst = createCompatibleDestRaster(src); } else if (nrows != dst.getNumBands()) { throw new IllegalArgumentException("Number of rows in the "+ "matrix ("+nrows+ ") must be equal to the number"+ " of bands ([+1]) in dst ("+ nBands+")."); } if (ImagingLib.filter(this, src, dst) != null) { return dst; } int[] pixel = null; int[] dstPixel = new int[dst.getNumBands()]; float accum; int sminX = src.getMinX(); int sY = src.getMinY(); int dminX = dst.getMinX(); int dY = dst.getMinY(); int sX; int dX; if (ncols == nBands) { for (int y=0; y < src.getHeight(); y++, sY++, dY++) { dX = dminX; sX = sminX; for (int x=0; x < src.getWidth(); x++, sX++, dX++) { pixel = src.getPixel(sX, sY, pixel); for (int r=0; r < nrows; r++) { accum = 0.f; for (int c=0; c < ncols; c++) { accum += matrix[r][c]*pixel[c]; } dstPixel[r] = (int) accum; } dst.setPixel(dX, dY, dstPixel); } } } else { // Need to add constant for (int y=0; y < src.getHeight(); y++, sY++, dY++) { dX = dminX; sX = sminX; for (int x=0; x < src.getWidth(); x++, sX++, dX++) { pixel = src.getPixel(sX, sY, pixel); for (int r=0; r < nrows; r++) { accum = 0.f; for (int c=0; c < nBands; c++) { accum += matrix[r][c]*pixel[c]; } dstPixel[r] = (int) (accum+matrix[r][nBands]); } dst.setPixel(dX, dY, dstPixel); } } } return dst; } /** * Returns the bounding box of the transformed destination. Since * this is not a geometric operation, the bounding box is the same for * the source and destination. * An {@code IllegalArgumentException} may be thrown if the number of * bands in the source is incompatible with the matrix. See * the class comments for more details. * * @param src The {@code Raster} to be filtered. * * @return The {@code Rectangle2D} representing the destination * image's bounding box. * * @throws IllegalArgumentException If the number of bands in the source * is incompatible with the matrix. */ public final Rectangle2D getBounds2D (Raster src) { return src.getBounds(); } /** * Creates a zeroed destination {@code Raster} with the correct size * and number of bands. * An {@code IllegalArgumentException} may be thrown if the number of * bands in the source is incompatible with the matrix. See * the class comments for more details. * * @param src The {@code Raster} to be filtered. * * @return The zeroed destination {@code Raster}. */ public WritableRaster createCompatibleDestRaster (Raster src) { int nBands = src.getNumBands(); if ((ncols != nBands) && (ncols != (nBands+1))) { throw new IllegalArgumentException("Number of columns in the "+ "matrix ("+ncols+ ") must be equal to the number"+ " of bands ([+1]) in src ("+ nBands+")."); } if (src.getNumBands() == nrows) { return src.createCompatibleWritableRaster(); } else { throw new IllegalArgumentException("Don't know how to create a "+ " compatible Raster with "+ nrows+" bands."); } } /** * Returns the location of the corresponding destination point given a * point in the source {@code Raster}. If {@code dstPt} is * specified, it is used to hold the return value. * Since this is not a geometric operation, the point returned * is the same as the specified {@code srcPt}. * * @param srcPt The {@code Point2D} that represents the point in * the source {@code Raster} * @param dstPt The {@code Point2D} in which to store the result. * * @return The {@code Point2D} in the destination image that * corresponds to the specified point in the source image. */ public final Point2D getPoint2D (Point2D srcPt, Point2D dstPt) { if (dstPt == null) { dstPt = new Point2D.Float(); } dstPt.setLocation(srcPt.getX(), srcPt.getY()); return dstPt; } /** * Returns the rendering hints for this operation. * * @return The {@code RenderingHints} object associated with this * operation. Returns null if no hints have been set. */ public final RenderingHints getRenderingHints() { return hints; } }
gpl-2.0
bhasudha/voldemort
src/java/voldemort/store/stats/Histogram.java
5108
/* * Copyright 2012 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store.stats; import java.util.Arrays; import org.apache.log4j.Logger; import voldemort.annotations.concurrency.Threadsafe; /** * A class for computing percentiles based on a simple histogram. * * The histogram starts at 0 and then has uniformly sized buckets. The number of * buckets and width of each bucket is specified upon construction. Each bucket * in the histogram "counts" the number of values inserted into the histogram * that fall into the bucket's range. * * All interfaces for adding data to the histogram or querying the histogram for * quantiles are synchronized to make this object threadsafe. * * @deprecated Use Tehuti Histograms instead... */ @Threadsafe @Deprecated public class Histogram { private final int nBuckets; private final int step; private final int[] buckets; private final long upperBound; private int size; private long sum; private static final Logger logger = Logger.getLogger(Histogram.class); private long resetIntervalMs = -1; private long lastResetTimeMs; /** * Initialize an empty histogram * * @param nBuckets The number of buckets to use * @param step The size of each bucket */ public Histogram(int nBuckets, int step, long resetIntervalMs) { this(nBuckets, step); this.resetIntervalMs = resetIntervalMs; this.lastResetTimeMs = System.currentTimeMillis(); } /** * Initialize an empty histogram * * @param nBuckets The number of buckets to use * @param step The size (width) of each bucket */ public Histogram(int nBuckets, int step) { this.nBuckets = nBuckets; this.step = step; this.upperBound = step * nBuckets; this.buckets = new int[nBuckets]; reset(); if(logger.isDebugEnabled()) { logger.debug("Constructed a histogram with " + nBuckets + " buckets."); } } /** * Reset the histogram back to empty (set all values to 0) */ public synchronized void reset() { Arrays.fill(buckets, 0); size = 0; sum = 0; this.lastResetTimeMs = System.currentTimeMillis(); } /** * Insert a value into the right bucket of the histogram. If the value is * larger than any bound, insert into the last bucket. If the value is less * than zero, then ignore it. * * @param data The value to insert into the histogram */ public synchronized void insert(long data) { resetIfNeeded(); long index = 0; if(data >= this.upperBound) { index = nBuckets - 1; } else if(data < 0) { logger.error(data + " can't be bucketed because it is negative!"); return; } else { index = data / step; } if(index < 0 || index >= nBuckets) { // This should be dead code. Defending against code changes in // future. logger.error(data + " can't be bucketed because index is not in range [0,nBuckets)."); return; } buckets[(int) index]++; sum += data; size++; } /** * Find the a value <em>n</em> such that the percentile falls within [ * <em>n</em>, <em>n + step</em>). This method does a <em>LINEAR</em> probe * of the histogram. I.e., this method is O(nBuckets). * * @param quantile The percentile to find * @return Lower bound associated with the percentile */ public synchronized long getQuantile(double quantile) { resetIfNeeded(); int total = 0; for(int i = 0; i < nBuckets; i++) { total += buckets[i]; double currQuantile = ((double) total) / ((double) size); if(currQuantile >= quantile) { return i * step; } } return 0; } /** * Obtain the average of the data in the histogram * * Note: Caller is responsible for making sure 'sum' does not overflow * within the reset interval * * @return the average over the current samples */ public synchronized double getAverage() { if(size == 0) return 0.0; return (sum * 1.0) / size; } private void resetIfNeeded() { if(resetIntervalMs > -1) { if((System.currentTimeMillis() - lastResetTimeMs) >= this.resetIntervalMs) { this.reset(); } } } }
apache-2.0
asedunov/intellij-community
platform/platform-impl/src/com/intellij/openapi/diff/impl/incrementalMerge/Change.java
8471
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.diff.impl.incrementalMerge; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.DiffBundle; import com.intellij.openapi.diff.impl.highlighting.FragmentSide; import com.intellij.openapi.diff.impl.util.DocumentUtil; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Comparator; /** * Represents a change in diff or merge view. * A change has two {@link com.intellij.openapi.diff.impl.incrementalMerge.Change.SimpleChangeSide sides} (left and right), each of them representing the text which has been changed and the original text * shown in the diff/merge. * Change can be applied, then its sides would be equal. */ public abstract class Change { private static final Logger LOG = Logger.getInstance(Change.class); public abstract ChangeSide getChangeSide(FragmentSide side); public abstract ChangeType getType(); public abstract ChangeList getChangeList(); protected abstract void removeFromList(); /** * Called when a change has been applied. */ public abstract void onApplied(); /** * Called when a change has been removed from the list. */ public abstract void onRemovedFromList(); public abstract boolean isValid(); /** * Apply the change, i.e. change the "Merge result" document and update range markers, highlighting, gutters, etc. * @param original The source side of the change, which is being applied. */ private void apply(@NotNull FragmentSide original) { FragmentSide targetSide = original.otherSide(); RangeMarker originalRangeMarker = getRangeMarker(original); RangeMarker rangeMarker = getRangeMarker(targetSide); TextRange textRange = modifyDocument(getProject(), originalRangeMarker, rangeMarker); if (textRange != null && isValid()) { updateTargetRangeMarker(targetSide, textRange); } onApplied(); } /** * Updates the target marker of a change after the change has been applied * to allow highlighting of the document modification which has been performed. * @param targetFragmentSide The side to be changed. * @param updatedTextRange New text range to be applied to the side. */ protected final void updateTargetRangeMarker(@NotNull FragmentSide targetFragmentSide, @NotNull TextRange updatedTextRange) { ChangeSide targetSide = getChangeSide(targetFragmentSide); DiffRangeMarker originalRange = targetSide.getRange(); DiffRangeMarker updatedRange = new DiffRangeMarker(originalRange.getDocument(), updatedTextRange, null); changeSide(targetSide, updatedRange); } /** * Substitutes the specified side of this change to a new side which contains the given range. * @param sideToChange The side to be changed. * @param newRange New text range of the new side. */ protected abstract void changeSide(ChangeSide sideToChange, DiffRangeMarker newRange); /** * Applies the text from the original marker to the target marker. * @return the resulting TextRange from the target document, or null if the document if not writable. */ @Nullable private static TextRange modifyDocument(@Nullable Project project, @NotNull RangeMarker original, @NotNull RangeMarker target) { Document document = target.getDocument(); if (project != null && !ReadonlyStatusHandler.ensureDocumentWritable(project, document)) { return null; } if (DocumentUtil.isEmpty(original)) { int offset = target.getStartOffset(); document.deleteString(offset, target.getEndOffset()); } CharSequence text = original.getDocument().getImmutableCharSequence().subSequence(original.getStartOffset(), original.getEndOffset()); int startOffset = target.getStartOffset(); if (DocumentUtil.isEmpty(target)) { document.insertString(startOffset, text); } else { document.replaceString(startOffset, target.getEndOffset(), text); } return new TextRange(startOffset, startOffset + text.length()); } public void addMarkup(Editor[] editors) { LOG.assertTrue(editors.length == 2); highlight(editors, FragmentSide.SIDE1); highlight(editors, FragmentSide.SIDE2); } private void highlight(Editor[] editors, FragmentSide side) { getHighlighterHolder(side).highlight(getChangeSide(side), editors[side.getIndex()], getType()); } private void updateHighlighter(FragmentSide side) { getHighlighterHolder(side).updateHighlighter(getChangeSide(side), getType()); } @Nullable private Project getProject() { return getChangeList().getProject(); } @NotNull private ChangeHighlighterHolder getHighlighterHolder(FragmentSide side) { return getChangeSide(side).getHighlighterHolder(); } @NotNull private RangeMarker getRangeMarker(FragmentSide side) { ChangeSide changeSide = getChangeSide(side); LOG.assertTrue(changeSide != null); return changeSide.getRange(); } public static void apply(final Change change, final FragmentSide fromSide) { ApplicationManager.getApplication().runWriteAction(() -> CommandProcessor.getInstance().executeCommand(change.getProject(), () -> doApply(change, fromSide), null, DiffBundle.message("save.merge.result.command.name"))); } public static void doApply(final Change change, final FragmentSide fromSide) { change.apply(fromSide); } public void updateMarkup() { updateHighlighter(FragmentSide.SIDE1); updateHighlighter(FragmentSide.SIDE2); } public boolean canHasActions(FragmentSide fromSide) { FragmentSide targetSide = fromSide.otherSide(); Document targetDocument = getChangeList().getDocument(targetSide); if (!targetDocument.isWritable()) return false; Editor targetEditor = getHighlighterHolder(targetSide).getEditor(); return !targetEditor.isViewer(); } public static class ChangeOrder implements Comparator<Change> { private final FragmentSide myMainSide; public ChangeOrder(FragmentSide mainSide) { myMainSide = mainSide; } @Override public int compare(@NotNull Change change, @NotNull Change change1) { int result1 = compareSide(change, change1, myMainSide); if (result1 != 0) return result1; return compareSide(change, change1, myMainSide.otherSide()); } private static int compareSide(Change change, Change change1, FragmentSide side) { return RangeMarker.BY_START_OFFSET.compare(change.getRangeMarker(side), change1.getRangeMarker(side)); } } protected static class SimpleChangeSide extends ChangeSide { private final FragmentSide mySide; private final DiffRangeMarker myRange; private final ChangeHighlighterHolder myHighlighterHolder; public SimpleChangeSide(FragmentSide side, DiffRangeMarker rangeMarker) { mySide = side; myRange = rangeMarker; myHighlighterHolder = new ChangeHighlighterHolder(); } public SimpleChangeSide(@NotNull ChangeSide originalSide, @NotNull DiffRangeMarker newRange) { mySide = ((SimpleChangeSide)originalSide).getFragmentSide(); myRange = newRange; myHighlighterHolder = originalSide.getHighlighterHolder(); } @NotNull public FragmentSide getFragmentSide() { return mySide; } @Override @NotNull public DiffRangeMarker getRange() { return myRange; } @NotNull @Override public ChangeHighlighterHolder getHighlighterHolder() { return myHighlighterHolder; } } }
apache-2.0
Stratehm/openhab2
addons/binding/org.openhab.binding.toon/src/main/java/org/openhab/binding/toon/ToonBindingConstants.java
3026
/** * Copyright (c) 2010-2017 by the respective copyright holders. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.toon; import java.util.Set; import org.eclipse.smarthome.core.thing.ThingTypeUID; import com.google.common.collect.ImmutableSet; /** * The {@link ToonBindingConstants} class defines common constants, which are * used across the whole binding. * * @author Jorg de Jong - Initial contribution */ public class ToonBindingConstants { public static final String BINDING_ID = "toon"; // List of all Thing Type UIDs public static final ThingTypeUID APIBRIDGE_THING_TYPE = new ThingTypeUID(BINDING_ID, "toonapi"); public static final ThingTypeUID MAIN_THING_TYPE = new ThingTypeUID(BINDING_ID, "main"); public static final ThingTypeUID PLUG_THING_TYPE = new ThingTypeUID(BINDING_ID, "plug"); public static final ThingTypeUID SMOKE_THING_TYPE = new ThingTypeUID(BINDING_ID, "smoke"); // List of all Channel ids public static final String CHANNEL_TEMPERATURE = "Temperature"; public static final String CHANNEL_SETPOINT = "Setpoint"; public static final String CHANNEL_SETPOINT_MODE = "SetpointMode"; public static final String CHANNEL_MODULATION_LEVEL = "ModulationLevel"; public static final String CHANNEL_HEATING_SWITCH = "Heating"; public static final String CHANNEL_TAPWATER_SWITCH = "Tapwater"; public static final String CHANNEL_PREHEAT_SWITCH = "Preheat"; // gasUsage public static final String CHANNEL_GAS_METER_READING = "GasMeterReading"; // powerUsage public static final String CHANNEL_POWER_METER_READING = "PowerMeterReading"; public static final String CHANNEL_POWER_METER_READING_LOW = "PowerMeterReadingLow"; public static final String CHANNEL_POWER_CONSUMPTION = "PowerConsumption"; // plug channels public static final String CHANNEL_SWITCH_BINARY = "SwitchBinary"; // main unit property names public static final String PROPERTY_AGREEMENT_ID = "agreementId"; public static final String PROPERTY_COMMON_NAME = "toon_displayCommonName"; public static final String PROPERTY_ADDRESS = "toon_address"; // plug property names public static final String PROPERTY_DEV_UUID = "devUUID"; public static final String PROPERTY_DEV_TYPE = "devType"; // List of all supported physical devices and modules public static final Set<ThingTypeUID> SUPPORTED_DEVICE_THING_TYPES_UIDS = ImmutableSet.of(MAIN_THING_TYPE, SMOKE_THING_TYPE, PLUG_THING_TYPE); // List of all adressable things in OH = SUPPORTED_DEVICE_THING_TYPES_UIDS + the virtual bridge public static final Set<ThingTypeUID> SUPPORTED_THING_TYPES_UIDS = ImmutableSet.of(MAIN_THING_TYPE, SMOKE_THING_TYPE, PLUG_THING_TYPE, APIBRIDGE_THING_TYPE); }
epl-1.0
siosio/intellij-community
plugins/kotlin/j2k/old/tests/testData/fileOrElement/assignmentExpression/assignment.java
17
//statement x = 2
apache-2.0
twalpole/selenium
java/client/src/com/thoughtworks/selenium/SeleneseTestNgHelper.java
4644
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.thoughtworks.selenium; import org.testng.ITestContext; import org.testng.TestRunner; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterTest; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeSuite; import org.testng.annotations.BeforeTest; import org.testng.annotations.Optional; import org.testng.annotations.Parameters; import org.testng.internal.IResultListener; import java.io.File; import java.lang.reflect.Method; public class SeleneseTestNgHelper extends SeleneseTestBase { private static Selenium staticSelenium; @BeforeTest @Override @Parameters({"selenium.url", "selenium.browser"}) public void setUp(@Optional String url, @Optional String browserString) throws Exception { if (browserString == null) { browserString = runtimeBrowserString(); } super.setUp(url, browserString); staticSelenium = selenium; } @BeforeClass @Parameters({"selenium.restartSession"}) public void getSelenium(@Optional("false") boolean restartSession) { selenium = staticSelenium; if (restartSession) { selenium.stop(); selenium.start(); } } @BeforeMethod public void setTestContext(Method method) { selenium.setContext( method.getDeclaringClass().getSimpleName() + "." + method.getName()); } @BeforeSuite @Parameters({"selenium.host", "selenium.port"}) public void attachScreenshotListener(@Optional("localhost") String host, @Optional("4444") String port, ITestContext context) { if (!"localhost".equals(host)) { return; } Selenium screenshotTaker = new DefaultSelenium(host, Integer.parseInt(port), "", ""); TestRunner tr = (TestRunner) context; File outputDirectory = new File(context.getOutputDirectory()); tr.addListener((IResultListener) new ScreenshotListener(outputDirectory, screenshotTaker)); } @AfterMethod @Override public void checkForVerificationErrors() { super.checkForVerificationErrors(); } @AfterMethod(alwaysRun = true) public void selectDefaultWindow() { if (selenium != null) { selenium.selectWindow("null"); } } @AfterTest(alwaysRun = true) @Override public void tearDown() throws Exception { super.tearDown(); } // @Override static method of super class (which assumes JUnit conventions) public static void assertEquals(Object actual, Object expected) { SeleneseTestBase.assertEquals(expected, actual); } // @Override static method of super class (which assumes JUnit conventions) public static void assertEquals(String actual, String expected) { SeleneseTestBase.assertEquals(expected, actual); } // @Override static method of super class (which assumes JUnit conventions) public static void assertEquals(String actual, String[] expected) { SeleneseTestBase.assertEquals(expected, actual); } // @Override static method of super class (which assumes JUnit conventions) public static void assertEquals(String[] actual, String[] expected) { SeleneseTestBase.assertEquals(expected, actual); } // @Override static method of super class (which assumes JUnit conventions) public static boolean seleniumEquals(Object actual, Object expected) { return SeleneseTestBase.seleniumEquals(expected, actual); } // @Override static method of super class (which assumes JUnit conventions) public static boolean seleniumEquals(String actual, String expected) { return SeleneseTestBase.seleniumEquals(expected, actual); } @Override public void verifyEquals(Object actual, Object expected) { super.verifyEquals(expected, actual); } @Override public void verifyEquals(String[] actual, String[] expected) { super.verifyEquals(expected, actual); } }
apache-2.0
jinlongliu/onos
core/api/src/main/java/org/onosproject/net/topology/TopologyStoreDelegate.java
822
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.topology; import org.onosproject.store.StoreDelegate; /** * Topology store delegate abstraction. */ public interface TopologyStoreDelegate extends StoreDelegate<TopologyEvent> { }
apache-2.0
maxkondr/onos-porta
core/store/serializers/src/main/java/org/onosproject/store/serializers/KryoSerializer.java
2299
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.serializers; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import org.onlab.util.KryoNamespace; import com.google.common.base.MoreObjects; /** * StoreSerializer implementation using Kryo. */ public class KryoSerializer implements StoreSerializer { protected KryoNamespace serializerPool; public KryoSerializer() { setupKryoPool(); } /** * Sets up the common serializers pool. */ protected void setupKryoPool() { serializerPool = KryoNamespace.newBuilder() .register(KryoNamespaces.API) .nextId(KryoNamespaces.BEGIN_USER_CUSTOM_ID) .build(); } @Override public byte[] encode(final Object obj) { return serializerPool.serialize(obj); } @Override public <T> T decode(final byte[] bytes) { if (bytes == null) { return null; } return serializerPool.deserialize(bytes); } @Override public void encode(Object obj, ByteBuffer buffer) { serializerPool.serialize(obj, buffer); } @Override public <T> T decode(ByteBuffer buffer) { return serializerPool.deserialize(buffer); } @Override public void encode(Object obj, OutputStream stream) { serializerPool.serialize(obj, stream); } @Override public <T> T decode(InputStream stream) { return serializerPool.deserialize(stream); } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("serializerPool", serializerPool) .toString(); } }
apache-2.0
jpike88/crosswalk
runtime/android/core_internal/src/org/xwalk/core/internal/XWalkNotificationServiceImpl.java
10352
// Copyright (c) 2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.core.internal; import java.util.HashMap; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.util.AndroidRuntimeException; import android.util.Log; import org.xwalk.core.internal.XWalkContentsClientBridge; import org.xwalk.core.internal.XWalkNotificationService; import org.xwalk.core.internal.XWalkViewInternal; /** * @hide */ public class XWalkNotificationServiceImpl implements XWalkNotificationService { private static final String TAG = "XWalkNotificationServiceImpl"; private static final String XWALK_ACTION_CLICK_NOTIFICATION_SUFFIX = ".notification.click"; private static final String XWALK_ACTION_CLOSE_NOTIFICATION_SUFFIX = ".notification.close"; private static final String XWALK_INTENT_EXTRA_KEY_NOTIFICATION_ID = "xwalk.NOTIFICATION_ID"; private static final String XWALK_INTENT_CATEGORY_NOTIFICATION_PREFIX = "notification_"; private class WebNotification { WebNotification() { mMessageNum = 1; } public Integer mNotificationId; public String mReplaceId; public Notification.Builder mBuilder; public Integer mMessageNum; } private Context mContext; private XWalkContentsClientBridge mBridge; private XWalkViewInternal mView; private NotificationManager mNotificationManager; private BroadcastReceiver mNotificationCloseReceiver; private HashMap<Integer, WebNotification> mExistNotificationIds; private HashMap<String, WebNotification> mExistReplaceIds; public XWalkNotificationServiceImpl(Context context, XWalkViewInternal view) { mContext = context; mView = view; mNotificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationCloseReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { mView.onNewIntent(intent); } }; mExistNotificationIds = new HashMap<Integer, WebNotification>(); mExistReplaceIds = new HashMap<String, WebNotification>(); } private static String getCategoryFromNotificationId(int id) { return XWALK_INTENT_CATEGORY_NOTIFICATION_PREFIX + id; } @Override public void setBridge(XWalkContentsClientBridge bridge) { mBridge = bridge; } @Override public void shutdown() { if (!mExistNotificationIds.isEmpty()) { unregisterReceiver(); } mBridge = null; } @Override public boolean maybeHandleIntent(Intent intent) { if (intent.getAction() == null) return false; int notificationId = intent.getIntExtra(XWALK_INTENT_EXTRA_KEY_NOTIFICATION_ID, -1); if (notificationId <= 0) return false; if (intent.getAction().equals( mView.getActivity().getPackageName() + XWALK_ACTION_CLOSE_NOTIFICATION_SUFFIX)) { onNotificationClose(notificationId, true); return true; } else if (intent.getAction().equals( mView.getActivity().getPackageName() + XWALK_ACTION_CLICK_NOTIFICATION_SUFFIX)) { onNotificationClick(notificationId); return true; } return false; } public Bitmap getNotificationIcon(Bitmap icon) { if (icon == null) return null; int originalWidth = icon.getWidth(); int originalHeight = icon.getHeight(); if (originalWidth == 0 || originalHeight == 0) { return icon; } int targetWidth = mContext.getResources().getDimensionPixelSize( android.R.dimen.notification_large_icon_width); int targetHeight = mContext.getResources().getDimensionPixelSize( android.R.dimen.notification_large_icon_height); if (originalWidth > targetWidth && originalHeight > targetHeight) { if (originalWidth * targetHeight > originalHeight * targetWidth) { targetHeight = originalHeight * targetWidth / originalWidth; } else { targetWidth = originalWidth * targetHeight / originalHeight; } } return Bitmap.createScaledBitmap(icon, targetWidth, targetHeight, true); } @Override @SuppressWarnings("deprecation") public void showNotification(String title, String message, String replaceId, Bitmap icon, int notificationId) { Notification.Builder builder; if (!replaceId.isEmpty() && mExistReplaceIds.containsKey(replaceId)) { WebNotification webNotification = mExistReplaceIds.get(replaceId); notificationId = webNotification.mNotificationId; builder = webNotification.mBuilder; builder.setNumber(++webNotification.mMessageNum); } else { builder = new Notification.Builder(mContext.getApplicationContext()) .setAutoCancel(true); WebNotification webNotification = new WebNotification(); webNotification.mNotificationId = notificationId; webNotification.mReplaceId = replaceId; webNotification.mBuilder = builder; mExistNotificationIds.put(notificationId, webNotification); if (!replaceId.isEmpty()) { mExistReplaceIds.put(replaceId, webNotification); } } builder.setContentTitle(title); builder.setContentText(message); int iconRes = mContext.getApplicationInfo().icon; if (iconRes == 0) { iconRes = android.R.drawable.sym_def_app_icon; } builder.setSmallIcon(iconRes); Bitmap bigIcon = getNotificationIcon(icon); if (bigIcon != null) builder.setLargeIcon(bigIcon); Context activity = mView.getActivity(); String category = getCategoryFromNotificationId(notificationId); Intent clickIntent = new Intent(activity, activity.getClass()) .setAction(activity.getPackageName() + XWALK_ACTION_CLICK_NOTIFICATION_SUFFIX) .putExtra(XWALK_INTENT_EXTRA_KEY_NOTIFICATION_ID, notificationId) .setFlags(Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY | Intent.FLAG_ACTIVITY_SINGLE_TOP) .addCategory(category); Intent closeIntent = new Intent(activity.getPackageName() + XWALK_ACTION_CLOSE_NOTIFICATION_SUFFIX) .putExtra(XWALK_INTENT_EXTRA_KEY_NOTIFICATION_ID, notificationId) .addCategory(category); builder.setContentIntent(PendingIntent.getActivity( activity, 0, clickIntent, PendingIntent.FLAG_UPDATE_CURRENT)); builder.setDeleteIntent(PendingIntent.getBroadcast( activity, 0, closeIntent, PendingIntent.FLAG_UPDATE_CURRENT)); doShowNotification(notificationId, VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN ? builder.build() : builder.getNotification()); notificationChanged(); onNotificationShown(notificationId); } @Override public void cancelNotification(int notificationId) { mNotificationManager.cancel(notificationId); onNotificationClose(notificationId, false); } public void doShowNotification(int notificationId, Notification notification) { mNotificationManager.notify(notificationId, notification); } public void onNotificationShown(int notificationId) { WebNotification webNotification = mExistNotificationIds.get(notificationId); if (webNotification == null) { return; } if (mBridge != null) { mBridge.notificationDisplayed(notificationId); } } public void onNotificationClick(int notificationId) { WebNotification webNotification = mExistNotificationIds.get(notificationId); if (webNotification == null) { return; } mExistNotificationIds.remove(notificationId); mExistReplaceIds.remove(webNotification.mReplaceId); notificationChanged(); if (mBridge != null) { mBridge.notificationClicked(notificationId); } } public void onNotificationClose( int notificationId, boolean byUser) { WebNotification webNotification = mExistNotificationIds.get(notificationId); if (webNotification == null) { return; } mExistNotificationIds.remove(notificationId); mExistReplaceIds.remove(webNotification.mReplaceId); notificationChanged(); if (mBridge != null) { mBridge.notificationClosed(notificationId, byUser); } } private void notificationChanged() { if (mExistNotificationIds.isEmpty()) { Log.i(TAG, "notifications are all cleared," + "unregister broadcast receiver for close pending intent"); unregisterReceiver(); } else { registerReceiver(); } } private void registerReceiver() { IntentFilter filter = new IntentFilter( mView.getActivity().getPackageName() + XWALK_ACTION_CLOSE_NOTIFICATION_SUFFIX); for(Integer id : mExistNotificationIds.keySet()) { filter.addCategory(getCategoryFromNotificationId(id)); } try { mView.getActivity().registerReceiver(mNotificationCloseReceiver, filter); } catch (AndroidRuntimeException e) { //FIXME(wang16): The exception will happen when there are multiple xwalkviews in one activity. // Remove it after notification service supports multi-views. Log.w(TAG, e.getLocalizedMessage()); } } private void unregisterReceiver() { mView.getActivity().unregisterReceiver(mNotificationCloseReceiver); } }
bsd-3-clause
rscrimojr/incubator-trafficcontrol
traffic_router/core/src/main/java/com/comcast/cdn/traffic_control/traffic_router/core/util/ResourceUrl.java
692
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cdn.traffic_control.traffic_router.core.util; public interface ResourceUrl { public String nextUrl(); }
apache-2.0
hurricup/intellij-community
platform/lang-api/src/com/intellij/psi/codeStyle/extractor/processor/GenProcessor.java
2547
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.codeStyle.extractor.processor; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.extractor.Utils; import com.intellij.psi.codeStyle.extractor.differ.Differ; import com.intellij.psi.codeStyle.extractor.differ.LangCodeStyleExtractor; import com.intellij.psi.codeStyle.extractor.values.Gens; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; /** * @author Roman.Shein * @since 29.07.2015. */ public class GenProcessor extends CodeStyleDeriveProcessor { private static DateFormat formatter = new SimpleDateFormat("mm:ss"); public GenProcessor(LangCodeStyleExtractor langExtractor) { super(langExtractor); } @Override public Gens runWithProgress(Project project, CodeStyleSettings settings, PsiFile file, ProgressIndicator indicator) { final Gens origGens = new Gens(getFormattingValues(settings, file.getLanguage())); final Gens forSelection = origGens.copy(); final Differ differ = myLangExtractor.getDiffer(project, file, settings); forSelection.dropToInitial(); Utils.resetRandom(); long startTime = System.nanoTime(); Utils.adjustValuesGA(forSelection, differ, indicator); reportResult("GA", forSelection, differ, startTime, file.getName()); startTime = System.nanoTime(); Utils.adjustValuesMin(forSelection, differ, indicator); reportResult("MIN", forSelection, differ, startTime, file.getName()); return forSelection; } private void reportResult(String label, Gens gens, Differ differ, long startTime, String fileName) { Date date = new Date((System.nanoTime() - startTime) / 1000000); System.out.println(fileName + ": " + label + " range:" + differ.getDifference(gens) + " Execution Time: " + formatter.format(date)); } }
apache-2.0
jknguyen/josephknguyen-selenium
java/client/src/org/openqa/selenium/WebDriverException.java
3504
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; import org.openqa.selenium.internal.BuildInfo; public class WebDriverException extends RuntimeException { public static final String SESSION_ID = "Session ID"; public static final String DRIVER_INFO = "Driver info"; private Map<String, String> extraInfo = new HashMap<String, String>(); public WebDriverException() { super(); } public WebDriverException(String message) { super(message); } public WebDriverException(Throwable cause) { super(cause); } public WebDriverException(String message, Throwable cause) { super(message, cause); } @Override public String getMessage() { return createMessage(super.getMessage()); } private String createMessage(String originalMessageString) { String supportMessage = getSupportUrl() == null ? "" : "For documentation on this error, please visit: " + getSupportUrl() + "\n"; return (originalMessageString == null ? "" : originalMessageString + "\n") + supportMessage + getBuildInformation() + "\n" + getSystemInformation() + getAdditionalInformation(); } public String getSystemInformation() { String host = "N/A"; String ip = "N/A"; try{ host = InetAddress.getLocalHost().getHostName(); ip = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException throw_away) {} return String.format("System info: host: '%s', ip: '%s', os.name: '%s', os.arch: '%s', os.version: '%s', java.version: '%s'", host, ip, System.getProperty("os.name"), System.getProperty("os.arch"), System.getProperty("os.version"), System.getProperty("java.version")); } public String getSupportUrl() { return null; } public BuildInfo getBuildInformation() { return new BuildInfo(); } public static String getDriverName(StackTraceElement[] stackTraceElements) { String driverName = "unknown"; for (StackTraceElement e : stackTraceElements) { if (e.getClassName().endsWith("Driver")) { String[] bits = e.getClassName().split("\\."); driverName = bits[bits.length - 1]; } } return driverName; } public void addInfo(String key, String value) { extraInfo.put(key, value); } public String getAdditionalInformation() { if (! extraInfo.containsKey(DRIVER_INFO)) { extraInfo.put(DRIVER_INFO, "driver.version: " + getDriverName(getStackTrace())); } String result = ""; for (Map.Entry<String, String> entry : extraInfo.entrySet()) { if (entry.getValue() != null && entry.getValue().startsWith(entry.getKey())) { result += "\n" + entry.getValue(); } else { result += "\n" + entry.getKey() + ": " + entry.getValue(); } } return result; } }
apache-2.0
ztx1491/jwatch
jwatch/src/main/java/org/jwatch/servlet/ActionConstants.java
1629
/** * JWatch - Quartz Monitor: http://code.google.com/p/jwatch/ * Copyright (C) 2011 Roy Russo and the original author or authors. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General * Public License along with this library; if not, write to the * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301 USA **/ package org.jwatch.servlet; /** * @author <a href="mailto:royrusso@gmail.com">Roy Russo</a> * Date: Apr 8, 2011 4:11:45 PM */ public class ActionConstants { public static final String LOAD_INSTANCES = "get_all_instances"; public static final String LOAD_INSTANCE_DETAILS = "get_instance_details"; public static final String CREATE_INSTANCE = "create_instance"; public static final String LOAD_SCHEDULERS = "get_schedulers"; public static final String LOAD_JOBS = "get_jobs"; public static final String LOAD_SCHEDULERINFO = "get_scheduler_info"; public static final String LOAD_TRIGGERS_FOR_JOB = "get_job_triggers"; public static final String MONITOR_JOBS = "monitor_jobs"; }
lgpl-3.0
e3tar/opengse
testing/server-side/webapps-src/servlet-tests/web/WEB-INF/java/tests/javax_servlet/ServletRequest/GetParameterValues_1TestServlet.java
3884
/* * $Header: /home/cvs/jakarta-watchdog-4.0/src/server/servlet-tests/WEB-INF/classes/tests/javax_servlet/ServletRequest/GetParameterValues_1TestServlet.java,v 1.2 2002/01/11 22:20:57 rlubke Exp $ * $Revision: 1.2 $ * $Date: 2002/01/11 22:20:57 $ * * ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 1999-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Tomcat", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package tests.javax_servlet.ServletRequest; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.GenericServlet; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import java.io.IOException; import java.io.PrintWriter; /** * A Negative test for getParameterValues(String) */ public class GetParameterValues_1TestServlet extends GenericServlet { public void service ( ServletRequest request, ServletResponse response ) throws ServletException, IOException { PrintWriter out = response.getWriter(); // no parameter was set with name zero try { String[] vals = request.getParameterValues( "doesnotexist" ); if ( vals == null ) out.println( "GetParameterValues_1Test test PASSED" ); } catch ( Exception e ) { out.println( "GetParameterValues_1Test test FAILED <BR>" ); e.printStackTrace(); } } }
apache-2.0
aakashysharma/opengse
testing/server-side/webapps-src/servlet-tests/web/WEB-INF/java/tests/javax_servlet/ServletContextListener/ContextListener.java
3659
/* * $Header: /home/cvs/jakarta-watchdog-4.0/src/server/servlet-tests/WEB-INF/classes/tests/javax_servlet/ServletContextListener/ContextListener.java,v 1.1 2002/01/11 22:20:57 rlubke Exp $ * $Revision: 1.1 $ * $Date: 2002/01/11 22:20:57 $ * * ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 1999-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Tomcat", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package tests.javax_servlet.ServletContextListener; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import java.util.ArrayList; public final class ContextListener implements ServletContextListener { // The servlet context with which we are associated. private ServletContext context = null; private ArrayList list = new ArrayList(); public void contextDestroyed( ServletContextEvent event ) { } public void contextInitialized( ServletContextEvent event ) { this.context = event.getServletContext(); list.add( "In contextInitialized() of ContextListener<BR>" ); context.setAttribute( "ServletContextListener", list ); } }
apache-2.0
rokn/Count_Words_2015
testing/drools-master/drools-verifier/src/main/java/org/drools/verifier/report/VerifierReportConfiguration.java
709
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.verifier.report; public interface VerifierReportConfiguration { }
mit
Xcorpio/spring-security
test/src/main/java/org/springframework/security/test/context/support/WithUserDetails.java
2406
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.test.context.support; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.test.web.servlet.MockMvc; /** * When used with {@link WithSecurityContextTestExecutionListener} this annotation can be * added to a test method to emulate running with a {@link UserDetails} returned from the * {@link UserDetailsService}. In order to work with {@link MockMvc} The * {@link SecurityContext} that is used will have the following properties: * * <ul> * <li>The {@link SecurityContext} created with be that of * {@link SecurityContextHolder#createEmptyContext()}</li> * <li>It will be populated with an {@link UsernamePasswordAuthenticationToken} that uses * the username of {@link #value()}. * </ul> * * @see WithMockUser * * @author Rob Winch * @since 4.0 */ @Target({ ElementType.METHOD, ElementType.TYPE }) @Retention(RetentionPolicy.RUNTIME) @Inherited @Documented @WithSecurityContext(factory = WithUserDetailsSecurityContextFactory.class) public @interface WithUserDetails { /** * The username to look up in the {@link UserDetailsService} * * @return */ String value() default "user"; }
apache-2.0
jimzucker/hygieia-temp
core/src/main/java/com/capitalone/dashboard/model/Dashboard.java
1480
package com.capitalone.dashboard.model; import org.springframework.data.mongodb.core.mapping.Document; import java.util.ArrayList; import java.util.List; /** * A collection of widgets, collectors and application components that represent a software * project under development and/or in production use. * */ @Document(collection="dashboards") public class Dashboard extends BaseModel { private String template; private String title; private Application application; private List<Widget> widgets = new ArrayList<>(); private String owner; Dashboard() { } public Dashboard(String template, String title, Application application,String owner) { this.template = template; this.title = title; this.application = application; this.owner = owner; } public String getTemplate() { return template; } public void setTemplate(String template) { this.template = template; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public Application getApplication() { return application; } public void setApplication(Application application) { this.application = application; } public List<Widget> getWidgets() { return widgets; } public String getOwner() { return owner; } public void setOwner(String owner) { this.owner = owner; } }
apache-2.0
bravevu/javaemvreader
src/main/java/sasc/emv/SignedDynamicApplicationData.java
5466
/* * Copyright 2010 sasc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sasc.emv; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import sasc.util.Log; import sasc.util.Util; /** * * @author sasc */ public class SignedDynamicApplicationData { private byte header; private byte signedDataFormat; private byte hashAlgorithmIndicator; private byte iccDynamicDataLenght; private byte[] iccDynamicNumber; private byte[] hashResult; private byte[] decipheredData; private byte trailer; private byte[] terminalDynamicData; private boolean isValid = false; private boolean validationPerformed = false; private SignedDynamicApplicationData(byte[] data, byte[] terminalDynamicData) { this.decipheredData = data; this.terminalDynamicData = terminalDynamicData; } //DDA/Internal Auth must be performed after all app records have been read. public boolean validate() { if (validationPerformed) { //Validation already run return isValid(); } validationPerformed = true; //'isValid' flag set further down ByteArrayInputStream stream = new ByteArrayInputStream(decipheredData); header = (byte) stream.read(); if (header != (byte) 0x6a) { throw new SignedDataException("Header != 0x6a"); } signedDataFormat = (byte) stream.read(); if (signedDataFormat != (byte) 0x05) { throw new SignedDataException("Signed Data Format != 0x05"); } hashAlgorithmIndicator = (byte) stream.read(); //We currently only support SHA-1 iccDynamicDataLenght = (byte) stream.read(); iccDynamicNumber = new byte[iccDynamicDataLenght]; stream.read(iccDynamicNumber, 0, iccDynamicDataLenght); //Now read padding bytes (0xbb), if available //The padding bytes are used in hash validation byte[] padding = new byte[stream.available()-21]; stream.read(padding, 0, padding.length); hashResult = new byte[20]; stream.read(hashResult, 0, 20); ByteArrayOutputStream hashStream = new ByteArrayOutputStream(); //EMV Book 2, page 67, table 15 //Header not included in hash hashStream.write(signedDataFormat); hashStream.write(hashAlgorithmIndicator); hashStream.write((byte)iccDynamicDataLenght); hashStream.write(iccDynamicNumber, 0, iccDynamicNumber.length); hashStream.write(padding, 0, padding.length); hashStream.write(terminalDynamicData, 0, terminalDynamicData.length); //Trailer not included in hash byte[] sha1Result = null; try { sha1Result = Util.calculateSHA1(hashStream.toByteArray()); } catch (NoSuchAlgorithmException ex) { throw new SignedDataException("SHA-1 hash algorithm not available", ex); } if(!Arrays.equals(sha1Result, hashResult)){ throw new SignedDataException("Hash is not valid"); } trailer = (byte) stream.read(); if (trailer != (byte) 0xbc) { throw new SignedDataException("Trailer != 0xbc"); } isValid = true; return true; } public boolean isValid() { return isValid; } public static SignedDynamicApplicationData parseSignedData(byte[] data, ICCPublicKey iccPublicKey, byte[] terminalDynamicData) { byte[] expBytesICC = iccPublicKey.getExponent(); byte[] modBytesICC = iccPublicKey.getModulus(); if (data.length != modBytesICC.length) { throw new SignedDataException("Data length does not equal key length. Data length=" + data.length + " Key length="+modBytesICC.length); } byte[] decipheredBytes = Util.performRSA(data, expBytesICC, modBytesICC); return new SignedDynamicApplicationData(decipheredBytes, terminalDynamicData); } @Override public String toString() { StringWriter sw = new StringWriter(); dump(new PrintWriter(sw), 0); return sw.toString(); } public void dump(PrintWriter pw, int indent) { pw.println(Util.getSpaces(indent) + "Signed Dynamic Application Data"); String indentStr = Util.getSpaces(indent + Log.INDENT_SIZE); if(!validationPerformed){ validate(); } if (isValid()) { pw.println(indentStr + "Hash Algorithm Indicator: " + hashAlgorithmIndicator +" (=SHA-1)"); pw.println(indentStr + "ICC Dynamic Data: " + Util.byteArrayToHexString(iccDynamicNumber)); pw.println(indentStr + "Hash: " + Util.byteArrayToHexString(hashResult)); } else { pw.println(indentStr + "SIGNED DYNAMIC DATA NOT VALID"); } } }
apache-2.0
caoanhhao/AndEngine
src/org/andengine/util/adt/list/concurrent/SynchronizedList.java
2820
package org.andengine.util.adt.list.concurrent; import org.andengine.util.adt.list.IList; import org.andengine.util.adt.list.ListUtils; /** * (c) 2012 Zynga Inc. * * @author Nicolas Gramlich <ngramlich@zynga.com> * @since 14:23:50 - 01.02.2012 */ public class SynchronizedList<T> implements IList<T> { // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== protected final IList<T> mList; // =========================================================== // Constructors // =========================================================== public SynchronizedList(final IList<T> pList) { this.mList = pList; } // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== @Override public synchronized boolean isEmpty() { return this.mList.isEmpty(); } @Override public synchronized T get(final int pIndex) throws IndexOutOfBoundsException { return this.mList.get(pIndex); } @Override public synchronized void set(final int pIndex, final T pItem) throws IndexOutOfBoundsException { this.mList.set(pIndex, pItem); } @Override public synchronized int indexOf(final T pItem) { return this.mList.indexOf(pItem); } @Override public synchronized void add(final T pItem) { this.mList.add(pItem); } @Override public synchronized void add(final int pIndex, final T pItem) throws IndexOutOfBoundsException { this.mList.add(pIndex, pItem); } @Override public synchronized T removeFirst() { return this.mList.removeFirst(); } @Override public synchronized T removeLast() { return this.mList.removeLast(); } @Override public synchronized boolean remove(final T pItem) { return this.mList.remove(pItem); } @Override public synchronized T remove(final int pIndex) throws IndexOutOfBoundsException { return this.mList.remove(pIndex); } @Override public synchronized int size() { return this.mList.size(); } @Override public synchronized void clear() { this.mList.clear(); } @Override public synchronized String toString() { return ListUtils.toString(this); } // =========================================================== // Methods // =========================================================== // =========================================================== // Inner and Anonymous Classes // =========================================================== }
apache-2.0
mahaliachante/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/CreateVpnGatewayResultStaxUnmarshaller.java
2452
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model.transform; import java.util.Map; import java.util.Map.Entry; import javax.xml.stream.events.XMLEvent; import com.amazonaws.services.ec2.model.*; import com.amazonaws.transform.Unmarshaller; import com.amazonaws.transform.MapEntry; import com.amazonaws.transform.StaxUnmarshallerContext; import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*; /** * Create Vpn Gateway Result StAX Unmarshaller */ public class CreateVpnGatewayResultStaxUnmarshaller implements Unmarshaller<CreateVpnGatewayResult, StaxUnmarshallerContext> { public CreateVpnGatewayResult unmarshall(StaxUnmarshallerContext context) throws Exception { CreateVpnGatewayResult createVpnGatewayResult = new CreateVpnGatewayResult(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; if (context.isStartOfDocument()) targetDepth += 1; while (true) { XMLEvent xmlEvent = context.nextEvent(); if (xmlEvent.isEndDocument()) return createVpnGatewayResult; if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) { if (context.testExpression("vpnGateway", targetDepth)) { createVpnGatewayResult.setVpnGateway(VpnGatewayStaxUnmarshaller.getInstance().unmarshall(context)); continue; } } else if (xmlEvent.isEndElement()) { if (context.getCurrentDepth() < originalDepth) { return createVpnGatewayResult; } } } } private static CreateVpnGatewayResultStaxUnmarshaller instance; public static CreateVpnGatewayResultStaxUnmarshaller getInstance() { if (instance == null) instance = new CreateVpnGatewayResultStaxUnmarshaller(); return instance; } }
apache-2.0
sheofir/aws-sdk-java
aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/UploadSigningCertificateResult.java
3634
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; /** * <p> * Contains the response to a successful UploadSigningCertificate * request. * </p> */ public class UploadSigningCertificateResult implements Serializable, Cloneable { /** * Information about the certificate. */ private SigningCertificate certificate; /** * Information about the certificate. * * @return Information about the certificate. */ public SigningCertificate getCertificate() { return certificate; } /** * Information about the certificate. * * @param certificate Information about the certificate. */ public void setCertificate(SigningCertificate certificate) { this.certificate = certificate; } /** * Information about the certificate. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param certificate Information about the certificate. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadSigningCertificateResult withCertificate(SigningCertificate certificate) { this.certificate = certificate; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCertificate() != null) sb.append("Certificate: " + getCertificate() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCertificate() == null) ? 0 : getCertificate().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UploadSigningCertificateResult == false) return false; UploadSigningCertificateResult other = (UploadSigningCertificateResult)obj; if (other.getCertificate() == null ^ this.getCertificate() == null) return false; if (other.getCertificate() != null && other.getCertificate().equals(this.getCertificate()) == false) return false; return true; } @Override public UploadSigningCertificateResult clone() { try { return (UploadSigningCertificateResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
sheofir/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/InstanceNetworkInterfaceAssociation.java
6701
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes association information for an Elastic IP address. * </p> */ public class InstanceNetworkInterfaceAssociation implements Serializable, Cloneable { /** * The public IP address or Elastic IP address bound to the network * interface. */ private String publicIp; /** * The public DNS name. */ private String publicDnsName; /** * The ID of the owner of the Elastic IP address. */ private String ipOwnerId; /** * The public IP address or Elastic IP address bound to the network * interface. * * @return The public IP address or Elastic IP address bound to the network * interface. */ public String getPublicIp() { return publicIp; } /** * The public IP address or Elastic IP address bound to the network * interface. * * @param publicIp The public IP address or Elastic IP address bound to the network * interface. */ public void setPublicIp(String publicIp) { this.publicIp = publicIp; } /** * The public IP address or Elastic IP address bound to the network * interface. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param publicIp The public IP address or Elastic IP address bound to the network * interface. * * @return A reference to this updated object so that method calls can be chained * together. */ public InstanceNetworkInterfaceAssociation withPublicIp(String publicIp) { this.publicIp = publicIp; return this; } /** * The public DNS name. * * @return The public DNS name. */ public String getPublicDnsName() { return publicDnsName; } /** * The public DNS name. * * @param publicDnsName The public DNS name. */ public void setPublicDnsName(String publicDnsName) { this.publicDnsName = publicDnsName; } /** * The public DNS name. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param publicDnsName The public DNS name. * * @return A reference to this updated object so that method calls can be chained * together. */ public InstanceNetworkInterfaceAssociation withPublicDnsName(String publicDnsName) { this.publicDnsName = publicDnsName; return this; } /** * The ID of the owner of the Elastic IP address. * * @return The ID of the owner of the Elastic IP address. */ public String getIpOwnerId() { return ipOwnerId; } /** * The ID of the owner of the Elastic IP address. * * @param ipOwnerId The ID of the owner of the Elastic IP address. */ public void setIpOwnerId(String ipOwnerId) { this.ipOwnerId = ipOwnerId; } /** * The ID of the owner of the Elastic IP address. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param ipOwnerId The ID of the owner of the Elastic IP address. * * @return A reference to this updated object so that method calls can be chained * together. */ public InstanceNetworkInterfaceAssociation withIpOwnerId(String ipOwnerId) { this.ipOwnerId = ipOwnerId; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPublicIp() != null) sb.append("PublicIp: " + getPublicIp() + ","); if (getPublicDnsName() != null) sb.append("PublicDnsName: " + getPublicDnsName() + ","); if (getIpOwnerId() != null) sb.append("IpOwnerId: " + getIpOwnerId() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPublicIp() == null) ? 0 : getPublicIp().hashCode()); hashCode = prime * hashCode + ((getPublicDnsName() == null) ? 0 : getPublicDnsName().hashCode()); hashCode = prime * hashCode + ((getIpOwnerId() == null) ? 0 : getIpOwnerId().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof InstanceNetworkInterfaceAssociation == false) return false; InstanceNetworkInterfaceAssociation other = (InstanceNetworkInterfaceAssociation)obj; if (other.getPublicIp() == null ^ this.getPublicIp() == null) return false; if (other.getPublicIp() != null && other.getPublicIp().equals(this.getPublicIp()) == false) return false; if (other.getPublicDnsName() == null ^ this.getPublicDnsName() == null) return false; if (other.getPublicDnsName() != null && other.getPublicDnsName().equals(this.getPublicDnsName()) == false) return false; if (other.getIpOwnerId() == null ^ this.getIpOwnerId() == null) return false; if (other.getIpOwnerId() != null && other.getIpOwnerId().equals(this.getIpOwnerId()) == false) return false; return true; } @Override public InstanceNetworkInterfaceAssociation clone() { try { return (InstanceNetworkInterfaceAssociation) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
rokn/Count_Words_2015
testing/openjdk2/jaxws/src/share/jaxws_classes/com/sun/xml/internal/rngom/digested/Annotation.java
3383
/* * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * Copyright (C) 2004-2011 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.sun.xml.internal.rngom.digested; import com.sun.xml.internal.rngom.ast.builder.Annotations; import com.sun.xml.internal.rngom.ast.builder.BuildException; import com.sun.xml.internal.rngom.ast.util.LocatorImpl; import javax.xml.namespace.QName; /** * @author Kohsuke Kawaguchi (kk@kohsuke.org) */ class Annotation implements Annotations<ElementWrapper,LocatorImpl,CommentListImpl> { private final DAnnotation a = new DAnnotation(); public void addAttribute(String ns, String localName, String prefix, String value, LocatorImpl loc) throws BuildException { a.attributes.put(new QName(ns,localName,prefix), new DAnnotation.Attribute(ns,localName,prefix,value,loc)); } public void addElement(ElementWrapper ea) throws BuildException { a.contents.add(ea.element); } public void addComment(CommentListImpl comments) throws BuildException { } public void addLeadingComment(CommentListImpl comments) throws BuildException { } DAnnotation getResult() { return a; } }
mit
rokn/Count_Words_2015
testing/openjdk2/langtools/test/tools/doclint/MissingThrowsTest.java
371
/* * @test /nodynamiccopyright/ * @bug 8004832 * @summary Add new doclint package * @build DocLintTester * @run main DocLintTester -Xmsgs:-missing MissingThrowsTest.java * @run main DocLintTester -Xmsgs:missing -ref MissingThrowsTest.out MissingThrowsTest.java */ /** */ public class MissingThrowsTest { /** */ void missingThrows() throws Exception { } }
mit
irudyak/ignite
modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/index/DynamicColumnsConcurrentTransactionalPartitionedSelfTest.java
1383
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.index; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; /** * Test to check concurrent operations on dynamic columns on TRANSACTIONAL PARTITIONED cache with flat key. */ public class DynamicColumnsConcurrentTransactionalPartitionedSelfTest extends DynamicColumnsAbstractConcurrentSelfTest { /** * Constructor. */ public DynamicColumnsConcurrentTransactionalPartitionedSelfTest() { super(CacheMode.PARTITIONED, CacheAtomicityMode.TRANSACTIONAL); } }
apache-2.0
fernandozhu/elasticsearch
core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java
2653
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.AcknowledgedRestListener; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestIndexDeleteAliasesAction extends BaseRestHandler { public RestIndexDeleteAliasesAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(DELETE, "/{index}/_alias/{name}", this); controller.registerHandler(DELETE, "/{index}/_aliases/{name}", this); } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] aliases = Strings.splitStringByCommaToArray(request.param("name")); IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout())); indicesAliasesRequest.addAliasAction(AliasActions.remove().indices(indices).aliases(aliases)); indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout())); return channel -> client.admin().indices().aliases(indicesAliasesRequest, new AcknowledgedRestListener<>(channel)); } }
apache-2.0
colczr/sakai
kernel/kernel-impl/src/main/java/org/sakaiproject/tool/impl/RebuildBreakdownServiceImpl.java
45294
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2014 The Apereo Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.impl; import com.google.common.collect.MapMaker; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.azeckoski.reflectutils.ConstructorUtils; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.event.api.UsageSession; import org.sakaiproject.event.api.UsageSessionService; import org.sakaiproject.memory.api.Cache; import org.sakaiproject.memory.api.MemoryService; import org.sakaiproject.tool.api.*; import org.sakaiproject.tool.api.Breakdownable.BreakdownableSize; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import java.io.Serializable; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.ConcurrentMap; /** * Implements the handling of Session and other bean breakdowns and rebuilds * * Session related handling config settings (defaults shown): * session.cluster.replication=false * session.cluster.minSecsOldToStore=20 * session.cluster.minSecsBetweenStores=10 * session.cluster.minSecsAfterRebuild=30 * * NOTE that org.sakaiproject.tool.impl.RebuildBreakdownService.cache must be set to a distributed store (like terracotta) */ public class RebuildBreakdownServiceImpl implements RebuildBreakdownService { final static String SPECIAL_SESSION_KEY_PREFIX = "_sakai_session_"; final static String SESSION_USER_ID_KEY = SPECIAL_SESSION_KEY_PREFIX+"UserId"; final static String SESSION_USER_EID_KEY = SPECIAL_SESSION_KEY_PREFIX+"UserEid"; final static String SESSION_CREATION_TIME_KEY = SPECIAL_SESSION_KEY_PREFIX+"CreationTime"; final static String SESSION_LAST_ACCESSED_TIME_KEY = SPECIAL_SESSION_KEY_PREFIX+"LastAccessedTime"; final static String SESSION_CURRENT_TOOLSESSION_PLACEMENT_KEY = SPECIAL_SESSION_KEY_PREFIX+"CurrentToolSessionPlacement"; final static String SESSION_CURRENT_USAGESESSION_KEY = SPECIAL_SESSION_KEY_PREFIX+"CurrentUsegeSessionId"; final static String SESSION_TOOL_SESSIONS_KEY = SPECIAL_SESSION_KEY_PREFIX+"ToolSessions"; final static String SESSION_CONTEXT_SESSIONS_KEY = SPECIAL_SESSION_KEY_PREFIX+"ContextSessions"; final static String SESSION_LAST_BREAKDOWN_KEY = SPECIAL_SESSION_KEY_PREFIX+"LastBreakdownTime"; final static String SESSION_LAST_REBUILD_KEY = SPECIAL_SESSION_KEY_PREFIX+"LastRebuildTime"; private static final Log log = LogFactory.getLog(RebuildBreakdownServiceImpl.class); private final int minSecondsBetweenStoresDefault = 10; private final int minSecondsAfterRebuildDefault = 30; private final int smallestMinSecondsBetweenStores = 1; private final int minAgeToStoreSecondsDefault = 10; /** * sessionClassWhitelist contains a list of classnames that are known safe to serialize * and store in sessions (since they can be deserialized safely) */ private Set<String> sessionClassWhitelist; /** * a list of all attributes in sessions (MySession or MyLittleSession) that should be skipped * when doing session breakdowns or rebuilds */ private Set<String> sessionAttributeBlacklist; /** * Map that contains all the Breakdownable which were manually registered from the services */ private ConcurrentMap<String, Breakdownable> breakdownableHandlers; private MemoryService memoryService; private SessionManager sessionManager; private ServerConfigurationService serverConfigurationService; private UsageSessionService usageSessionService; private Cache sessionCache; // Replicated, long lived private Cache stashingCache; // NON-replicated, short lived public void init() { log.info("INIT: session clustering=" + isSessionClusteringEnabled()); if (isSessionClusteringEnabled()) { sessionCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.cache"); stashingCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.stash"); sessionClassWhitelist = new HashSet<String>(4); // number should match items count below sessionClassWhitelist.add(Locale.class.getName()); sessionClassWhitelist.add("org.sakaiproject.event.api.SimpleEvent"); sessionClassWhitelist.add("org.sakaiproject.authz.api.SimpleRole"); sessionClassWhitelist.add("org.apache.commons.lang.mutable.MutableLong"); sessionAttributeBlacklist = new HashSet<String>(6); // number should match items count below sessionAttributeBlacklist.add(SESSION_LAST_BREAKDOWN_KEY); sessionAttributeBlacklist.add(SESSION_LAST_REBUILD_KEY); /* from BasePreferencesService.ATTR_PREFERENCE_IS_NULL * This controls whether the session cached version of prefs is reloaded or assumed to be populated, * when it is true the processing assumes it is populated (very weird logic and dual-caching) */ sessionAttributeBlacklist.add("attr_preference_is_null"); /* from BasePreferencesService.ATTR_PREFERENCE * rebuild this manually on demand from the cache instead of storing it */ sessionAttributeBlacklist.add("attr_preference"); /** should be re-detected on rebuild of the session */ sessionAttributeBlacklist.add("is_mobile_device"); /** this is normally only set on login, we handle it specially on breakdown and rebuild */ sessionAttributeBlacklist.add(UsageSessionService.USAGE_SESSION_KEY); } /* Create a map with weak references to the values */ breakdownableHandlers = new MapMaker().weakValues().makeMap(); } @Override public boolean storeSession(Session s, HttpServletRequest request) { if (!isSessionClusteringEnabled()) { if (log.isDebugEnabled()) log.debug("Session Clustering not enabled"); return false; } if (s == null) { throw new IllegalArgumentException("session cannot be null"); } if (!(s instanceof MySession)) { // yes, this is kind of dumb but unless we change the class structures it is necessary throw new IllegalArgumentException("session ("+s.getId()+") MUST be a MySession implementation"); } MySession ms = (MySession) s; if (!isSessionValid(ms)) { if (log.isDebugEnabled()) log.debug("Session ("+s.getId()+") not valid for clustering, not a MySession"); return false; } if (request == null) { request = ms.currentRequest(); } if (!isSessionBreakdownAllowed(ms, request)) { // logging in the check method itself return false; } String sessionId = ms.getId(); if (log.isDebugEnabled()) log.debug("RebuildBreakdownServiceImpl.storeSession, for sessionId: [" + sessionId + "]"); Map<String,Serializable> sessionMap = new HashMap<String,Serializable>(); storeSessionSpecialAttributes(ms, sessionMap); storeSessionAttributes(ms, sessionMap); if (log.isDebugEnabled()) log.debug("RebuildBreakdownServiceImpl.storeSession, for sessionId: [" + sessionId + "] completed"); sessionCache.put(sessionId, sessionMap); ms.setAttribute(SESSION_LAST_BREAKDOWN_KEY, System.currentTimeMillis()); return true; } @Override public boolean rebuildSession(Session s) { if (!isSessionClusteringEnabled()) { if (log.isDebugEnabled()) log.debug("Session Clustering not enabled"); return false; } if (s == null) { throw new IllegalArgumentException("session cannot be null"); } if (!(s instanceof MySession)) { // yes, this is kind of dumb but unless we change the class structures it is necessary throw new IllegalArgumentException("session ("+s.getId()+") MUST be a MySession implementation"); } MySession ms = (MySession) s; if (!ms.isValid() || ms.isInactive()) { throw new IllegalArgumentException("session cannot be invalid (valid="+ms.isValid()+") or inactive (inactive="+ms.isInactive()+")"); } boolean rebuilt; String sessionId = ms.getId(); @SuppressWarnings("unchecked") Map<String, Serializable> sessionMap = (Map<String, Serializable>) sessionCache.get(sessionId); if (sessionMap == null || sessionMap.isEmpty()) { // no data available to rebuild this session if (log.isDebugEnabled()) log.debug("rebuildSession, sessionId: [" + sessionId + "] data not found in store, cannot rebuild"); rebuilt = false; } else { // REBUILD the session if (log.isDebugEnabled()) log.debug("rebuildSession, sessionId: [" + sessionId + "] from map("+sessionMap.size()+")"); processMySessionMap(ms, sessionMap); // now that the session is fully rebuilt we need to make sure we reactivate it and make it current ms.setActive(); sessionManager.setCurrentSession(ms); // this has to be set for some of the stuff below to work // SPECIAL cases related to the rebuild // Repopulate the current ToolSession if (sessionMap.containsKey(SESSION_CURRENT_TOOLSESSION_PLACEMENT_KEY)) { String currentToolSessionPlacementId = (String) sessionMap.get(SESSION_CURRENT_TOOLSESSION_PLACEMENT_KEY); if (currentToolSessionPlacementId != null) { ToolSession ts = ms.getToolSession(currentToolSessionPlacementId); if (ts != null) { sessionManager.setCurrentToolSession(ts); if (log.isDebugEnabled()) log.debug("rebuildSession, sessionId: [" + sessionId + "], updated current tool session("+ts.getId()+") for placement: "+currentToolSessionPlacementId); } } } // Repopulate the current UsageSession and reassign to this server if (sessionMap.containsKey(SESSION_CURRENT_USAGESESSION_KEY)) { String currentUsageSessionId = (String) sessionMap.get(SESSION_CURRENT_USAGESESSION_KEY); UsageSession us = usageSessionService.getSession(currentUsageSessionId); if (us == null) { // likely have to create a new one, warn and then attempt it (maybe should have failed here instead) // TODO UsageSession startSession(String userId, String remoteAddress, String userAgent) if (log.isDebugEnabled()) log.debug("rebuildSession, sessionId: [" + sessionId + "], made new usage session: "+currentUsageSessionId); } // NOTE that this usageSession will be realigned to the current server (as needed) by code in the RequestFilter ms.setAttribute(UsageSessionService.USAGE_SESSION_KEY, us); if (log.isDebugEnabled()) log.debug("rebuildSession, sessionId: [" + sessionId + "], reloaded usage session: "+currentUsageSessionId); } ms.setAttribute(SESSION_LAST_REBUILD_KEY, System.currentTimeMillis()); rebuilt = true; } log.info("RBS rebuildSession, sessionId: [" + sessionId + "] complete, rebuilt: "+rebuilt); return rebuilt; } @Override public boolean isSessionHandlingEnabled() { return isSessionClusteringEnabled(); } @Override public void purgeSessionFromStorageById(String sessionId) { if (this.sessionCache != null) { this.sessionCache.remove(sessionId); } } @Override public StoreableBreakdown retrieveCallbackSessionData(String attributeKey, String sessionId) { if (attributeKey == null) { throw new IllegalArgumentException("attributeKey must be set"); } if (sessionId == null) { try { sessionId = sessionManager.getCurrentToolSession().getId(); } catch (Exception e) { throw new IllegalStateException("no current tool session found: "+e, e); } } String storedKey = StoreableBreakdown.makeStashKey(sessionId, attributeKey); StoreableBreakdown data = (StoreableBreakdown) this.stashingCache.get(storedKey); if (data != null) { // clears the cache after retrieving the data if successful this.stashingCache.remove(storedKey); } return data; } // SUPPORT METHODS @Override public void registerBreakdownHandler(Breakdownable<?> handler) { if (log.isDebugEnabled()) log.debug("registering a BreakdownableHandler"); if (handler == null) { throw new IllegalArgumentException("handler cannot be null"); } try { String handlerName = handler.defineHandledClass().getName(); if (log.isDebugEnabled()) log.debug("handler: [" + handlerName + "]"); handler.defineClassLoader(); // just call to make sure it is not going to die breakdownableHandlers.putIfAbsent(handlerName, handler); if (log.isDebugEnabled()) log.debug("breakdownableHandlers now has [" + breakdownableHandlers.size() + "]"); } catch (Exception e) { throw new IllegalStateException("Failure in the handler that was attempting to be registered: "+handler+" :: "+e, e); } } @Override public void unregisterBreakdownHandler(String fullClassName) { if (fullClassName == null) { throw new IllegalArgumentException("fullClassName cannot be null"); } breakdownableHandlers.remove(fullClassName); } @Override public StoreableBreakdown breakdownObject(Object breakdownable, BreakdownableSize size) { if (breakdownable == null) { throw new IllegalArgumentException("breakdownable cannot be null"); } if (size == null) { size = BreakdownableSize.TINY; // default to the smallest one } String className = breakdownable.getClass().getName(); @SuppressWarnings("unchecked") Breakdownable<Object> handler = breakdownableHandlers.get(className); if (handler == null) { throw new IllegalStateException("No Breakdownable handler found for object class: "+className); } StoreableBreakdown sb; try { Serializable data = handler.makeBreakdown(breakdownable, size); sb = new StoreableBreakdown(className, size, data); } catch (Exception e) { sb = null; log.warn("Failure attempting to breakdown object (to size="+size+"): "+breakdownable+" :: "+e, e); } return sb; } @Override public Object rebuildObject(String className, BreakdownableSize size, Serializable data) { if (className == null) { throw new IllegalArgumentException("rebuildObject className cannot be null"); } if (data == null) { throw new IllegalArgumentException("rebuildObject data cannot be null"); } if (size == null) { size = BreakdownableSize.TINY; // Default } ClassLoader currentCL = Thread.currentThread().getContextClassLoader(); // likely the kernel CL (shared) Object rebuilt = null; try { Breakdownable<?> handler = breakdownableHandlers.get(className); if (handler != null) { // only rebuild if we have a handler ClassLoader objectCL; try { objectCL = handler.defineClassLoader(); } catch (Exception e) { objectCL = null; log.warn("Failure in defineClassLoader: "+e); } if (objectCL == null) { objectCL = handler.getClass().getClassLoader(); } Thread.currentThread().setContextClassLoader(objectCL); rebuilt = handler.doRebuild(data, size); } } catch (Exception e) { log.warn("Failure ("+e.getMessage()+") attempting to rebuild object: class="+className+", size:"+size+", data: "+data+" :: "+e, e); } finally { // reset back to the current CL Thread.currentThread().setContextClassLoader(currentCL); } return rebuilt; } /** * storeSessionSpecialAttributes() adds session attributes that need to be stored in the * cluster in the sessionMap. These are attributes that are not part of session.getAttributes(). * For example, it includes the sessions User EID and User ID, as well as data about the session's * ToolSessions and ContextSessions if the session is of type MySession. * @param s session that will have special attributes stored * @param sessionMap the Map that will contain the attributes that will eventually stored in the cluster */ private void storeSessionSpecialAttributes(MySession s, Map<String, Serializable> sessionMap) { // special case ids from the session sessionMap.put(SESSION_USER_ID_KEY, s.getUserId()); sessionMap.put(SESSION_USER_EID_KEY, s.getUserEid()); sessionMap.put(SESSION_CREATION_TIME_KEY, s.getCreationTime()); sessionMap.put(SESSION_LAST_ACCESSED_TIME_KEY, s.getLastAccessedTime()); // special cases related to the session ToolSession ts = sessionManager.getCurrentToolSession(); if (ts != null) { sessionMap.put(SESSION_CURRENT_TOOLSESSION_PLACEMENT_KEY, ts.getPlacementId()); } UsageSession us = usageSessionService.getSession(); if (us != null) { sessionMap.put(SESSION_CURRENT_USAGESESSION_KEY, us.getId()); } // sub-sessions Map<String, MyLittleSession> toolSessions = s.m_toolSessions; if (toolSessions != null && toolSessions.size() > 0) { Map<String, Map<String, Serializable>> toolSessionMaps = new HashMap<String, Map<String, Serializable>>(); for (Entry<String, MyLittleSession> entry: toolSessions.entrySet()) { String toolSessionId = entry.getKey(); MyLittleSession toolSession = entry.getValue(); if (!storeSubSession(toolSession)) { if (log.isDebugEnabled()) log.debug("store toolSessions skipping subSession: "+toolSession); continue; } Map<String, Serializable> toolSessionMap = new HashMap<String, Serializable>(); storeSessionAttributes(toolSession, toolSessionMap); if (toolSessionMap.isEmpty()) { if (log.isDebugEnabled()) log.debug("store toolSessions skipping subSession (no attributes included): "+toolSession); continue; } toolSessionMap.put(SESSION_CREATION_TIME_KEY, toolSession.getCreationTime()); toolSessionMap.put(SESSION_LAST_ACCESSED_TIME_KEY, toolSession.getLastAccessedTime()); toolSessionMaps.put(toolSessionId, toolSessionMap); } sessionMap.put(SESSION_TOOL_SESSIONS_KEY, (Serializable) toolSessionMaps); } Map<String, MyLittleSession> contextSessions = s.m_contextSessions; if (contextSessions != null && contextSessions.size() > 0) { Map<String, Map<String, Serializable>> contextSessionMaps = new HashMap<String, Map<String, Serializable>>(); for (Entry<String, MyLittleSession> entry: contextSessions.entrySet()) { String contextSessionId = entry.getKey(); MyLittleSession contextSession = entry.getValue(); if (!storeSubSession(contextSession)) { if (log.isDebugEnabled()) log.debug("store contextSessions skipping subSession: "+contextSession); continue; } Map<String, Serializable> contextSessionMap = new HashMap<String, Serializable>(); storeSessionAttributes(contextSession, contextSessionMap); if (contextSessionMap.isEmpty()) { if (log.isDebugEnabled()) log.debug("store contextSessions skipping subSession (no attributes included): "+contextSession); continue; } contextSessionMap.put(SESSION_CREATION_TIME_KEY, contextSession.getCreationTime()); contextSessionMap.put(SESSION_LAST_ACCESSED_TIME_KEY, contextSession.getLastAccessedTime()); contextSessionMaps.put(contextSessionId, contextSessionMap); } sessionMap.put(SESSION_CONTEXT_SESSIONS_KEY, (Serializable) contextSessionMaps); } } /** * Checks if a subSession should be processed and stored in the distributed store * * @param subSession the sub session which we are checking to see if it should be stored * @return true if the session should be stored OR false if it should be skipped */ private boolean storeSubSession(MyLittleSession subSession) { if (subSession == null) { return false; } if (MyLittleSession.TYPE_TOOL.equals(subSession.getSessionType())) { if (subSession.getSessionToolId() == null) { // don't store tool sessions if they have no toolId return false; } else if (StringUtils.contains(subSession.getSessionToolId(), "synoptic")) { // don't store tool sessions for synoptic tools return false; } } if (subSession.getContextId() != null && subSession.getContextId().equals(serverConfigurationService.getGatewaySiteId())) { // do not store context sessions if they are related to the gateway site return false; } if (!subSession.getAttributeNames().hasMoreElements()) { // don't store empty sessions return false; } return true; } /** * storeSessionAttributes() puts all of the attributes that are available from session.getAttribute() * into the sessionMap which will be stored in the cluster * @param s Session that is being stored * @param sessionMap the Map that will contain the attributes that will eventually stored in the cluster */ private void storeSessionAttributes(HttpSession s, Map<String, Serializable> sessionMap) { @SuppressWarnings("unchecked") Enumeration<String> keys = s.getAttributeNames(); while (keys.hasMoreElements()) { String key = keys.nextElement(); if (sessionAttributeBlacklist.contains(key)) { // skip processing on this key continue; } if (log.isDebugEnabled()) log.debug("attempting to store session attribute key [" + key + "] in cache"); Object object = s.getAttribute(key); Serializable toStore = serializeSessionAttribute(object); // now store it if we were successful if (toStore != null) { sessionMap.put(key, toStore); if (log.isDebugEnabled()) log.debug("RebuildBreakdownServiceImpl.storeSession, putting key [" + key + "], class: [" + object.getClass().getName() + "], value: [" + object + "]"); } } } /** * serializeSessionAttribute() takes an object and returns a Serialized version of it, if * the object can be serialized. Objects of type StoreableBreakdown are serialized here, * as well as objects that are either primitives or are part of a known list of classes * that can be serialized * @param object Object to be serialized * @return a Serialized version of the Object, or null if the object cannot be serialized */ private Serializable serializeSessionAttribute(Object object) { // Convert object into something that can be stored Serializable toStore; if (object == null) { toStore = null; } else { try { @SuppressWarnings("UnnecessaryLocalVariable") StoreableBreakdown sb = breakdownObject(object, BreakdownableSize.TINY); toStore = sb; } catch (IllegalStateException e) { // no handler for this type of object if (isObjectSimple(object)) { toStore = (Serializable) object; } else if (sessionClassWhitelist.contains(object.getClass().getName())) { toStore = (Serializable) object; } else { // do not store, maybe log trace or debug message toStore = null; } } } return toStore; } /** * iterates through all of the items in the sessionMap and * sets the relevant properties of the session. Some session attributes will be * set directly (via session.setAttribute()). Others will require special handling, * such as ToolSessions contained within a Session, or session properties, such as User ID * or User EID. * @param s MySession that will be updated * @param sessionMap the map of attributes that will be applied to the session. */ private void processMySessionMap(MySession s, Map<String, Serializable> sessionMap) { if (sessionMap != null && !sessionMap.isEmpty()) { for (Entry<String, Serializable> entry : sessionMap.entrySet()) { if (!handleMySessionSpecialKey(s, entry.getKey(), entry.getValue())) { handleSessionStandardKey(s, entry.getKey(), entry.getValue()); } } } } /** * iterates through all of the items in the sessionMap and * sets the relevant properties of the session. Some session attributes will be * set directly (via session.setAttribute()). Others will require special handling, * such as ToolSessions contained within a Session, or session properties, such as User ID * or User EID. * @param s ToolSession or ContextSession that will be updated * @param sessionMap the map of attributes that will be applied to the session. */ private void processMLSessionMap(MyLittleSession s, Map<String, Serializable> sessionMap) { if (sessionMap != null && !sessionMap.isEmpty()) { for (Entry<String, Serializable> entry : sessionMap.entrySet()) { if (!handleMLSessionSpecialKey(s, entry.getKey(), entry.getValue())) { handleSessionStandardKey(s, entry.getKey(), entry.getValue()); } } } } /** * performs any special processing for keys found in the session cache. These keys could * be attributes of the session itself, or objects like ToolSessions or ContextSessions * @param s Session being processed * @param key name of the object stored in the session cache * @param object the object that must be handled separately * @return true if the key was specially processed, false if key uses default processing */ private boolean handleMySessionSpecialKey(MySession s, String key, Serializable object) { if (object != null) { if (SESSION_USER_ID_KEY.equals(key)) { s.setUserId((String) object); return true; } else if (SESSION_USER_EID_KEY.equals(key)) { s.setUserEid((String) object); return true; } else if (SESSION_CREATION_TIME_KEY.equals(key)) { if (object instanceof Long) { s.m_created = (Long) object; } } else if (SESSION_LAST_ACCESSED_TIME_KEY.equals(key)) { if (object instanceof Long) { s.m_accessed = (Long) object; } } else if (SESSION_TOOL_SESSIONS_KEY.equals(key)) { if (isObjectMap(object)) { //noinspection unchecked rebuildToolSessions(s, (Map<String, Serializable>)object); } return true; } else if (SESSION_CONTEXT_SESSIONS_KEY.equals(key)) { if (isObjectMap(object)) { //noinspection unchecked rebuildContextSessions(s, (Map<String, Serializable>)object); } return true; } else if (SESSION_CURRENT_TOOLSESSION_PLACEMENT_KEY.equals(key)) { // ORDER is critical for this key so we actually skip it here and then process it later in the rebuild return true; } else if (sessionAttributeBlacklist.contains(key)) { // skip this key entirely return true; } } return false; } /** * performs any special processing for keys found in the session cache. These keys could * be attributes of the session itself, or objects like ToolSessions or ContextSessions * @param s Session being processed * @param key name of the object stored in the session cache * @param object the object that must be handled separately * @return true if the key was specially processed, false if key uses default processing */ private boolean handleMLSessionSpecialKey(MyLittleSession s, String key, Serializable object) { if (object != null) { if (SESSION_CREATION_TIME_KEY.equals(key)) { if (object instanceof Long) { s.m_created = (Long) object; } return true; } else if (SESSION_LAST_ACCESSED_TIME_KEY.equals(key)) { if (object instanceof Long) { s.m_accessed = (Long) object; } return true; } else if (sessionAttributeBlacklist.contains(key)) { // skip this key entirely return true; } } return false; } /** * handleSessionStandardKey() set's the session attribute for the given key and object. * The object will be reconstructed if it is of type StoreableBreakdown; otherwise, it * is stored as the attributes value directly * @param s the Session that will contain the attribute * @param key the key for the attribute * @param object the value of the attribute, which will be reconstituted if of type StoreableBreakdown */ private void handleSessionStandardKey(HttpSession s, String key, Serializable object) { if (s != null && key != null) { String className = object.getClass().getName(); if (object instanceof StoreableBreakdown) { if (log.isDebugEnabled()) log.debug("rebuilding StoreableBreakdown, key: [" + key + "], className: [" + className + "]"); StoreableBreakdown storedBreakdown = (StoreableBreakdown) object; Breakdownable<?> handler = breakdownableHandlers.get(storedBreakdown.getClassName()); if (handler != null && handler instanceof BreakdownRebuildCallback) { // Skip the rebuilding and only call the stashing function boolean stashed = ((BreakdownRebuildCallback)handler).makeStash(storedBreakdown, key, s); if (!stashed) { String stashKey = storedBreakdown.makeStash(s.getId(), key); stashingCache.put(stashKey, storedBreakdown); } } else { Object thing = this.rebuildObject(storedBreakdown.getClassName(), storedBreakdown.getSize(), storedBreakdown.getData()); s.setAttribute(key, thing); } } else { if (log.isDebugEnabled()) { log.debug("rebuilding Serializable, key: [" + key + "], className: [" + className + "], value: [" + object + "]"); } s.setAttribute(key, object); } } } /** * rebuildToolSessions() expects to find a Map of Maps. The outer map contains * the ToolSession ID's, and for each ToolSessionId, the inner map contains the * attributes of that tool session * @param mySession a Session that can be resolved to a MySession, giving access to the ToolSession property * @param toolSessionMap a Serialized map of maps. The outer map containing the ToolSession ID, and the inner * map containing the details of the ToolSession */ private void rebuildToolSessions(MySession mySession, Map<String, Serializable> toolSessionMap) { for (Entry<String, Serializable> entry : toolSessionMap.entrySet()) { String toolSessionKey = entry.getKey(); // if a tool session doesn't exist for this key, a new one will be created automatically MyLittleSession toolSession = (MyLittleSession) mySession.getToolSession(toolSessionKey); Serializable serializable = entry.getValue(); if (!(serializable instanceof Map)) { log.warn("inner object for toolSession [" + toolSessionKey + "] should be [Map], found [" + serializable.getClass().getName() + "]"); continue; } @SuppressWarnings("unchecked") Map<String, Serializable> toolAttributes = (Map<String, Serializable>) serializable; processMLSessionMap(toolSession, toolAttributes); } } /** * rebuildContextSessions() expects to find a Map of Maps. The outer map contains * the ContextSession ID's, and for each ContextSessionId, the inner map contains the * attributes of that context session * @param mySession a Session that can be resolved to a MySession, giving access to the ToolSession property * @param contextSessionMap a Serialized map of maps. The outer map containing the ContextSession ID, and the inner * map containing the details of the ContextSession */ private void rebuildContextSessions(MySession mySession, Map<String, Serializable> contextSessionMap) { for (Entry<String, Serializable> entry : contextSessionMap.entrySet()) { String contextSessionKey = entry.getKey(); MyLittleSession contextSession = (MyLittleSession) mySession.getContextSession(contextSessionKey); Serializable serializable = entry.getValue(); if (!(serializable instanceof Map)) { log.warn("inner object for contextSession [" + contextSessionKey + "] should be [Map], found [" + serializable.getClass().getName() + "]"); continue; } @SuppressWarnings("unchecked") Map<String, Serializable> contextAttributes = (Map<String, Serializable>) serializable; processMLSessionMap(contextSession, contextAttributes); } } /** * We only want to breakdown a session as needed. Sakai will typically have 4+ requests per user click. * It also has pings which happen routinely to keep the session alive and update things in the portal. * If we actually have a likely chance of updates or there has been enough time then we will do the breakdown * and store it in the distributed cache. Otherwise we will skip it. * NOTE: this will be tricky to get right and could dramatically affect load and performance * * @param ms the MySession to check * @param req [OPTIONAL] the current request * @return true if the session should be processed now, false if processing should be skipped until later */ private boolean isSessionBreakdownAllowed(MySession ms, HttpServletRequest req) { boolean allowed = false; boolean done = false; int minSecondsBetweenStores = minSecondsBetweenStoresDefault; // DEFAULT int minSecsOldToStore = minAgeToStoreSecondsDefault; long now = System.currentTimeMillis(); // don't store invalidated or inactive or empty sessions if (ms == null || !ms.isValid() || ms.isInactive()) { allowed = false; done = true; } if (!done) { // first check for freshly created sessions, we don't store them until they are at least X seconds old (avoid server thrashing) if (serverConfigurationService != null) { // only try if we have a SCS AND the min has not been forced to the smallest value as an override minSecsOldToStore = serverConfigurationService.getInt("session.cluster.minSecsOldToStore", minAgeToStoreSecondsDefault); } long minMSOldToStore = minSecsOldToStore * 1000l; long sessionCreationMS = ms.getCreationTime(); long sessionMSOld = now - sessionCreationMS; if (sessionMSOld > minMSOldToStore) { allowed = true; } else { allowed = false; done = true; } } if (req != null && !done) { // requests through access or direct should not result in storing the session ever String contextPath = req.getContextPath(); if (StringUtils.startsWith(contextPath, "/direct") || StringUtils.startsWith(contextPath, "/xlogin") || StringUtils.startsWith(contextPath, "/access") ) { if (log.isDebugEnabled()) log.debug("isSessionBreakdownAllowed("+ms.getId()+"): found direct or access: "+contextPath); allowed = false; done = true; } //noinspection ConstantConditions if (!done && !allowed) { // we will assume that POSTs changed something and therefore should be allowed to always update the session String method = req.getMethod().toUpperCase(); if ("POST".equals(method)) { if (log.isDebugEnabled()) log.debug("isSessionBreakdownAllowed("+ms.getId()+"): found POST: "+req.getRequestURI()); minSecondsBetweenStores = smallestMinSecondsBetweenStores; // reset to the shortest reasonable minimum allowed = true; } } } if (!done) { // recently rebuilt sessions should not be stored for at least 30 seconds, check timing for all cases Long lastRebuild = (Long) ms.getAttribute(SESSION_LAST_REBUILD_KEY); if (lastRebuild != null) { int minSecondsAfterRebuild = minSecondsAfterRebuildDefault; if (serverConfigurationService != null) { // only try if we have a SCS minSecondsAfterRebuild = serverConfigurationService.getInt("session.cluster.minSecsAfterRebuild", minSecondsAfterRebuildDefault); if (minSecondsAfterRebuild < 1) { minSecondsAfterRebuild = minSecondsAfterRebuildDefault; } } long minMSAfterRebuild = minSecondsAfterRebuild * 1000l; long msSinceLastRebuild = (now - lastRebuild); if (msSinceLastRebuild > minMSAfterRebuild) { if (log.isDebugEnabled()) log.debug("isSessionBreakdownAllowed("+ms.getId()+"): rebuild min ("+minSecondsAfterRebuild+" s) passed: "+msSinceLastRebuild+" > "+minMSAfterRebuild); allowed = true; } } } if (!done) { // session breakdown should not happen if it happened recently, check the timing in ALL cases Long lastBreakdown = (Long) ms.getAttribute(SESSION_LAST_BREAKDOWN_KEY); if (lastBreakdown != null) { //noinspection ConstantConditions if (serverConfigurationService != null && minSecondsBetweenStores != smallestMinSecondsBetweenStores) { // only try if we have a SCS AND the min has not been forced to the smallest value as an override minSecondsBetweenStores = serverConfigurationService.getInt("session.cluster.minSecsBetweenStores", minSecondsBetweenStoresDefault); if (minSecondsBetweenStores < smallestMinSecondsBetweenStores) { minSecondsBetweenStores = smallestMinSecondsBetweenStores; } } long minMSBetweenStores = minSecondsBetweenStores * 1000l; long msSinceLastBreakdown = (now - lastBreakdown); if (msSinceLastBreakdown > minMSBetweenStores) { if (log.isDebugEnabled()) log.debug("isSessionBreakdownAllowed("+ms.getId()+"): store min ("+minSecondsBetweenStores+" s) passed: "+msSinceLastBreakdown+" > "+minMSBetweenStores); allowed = true; } } else { // not stored before so store it if (log.isDebugEnabled()) log.debug("isSessionBreakdownAllowed("+ms.getId()+"): not stored before"); allowed = true; } } return allowed; } /** * @return true if the session clustering is enabled */ private boolean isSessionClusteringEnabled() { boolean enabled = false; if (serverConfigurationService != null) { enabled = serverConfigurationService.getBoolean("session.cluster.replication", false); } return enabled; } /** * isSessionValid() checks the properties of the session * @param mySession a Session * @return true if the session is valid or false otherwise */ private boolean isSessionValid(MySession mySession) { if (mySession == null) { return false; } if (!mySession.isValid()) { return false; } if (mySession.isInactive()) { return false; } if (StringUtils.isBlank(mySession.getUserEid())) { return false; } if (StringUtils.isBlank(mySession.getUserId())) { return false; } return true; } /** * Determine if an object is a primitive type * @param object any object * @return true if the object is a String or primitive class type */ private boolean isObjectSimple(Object object) { boolean primitive = false; if (object != null) { Class clazz = object.getClass(); primitive = clazz.isPrimitive() || ConstructorUtils.isClassSimple(clazz); } return primitive; } /** * Determine if an object is a map * @param object any object * @return true if the object is a map */ private boolean isObjectMap(Object object) { boolean map = false; if (object != null) { Class clazz = object.getClass(); map = ConstructorUtils.isClassMap(clazz); } return map; } // SPRING SETTERS public void setMemoryService(MemoryService memoryService) { this.memoryService = memoryService; } public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) { this.serverConfigurationService = serverConfigurationService; } public void setSessionManager(SessionManager sessionManager) { this.sessionManager = sessionManager; } public void setUsageSessionService(UsageSessionService usageSessionService) { this.usageSessionService = usageSessionService; } }
apache-2.0
dulvac/sling
contrib/extensions/collection/src/main/java/org/apache/sling/resource/collection/impl/ResourceCollectionAdapterFactory.java
4085
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.resource.collection.impl; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.adapter.AdapterFactory; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.resource.collection.ResourceCollection; import org.apache.sling.resource.collection.ResourceCollectionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * AdapterFactory that adapts Resources to: {@link ResourceCollection} * And ResourceResolver to: {@link ResourceCollectionManager) */ @Component @Service @Property(name = "service.description", value = "Collection Adapter Factory") public class ResourceCollectionAdapterFactory implements AdapterFactory { private static final Logger log = LoggerFactory.getLogger(ResourceCollectionAdapterFactory.class); private static final Class<ResourceCollection> COLLECTION_CLASS = ResourceCollection.class; private static final Class<ResourceCollectionManager> COLLECTION_MGR_CLASS = ResourceCollectionManager.class; @Property(name = "adapters") public static final String[] ADAPTER_CLASSES = { COLLECTION_CLASS.getName(), COLLECTION_MGR_CLASS.getName() }; @Property(name = "adaptables") public static final String[] ADAPTABLE_CLASSES = { Resource.class.getName(), ResourceResolver.class.getName() }; @Reference private ResourceCollectionManager collectionManager; // ---------- AdapterFactory ----------------------------------------------- public <AdapterType> AdapterType getAdapter(Object adaptable, Class<AdapterType> type) { if (adaptable instanceof Resource) { return getAdapter((Resource) adaptable, type); } else if (adaptable instanceof ResourceResolver) { return getAdapter((ResourceResolver) adaptable, type); } else { log.warn("Unable to handle adaptable {}", adaptable.getClass().getName()); return null; } } @SuppressWarnings("unchecked") private <AdapterType> AdapterType getAdapter(Resource resource, Class<AdapterType> type) { if (resource != null) { if (type == COLLECTION_CLASS) { if (resource.isResourceType(ResourceCollection.RESOURCE_TYPE)) { return (AdapterType) new ResourceCollectionImpl(resource); } } log.debug("Unable to adapt resource of {} to type {}", resource.getResourceType(), type.getName()); } log.debug("Unable to adapt null resource to type {}", type.getName()); return null; } @SuppressWarnings("unchecked") private <AdapterType> AdapterType getAdapter(ResourceResolver resolver, Class<AdapterType> type) { if (COLLECTION_MGR_CLASS == type) { return (AdapterType) collectionManager; } else { log.warn("Unable to adapt resolver to requested type {}", type.getName()); return null; } } }
apache-2.0
stain/jdk8u
test/java/awt/print/PrinterJob/ImagePrinting/NullClipARGB.java
2677
/* * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * @test * @bug 8061392 * @summary Test no NPE when printing transparency with null clip. */ import java.awt.*; import java.awt.image.*; import java.awt.print.*; public class NullClipARGB implements Printable { public static void main( String[] args ) { try { PrinterJob pj = PrinterJob.getPrinterJob(); pj.setPrintable(new NullClipARGB()); pj.print(); } catch (Exception ex) { throw new RuntimeException(ex); } } public int print(Graphics g, PageFormat pf, int pageIndex) throws PrinterException{ if (pageIndex != 0) { return NO_SUCH_PAGE; } Graphics2D g2 = (Graphics2D)g; System.out.println("original clip="+g2.getClip()); g2.translate(pf.getImageableX(), pf.getImageableY()); g2.rotate(0.2); g2.setClip(null); g2.setColor( Color.BLACK ); g2.drawString("This text should be visible through the image", 0, 20); BufferedImage bi = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB ); Graphics ig = bi.createGraphics(); ig.setColor( new Color( 192, 192, 192, 80 ) ); ig.fillRect( 0, 0, 100, 100 ); ig.setColor( Color.BLACK ); ig.drawRect( 0, 0, 99, 99 ); ig.dispose(); g2.drawImage(bi, 10, 0, 90, 90, null ); g2.translate(100, 100); g2.drawString("This text should also be visible through the image", 0, 20); g2.drawImage(bi, 10, 0, 90, 90, null ); return PAGE_EXISTS; } }
gpl-2.0
asedunov/intellij-community
java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/surroundWithArray/afterGenericArrayCreation.java
161
// "Surround with array initialization" "true" import java.util.List; class A { public List<?>[] test(List<Number> list) { return new List[]{list}; } }
apache-2.0
carlesls2/sitappandroidv1
prova/facebook/src/com/facebook/GraphRequestBatch.java
8736
/** * Copyright (c) 2014-present, Facebook, Inc. All rights reserved. * * You are hereby granted a non-exclusive, worldwide, royalty-free license to use, * copy, modify, and distribute this software in source code or binary form for use * in connection with the web services and APIs provided by Facebook. * * As with any software that integrates with the Facebook platform, your use of * this software is subject to the Facebook Developer Principles and Policies * [http://developers.facebook.com/policy/]. This copyright notice shall be * included in all copies or substantial portions of the software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.facebook; import android.os.Handler; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; /** * RequestBatch contains a list of Request objects that can be sent to Facebook in a single * round-trip. */ public class GraphRequestBatch extends AbstractList<GraphRequest> { private static AtomicInteger idGenerator = new AtomicInteger(); private Handler callbackHandler; private List<GraphRequest> requests = new ArrayList<GraphRequest>(); private int timeoutInMilliseconds = 0; private final String id = Integer.valueOf(idGenerator.incrementAndGet()).toString(); private List<Callback> callbacks = new ArrayList<Callback>(); private String batchApplicationId; /** * Constructor. Creates an empty batch. */ public GraphRequestBatch() { this.requests = new ArrayList<GraphRequest>(); } /** * Constructor. * @param requests the requests to add to the batch */ public GraphRequestBatch(Collection<GraphRequest> requests) { this.requests = new ArrayList<GraphRequest>(requests); } /** * Constructor. * @param requests the requests to add to the batch */ public GraphRequestBatch(GraphRequest... requests) { this.requests = Arrays.asList(requests); } /** * Constructor. * @param requests the requests to add to the batch */ public GraphRequestBatch(GraphRequestBatch requests) { this.requests = new ArrayList<GraphRequest>(requests); this.callbackHandler = requests.callbackHandler; this.timeoutInMilliseconds = requests.timeoutInMilliseconds; this.callbacks = new ArrayList<Callback>(requests.callbacks); } /** * Gets the timeout to wait for responses from the server before a timeout error occurs. * @return the timeout, in milliseconds; 0 (the default) means do not timeout */ public int getTimeout() { return timeoutInMilliseconds; } /** * Sets the timeout to wait for responses from the server before a timeout error occurs. * @param timeoutInMilliseconds the timeout, in milliseconds; 0 means do not timeout */ public void setTimeout(int timeoutInMilliseconds) { if (timeoutInMilliseconds < 0) { throw new IllegalArgumentException("Argument timeoutInMilliseconds must be >= 0."); } this.timeoutInMilliseconds = timeoutInMilliseconds; } /** * Adds a batch-level callback which will be called when the entire batch has finished * executing. * * @param callback the callback */ public void addCallback(Callback callback) { if (!callbacks.contains(callback)) { callbacks.add(callback); } } /** * Removes a batch-level callback. * * @param callback the callback */ public void removeCallback(Callback callback) { callbacks.remove(callback); } @Override public final boolean add(GraphRequest request) { return requests.add(request); } @Override public final void add(int location, GraphRequest request) { requests.add(location, request); } @Override public final void clear() { requests.clear(); } @Override public final GraphRequest get(int i) { return requests.get(i); } @Override public final GraphRequest remove(int location) { return requests.remove(location); } @Override public final GraphRequest set(int location, GraphRequest request) { return requests.set(location, request); } @Override public final int size() { return requests.size(); } final String getId() { return id; } final Handler getCallbackHandler() { return callbackHandler; } final void setCallbackHandler(Handler callbackHandler) { this.callbackHandler = callbackHandler; } final List<GraphRequest> getRequests() { return requests; } final List<Callback> getCallbacks() { return callbacks; } /** * Getter for the batch application id. * @return the batch application id. */ final public String getBatchApplicationId() { return batchApplicationId; } /** * Setter for the batch application id. * @param batchApplicationId The batch application id. */ final public void setBatchApplicationId(String batchApplicationId) { this.batchApplicationId = batchApplicationId; } /** * Executes this batch on the current thread and returns the responses. * <p/> * This should only be used if you have transitioned off the UI thread. * * @return a list of Response objects representing the results of the requests; responses are * returned in the same order as the requests were specified. * @throws FacebookException If there was an error in the protocol used to communicate * with the service * @throws IllegalArgumentException if the passed in RequestBatch is empty * @throws NullPointerException if the passed in RequestBatch or any of its contents are * null */ public final List<GraphResponse> executeAndWait() { return executeAndWaitImpl(); } /** * Executes this batch asynchronously. This function will return immediately, and the batch will * be processed on a separate thread. In order to process results of a request, or determine * whether a request succeeded or failed, a callback must be specified (see * {@link GraphRequest#setCallback(GraphRequest.Callback)}) * <p/> * This should only be called from the UI thread. * * @return a RequestAsyncTask that is executing the request * * @throws IllegalArgumentException if this batch is empty * @throws NullPointerException if any of the contents of this batch are null */ public final GraphRequestAsyncTask executeAsync() { return executeAsyncImpl(); } /** * Specifies the interface that consumers of the RequestBatch class can implement in order to be * notified when the entire batch completes execution. It will be called after all per-Request * callbacks are called. */ public interface Callback { /** * The method that will be called when a batch completes. * * @param batch the RequestBatch containing the Requests which were executed */ void onBatchCompleted(GraphRequestBatch batch); } /** * Specifies the interface that consumers of the RequestBatch class can implement in order to be * notified when the batch makes progress. The frequency of the callbacks can be controlled * using {@link FacebookSdk#setOnProgressThreshold(long)}. */ public interface OnProgressCallback extends Callback { /** * The method that will be called when a batch makes progress. * * @param batch the RequestBatch containing the Requests which were executed * @param current the current value of the progress * @param max the max (target) value of the progress */ void onBatchProgress(GraphRequestBatch batch, long current, long max); } List<GraphResponse> executeAndWaitImpl() { return GraphRequest.executeBatchAndWait(this); } GraphRequestAsyncTask executeAsyncImpl() { return GraphRequest.executeBatchAsync(this); } }
lgpl-3.0
carlesls2/sitappandroidv1
prova/facebook/src/com/facebook/internal/LoginAuthorizationType.java
1434
/** * Copyright (c) 2014-present, Facebook, Inc. All rights reserved. * * You are hereby granted a non-exclusive, worldwide, royalty-free license to use, * copy, modify, and distribute this software in source code or binary form for use * in connection with the web services and APIs provided by Facebook. * * As with any software that integrates with the Facebook platform, your use of * this software is subject to the Facebook Developer Principles and Policies * [http://developers.facebook.com/policy/]. This copyright notice shall be * included in all copies or substantial portions of the software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.facebook.internal; /** * com.facebook.internal is solely for the use of other packages within the Facebook SDK for * Android. Use of any of the classes in this package is unsupported, and they may be modified or * removed without warning at any time. */ public enum LoginAuthorizationType { READ, PUBLISH }
lgpl-3.0
WangTaoTheTonic/flink
flink-runtime/src/test/java/org/apache/flink/runtime/operators/CachedMatchTaskTest.java
16379
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.FlatJoinFunction; import org.apache.flink.api.common.functions.RichFlatJoinFunction; import org.apache.flink.runtime.testutils.recordutils.RecordComparator; import org.apache.flink.runtime.testutils.recordutils.RecordPairComparatorFactory; import org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator; import org.apache.flink.runtime.operators.testutils.DriverTestBase; import org.apache.flink.runtime.operators.testutils.ExpectedTestException; import org.apache.flink.runtime.operators.testutils.NirvanaOutputList; import org.apache.flink.runtime.operators.testutils.TaskCancelThread; import org.apache.flink.runtime.operators.testutils.UniformRecordGenerator; import org.apache.flink.types.Value; import org.apache.flink.types.IntValue; import org.apache.flink.types.Record; import org.apache.flink.util.Collector; import org.junit.Assert; import org.junit.Test; public class CachedMatchTaskTest extends DriverTestBase<FlatJoinFunction<Record, Record, Record>> { private static final long HASH_MEM = 6*1024*1024; private static final long SORT_MEM = 3*1024*1024; @SuppressWarnings("unchecked") private final RecordComparator comparator1 = new RecordComparator( new int[]{0}, (Class<? extends Value>[])new Class[]{ IntValue.class }); @SuppressWarnings("unchecked") private final RecordComparator comparator2 = new RecordComparator( new int[]{0}, (Class<? extends Value>[])new Class[]{ IntValue.class }); private final List<Record> outList = new ArrayList<Record>(); public CachedMatchTaskTest(ExecutionConfig config) { super(config, HASH_MEM, 2, SORT_MEM); } @Test public void testHash1MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 10; int valCnt2 = 2; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockMatchStub.class, 3); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); } @Test public void testHash2MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 1; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockMatchStub.class, 3); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); } @Test public void testHash3MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockMatchStub.class, 3); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); } @Test public void testHash4MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 1; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockMatchStub.class, 3); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); } @Test public void testHash5MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockMatchStub.class, 3); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); } @Test public void testFailingHashFirstMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockFailingMatchStub.class, 3); Assert.fail("Function exception was not forwarded."); } catch (ExpectedTestException etex) { // good! } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } } @Test public void testFailingHashSecondMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>(); try { testResettableDriver(testTask, MockFailingMatchStub.class, 3); Assert.fail("Function exception was not forwarded."); } catch (ExpectedTestException etex) { // good! } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } } @Test public void testCancelHashMatchTaskWhileBuildFirst() { int keyCnt = 20; int valCnt = 20; addInput(new DelayingInfinitiveInputIterator(100)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); final BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockFailingMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch(InterruptedException ie) { Assert.fail("Joining threads failed"); } Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get()); } @Test public void testHashCancelMatchTaskWhileBuildSecond() { int keyCnt = 20; int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new DelayingInfinitiveInputIterator(100)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED); getTaskConfig().setRelativeMemoryDriver(1.0f); final BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch(InterruptedException ie) { Assert.fail("Joining threads failed"); } Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get()); } @Test public void testHashFirstCancelMatchTaskWhileMatching() { int keyCnt = 20; int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(1.0f); final BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch(InterruptedException ie) { Assert.fail("Joining threads failed"); } Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get()); } @Test public void testHashSecondCancelMatchTaskWhileMatching() { int keyCnt = 20; int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(1.0f); final BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch(InterruptedException ie) { Assert.fail("Joining threads failed"); } Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get()); } // ================================================================================================= public static final class MockMatchStub extends RichFlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; @Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { out.collect(record1); } } public static final class MockFailingMatchStub extends RichFlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; private int cnt = 0; @Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { if (++this.cnt >= 10) { throw new ExpectedTestException(); } out.collect(record1); } } public static final class MockDelayingMatchStub extends RichFlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; @Override public void join(Record record1, Record record2, Collector<Record> out) { try { Thread.sleep(100); } catch (InterruptedException e) { } } } }
apache-2.0
gorcz/Hystrix
hystrix-core/src/test/java/com/netflix/hystrix/collapser/CollapsedRequestObservableFunctionTest.java
6477
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.hystrix.collapser; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import org.junit.Test; import rx.Observable; public class CollapsedRequestObservableFunctionTest { @Test public void testSetResponseSuccess() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setResponse("theResponse"); // fetch value assertEquals("theResponse", v.get()); } @Test public void testSetNullResponseSuccess() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setResponse(null); // fetch value assertEquals(null, v.get()); } @Test public void testSetException() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setException(new RuntimeException("anException")); // fetch value try { v.get(); fail("expected exception"); } catch (ExecutionException e) { assertEquals("anException", e.getCause().getMessage()); } } @Test public void testSetExceptionAfterResponse() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setResponse("theResponse"); try { cr.setException(new RuntimeException("anException")); fail("expected IllegalState"); } catch (IllegalStateException e) { } assertEquals("theResponse", v.get()); } @Test public void testSetResponseAfterException() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setException(new RuntimeException("anException")); try { cr.setResponse("theResponse"); fail("expected IllegalState"); } catch (IllegalStateException e) { } try { v.get(); fail("expected exception"); } catch (ExecutionException e) { assertEquals("anException", e.getCause().getMessage()); } } @Test public void testSetResponseDuplicate() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setResponse("theResponse"); try { cr.setResponse("theResponse2"); fail("expected IllegalState"); } catch (IllegalStateException e) { } assertEquals("theResponse", v.get()); } @Test(expected = CancellationException.class) public void testSetResponseAfterUnsubscribe() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> f = o.toBlocking().toFuture(); // cancel/unsubscribe f.cancel(true); try { cr.setResponse("theResponse"); } catch (IllegalStateException e) { fail("this should have done nothing as it was unsubscribed already"); } // expect CancellationException after cancelling f.get(); } @Test(expected = CancellationException.class) public void testSetExceptionAfterUnsubscribe() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> f = o.toBlocking().toFuture(); // cancel/unsubscribe f.cancel(true); try { cr.setException(new RuntimeException("anException")); } catch (IllegalStateException e) { fail("this should have done nothing as it was unsubscribed already"); } // expect CancellationException after cancelling f.get(); } @Test public void testUnsubscribeAfterSetResponse() throws InterruptedException, ExecutionException { CollapsedRequestObservableFunction<String, String> cr = new CollapsedRequestObservableFunction<String, String>("hello"); Observable<String> o = Observable.create(cr); Future<String> v = o.toBlocking().toFuture(); cr.setResponse("theResponse"); // unsubscribe after the value is sent v.cancel(true); // still get value as it was set before canceling assertEquals("theResponse", v.get()); } }
apache-2.0
573196010/powermock
examples/simple/src/test/java/demo/org/powermock/examples/simple/LoggerTest.java
2332
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package demo.org.powermock.examples.simple; import org.easymock.EasyMock; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.reflect.Whitebox; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; import static org.powermock.api.easymock.PowerMock.*; import static org.powermock.api.support.membermodification.MemberMatcher.constructor; import static org.powermock.api.support.membermodification.MemberModifier.suppress; @RunWith(PowerMockRunner.class) @PrepareForTest( { Logger.class }) public class LoggerTest { @Test(expected = IllegalStateException.class) public void testException() throws Exception { expectNew(FileWriter.class, "target/logger.log").andThrow(new IOException()); replayAll(); new Logger(); } @Test public void testLogger() throws Exception { PrintWriter printWriter = createMock(PrintWriter.class); printWriter.println("qwe"); expectNew(PrintWriter.class, new Class[] { Writer.class }, EasyMock.anyObject()).andReturn(printWriter); replayAll(); Logger logger = new Logger(); logger.log("qwe"); verifyAll(); } @Test public void testLogger2() throws Exception { PrintWriter printWriter = createMock(PrintWriter.class); printWriter.println("qwe"); suppress(constructor(Logger.class)); replayAll(); Logger logger = new Logger(); Whitebox.setInternalState(logger, printWriter); logger.log("qwe"); verifyAll(); } }
apache-2.0
diegotori/robolectric
robolectric-processor/src/test/resources/org/robolectric/Robolectric_Anything.java
1579
package org.robolectric; import java.util.HashMap; import java.util.Map; import javax.annotation.Generated; import org.robolectric.annotation.processing.objects.AnyObject; import org.robolectric.annotation.processing.objects.Dummy; import org.robolectric.annotation.processing.shadows.ShadowAnything; import org.robolectric.annotation.processing.shadows.ShadowDummy; import org.robolectric.internal.ShadowExtractor; import org.robolectric.internal.ShadowProvider; @Generated("org.robolectric.annotation.processing.RobolectricProcessor") @SuppressWarnings({"unchecked","deprecation"}) public class Shadows implements ShadowProvider { private static final Map<String, String> SHADOW_MAP = new HashMap<>(2); static { SHADOW_MAP.put("org.robolectric.annotation.processing.objects.AnyObject", "org.robolectric.annotation.processing.shadows.ShadowAnything"); SHADOW_MAP.put("org.robolectric.annotation.processing.objects.Dummy", "org.robolectric.annotation.processing.shadows.ShadowDummy"); } public static ShadowAnything shadowOf(AnyObject actual) { return (ShadowAnything) ShadowExtractor.extract(actual); } public static ShadowDummy shadowOf(Dummy actual) { return (ShadowDummy) ShadowExtractor.extract(actual); } public void reset() { ShadowAnything.anotherResetter(); ShadowDummy.resetter_method(); } @Override public Map<String, String> getShadowMap() { return SHADOW_MAP; } @Override public String[] getProvidedPackageNames() { return new String[] {"org.robolectric.annotation.processing.objects"}; } }
mit
jackalchen/dex2jar
dex-ir/src/main/java/com/googlecode/dex2jar/ir/stmt/TableSwitchStmt.java
2232
/* * Copyright (c) 2009-2012 Panxiaobo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.dex2jar.ir.stmt; import com.googlecode.dex2jar.ir.LabelAndLocalMapper; import com.googlecode.dex2jar.ir.expr.Value; /** * Represent a TABLE_SWITCH statement * * @see ST#TABLE_SWITCH * * @author <a href="mailto:pxb1988@gmail.com">Panxiaobo</a> * @version $Rev: 9fd8005bbaa4 $ */ public class TableSwitchStmt extends BaseSwitchStmt { public int lowIndex; public TableSwitchStmt() { super(ST.TABLE_SWITCH, null); } public TableSwitchStmt(Value key, int lowIndex, LabelStmt[] targets, LabelStmt defaultTarget) { super(ST.TABLE_SWITCH, key); this.lowIndex = lowIndex; this.targets = targets; this.defaultTarget = defaultTarget; } @Override public Stmt clone(LabelAndLocalMapper mapper) { LabelStmt[] nTargets = new LabelStmt[targets.length]; for (int i = 0; i < nTargets.length; i++) { nTargets[i] = mapper.map(targets[i]); } return new TableSwitchStmt(op.clone(mapper), lowIndex, nTargets, mapper.map(defaultTarget)); } @Override public String toString() { StringBuilder sb = new StringBuilder("switch(").append(op).append(") {"); for (int i = 0; i < targets.length; i++) { sb.append("\n case ").append(lowIndex + i).append(": GOTO ").append(targets[i].getDisplayName()) .append(";"); } sb.append("\n default : GOTO ").append(defaultTarget.getDisplayName()).append(";"); sb.append("\n}"); return sb.toString(); } }
apache-2.0
AlexSikia/dotty
tests/pos/java-interop/t1230/J.java
29
class J { public int foo ; }
bsd-3-clause
raphanda/ExoPlayer
demo/src/main/java/com/google/android/exoplayer/demo/Samples.java
8405
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.demo; import java.util.Locale; /** * Holds statically defined sample definitions. */ /* package */ class Samples { public static class Sample { public final String name; public final String contentId; public final String uri; public final int type; public Sample(String name, String uri, int type) { this(name, name.toLowerCase(Locale.US).replaceAll("\\s", ""), uri, type); } public Sample(String name, String contentId, String uri, int type) { this.name = name; this.contentId = contentId; this.uri = uri; this.type = type; } } public static final Sample[] YOUTUBE_DASH_MP4 = new Sample[] { new Sample("Google Glass", "http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?" + "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&" + "ipbits=0&expire=19000000000&signature=51AF5F39AB0CEC3E5497CD9C900EBFEAECCCB5C7." + "8506521BFC350652163895D4C26DEE124209AA9E&key=ik0", PlayerActivity.TYPE_DASH), new Sample("Google Play", "http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?" + "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&" + "ipbits=0&expire=19000000000&signature=A2716F75795F5D2AF0E88962FFCD10DB79384F29." + "84308FF04844498CE6FBCE4731507882B8307798&key=ik0", PlayerActivity.TYPE_DASH), }; public static final Sample[] YOUTUBE_DASH_WEBM = new Sample[] { new Sample("Google Glass", "http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?" + "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&" + "ipbits=0&expire=19000000000&signature=249B04F79E984D7F86B4D8DB48AE6FAF41C17AB3." + "7B9F0EC0505E1566E59B8E488E9419F253DDF413&key=ik0", PlayerActivity.TYPE_DASH), new Sample("Google Play", "http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?" + "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&" + "ipbits=0&expire=19000000000&signature=B1C2A74783AC1CC4865EB312D7DD2D48230CC9FD." + "BD153B9882175F1F94BFE5141A5482313EA38E8D&key=ik0", PlayerActivity.TYPE_DASH), }; public static final Sample[] SMOOTHSTREAMING = new Sample[] { new Sample("Super speed", "http://playready.directtaps.net/smoothstreaming/SSWSS720H264/SuperSpeedway_720.ism", PlayerActivity.TYPE_SS), new Sample("Super speed (PlayReady)", "http://playready.directtaps.net/smoothstreaming/SSWSS720H264PR/SuperSpeedway_720.ism", PlayerActivity.TYPE_SS), }; public static final Sample[] WIDEVINE_GTS = new Sample[] { new Sample("WV: HDCP not specified", "d286538032258a1c", "http://www.youtube.com/api/manifest/dash/id/d286538032258a1c/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=477CF7D478BE26C205045D507E9358F85F84C065." + "8971631EB657BC33EC2F48A2FF4211956760C3E9&key=ik0", PlayerActivity.TYPE_DASH), new Sample("WV: HDCP not required", "48fcc369939ac96c", "http://www.youtube.com/api/manifest/dash/id/48fcc369939ac96c/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=171DAE48D00B5BE7434BC1A9F84DAE0463C7EA7A." + "0925B4DBB5605BEE9F5D088C48F25F5108E96191&key=ik0", PlayerActivity.TYPE_DASH), new Sample("WV: HDCP required", "e06c39f1151da3df", "http://www.youtube.com/api/manifest/dash/id/e06c39f1151da3df/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=8D3B8AF4E3F72B7F127C8D0D39B7AFCF37B30519." + "A118BADEBF3582AD2CC257B0EE6E579C6955D8AA&key=ik0", PlayerActivity.TYPE_DASH), new Sample("WV: Secure video path required", "0894c7c8719b28a0", "http://www.youtube.com/api/manifest/dash/id/0894c7c8719b28a0/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=A41D835C7387885A4A820628F57E481E00095931." + "9D50DBEEB5E37344647EE11BDA129A7FCDE8B7B9&key=ik0", PlayerActivity.TYPE_DASH), new Sample("WV: HDCP + secure video path required", "efd045b1eb61888a", "http://www.youtube.com/api/manifest/dash/id/efd045b1eb61888a/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=A97C9032C9D0C74F1643DB17C178873887C229E4." + "0A657BF6F23C8BC1538F276137383478330B76DE&key=ik0", PlayerActivity.TYPE_DASH), new Sample("WV: 30s license duration (fails at ~30s)", "f9a34cab7b05881a", "http://www.youtube.com/api/manifest/dash/id/f9a34cab7b05881a/source/youtube?" + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0" + "&ipbits=0&expire=19000000000&signature=80648A12A7D5FC1FA02B52B4250E4EB74CF0C5FD." + "66A261130CA137AA5C541EA9CED2DBF240829EE6&key=ik0", PlayerActivity.TYPE_DASH), }; public static final Sample[] HLS = new Sample[] { new Sample("Apple master playlist", "https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/" + "bipbop_4x3_variant.m3u8", PlayerActivity.TYPE_HLS), new Sample("Apple master playlist advanced", "https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_16x9/" + "bipbop_16x9_variant.m3u8", PlayerActivity.TYPE_HLS), new Sample("Apple TS media playlist", "https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/gear1/" + "prog_index.m3u8", PlayerActivity.TYPE_HLS), new Sample("Apple AAC media playlist", "https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/gear0/" + "prog_index.m3u8", PlayerActivity.TYPE_HLS), new Sample("Apple ID3 metadata", "http://devimages.apple.com/samplecode/adDemo/ad.m3u8", PlayerActivity.TYPE_HLS), }; public static final Sample[] MISC = new Sample[] { new Sample("Dizzy", "http://html5demos.com/assets/dizzy.mp4", PlayerActivity.TYPE_OTHER), new Sample("Apple AAC 10s", "https://devimages.apple.com.edgekey.net/" + "streaming/examples/bipbop_4x3/gear0/fileSequence0.aac", PlayerActivity.TYPE_OTHER), new Sample("Apple TS 10s", "https://devimages.apple.com.edgekey.net/streaming/examples/" + "bipbop_4x3/gear1/fileSequence0.ts", PlayerActivity.TYPE_OTHER), new Sample("Android screens (Matroska)", "http://storage.googleapis.com/exoplayer-test-media-1/" + "mkv/android-screens-lavf-56.36.100-aac-avc-main-1280x720.mkv", PlayerActivity.TYPE_OTHER), new Sample("Big Buck Bunny (MP4 Video)", "http://redirector.c.youtube.com/videoplayback?id=604ed5ce52eda7ee&itag=22&source=youtube&" + "sparams=ip,ipbits,expire,source,id&ip=0.0.0.0&ipbits=0&expire=19000000000&signature=" + "513F28C7FDCBEC60A66C86C9A393556C99DC47FB.04C88036EEE12565A1ED864A875A58F15D8B5300" + "&key=ik0", PlayerActivity.TYPE_OTHER), new Sample("Google Play (MP3 Audio)", "http://storage.googleapis.com/exoplayer-test-media-0/play.mp3", PlayerActivity.TYPE_OTHER), new Sample("Google Glass (WebM Video with Vorbis Audio)", "http://demos.webmproject.org/exoplayer/glass_vp9_vorbis.webm", PlayerActivity.TYPE_OTHER), }; private Samples() {} }
apache-2.0
dahlstrom-g/intellij-community
java/java-tests/testData/refactoring/extractMethodNew/ParametrizedDuplicateNestedSubexpression.java
325
class C { public void foo(C c, long d, int i, String s) { <selection>b(s, 1).m(d, i())</selection>; b(s, i).m(d, k(d)); c.m(d, i); } private C b(String s, int i) { return new C(); } void m(long d, int i) { } private int i() { return 0; } private int k(long d) { return 0; } }
apache-2.0
weiwenqiang/GitHub
expert/realm-java/examples/moduleExample/library/src/main/java/io/realm/examples/librarymodules/model/Dog.java
867
/* * Copyright 2015 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm.examples.librarymodules.model; import io.realm.RealmObject; public class Dog extends RealmObject { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } }
apache-2.0
jonathanmcelroy/DataCommunicationsProgram456
twitter4j/twitter4j-appengine/src/main/java/twitter4j/AppEngineTwitterImpl.java
1107
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j; import twitter4j.auth.Authorization; import twitter4j.conf.Configuration; /** * @author Yusuke Yamamoto - yusuke at mac.com * @since Twitter4J 2.2.4 */ class AppEngineTwitterImpl extends TwitterImpl { private static final long serialVersionUID = -2740925801854937207L; AppEngineTwitterImpl(Configuration conf, Authorization auth) { super(conf, auth); } @Override protected void setFactory() { factory = new LazyJSONImplFactory(conf); } }
gpl-2.0
rokn/Count_Words_2015
testing/openjdk2/langtools/src/share/classes/com/sun/source/tree/BinaryTree.java
1723
/* * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.source.tree; /** * A tree node for a binary expression. * Use {@link #getKind getKind} to determine the kind of operator. * * For example: * <pre> * <em>leftOperand</em> <em>operator</em> <em>rightOperand</em> * </pre> * * @jls sections 15.17 to 15.24 * * @author Peter von der Ah&eacute; * @author Jonathan Gibbons * @since 1.6 */ @jdk.Exported public interface BinaryTree extends ExpressionTree { ExpressionTree getLeftOperand(); ExpressionTree getRightOperand(); }
mit
YolandaMDavis/nifi
nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/main/java/org/apache/nifi/processors/evtx/ResultProcessor.java
2188
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.evtx; import com.google.common.net.MediaType; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; public class ResultProcessor { private final Relationship successRelationship; private final Relationship failureRelationship; public static final String UNABLE_TO_PROCESS_DUE_TO = "Unable to process {} due to {}"; public ResultProcessor(Relationship successRelationship, Relationship failureRelationship) { this.successRelationship = successRelationship; this.failureRelationship = failureRelationship; } public void process(ProcessSession session, ComponentLog logger, FlowFile updated, Exception exception, String name) { updated = session.putAttribute(updated, CoreAttributes.FILENAME.key(), name); updated = session.putAttribute(updated, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString()); if (exception == null) { session.transfer(updated, successRelationship); } else { logger.error(UNABLE_TO_PROCESS_DUE_TO, new Object[]{name, exception}, exception); session.transfer(updated, failureRelationship); } } }
apache-2.0
akosyakov/intellij-community
platform/lang-impl/src/com/intellij/ide/impl/dataRules/VirtualFileRule.java
1985
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.impl.dataRules; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiUtilBase; public class VirtualFileRule implements GetDataRule { @Override public Object getData(final DataProvider dataProvider) { // Try to detect multiselection. PsiElement[] psiElements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataProvider); if (psiElements != null) { for (PsiElement elem : psiElements) { VirtualFile virtualFile = PsiUtilBase.getVirtualFile(elem); if (virtualFile != null) return virtualFile; } } VirtualFile[] virtualFiles = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataProvider); if (virtualFiles != null && virtualFiles.length == 1) { return virtualFiles[0]; } PsiFile psiFile = CommonDataKeys.PSI_FILE.getData(dataProvider); if (psiFile != null) { return psiFile.getVirtualFile(); } PsiElement elem = CommonDataKeys.PSI_ELEMENT.getData(dataProvider); if (elem == null) { return null; } return PsiUtilBase.getVirtualFile(elem); } }
apache-2.0
tomwscott/GoCD
config/config-api/test/com/thoughtworks/go/util/ArtifactLogUtilTest.java
1311
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.util; import org.junit.Test; import static com.thoughtworks.go.util.ArtifactLogUtil.isConsoleOutput; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; public class ArtifactLogUtilTest { @Test public void shouldIdentifyConsoleLog() throws Exception { assertThat(isConsoleOutput("cruise-output/console.log"), is(true)); } @Test public void shouldNotIdentifyAnyOtherArtifactAsConsoleLog() throws Exception { assertThat(isConsoleOutput("artifact"), is(false)); } }
apache-2.0
YolandaMDavis/nifi
nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java
1475
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.api.request; import java.util.UUID; import org.apache.commons.lang3.StringUtils; /** * Class for parsing handling client ids. If the client id is not specified, one will be generated. */ public class ClientIdParameter { private final String clientId; public ClientIdParameter(String clientId) { if (StringUtils.isBlank(clientId)) { this.clientId = UUID.randomUUID().toString(); } else { this.clientId = clientId; } } public ClientIdParameter() { this.clientId = UUID.randomUUID().toString(); } public String getClientId() { return clientId; } }
apache-2.0
snadakuduru/camel
components/camel-kestrel/src/main/java/org/apache/camel/component/kestrel/KestrelConsumer.java
13377
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kestrel; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Exchanger; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; import net.spy.memcached.MemcachedClient; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.impl.DefaultConsumer; import org.apache.camel.spi.ShutdownAware; /** * A Camel consumer that polls a kestrel queue. */ public class KestrelConsumer extends DefaultConsumer implements ShutdownAware { private final KestrelEndpoint endpoint; private final MemcachedClient memcachedClient; private final BlockingQueue<Exchanger<?>> exchangerQueue = new LinkedBlockingQueue<Exchanger<?>>(); private ExecutorService pollerExecutor; private ExecutorService handlerExecutor; private volatile boolean shutdownPending; private CountDownLatch shutdownLatch; private AtomicInteger pendingExchangeCount = new AtomicInteger(0); public KestrelConsumer(final KestrelEndpoint endpoint, Processor processor, final MemcachedClient memcachedClient) { super(endpoint, processor); this.endpoint = endpoint; this.memcachedClient = memcachedClient; } @Override protected void doStart() throws Exception { log.info("Starting consumer for " + endpoint.getEndpointUri()); int poolSize = endpoint.getConfiguration().getConcurrentConsumers(); shutdownPending = false; if (poolSize > 1) { // We'll set the shutdown latch to poolSize + 1, since we'll also // wait for the poller thread when shutting down. shutdownLatch = new CountDownLatch(poolSize + 1); // Fire up the handler thread pool handlerExecutor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, "Handlers-" + endpoint.getEndpointUri(), poolSize); for (int k = 0; k < poolSize; ++k) { handlerExecutor.execute(new Handler()); } } else { // Since we only have concurrentConsumers=1, we'll do the handling // inside the poller thread, so there will only be one thread to // wait for on this latch. shutdownLatch = new CountDownLatch(1); } // Fire up the single poller thread pollerExecutor = endpoint.getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "Poller-" + endpoint.getEndpointUri()); pollerExecutor.submit(new Poller(poolSize > 1)); super.doStart(); } @Override protected void doStop() throws Exception { log.info("Stopping consumer for " + endpoint.getEndpointUri()); if (pollerExecutor != null) { endpoint.getCamelContext().getExecutorServiceManager().shutdown(pollerExecutor); pollerExecutor = null; } if (handlerExecutor != null) { endpoint.getCamelContext().getExecutorServiceManager().shutdown(handlerExecutor); handlerExecutor = null; } super.doStop(); } public boolean deferShutdown(ShutdownRunningTask shutdownRunningTask) { return false; } public int getPendingExchangesSize() { return pendingExchangeCount.get(); } @Override public void prepareShutdown(boolean forced) { // Signal to our threads that shutdown is happening shutdownPending = true; if (log.isDebugEnabled()) { log.debug("Preparing to shutdown, waiting for {} threads to complete.", shutdownLatch.getCount()); } // Wait for all threads to end try { shutdownLatch.await(); } catch (InterruptedException e) { // ignore } } /** * This single thread is responsible for reading objects from kestrel and * dispatching them to the handler threads. The catch is that we don't * want to poll kestrel until we know we have a handler thread available * and waiting to handle whatever comes up. So the way we deal with that * is...each handler thread has an exchanger used to "receive" objects * from the kestrel reader thread. When a handler thread is ready for * work, it simply puts its exchanger in the queue. The kestrel reader * thread takes an exchanger from the queue (which will block until one * is there), and *then* it can poll kestrel. Once an object is received * from kestrel, it gets exchanged with the handler thread, which can * take the object and process it. Repeat... */ @SuppressWarnings("unchecked") private final class Poller implements Runnable { private boolean concurrent; private Poller(boolean concurrent) { this.concurrent = concurrent; } public void run() { log.trace("Kestrel poller is running"); // Construct the target key that we'll be requesting from kestrel. // Include the /t=... wait time as applicable. String target; if (endpoint.getConfiguration().getWaitTimeMs() > 0) { target = endpoint.getQueue() + "/t=" + endpoint.getConfiguration().getWaitTimeMs(); } else { target = endpoint.getQueue(); } @SuppressWarnings("rawtypes") Exchanger exchanger = null; while (isRunAllowed() && !shutdownPending) { if (concurrent) { // Wait until an exchanger is available, indicating that a // handler thread is ready to handle the next request. // Don't read from kestrel until we know a handler is ready. try { exchanger = exchangerQueue.take(); } catch (InterruptedException e) { if (log.isDebugEnabled()) { log.debug("Interrupted, are we stopping? {}", isStopping() || isStopped()); } continue; } // We have the exchanger, so there's a handler thread ready // to handle whatever we may read...so read the next object // from the queue. } // Poll kestrel until we get an object back Object value = null; while (isRunAllowed() && !shutdownPending) { log.trace("Polling {}", target); try { value = memcachedClient.get(target); if (value != null) { break; } } catch (Exception e) { if (isRunAllowed() && !shutdownPending) { getExceptionHandler().handleException("Failed to get object from kestrel", e); } } // We didn't get a value back from kestrel if (isRunAllowed() && !shutdownPending) { if (endpoint.getConfiguration().getWaitTimeMs() > 0) { // Kestrel did the blocking for us } else { // We're doing non-blocking get, so in between we // should at least sleep some short period of time // so this loop doesn't go nuts so tightly. try { Thread.sleep(100); } catch (InterruptedException ignored) { } } } } log.trace("Got object from {}", target); if (concurrent) { // Pass the object to the handler thread via the exchanger. // The handler will take it from there. try { exchanger.exchange(value); } catch (InterruptedException e) { if (log.isDebugEnabled()) { log.debug("Interrupted, are we stopping? {}", isStopping() || isStopped()); } continue; } } else { // We're non-concurrent, so handle it right here pendingExchangeCount.incrementAndGet(); try { // Create the exchange and let camel process/route it Exchange exchange = null; try { exchange = endpoint.createExchange(); exchange.getIn().setBody(value); getProcessor().process(exchange); } catch (Exception e) { if (exchange != null) { getExceptionHandler().handleException("Error processing exchange", exchange, e); } else { getExceptionHandler().handleException(e); } } } finally { // Decrement our pending exchange counter pendingExchangeCount.decrementAndGet(); } } } log.trace("Finished polling {}", target); // Decrement the shutdown countdown latch shutdownLatch.countDown(); } } private final class Handler implements Runnable { private Exchanger<Handler> exchanger = new Exchanger<Handler>(); public void run() { if (log.isTraceEnabled()) { log.trace("{} is starting", Thread.currentThread().getName()); } while (isRunAllowed() && !shutdownPending) { // First things first, add our exchanger to the queue, // indicating that we're ready for a hand-off of work try { exchangerQueue.put(exchanger); } catch (InterruptedException e) { if (log.isDebugEnabled()) { log.debug("Interrupted, are we stopping? {}", isStopping() || isStopped()); } continue; } // Optimistically increment our internal pending exchange // counter, anticipating getting a value back from the exchanger pendingExchangeCount.incrementAndGet(); try { // Now wait for an object to come through the exchanger Object value; try { value = exchanger.exchange(this); } catch (InterruptedException e) { if (log.isDebugEnabled()) { log.debug("Interrupted, are we stopping? {}", isStopping() || isStopped()); } continue; } log.trace("Got a value from the exchanger"); // Create the exchange and let camel process/route it Exchange exchange = null; try { exchange = endpoint.createExchange(); exchange.getIn().setBody(value); getProcessor().process(exchange); } catch (Exception e) { if (exchange != null) { getExceptionHandler().handleException("Error processing exchange", exchange, e); } else { getExceptionHandler().handleException(e); } } } finally { // Decrement our pending exchange counter pendingExchangeCount.decrementAndGet(); } } // Decrement the shutdown countdown latch shutdownLatch.countDown(); if (log.isTraceEnabled()) { log.trace("{} is finished", Thread.currentThread().getName()); } } } }
apache-2.0
akosyakov/intellij-community
java/java-psi-impl/src/com/intellij/psi/impl/light/LightVariableBase.java
4345
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.light; import com.intellij.psi.OriginInfoAwareElement; import com.intellij.lang.Language; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.ElementPresentationUtil; import com.intellij.psi.impl.PsiImplUtil; import com.intellij.ui.RowIcon; import com.intellij.util.IncorrectOperationException; import com.intellij.util.PlatformIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author ven */ public abstract class LightVariableBase extends LightElement implements PsiVariable, OriginInfoAwareElement { protected PsiElement myScope; protected PsiIdentifier myNameIdentifier; protected PsiType myType; protected final PsiModifierList myModifierList; protected boolean myWritable; private String myOriginInfo = null; public LightVariableBase(PsiManager manager, PsiIdentifier nameIdentifier, PsiType type, boolean writable, PsiElement scope) { this(manager, nameIdentifier, JavaLanguage.INSTANCE, type, writable, scope); } public LightVariableBase(PsiManager manager, PsiIdentifier nameIdentifier, Language language, PsiType type, boolean writable, PsiElement scope) { super(manager, language); myNameIdentifier = nameIdentifier; myWritable = writable; myType = type; myScope = scope; myModifierList = createModifierList(); } protected PsiModifierList createModifierList() { return new LightModifierList(getManager()); } @NotNull public PsiElement getDeclarationScope() { return myScope; } @Override public PsiIdentifier getNameIdentifier() { return myNameIdentifier; } @Override public boolean isValid() { return myNameIdentifier == null || myNameIdentifier.isValid(); } @Override @NotNull public String getName() { return StringUtil.notNullize(getNameIdentifier().getText()); } @Override public PsiElement setName(@NotNull String name) throws IncorrectOperationException{ PsiImplUtil.setName(getNameIdentifier(), name); return this; } @Override @NotNull public PsiType getType() { if (myType == null) { myType = computeType(); } return myType; } @NotNull protected PsiType computeType() { return PsiType.VOID; } @Override @NotNull public PsiTypeElement getTypeElement() { return JavaPsiFacade.getInstance(getProject()).getElementFactory().createTypeElement(myType); } @Override public PsiModifierList getModifierList() { return myModifierList; } @Override public boolean hasModifierProperty(@NotNull String name) { return getModifierList().hasModifierProperty(name); } @Override public PsiExpression getInitializer() { return null; } @Override public boolean hasInitializer() { return false; } @Override public String getText() { return myNameIdentifier.getText(); } @Override public Object computeConstantValue() { return null; } @Override public void normalizeDeclaration() throws IncorrectOperationException { } @Override public boolean isWritable() { return myWritable; } @Override protected boolean isVisibilitySupported() { return true; } @Override public Icon getElementIcon(final int flags) { final RowIcon baseIcon = ElementPresentationUtil.createLayeredIcon(PlatformIcons.VARIABLE_ICON, this, false); return ElementPresentationUtil.addVisibilityIcon(this, flags, baseIcon); } @Nullable @Override public String getOriginInfo() { return myOriginInfo; } public void setOriginInfo(String originInfo) { myOriginInfo = originInfo; } }
apache-2.0
akosyakov/intellij-community
python/psi-api/src/com/jetbrains/python/psi/Property.java
2217
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.TypeEvalContext; import com.jetbrains.python.toolbox.Maybe; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Describes a property, result of either a call to property() or application of @property and friends. * This is <i>not</i> a node of PSI tree. * <br/> * User: dcheryasov * Date: May 31, 2010 5:18:10 PM */ public interface Property { String getName(); /** * @return the setter: a method or null if defined, or something else callable if undefined. */ @NotNull Maybe<PyCallable> getSetter(); /** * @return the getter: a method or null if defined, or something else callable if undefined. */ @NotNull Maybe<PyCallable> getGetter(); /** * @return the deleter: a method or null if defined, or something else callable if undefined. */ @NotNull Maybe<PyCallable> getDeleter(); /** * @return doc string as known to property() call. If null, see getter's doc. */ @Nullable String getDoc(); /** * @return the target to which the result of property() call is assigned. For things defined via @property, it is null. */ @Nullable PyTargetExpression getDefinitionSite(); /** * @param direction how the property is accessed * @return getter, setter, or deleter. */ @NotNull Maybe<PyCallable> getByDirection(@NotNull AccessDirection direction); /** * Get the return type of the property getter. */ @Nullable PyType getType(@NotNull TypeEvalContext context); }
apache-2.0
odpi/hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java
19060
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class TestChildQueueOrder { private static final Log LOG = LogFactory.getLog(TestChildQueueOrder.class); RMContext rmContext; YarnConfiguration conf; CapacitySchedulerConfiguration csConf; CapacitySchedulerContext csContext; final static int GB = 1024; final static String DEFAULT_RACK = "/default"; private final ResourceCalculator resourceComparator = new DefaultResourceCalculator(); @Before public void setUp() throws Exception { rmContext = TestUtils.getMockRMContext(); conf = new YarnConfiguration(); csConf = new CapacitySchedulerConfiguration(); csContext = mock(CapacitySchedulerContext.class); when(csContext.getConf()).thenReturn(conf); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getMinimumResourceCapability()).thenReturn( Resources.createResource(GB, 1)); when(csContext.getMaximumResourceCapability()).thenReturn( Resources.createResource(16*GB, 32)); when(csContext.getClusterResource()). thenReturn(Resources.createResource(100 * 16 * GB, 100 * 32)); when(csContext.getNonPartitionedQueueComparator()). thenReturn(CapacityScheduler.nonPartitionedQueueComparator); when(csContext.getResourceCalculator()). thenReturn(resourceComparator); when(csContext.getRMContext()).thenReturn(rmContext); } private FiCaSchedulerApp getMockApplication(int appId, String user) { FiCaSchedulerApp application = mock(FiCaSchedulerApp.class); doReturn(user).when(application).getUser(); doReturn(Resources.createResource(0, 0)).when(application).getHeadroom(); return application; } private void stubQueueAllocation(final CSQueue queue, final Resource clusterResource, final FiCaSchedulerNode node, final int allocation) { stubQueueAllocation(queue, clusterResource, node, allocation, NodeType.NODE_LOCAL); } private void stubQueueAllocation(final CSQueue queue, final Resource clusterResource, final FiCaSchedulerNode node, final int allocation, final NodeType type) { // Simulate the queue allocation doAnswer(new Answer<CSAssignment>() { @Override public CSAssignment answer(InvocationOnMock invocation) throws Throwable { try { throw new Exception(); } catch (Exception e) { LOG.info("FOOBAR q.assignContainers q=" + queue.getQueueName() + " alloc=" + allocation + " node=" + node.getNodeName()); } final Resource allocatedResource = Resources.createResource(allocation); if (queue instanceof ParentQueue) { ((ParentQueue)queue).allocateResource(clusterResource, allocatedResource, RMNodeLabelsManager.NO_LABEL); } else { FiCaSchedulerApp app1 = getMockApplication(0, ""); ((LeafQueue)queue).allocateResource(clusterResource, app1, allocatedResource, null, null); } // Next call - nothing if (allocation > 0) { doReturn(new CSAssignment(Resources.none(), type)). when(queue) .assignContainers(eq(clusterResource), eq(node), any(ResourceLimits.class), any(SchedulingMode.class)); // Mock the node's resource availability Resource available = node.getAvailableResource(); doReturn(Resources.subtractFrom(available, allocatedResource)). when(node).getAvailableResource(); } return new CSAssignment(allocatedResource, type); } }). when(queue).assignContainers(eq(clusterResource), eq(node), any(ResourceLimits.class), any(SchedulingMode.class)); doNothing().when(node).releaseContainer(any(Container.class)); } private float computeQueueAbsoluteUsedCapacity(CSQueue queue, int expectedMemory, Resource clusterResource) { return ( ((float)expectedMemory / (float)clusterResource.getMemory()) ); } private float computeQueueUsedCapacity(CSQueue queue, int expectedMemory, Resource clusterResource) { return (expectedMemory / (clusterResource.getMemory() * queue.getAbsoluteCapacity())); } final static float DELTA = 0.0001f; private void verifyQueueMetrics(CSQueue queue, int expectedMemory, Resource clusterResource) { assertEquals( computeQueueAbsoluteUsedCapacity(queue, expectedMemory, clusterResource), queue.getAbsoluteUsedCapacity(), DELTA); assertEquals( computeQueueUsedCapacity(queue, expectedMemory, clusterResource), queue.getUsedCapacity(), DELTA); } private static final String A = "a"; private static final String B = "b"; private static final String C = "c"; private static final String D = "d"; private void setupSortedQueues(CapacitySchedulerConfiguration conf) { // Define queues csConf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B, C, D}); final String Q_A = CapacitySchedulerConfiguration.ROOT + "." + A; conf.setCapacity(Q_A, 25); final String Q_B = CapacitySchedulerConfiguration.ROOT + "." + B; conf.setCapacity(Q_B, 25); final String Q_C = CapacitySchedulerConfiguration.ROOT + "." + C; conf.setCapacity(Q_C, 25); final String Q_D = CapacitySchedulerConfiguration.ROOT + "." + D; conf.setCapacity(Q_D, 25); } @Test public void testSortedQueues() throws Exception { // Setup queue configs setupSortedQueues(csConf); Map<String, CSQueue> queues = new HashMap<String, CSQueue>(); CSQueue root = CapacityScheduler.parseQueue(csContext, csConf, null, CapacitySchedulerConfiguration.ROOT, queues, queues, TestUtils.spyHook); // Setup some nodes final int memoryPerNode = 10; final int coresPerNode = 16; final int numNodes = 1; FiCaSchedulerNode node_0 = TestUtils.getMockNode("host_0", DEFAULT_RACK, 0, memoryPerNode*GB); doNothing().when(node_0).releaseContainer(any(Container.class)); final Resource clusterResource = Resources.createResource(numNodes * (memoryPerNode*GB), numNodes * coresPerNode); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Start testing CSQueue a = queues.get(A); CSQueue b = queues.get(B); CSQueue c = queues.get(C); CSQueue d = queues.get(D); // Make a/b/c/d has >0 pending resource, so that allocation will continue. queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() .incPending(Resources.createResource(1 * GB)); a.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); c.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); d.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); final String user_0 = "user_0"; // Stub an App and its containerCompleted FiCaSchedulerApp app_0 = getMockApplication(0,user_0); doReturn(true).when(app_0).containerCompleted(any(RMContainer.class), any(ContainerStatus.class), any(RMContainerEventType.class), any(String.class)); Priority priority = TestUtils.createMockPriority(1); ContainerAllocationExpirer expirer = mock(ContainerAllocationExpirer.class); DrainDispatcher drainDispatcher = new DrainDispatcher(); RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); when(rmContext.getYarnConfiguration()).thenReturn(new YarnConfiguration()); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( app_0.getApplicationId(), 1); ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1); Container container=TestUtils.getMockContainer(containerId, node_0.getNodeID(), Resources.createResource(1*GB), priority); RMContainer rmContainer = new RMContainerImpl(container, appAttemptId, node_0.getNodeID(), "user", rmContext); // Assign {1,2,3,4} 1GB containers respectively to queues stubQueueAllocation(a, clusterResource, node_0, 1*GB); stubQueueAllocation(b, clusterResource, node_0, 0*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); for(int i=0; i < 2; i++) { stubQueueAllocation(a, clusterResource, node_0, 0*GB); stubQueueAllocation(b, clusterResource, node_0, 1*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); } for(int i=0; i < 3; i++) { stubQueueAllocation(a, clusterResource, node_0, 0*GB); stubQueueAllocation(b, clusterResource, node_0, 0*GB); stubQueueAllocation(c, clusterResource, node_0, 1*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); } for(int i=0; i < 4; i++) { stubQueueAllocation(a, clusterResource, node_0, 0*GB); stubQueueAllocation(b, clusterResource, node_0, 0*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 1*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); } verifyQueueMetrics(a, 1*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 4*GB, clusterResource); LOG.info("status child-queues: " + ((ParentQueue)root). getChildQueuesToPrint()); //Release 3 x 1GB containers from D for(int i=0; i < 3;i++) { d.completedContainer(clusterResource, app_0, node_0, rmContainer, null, RMContainerEventType.KILL, null, true); } verifyQueueMetrics(a, 1*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); //reset manually resources on node node_0 = TestUtils.getMockNode("host_0", DEFAULT_RACK, 0, (memoryPerNode-1-2-3-1)*GB); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); // Assign 2 x 1GB Containers to A for(int i=0; i < 2; i++) { stubQueueAllocation(a, clusterResource, node_0, 1*GB); stubQueueAllocation(b, clusterResource, node_0, 0*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); } verifyQueueMetrics(a, 3*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); //Release 1GB Container from A a.completedContainer(clusterResource, app_0, node_0, rmContainer, null, RMContainerEventType.KILL, null, true); verifyQueueMetrics(a, 2*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); //reset manually resources on node node_0 = TestUtils.getMockNode("host_0", DEFAULT_RACK, 0, (memoryPerNode-2-2-3-1)*GB); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); // Assign 1GB container to B stubQueueAllocation(a, clusterResource, node_0, 0*GB); stubQueueAllocation(b, clusterResource, node_0, 1*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); verifyQueueMetrics(a, 2*GB, clusterResource); verifyQueueMetrics(b, 3*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); //Release 1GB container resources from B b.completedContainer(clusterResource, app_0, node_0, rmContainer, null, RMContainerEventType.KILL, null, true); verifyQueueMetrics(a, 2*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); //reset manually resources on node node_0 = TestUtils.getMockNode("host_0", DEFAULT_RACK, 0, (memoryPerNode-2-2-3-1)*GB); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); // Assign 1GB container to A stubQueueAllocation(a, clusterResource, node_0, 1*GB); stubQueueAllocation(b, clusterResource, node_0, 0*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 0*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); verifyQueueMetrics(a, 3*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 1*GB, clusterResource); LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); // Now do the real test, where B and D request a 1GB container // D should should get the next container if the order is correct stubQueueAllocation(a, clusterResource, node_0, 0*GB); stubQueueAllocation(b, clusterResource, node_0, 1*GB); stubQueueAllocation(c, clusterResource, node_0, 0*GB); stubQueueAllocation(d, clusterResource, node_0, 1*GB); root.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); InOrder allocationOrder = inOrder(d,b); allocationOrder.verify(d).assignContainers(eq(clusterResource), any(FiCaSchedulerNode.class), any(ResourceLimits.class), any(SchedulingMode.class)); allocationOrder.verify(b).assignContainers(eq(clusterResource), any(FiCaSchedulerNode.class), any(ResourceLimits.class), any(SchedulingMode.class)); verifyQueueMetrics(a, 3*GB, clusterResource); verifyQueueMetrics(b, 2*GB, clusterResource); verifyQueueMetrics(c, 3*GB, clusterResource); verifyQueueMetrics(d, 2*GB, clusterResource); //D got the container LOG.info("status child-queues: " + ((ParentQueue)root).getChildQueuesToPrint()); } @After public void tearDown() throws Exception { } }
apache-2.0
shreejay/elasticsearch
core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
51859
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.geo; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiLineString; import com.vividsolutions.jts.geom.Point; import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Circle; import org.locationtech.spatial4j.shape.Rectangle; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.ShapeCollection; import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.locationtech.spatial4j.shape.jts.JtsPoint; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; /** * Tests for {@code GeoJSONShapeParser} */ public class GeoJSONShapeParserTests extends ESTestCase { private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); public void testParseSimplePoint() throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startArray("coordinates").value(100.0).value(0.0).endArray() .endObject(); Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson); } public void testParseLineString() throws IOException { XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); List<Coordinate> lineCoordinates = new ArrayList<>(); lineCoordinates.add(new Coordinate(100, 0)); lineCoordinates.add(new Coordinate(101, 1)); LineString expected = GEOMETRY_FACTORY.createLineString( lineCoordinates.toArray(new Coordinate[lineCoordinates.size()])); assertGeometryEquals(jtsGeom(expected), lineGeoJson); } public void testParseMultiLineString() throws IOException { XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiLineString") .startArray("coordinates") .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .endArray() .endArray() .endObject(); MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString(new LineString[]{ GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(100, 0), new Coordinate(101, 1), }), GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(102, 2), new Coordinate(103, 3), }), }); assertGeometryEquals(jtsGeom(expected), multilinesGeoJson); } public void testParseCircle() throws IOException { XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "circle") .startArray("coordinates").value(100.0).value(0.0).endArray() .field("radius", "100m") .endObject(); Circle expected = SPATIAL_CONTEXT.makeCircle(100.0, 0.0, 360 * 100 / GeoUtils.EARTH_EQUATOR); assertGeometryEquals(expected, multilinesGeoJson); } public void testParseMultiDimensionShapes() throws IOException { // multi dimension point XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startArray("coordinates").value(100.0).value(0.0).value(15.0).value(18.0).endArray() .endObject(); Point expectedPt = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expectedPt, SPATIAL_CONTEXT), pointGeoJson); // multi dimension linestring XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).value(15.0).endArray() .startArray().value(101.0).value(1.0).value(18.0).value(19.0).endArray() .endArray() .endObject(); List<Coordinate> lineCoordinates = new ArrayList<>(); lineCoordinates.add(new Coordinate(100, 0)); lineCoordinates.add(new Coordinate(101, 1)); LineString expectedLS = GEOMETRY_FACTORY.createLineString( lineCoordinates.toArray(new Coordinate[lineCoordinates.size()])); assertGeometryEquals(jtsGeom(expectedLS), lineGeoJson); } public void testParseEnvelope() throws IOException { // test #1: envelope with expected coordinate order (TopLeft, BottomRight) XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(-50).value(30).endArray() .startArray().value(50).value(-30).endArray() .endArray() .endObject(); Rectangle expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30); assertGeometryEquals(expected, multilinesGeoJson); // test #2: envelope with agnostic coordinate order (TopRight, BottomLeft) multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .endArray() .endObject(); expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30); assertGeometryEquals(expected, multilinesGeoJson); // test #3: "envelope" (actually a triangle) with invalid number of coordinates (TopRight, BottomLeft, BottomRight) multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .startArray().value(50).value(-39).endArray() .endArray() .endObject(); XContentParser parser = createParser(multilinesGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .endArray() .endObject(); parser = createParser(multilinesGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParsePolygonNoHoles() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .endArray() .endObject(); List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(100, 0)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); assertGeometryEquals(jtsGeom(expected), polygonGeoJson); } public void testParseInvalidPoint() throws IOException { // test case 1: create an invalid point object with multipoint data format XContentBuilder invalidPoint1 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); XContentParser parser = createParser(invalidPoint1); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .endArray() .endObject(); parser = createParser(invalidPoint2); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParseInvalidMultipoint() throws IOException { // test case 1: create an invalid multipoint object with single coordinate XContentBuilder invalidMultipoint1 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject(); XContentParser parser = createParser(invalidMultipoint1); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid multipoint object with null coordinate XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates") .endArray() .endObject(); parser = createParser(invalidMultipoint2); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates") .startArray().endArray() .endArray() .endObject(); parser = createParser(invalidMultipoint3); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParseInvalidMultiPolygon() throws IOException { // test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring) String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray()//one poly (with two holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .startArray()// first hole .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//second hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); } public void testParseOGCPolygonWithoutHoles() throws IOException { // test 1: ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(180.0).value(10.0).endArray() .startArray().value(180.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(184.0).value(15.0).endArray() .startArray().value(184.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(174.0).value(-10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } public void testParseOGCPolygonWithHoles() throws IOException { // test 1: ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .endArray() .startArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(-180.0).value(-8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(180.0).value(10.0).endArray() .startArray().value(179.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(177.0).value(8.0).endArray() .startArray().value(179.0).value(10.0).endArray() .startArray().value(179.0).value(-8.0).endArray() .startArray().value(177.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(183.0).value(10.0).endArray() .startArray().value(183.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(183.0).value(10.0).endArray() .endArray() .startArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(182.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } public void testParseInvalidPolygon() throws IOException { /** * The following 3 test cases ensure proper error handling of invalid polygons * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .startArray().value(-75.022).value(41.783).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid polygon with only 1 point invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create an invalid polygon with 0 points invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 4: create an invalid polygon with null value points invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().nullValue().nullValue().endArray() .endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .nullValue().nullValue() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject().string(); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParsePolygonWithHole() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endObject(); // add 3d point to test ISSUE #10501 List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0, 15.0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1, 10.0)); shellCoordinates.add(new Coordinate(100, 0)); List<Coordinate> holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing( shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); LinearRing[] holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing( holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes); assertGeometryEquals(jtsGeom(expected), polygonGeoJson); } public void testParseSelfCrossingPolygon() throws IOException { // test self crossing ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); } public void testParseMultiPoint() throws IOException { XContentBuilder multiPointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPoint") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); ShapeCollection expected = shapeCollection( SPATIAL_CONTEXT.makePoint(100, 0), SPATIAL_CONTEXT.makePoint(101, 1.0)); assertGeometryEquals(expected, multiPointGeoJson); } public void testParseMultiPolygon() throws IOException { // test #1: two polygons; one without hole, one with hole XContentBuilder multiPolygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPolygon") .startArray("coordinates") .startArray()//first poly (without holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .endArray() .startArray()//second poly (with hole) .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject(); List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(100, 0)); List<Coordinate> holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); LinearRing[] holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); Polygon withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes); shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(102, 3)); shellCoordinates.add(new Coordinate(103, 3)); shellCoordinates.add(new Coordinate(103, 2)); shellCoordinates.add(new Coordinate(102, 2)); shellCoordinates.add(new Coordinate(102, 3)); shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null); Shape expected = shapeCollection(withoutHoles, withHoles); assertGeometryEquals(expected, multiPolygonGeoJson); // test #2: multipolygon; one polygon with one hole // this test converting the multipolygon from a ShapeCollection type // to a simple polygon (jtsGeom) multiPolygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPolygon") .startArray("coordinates") .startArray() .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .startArray() // hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject(); shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(100, 1)); holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes); assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson); } public void testParseGeometryCollection() throws IOException { XContentBuilder geometryCollectionGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "GeometryCollection") .startArray("geometries") .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject() .startObject() .field("type", "Point") .startArray("coordinates").value(102.0).value(2.0).endArray() .endObject() .endArray() .endObject(); Shape[] expected = new Shape[2]; LineString expectedLineString = GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(100, 0), new Coordinate(101, 1), }); expected[0] = jtsGeom(expectedLineString); Point expectedPoint = GEOMETRY_FACTORY.createPoint(new Coordinate(102.0, 2.0)); expected[1] = new JtsPoint(expectedPoint, SPATIAL_CONTEXT); //equals returns true only if geometries are in the same order assertGeometryEquals(shapeCollection(expected), geometryCollectionGeoJson); } public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .startObject("crs") .field("type", "name") .startObject("properties") .field("name", "urn:ogc:def:crs:OGC:1.3:CRS84") .endObject() .endObject() .field("bbox", "foobar") .field("type", "point") .field("bubu", "foobar") .startArray("coordinates").value(100.0).value(0.0).endArray() .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject() .startObject("lala").field("type", "NotAPoint").endObject() .endObject(); Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson); } public void testParseOrientationOption() throws IOException { // test 1: valid ccw (right handed system) poly not crossing dateline (with 'right' field) XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "right") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject(); XContentParser parser = createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field) polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "ccw") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject(); parser = createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "counterclockwise") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject(); parser = createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: valid cw (left handed system) poly crossing dateline (with 'left' field) polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "left") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject(); parser = createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field) polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "cw") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject(); parser = createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", "clockwise") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject(); parser = createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } private void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { XContentParser parser = createParser(geoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build()); } private ShapeCollection<Shape> shapeCollection(Shape... shapes) { return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT); } private ShapeCollection<Shape> shapeCollection(Geometry... geoms) { List<Shape> shapes = new ArrayList<>(geoms.length); for (Geometry geom : geoms) { shapes.add(jtsGeom(geom)); } return new ShapeCollection<>(shapes, SPATIAL_CONTEXT); } private JtsGeometry jtsGeom(Geometry geom) { return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false); } }
apache-2.0
idea4bsd/idea4bsd
platform/vcs-impl/src/com/intellij/openapi/vcs/changes/committed/CacheSettingsDialog.java
1763
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.changes.committed; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.vcs.VcsBundle; import javax.swing.*; /** * @author yole */ public class CacheSettingsDialog extends DialogWrapper { private final CacheSettingsPanel myPanel; private final Project myProject; public CacheSettingsDialog(Project project) { super(project, false); myProject = project; setTitle(VcsBundle.message("cache.settings.dialog.title")); myPanel = new CacheSettingsPanel(); myPanel.initPanel(project); myPanel.reset(); init(); } protected JComponent createCenterPanel() { return myPanel.getPanel(); } protected void doOKAction() { try { myPanel.apply(); } catch (ConfigurationException e) { //ignore } super.doOKAction(); } public static boolean showSettingsDialog(final Project project) { CacheSettingsDialog dialog = new CacheSettingsDialog(project); if (!dialog.showAndGet()) { return false; } return true; } }
apache-2.0
sintjuri/openmrs-core
api/src/test/java/org/openmrs/propertyeditor/WorkflowCollectionEditorTest.java
1437
/** * This Source Code Form is subject to the terms of the Mozilla Public License, * v. 2.0. If a copy of the MPL was not distributed with this file, You can * obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under * the terms of the Healthcare Disclaimer located at http://openmrs.org/license. * * Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS * graphic logo is a trademark of OpenMRS Inc. */ package org.openmrs.propertyeditor; import org.junit.Assert; import org.junit.Test; import org.openmrs.Program; import org.openmrs.api.context.Context; import org.openmrs.test.BaseContextSensitiveTest; /** * Tests {@link WorkflowCollectionEditor} */ public class WorkflowCollectionEditorTest extends BaseContextSensitiveTest { /** * @see WorkflowCollectionEditor#setAsText(String) * @verifies update workflows in program */ @Test public void setAsText_shouldUpdateWorkflowsInProgram() throws Exception { Program program = Context.getProgramWorkflowService().getProgram(1); WorkflowCollectionEditor editor = new WorkflowCollectionEditor(); Assert.assertEquals(2, program.getWorkflows().size()); editor.setAsText("1:3"); Assert.assertEquals(1, program.getWorkflows().size()); Assert.assertEquals(3, program.getWorkflows().iterator().next().getConcept().getConceptId().intValue()); Assert.assertEquals(3, program.getAllWorkflows().size()); } }
mpl-2.0
android-ia/platform_tools_idea
jps/jps-builders/src/org/jetbrains/jps/javac/CompilationCanceledException.java
919
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.javac; /** * @author Eugene Zhuravlev * Date: 4/2/12 */ class CompilationCanceledException extends RuntimeException{ CompilationCanceledException() { super("Compilation canceled"); } @Override public synchronized Throwable fillInStackTrace() { return this; } }
apache-2.0
qwerty4030/elasticsearch
server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
4812
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.termvectors; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class TransportShardMultiTermsVectorAction extends TransportSingleShardAction<MultiTermVectorsShardRequest, MultiTermVectorsShardResponse> { private final IndicesService indicesService; private static final String ACTION_NAME = MultiTermVectorsAction.NAME + "[shard]"; @Inject public TransportShardMultiTermsVectorAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, MultiTermVectorsShardRequest::new, ThreadPool.Names.GET); this.indicesService = indicesService; } @Override protected boolean isSubAction() { return true; } @Override protected MultiTermVectorsShardResponse newResponse() { return new MultiTermVectorsShardResponse(); } @Override protected boolean resolveIndex(MultiTermVectorsShardRequest request) { return false; } @Override protected ShardIterator shards(ClusterState state, InternalRequest request) { return clusterService.operationRouting() .getShards(state, request.concreteIndex(), request.request().shardId(), request.request().preference()); } @Override protected MultiTermVectorsShardResponse shardOperation(MultiTermVectorsShardRequest request, ShardId shardId) { final MultiTermVectorsShardResponse response = new MultiTermVectorsShardResponse(); final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.id()); for (int i = 0; i < request.locations.size(); i++) { TermVectorsRequest termVectorsRequest = request.requests.get(i); try { TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest); response.add(request.locations.get(i), termVectorsResponse); } catch (Exception t) { if (TransportActions.isShardNotAvailableException(t)) { throw (ElasticsearchException) t; } else { logger.debug((Supplier<?>) () -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t); response.add(request.locations.get(i), new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t)); } } } return response; } }
apache-2.0
qq83387856/spring-boot
spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/archive/JarFileArchive.java
5376
/* * Copyright 2012-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.archive; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.Manifest; import org.springframework.boot.loader.data.RandomAccessData.ResourceAccess; import org.springframework.boot.loader.jar.JarEntryData; import org.springframework.boot.loader.jar.JarEntryFilter; import org.springframework.boot.loader.jar.JarFile; import org.springframework.boot.loader.util.AsciiBytes; /** * {@link Archive} implementation backed by a {@link JarFile}. * * @author Phillip Webb */ public class JarFileArchive extends Archive { private static final AsciiBytes UNPACK_MARKER = new AsciiBytes("UNPACK:"); private static final int BUFFER_SIZE = 32 * 1024; private final JarFile jarFile; private final List<Entry> entries; private URL url; public JarFileArchive(File file) throws IOException { this(file, null); } public JarFileArchive(File file, URL url) throws IOException { this(new JarFile(file)); this.url = url; } public JarFileArchive(JarFile jarFile) { this.jarFile = jarFile; ArrayList<Entry> jarFileEntries = new ArrayList<Entry>(); for (JarEntryData data : jarFile) { jarFileEntries.add(new JarFileEntry(data)); } this.entries = Collections.unmodifiableList(jarFileEntries); } @Override public URL getUrl() throws MalformedURLException { if (this.url != null) { return this.url; } return this.jarFile.getUrl(); } @Override public Manifest getManifest() throws IOException { return this.jarFile.getManifest(); } @Override public List<Archive> getNestedArchives(EntryFilter filter) throws IOException { List<Archive> nestedArchives = new ArrayList<Archive>(); for (Entry entry : getEntries()) { if (filter.matches(entry)) { nestedArchives.add(getNestedArchive(entry)); } } return Collections.unmodifiableList(nestedArchives); } @Override public Collection<Entry> getEntries() { return Collections.unmodifiableCollection(this.entries); } protected Archive getNestedArchive(Entry entry) throws IOException { JarEntryData data = ((JarFileEntry) entry).getJarEntryData(); if (data.getComment().startsWith(UNPACK_MARKER)) { return getUnpackedNestedArchive(data); } JarFile jarFile = this.jarFile.getNestedJarFile(data); return new JarFileArchive(jarFile); } private Archive getUnpackedNestedArchive(JarEntryData data) throws IOException { AsciiBytes hash = data.getComment().substring(UNPACK_MARKER.length()); String name = data.getName().toString(); if (name.lastIndexOf("/") != -1) { name = name.substring(name.lastIndexOf("/") + 1); } File file = new File(getTempUnpackFolder(), hash.toString() + "-" + name); if (!file.exists() || file.length() != data.getSize()) { unpack(data, file); } return new JarFileArchive(file, file.toURI().toURL()); } private File getTempUnpackFolder() { File tempFolder = new File(System.getProperty("java.io.tmpdir")); File unpackFolder = new File(tempFolder, "spring-boot-libs"); unpackFolder.mkdirs(); return unpackFolder; } private void unpack(JarEntryData data, File file) throws IOException { InputStream inputStream = data.getData().getInputStream(ResourceAccess.ONCE); try { OutputStream outputStream = new FileOutputStream(file); try { byte[] buffer = new byte[BUFFER_SIZE]; int bytesRead = -1; while ((bytesRead = inputStream.read(buffer)) != -1) { outputStream.write(buffer, 0, bytesRead); } outputStream.flush(); } finally { outputStream.close(); } } finally { inputStream.close(); } } @Override public Archive getFilteredArchive(final EntryRenameFilter filter) throws IOException { JarFile filteredJar = this.jarFile.getFilteredJarFile(new JarEntryFilter() { @Override public AsciiBytes apply(AsciiBytes name, JarEntryData entryData) { return filter.apply(name, new JarFileEntry(entryData)); } }); return new JarFileArchive(filteredJar); } /** * {@link Archive.Entry} implementation backed by a {@link JarEntry}. */ private static class JarFileEntry implements Entry { private final JarEntryData entryData; JarFileEntry(JarEntryData entryData) { this.entryData = entryData; } public JarEntryData getJarEntryData() { return this.entryData; } @Override public boolean isDirectory() { return this.entryData.isDirectory(); } @Override public AsciiBytes getName() { return this.entryData.getName(); } } }
apache-2.0
dgrif/binnavi
src/test/java/com/google/security/zynamics/binnavi/disassembly/CProjectFactory.java
1168
/* Copyright 2014 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.disassembly; import java.util.Date; import com.google.security.zynamics.binnavi.Database.MockClasses.MockSqlProvider; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate; import com.google.security.zynamics.binnavi.disassembly.CProject; import com.google.security.zynamics.zylib.types.lists.FilledList; public class CProjectFactory { public static CProject get() { return new CProject(123, "Name", "Comment", new Date(), new Date(), 55, new FilledList<DebuggerTemplate>(), new MockSqlProvider()); } }
apache-2.0
asedunov/intellij-community
jps/model-api/src/org/jetbrains/jps/model/JpsDummyElement.java
714
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.model; /** * @author nik */ public interface JpsDummyElement extends JpsElement { }
apache-2.0
jmandawg/camel
camel-core/src/test/java/org/apache/camel/impl/DefaultClassResolverTest.java
5472
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl; import java.io.InputStream; import java.net.URL; import junit.framework.TestCase; /** * @version */ public class DefaultClassResolverTest extends TestCase { public void testResolveClass() { DefaultClassResolver resolver = new DefaultClassResolver(); Class<?> clazz = resolver.resolveClass("java.lang.Integer"); assertNotNull(clazz); } public void testResolveClassType() { DefaultClassResolver resolver = new DefaultClassResolver(); Class<Integer> clazz = resolver.resolveClass("java.lang.Integer", Integer.class); assertNotNull(clazz); } public void testResolveClassClassLoader() { DefaultClassResolver resolver = new DefaultClassResolver(); Class<?> clazz = resolver.resolveClass("java.lang.Integer", DefaultClassResolverTest.class.getClassLoader()); assertNotNull(clazz); } public void testResolveClassClassLoaderType() { DefaultClassResolver resolver = new DefaultClassResolver(); Class<Integer> clazz = resolver.resolveClass("java.lang.Integer", Integer.class, DefaultClassResolverTest.class.getClassLoader()); assertNotNull(clazz); } public void testResolveMandatoryClass() throws Exception { DefaultClassResolver resolver = new DefaultClassResolver(); Class<?> clazz = resolver.resolveMandatoryClass("java.lang.Integer"); assertNotNull(clazz); } public void testResolveMandatoryClassType()throws Exception { DefaultClassResolver resolver = new DefaultClassResolver(); Class<Integer> clazz = resolver.resolveMandatoryClass("java.lang.Integer", Integer.class); assertNotNull(clazz); } public void testResolveMandatorySimpleClassType()throws Exception { DefaultClassResolver resolver = new DefaultClassResolver(); Class<Byte> clazz = resolver.resolveMandatoryClass("Byte", Byte.class); assertNotNull(clazz); clazz = resolver.resolveMandatoryClass("java.lang.Byte", Byte.class); assertNotNull(clazz); Class<Long> clazz2 = resolver.resolveMandatoryClass("Long", Long.class); assertNotNull(clazz2); clazz2 = resolver.resolveMandatoryClass("java.lang.Long", Long.class); assertNotNull(clazz2); Class<String> clazz3 = resolver.resolveMandatoryClass("String", String.class); assertNotNull(clazz3); clazz3 = resolver.resolveMandatoryClass("java.lang.String", String.class); assertNotNull(clazz3); Class<Byte[]> clazz4 = resolver.resolveMandatoryClass("Byte[]", Byte[].class); assertNotNull(clazz4); clazz4 = resolver.resolveMandatoryClass("java.lang.Byte[]", Byte[].class); assertNotNull(clazz4); Class<Object[]> clazz5 = resolver.resolveMandatoryClass("Object[]", Object[].class); assertNotNull(clazz5); clazz5 = resolver.resolveMandatoryClass("java.lang.Object[]", Object[].class); assertNotNull(clazz5); Class<String[]> clazz6 = resolver.resolveMandatoryClass("String[]", String[].class); assertNotNull(clazz6); clazz6 = resolver.resolveMandatoryClass("java.lang.String[]", String[].class); assertNotNull(clazz6); } public void testResolveMandatoryClassClassLoader() throws Exception { DefaultClassResolver resolver = new DefaultClassResolver(); Class<?> clazz = resolver.resolveMandatoryClass("java.lang.Integer", DefaultClassResolverTest.class.getClassLoader()); assertNotNull(clazz); } public void testResolveMandatoryClassClassLoaderType() throws Exception { DefaultClassResolver resolver = new DefaultClassResolver(); Class<Integer> clazz = resolver.resolveMandatoryClass("java.lang.Integer", Integer.class, DefaultClassResolverTest.class.getClassLoader()); assertNotNull(clazz); } public void testResolveMandatoryClassNotFound() { DefaultClassResolver resolver = new DefaultClassResolver(); try { resolver.resolveMandatoryClass("com.FooBar"); fail("Should thrown an exception"); } catch (ClassNotFoundException e) { // expected } } public void testLoadResourceAsUri() { DefaultClassResolver resolver = new DefaultClassResolver(); URL url = resolver.loadResourceAsURL("log4j.properties"); assertNotNull(url); } public void testLoadResourceAsStream() { DefaultClassResolver resolver = new DefaultClassResolver(); InputStream is = resolver.loadResourceAsStream("log4j.properties"); assertNotNull(is); } }
apache-2.0
actframework/FrameworkBenchmarks
frameworks/Java/dropwizard/src/main/java/com/example/helloworld/db/FortuneDAO.java
168
package com.example.helloworld.db; import com.example.helloworld.db.model.Fortune; import java.util.List; public interface FortuneDAO { List<Fortune> list(); }
bsd-3-clause
Greblys/openhab
bundles/persistence/org.openhab.persistence.exec/src/main/java/org/openhab/persistence/exec/internal/ExecService.java
3239
/** * Copyright (c) 2010-2015, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.persistence.exec.internal; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.Calendar; import java.util.Formatter; import org.openhab.core.items.Item; import org.openhab.core.persistence.PersistenceService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is the implementation of the Exec {@link PersistenceService}. * * @author Henrik Sjöstrand * @author Thomas.Eichstaedt-Engelen * @since 1.1.0 */ public class ExecService implements PersistenceService { private static final Logger logger = LoggerFactory.getLogger(ExecService.class); /** * @{inheritDoc */ public String getName() { return "exec"; } /** * @{inheritDoc */ public void store(Item item, String alias) { String execCmd = null; BufferedReader reader = null; try { execCmd = formatAlias(alias, item.getState().toString(), Calendar.getInstance().getTime(), item.getName()); logger.debug("Executing command [" + execCmd + "]"); Process process = Runtime.getRuntime().exec(execCmd); String line = null; String output = ""; logger.debug("Stored item '{}' as '{}' using Exec at {}.", new String[] { item.getName(), item.getState().toString(), (new java.util.Date()).toString() }); // Collect the output stream (if any) reader = new BufferedReader(new InputStreamReader(process.getInputStream())); while ((line = reader.readLine()) != null) { output = output + line; } reader.close(); if (output.length() > 0) { logger.debug("Output from exec command is: " + output); } // Collect the error stream (if any) output = ""; reader = new BufferedReader(new InputStreamReader(process.getErrorStream())); while ((line = reader.readLine()) != null) { output = output + line; } reader.close(); if (output.length() > 0) { logger.debug("Error from exec command is: " + output); } process.waitFor(); } catch (Exception e) { logger.error("Could not execute command [" + execCmd + "]", e); } finally { try { reader.close(); reader = null; } catch (Exception hidden) { } } } /** * @{inheritDoc */ public void store(Item item) { throw new UnsupportedOperationException( "The Exec service requires aliases for persistence configurations that should match the Exec statement. Please configure exec.persist properly."); } /** * Formats the given <code>alias</code> by utilizing {@link Formatter}. * * @param alias the alias String which contains format strings * @param values the values which will be replaced in the alias String * * @return the formatted value. All format strings are replaced by * appropriate values * @see java.util.Formatter for detailed information on format Strings. */ protected String formatAlias(String alias, Object... values) { return String.format(alias, values); } }
epl-1.0
alsmadi/CSCI-6617
src/main/java/net/floodlightcontroller/devicemanager/internal/DeviceIndex.java
4021
/** * Copyright 2012 Big Switch Networks, Inc. * Originally created by David Erickson, Stanford University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.devicemanager.internal; import java.util.Collection; import java.util.EnumSet; import java.util.Iterator; import net.floodlightcontroller.devicemanager.IDeviceService.DeviceField; /** * An index that maps key fields of an entity to device keys */ public abstract class DeviceIndex { /** * The key fields for this index */ protected EnumSet<DeviceField> keyFields; /** * Construct a new device index using the provided key fields * @param keyFields the key fields to use */ public DeviceIndex(EnumSet<DeviceField> keyFields) { super(); this.keyFields = keyFields; } /** * Find all device keys in the index that match the given entity * on all the key fields for this index * @param e the entity to search for * @return an iterator over device keys */ public abstract Iterator<Long> queryByEntity(Entity entity); /** * Get all device keys in the index. If certain devices exist * multiple times, then these devices may be returned multiple times * @return an iterator over device keys */ public abstract Iterator<Long> getAll(); /** * Attempt to update an index with the entities in the provided * {@link Device}. If the update fails because of a concurrent update, * will return false. * @param device the device to update * @param deviceKey the device key for the device * @return true if the update succeeded, false otherwise. */ public abstract boolean updateIndex(Device device, Long deviceKey); /** * Add a mapping from the given entity to the given device key. This * update will not fail because of a concurrent update * @param device the device to update * @param deviceKey the device key for the device */ public abstract void updateIndex(Entity entity, Long deviceKey); /** * Remove the entry for the given entity * @param entity the entity to remove */ public abstract void removeEntity(Entity entity); /** * Remove the given device key from the index for the given entity * @param entity the entity to search for * @param deviceKey the key to remove */ public abstract void removeEntity(Entity entity, Long deviceKey); /** * Remove the give device from the index only if this the collection * of others does not contain an entity that is identical on all the key * fields for this index. * @param entity the entity to search for * @param deviceKey the key to remove * @param others the others against which to check */ public void removeEntityIfNeeded(Entity entity, Long deviceKey, Collection<Entity> others) { IndexedEntity ie = new IndexedEntity(keyFields, entity); for (Entity o : others) { IndexedEntity oio = new IndexedEntity(keyFields, o); if (oio.equals(ie)) return; } Iterator<Long> keyiter = this.queryByEntity(entity); while (keyiter.hasNext()) { Long key = keyiter.next(); if (key.equals(deviceKey)) { removeEntity(entity, deviceKey); break; } } } }
apache-2.0
jerome-jacob/selenium
java/server/test/org/openqa/selenium/remote/server/FakeClock.java
1039
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.remote.server; public class FakeClock implements Clock { private long now = 0; public long now() { return now; } public void pass(long durationInMillis) { now += durationInMillis; } }
apache-2.0
rokn/Count_Words_2015
testing/openjdk2/jdk/test/javax/swing/JFileChooser/6484091/bug6484091.java
2086
/* * Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 6484091 * @summary FileSystemView leaks directory info * @author Pavel Porvatov @run main bug6484091 */ import javax.swing.filechooser.FileSystemView; import java.io.File; import java.security.AccessControlException; public class bug6484091 { public static void main(String[] args) { File dir = FileSystemView.getFileSystemView().getDefaultDirectory(); printDirContent(dir); System.setSecurityManager(new SecurityManager()); // The next test cases use 'dir' obtained without SecurityManager try { printDirContent(dir); throw new RuntimeException("Dir content was derived bypass SecurityManager"); } catch (AccessControlException e) { // It's a successful situation } } private static void printDirContent(File dir) { System.out.println("Files in " + dir.getAbsolutePath() + ":"); for (File file : dir.listFiles()) { System.out.println(file.getName()); } } }
mit
rokn/Count_Words_2015
testing/openjdk2/jdk/test/com/sun/security/auth/module/LdapLoginModule/CheckConfigs.java
9304
/* * Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * @test * @author Vincent Ryan * @bug 4814522 * @summary Check that an LdapLoginModule can be initialized using various * JAAS configurations. * (LdapLoginModule replaces the JndiLoginModule for LDAP access) * * Run this test twice, once using the default security manager: * * @run main/othervm CheckConfigs * @run main/othervm/policy=CheckConfigs.policy CheckConfigs */ import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.HashMap; import javax.naming.CommunicationException; import javax.security.auth.*; import javax.security.auth.login.*; import javax.security.auth.callback.*; import com.sun.security.auth.module.LdapLoginModule; public class CheckConfigs { public static void main(String[] args) throws Exception { SecurityManager securityManager = System.getSecurityManager(); System.out.println(securityManager == null ? "[security manager is not running]" : "[security manager is running: " + securityManager.getClass().getName() + "]"); init(); checkConfigModes(); } private static void init() throws Exception { } private static void checkConfigModes() throws Exception { LoginContext ldapLogin; // search-first mode System.out.println("Testing search-first mode..."); try { ldapLogin = new LoginContext(LdapConfiguration.LOGIN_CONFIG_NAME, null, new TestCallbackHandler(), new SearchFirstMode()); ldapLogin.login(); throw new SecurityException("expected a LoginException"); } catch (LoginException le) { // expected behaviour (because no LDAP server is available) if (!(le.getCause() instanceof CommunicationException)) { throw le; } } // authentication-first mode System.out.println("\nTesting authentication-first mode..."); try { ldapLogin = new LoginContext(LdapConfiguration.LOGIN_CONFIG_NAME, null, new TestCallbackHandler(), new AuthFirstMode()); ldapLogin.login(); throw new SecurityException("expected a LoginException"); } catch (LoginException le) { // expected behaviour (because no LDAP server is available) if (!(le.getCause() instanceof CommunicationException)) { throw le; } } // authentication-only mode System.out.println("\nTesting authentication-only mode..."); try { ldapLogin = new LoginContext(LdapConfiguration.LOGIN_CONFIG_NAME, null, new TestCallbackHandler(), new AuthOnlyMode()); ldapLogin.login(); throw new SecurityException("expected a LoginException"); } catch (LoginException le) { // expected behaviour (because no LDAP server is available) if (!(le.getCause() instanceof CommunicationException)) { throw le; } } } private static class TestCallbackHandler implements CallbackHandler { public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback)callbacks[i]).setName("myname"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback)callbacks[i]) .setPassword("mypassword".toCharArray()); } else { throw new UnsupportedCallbackException (callbacks[i], "Unrecognized callback"); } } } } } class LdapConfiguration extends Configuration { // The JAAS configuration name for ldap-based authentication public static final String LOGIN_CONFIG_NAME = "TestAuth"; // The JAAS configuration for ldap-based authentication protected static AppConfigurationEntry[] entries; // The classname of the login module for ldap-based authentication protected static final String LDAP_LOGIN_MODULE = LdapLoginModule.class.getName(); /** * Gets the JAAS configuration for ldap-based authentication */ public AppConfigurationEntry[] getAppConfigurationEntry(String name) { return name.equals(LOGIN_CONFIG_NAME) ? entries : null; } /** * Refreshes the configuration. */ public void refresh() { // the configuration is fixed } } /** * This class defines the JAAS configuration for ldap-based authentication. * It is equivalent to the following textual configuration entry: * <pre> * TestAuth { * com.sun.security.auth.module.LdapLoginModule REQUIRED * userProvider="ldap://localhost:23456/dc=example,dc=com" * userFilter="(&(uid={USERNAME})(objectClass=inetOrgPerson))" * authzIdentity="{EMPLOYEENUMBER}" * debug=true; * }; * </pre> */ class SearchFirstMode extends LdapConfiguration { public SearchFirstMode() { super(); Map<String, String> options = new HashMap<>(4); options.put("userProvider", "ldap://localhost:23456/dc=example,dc=com"); options.put("userFilter", "(&(uid={USERNAME})(objectClass=inetOrgPerson))"); options.put("authzIdentity", "{EMPLOYEENUMBER}"); options.put("debug", "true"); entries = new AppConfigurationEntry[] { new AppConfigurationEntry(LDAP_LOGIN_MODULE, AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; } } /** * This class defines the JAAS configuration for ldap-based authentication. * It is equivalent to the following textual configuration entry: * <pre> * TestAuth { * com.sun.security.auth.module.LdapLoginModule REQUIRED * userProvider="ldap://localhost:23456/dc=example,dc=com" * authIdentity="{USERNAME}" * userFilter="(&(|(samAccountName={USERNAME})(userPrincipalName={USERNAME})(cn={USERNAME}))(objectClass=user))" * useSSL=false * debug=true; * }; * </pre> */ class AuthFirstMode extends LdapConfiguration { public AuthFirstMode() { super(); Map<String, String> options = new HashMap<>(5); options.put("userProvider", "ldap://localhost:23456/dc=example,dc=com"); options.put("authIdentity", "{USERNAME}"); options.put("userFilter", "(&(|(samAccountName={USERNAME})(userPrincipalName={USERNAME})" + "(cn={USERNAME}))(objectClass=user))"); options.put("useSSL", "false"); options.put("debug", "true"); entries = new AppConfigurationEntry[] { new AppConfigurationEntry(LDAP_LOGIN_MODULE, AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; } } /** * This class defines the JAAS configuration for ldap-based authentication. * It is equivalent to the following textual configuration entry: * <pre> * TestAuth { * com.sun.security.auth.module.LdapLoginModule REQUIRED * userProvider="ldap://localhost:23456 ldap://localhost:23457" * authIdentity="cn={USERNAME},ou=people,dc=example,dc=com" * authzIdentity="staff" * debug=true; * }; * </pre> */ class AuthOnlyMode extends LdapConfiguration { public AuthOnlyMode() { super(); Map<String, String> options = new HashMap<>(4); options.put("userProvider", "ldap://localhost:23456 ldap://localhost:23457"); options.put("authIdentity", "cn={USERNAME},ou=people,dc=example,dc=com"); options.put("authzIdentity", "staff"); options.put("debug", "true"); entries = new AppConfigurationEntry[] { new AppConfigurationEntry(LDAP_LOGIN_MODULE, AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; } }
mit
asedunov/intellij-community
platform/xdebugger-api/src/com/intellij/xdebugger/settings/DebuggerSettingsCategory.java
197
package com.intellij.xdebugger.settings; public enum DebuggerSettingsCategory { ROOT /* will be placed under root "Debugger" node, use it with care */, GENERAL, DATA_VIEWS, STEPPING, HOTSWAP }
apache-2.0
rokn/Count_Words_2015
testing/openjdk2/jdk/test/java/lang/reflect/ReflectPermission/Exceptions.java
2852
/* * Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * @test * @bug 5002910 */ import java.lang.reflect.ReflectPermission; public class Exceptions { private static int fail = 0; private static int pass = 0; private static Throwable first; static void pass() { pass++; } static void fail(String fs, Throwable ex) { String s = "'" + fs + "': " + ex.getClass().getName() + " thrown"; if (first == null) first = ex; System.err.println("FAILED: " + s); fail++; } public static void main(String [] args) { RuntimeException re = new RuntimeException("no exception thrown"); try { new ReflectPermission(null); fail("null", re); } catch (NullPointerException x) { pass(); } catch (Exception x) { fail("null", x); } try { new ReflectPermission(""); fail("\"\"", re); } catch (IllegalArgumentException x) { pass(); } catch (Exception x) { fail("\"\"", x); } try { new ReflectPermission(null, null); fail("null, null", re); } catch (NullPointerException x) { pass(); } catch (Exception x) { fail("null, null", x); } try { new ReflectPermission("", null); fail("\"\", null", re); } catch (IllegalArgumentException x) { pass(); } catch (Exception x) { fail("\"\", null", x); } if (fail != 0) throw new RuntimeException((fail + pass) + " tests: " + fail + " failure(s), first", first); else System.out.println("all " + (fail + pass) + " tests passed"); } }
mit
liveqmock/platform-tools-idea
java/java-tests/testData/codeInsight/completion/normal/ChainedCallOnNextLine.java
68
class Foo { int a = new Foo(). t<caret>a().length(). .. }
apache-2.0
android-ia/platform_tools_idea
java/java-tests/testData/refactoring/inlineSuperClass/newArrayInitializerExpr/after/Usage.java
88
class Usage { void ba() { Test[][] s = new Test[][]{new Test[]{new Test()}}; } }
apache-2.0
martinstuga/elasticsearch
test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java
2549
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.rest.spec; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.rest.support.FileUtils; import java.io.IOException; import java.io.InputStream; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * Holds the elasticsearch REST spec */ public class RestSpec { Map<String, RestApi> restApiMap = new HashMap<>(); private RestSpec() { } void addApi(RestApi restApi) { restApiMap.put(restApi.getName(), restApi); } public RestApi getApi(String api) { return restApiMap.get(api); } public Collection<RestApi> getApis() { return restApiMap.values(); } /** * Parses the complete set of REST spec available under the provided directories */ public static RestSpec parseFrom(FileSystem fileSystem, String optionalPathPrefix, String... paths) throws IOException { RestSpec restSpec = new RestSpec(); for (String path : paths) { for (Path jsonFile : FileUtils.findJsonSpec(fileSystem, optionalPathPrefix, path)) { try (InputStream stream = Files.newInputStream(jsonFile)) { XContentParser parser = JsonXContent.jsonXContent.createParser(stream); RestApi restApi = new RestApiParser().parse(parser); restSpec.addApi(restApi); } catch (Throwable ex) { throw new IOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } } } return restSpec; } }
apache-2.0
queirozfcom/elasticsearch
core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java
46500
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.junit.Test; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; /** * */ @ESIntegTestCase.SuiteScopeTestCase public class HistogramIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; static int numDocs; static int interval; static int numValueBuckets, numValuesBuckets; static long[] valueCounts, valuesCounts; @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); interval = randomIntBetween(2, 5); numValueBuckets = numDocs / interval + 1; valueCounts = new long[numValueBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket = (i + 1) / interval; valueCounts[bucket]++; } numValuesBuckets = (numDocs + 1) / interval + 1; valuesCounts = new long[numValuesBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket1 = (i + 1) / interval; final int bucket2 = (i + 2) / interval; valuesCounts[bucket1]++; if (bucket1 != bucket2) { valuesCounts[bucket2]++; } } List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i + 1) .startArray(MULTI_VALUED_FIELD_NAME).value(i + 1).value(i + 2).endArray() .field("tag", "tag" + i) .endObject())); } assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i * 2) .endObject())); } indexRandom(true, builders); ensureSearchable(); } @Test public void singleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } public void singleValuedField_withOffset() throws Exception { int interval1 = 10; int offset = 5; SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) .execute().actionGet(); // from setup we have between 6 and 20 documents, each with value 1 in test field int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); // first bucket should start at -5, contain 4 documents Histogram.Bucket bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(-5L)); assertThat(bucket.getDocCount(), equalTo(4L)); // last bucket should have (numDocs % interval + 1) docs bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(numDocs%interval1 + 5L)); assertThat(bucket.getDocCount(), equalTo((numDocs % interval) + 1L)); } /** * Shift buckets by random offset between [2..interval]. From setup we have 1 doc per values from 1..numdocs. * Special care needs to be taken for expecations on counts in first and last bucket. */ @Test public void singleValuedField_withRandomOffset() throws Exception { int offset = randomIntBetween(2, interval); SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) .execute().actionGet(); assertSearchResponse(response); // shifting by offset>2 creates new extra bucket [0,offset-1] // if offset is >= number of values in original last bucket, that effect is canceled int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); int docsCounted = 0; for (int i = 0; i < expectedNumberOfBuckets; ++i) { Histogram.Bucket bucket = histo.getBuckets().get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i-1) * interval + offset))); if (i==0) { // first bucket long expectedFirstBucketCount = offset-1; assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); docsCounted += expectedFirstBucketCount; } else if(i<expectedNumberOfBuckets-1) { assertThat(bucket.getDocCount(), equalTo((long) interval)); docsCounted += interval; } else { assertThat(bucket.getDocCount(), equalTo((long) numDocs - docsCounted)); } } } @Test public void singleValuedField_OrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByCountAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MIN_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), greaterThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_OrderedByCountDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MAX_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), lessThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_WithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); assertThat((double) propertiesCounts[i], equalTo((double) s)); } } @Test public void singleValuedField_WithSubAggregation_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void singleValuedField_OrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", true)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", false)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationAsc_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", true)) .subAggregation(stats("stats"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", false)) .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc_DeepOrderPath() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("filter>max", asc)) .subAggregation(filter("filter").filter(matchAllQuery()) .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Filter filter = bucket.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(bucket.getDocCount(), equalTo(filter.getDocCount())); Max max = filter.getAggregations().get("max"); assertThat(max, Matchers.notNullValue()); assertThat(max.getValue(), asc ? greaterThanOrEqualTo(prevMax) : lessThanOrEqualTo(prevMax)); prevMax = max.getValue(); } } @Test public void singleValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 1) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 1) / interval + 1]; for (int i = 0; i < numDocs; ++i) { ++counts[(i + 2) / interval]; } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField_WithValueScript_WithInheritedSubAggregator() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo") .field(MULTI_VALUED_FIELD_NAME) .script(new Script("_value + 1")) .interval(interval) .subAggregation( terms(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())).order( Terms.Order.term(true)))).execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo(MULTI_VALUED_FIELD_NAME)); int minTerm = Math.max(2, key - 1); int maxTerm = Math.min(numDocs + 2, (key / interval + 1) * interval); assertThat(terms.getBuckets().size(), equalTo(maxTerm - minTerm + 1)); Iterator<Terms.Bucket> iter = terms.getBuckets().iterator(); for (int j = minTerm; j <= maxTerm; ++j) { assertThat(iter.next().getKeyAsNumber().longValue(), equalTo((long) j)); } } } @Test public void script_SingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void script_SingleValue_WithSubAggregator_Inherited() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).interval(interval) .subAggregation(sum("sum"))).execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void script_MultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void script_MultiValued_WithAggregatorInherited() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")).interval(interval) .subAggregation(sum("sum"))).execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i || (j + 2) / interval == i) { s += j + 1; s += j + 2; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void unmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(0)); } @Test public void partiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void emptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1l).minDocCount(0) .subAggregation(histogram("sub_histo").interval(1l))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); Histogram histo = searchResponse.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List<? extends Bucket> buckets = histo.getBuckets(); Histogram.Bucket bucket = buckets.get(1); assertThat(bucket, Matchers.notNullValue()); histo = bucket.getAggregations().get("sub_histo"); assertThat(histo, Matchers.notNullValue()); assertThat(histo.getName(), equalTo("sub_histo")); assertThat(histo.getBuckets().isEmpty(), is(true)); } @Test public void singleValuedField_WithExtendedBounds() throws Exception { int lastDataBucketKey = (numValueBuckets - 1) * interval; // randomizing the number of buckets on the min bound // (can sometimes fall within the data range, but more frequently will fall before the data range) int addedBucketsLeft = randomIntBetween(0, numValueBuckets); long boundsMinKey = addedBucketsLeft * interval; if (frequently()) { boundsMinKey = -boundsMinKey; } else { addedBucketsLeft = 0; } long boundsMin = boundsMinKey + randomIntBetween(0, interval - 1); // randomizing the number of buckets on the max bound // (can sometimes fall within the data range, but more frequently will fall after the data range) int addedBucketsRight = randomIntBetween(0, numValueBuckets); long boundsMaxKeyDelta = addedBucketsRight * interval; if (rarely()) { addedBucketsRight = 0; boundsMaxKeyDelta = -boundsMaxKeyDelta; } long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an // error boolean invalidBoundsError = boundsMin > boundsMax; // constructing the newly expected bucket list int bucketsCount = numValueBuckets + addedBucketsLeft + addedBucketsRight; long[] extendedValueCounts = new long[bucketsCount]; System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); SearchResponse response = null; try { response = client().prepareSearch("idx") .addAggregation(histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .minDocCount(0) .extendedBounds(boundsMin, boundsMax)) .execute().actionGet(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); return; } } catch (Exception e) { if (invalidBoundsError) { // expected return; } else { throw e; } } assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(bucketsCount)); long key = Math.min(boundsMinKey, 0); for (int i = 0; i < bucketsCount; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i])); key += interval; } } /** * see issue #9634, negative interval in histogram should raise exception */ public void testExeptionOnNegativerInterval() { try { client().prepareSearch("empty_bucket_idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)).execute().actionGet(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("Missing required field [interval]")); } } }
apache-2.0
rokn/Count_Words_2015
testing/openjdk2/hotspot/agent/src/share/classes/sun/jvm/hotspot/debugger/win32/coff/TestDebugInfo.java
13919
/* * Copyright (c) 2001, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ package sun.jvm.hotspot.debugger.win32.coff; public class TestDebugInfo implements DebugVC50SubsectionTypes, DebugVC50SymbolTypes, DebugVC50TypeLeafIndices { public static void main(String[] args) { if (args.length != 1) { System.err.println("usage: java TestParser [file name]"); System.err.println("File name may be an .exe, .dll or .obj"); System.exit(1); } try { COFFFile file = COFFFileParser.getParser().parse(args[0]); if (file.isImage()) { System.out.println("PE Image detected."); } else { System.out.println("PE Image NOT detected, assuming object file."); } DebugVC50 vc50 = getDebugVC50(file); if (vc50 == null) { System.out.println("No debug information found."); System.exit(1); } else { System.out.println("Debug information found!"); } DebugVC50SubsectionDirectory dir = vc50.getSubsectionDirectory(); for (int i = 0; i < dir.getNumEntries(); i++) { DebugVC50Subsection sec = dir.getSubsection(i); switch (sec.getSubsectionType()) { case SST_MODULE: System.out.println(" SST_MODULE"); break; case SST_TYPES: System.out.println(" SST_TYPES"); break; case SST_PUBLIC: System.out.println(" SST_PUBLIC"); break; case SST_PUBLIC_SYM: System.out.println(" SST_PUBLIC_SYM"); break; case SST_SYMBOLS: System.out.println(" SST_SYMBOLS"); break; case SST_ALIGN_SYM: System.out.println(" SST_ALIGN_SYM"); printSymbolTable(((DebugVC50SSAlignSym) sec).getSymbolIterator()); break; case SST_SRC_LN_SEG: System.out.println(" SST_SRC_LN_SEG"); break; case SST_SRC_MODULE: System.out.println(" SST_SRC_MODULE"); break; case SST_LIBRARIES: System.out.println(" SST_LIBRARIES"); break; case SST_GLOBAL_SYM: System.out.println(" SST_GLOBAL_SYM"); printSymbolTable(sec); break; case SST_GLOBAL_PUB: System.out.println(" SST_GLOBAL_PUB"); printSymbolTable(sec); break; case SST_GLOBAL_TYPES: System.out.println(" SST_GLOBAL_TYPES"); printTypeTable(sec); break; case SST_MPC: System.out.println(" SST_MPC"); break; case SST_SEG_MAP: System.out.println(" SST_SEG_MAP"); break; case SST_SEG_NAME: System.out.println(" SST_SEG_NAME"); break; case SST_PRE_COMP: System.out.println(" SST_PRE_COMP"); break; case SST_UNUSED: System.out.println(" SST_UNUSED"); break; case SST_OFFSET_MAP_16: System.out.println(" SST_OFFSET_MAP_16"); break; case SST_OFFSET_MAP_32: System.out.println(" SST_OFFSET_MAP_32"); break; case SST_FILE_INDEX: System.out.println(" SST_FILE_INDEX"); break; case SST_STATIC_SYM: System.out.println(" SST_STATIC_SYM"); printSymbolTable(sec); break; default: System.out.println(" (Unknown subsection type " + sec.getSubsectionType() + ")"); break; } } } catch (Exception e) { e.printStackTrace(); } } private static DebugVC50 getDebugVC50(COFFFile file) { COFFHeader header = file.getHeader(); OptionalHeader opt = header.getOptionalHeader(); if (opt == null) { System.out.println("Optional header not found."); return null; } OptionalHeaderDataDirectories dd = opt.getDataDirectories(); if (dd == null) { System.out.println("Optional header data directories not found."); return null; } DebugDirectory debug = dd.getDebugDirectory(); if (debug == null) { System.out.println("Debug directory not found."); return null; } for (int i = 0; i < debug.getNumEntries(); i++) { DebugDirectoryEntry entry = debug.getEntry(i); if (entry.getType() == DebugTypes.IMAGE_DEBUG_TYPE_CODEVIEW) { System.out.println("Debug Directory Entry " + i + " has debug type IMAGE_DEBUG_TYPE_CODEVIEW"); return entry.getDebugVC50(); } } return null; } private static void printSymbolTable(DebugVC50Subsection sec) { DebugVC50SSSymbolBase sym = (DebugVC50SSSymbolBase) sec; DebugVC50SymbolIterator iter = sym.getSymbolIterator(); printSymbolTable(iter); } private static void printSymbolTable(DebugVC50SymbolIterator iter) { while (!iter.done()) { int type = iter.getType() & 0xFFFF; switch (type) { case S_COMPILE: System.out.println(" S_COMPILE"); break; case S_SSEARCH: System.out.println(" S_SSEARCH"); break; case S_END: System.out.println(" S_END"); break; case S_SKIP: System.out.println(" S_SKIP"); break; case S_CVRESERVE: System.out.println(" S_CVRESERVE"); break; case S_OBJNAME: System.out.println(" S_OBJNAME"); break; case S_ENDARG: System.out.println(" S_ENDARG"); break; case S_COBOLUDT: System.out.println(" S_COBOLUDT"); break; case S_MANYREG: System.out.println(" S_MANYREG"); break; case S_RETURN: System.out.println(" S_RETURN"); break; case S_ENTRYTHIS: System.out.println(" S_ENTRYTHIS"); break; case S_REGISTER: System.out.println(" S_REGISTER"); break; case S_CONSTANT: System.out.println(" S_CONSTANT"); break; case S_UDT: System.out.println(" S_UDT"); break; case S_COBOLUDT2: System.out.println(" S_COBOLUDT2"); break; case S_MANYREG2: System.out.println(" S_MANYREG2"); break; case S_BPREL32: System.out.println(" S_BPREL32"); break; case S_LDATA32: System.out.println(" S_LDATA32"); break; case S_GDATA32: System.out.println(" S_GDATA32"); break; case S_PUB32: System.out.println(" S_PUB32"); break; case S_LPROC32: System.out.println(" S_LPROC32"); break; case S_GPROC32: System.out.println(" S_GPROC32"); break; case S_THUNK32: System.out.println(" S_THUNK32"); break; case S_BLOCK32: System.out.println(" S_BLOCK32"); break; case S_WITH32: System.out.println(" S_WITH32"); break; case S_LABEL32: System.out.println(" S_LABEL32"); break; case S_CEXMODEL32: System.out.println(" S_CEXMODEL32"); break; case S_VFTTABLE32: System.out.println(" S_VFTTABLE32"); break; case S_REGREL32: System.out.println(" S_REGREL32"); break; case S_LTHREAD32: System.out.println(" S_LTHREAD32"); break; case S_GTHREAD32: System.out.println(" S_GTHREAD32"); break; case S_LPROCMIPS: System.out.println(" S_LPROCMIPS"); break; case S_GPROCMIPS: System.out.println(" S_GPROCMIPS"); break; case S_PROCREF: System.out.println(" S_PROCREF"); break; case S_DATAREF: System.out.println(" S_DATAREF"); break; case S_ALIGN: System.out.println(" S_ALIGN"); break; default: System.out.println(" (Unknown symbol type " + type + ")"); break; } iter.next(); } } private static void printTypeTable(DebugVC50Subsection sec) { DebugVC50SSGlobalTypes types = (DebugVC50SSGlobalTypes) sec; DebugVC50TypeIterator iter = types.getTypeIterator(); while (!iter.done()) { System.out.print(" Type string: "); while (!iter.typeStringDone()) { int leaf = iter.typeStringLeaf() & 0xFFFF; switch (leaf) { case LF_MODIFIER: System.out.print("LF_MODIFIER "); break; case LF_POINTER: System.out.print("LF_POINTER "); break; case LF_ARRAY: System.out.print("LF_ARRAY "); break; case LF_CLASS: System.out.print("LF_CLASS "); break; case LF_STRUCTURE: System.out.print("LF_STRUCTURE "); break; case LF_UNION: System.out.print("LF_UNION "); break; case LF_ENUM: System.out.print("LF_ENUM "); break; case LF_PROCEDURE: System.out.print("LF_PROCEDURE "); break; case LF_MFUNCTION: System.out.print("LF_MFUNCTION "); break; case LF_VTSHAPE: System.out.print("LF_VTSHAPE "); break; case LF_COBOL0: System.out.print("LF_COBOL0 "); break; case LF_COBOL1: System.out.print("LF_COBOL1 "); break; case LF_BARRAY: System.out.print("LF_BARRAY "); break; case LF_LABEL: System.out.print("LF_LABEL "); break; case LF_NULL: System.out.print("LF_NULL "); break; case LF_NOTTRAN: System.out.print("LF_NOTTRAN "); break; case LF_DIMARRAY: System.out.print("LF_DIMARRAY "); break; case LF_VFTPATH: System.out.print("LF_VFTPATH "); break; case LF_PRECOMP: System.out.print("LF_PRECOMP "); break; case LF_ENDPRECOMP: System.out.print("LF_ENDPRECOMP "); break; case LF_OEM: System.out.print("LF_OEM "); break; case LF_TYPESERVER: System.out.print("LF_TYPESERVER "); break; case LF_SKIP: System.out.print("LF_SKIP "); break; case LF_ARGLIST: System.out.print("LF_ARGLIST "); break; case LF_DEFARG: System.out.print("LF_DEFARG "); break; case LF_FIELDLIST: System.out.print("LF_FIELDLIST "); break; case LF_DERIVED: System.out.print("LF_DERIVED "); break; case LF_BITFIELD: System.out.print("LF_BITFIELD "); break; case LF_METHODLIST: System.out.print("LF_METHODLIST "); break; case LF_DIMCONU: System.out.print("LF_DIMCONU "); break; case LF_DIMCONLU: System.out.print("LF_DIMCONLU "); break; case LF_DIMVARU: System.out.print("LF_DIMVARU "); break; case LF_DIMVARLU: System.out.print("LF_DIMVARLU "); break; case LF_REFSYM: System.out.print("LF_REFSYM "); break; case LF_BCLASS: System.out.print("LF_BCLASS "); break; case LF_VBCLASS: System.out.print("LF_VBCLASS "); break; case LF_IVBCLASS: System.out.print("LF_IVBCLASS "); break; case LF_ENUMERATE: System.out.print("LF_ENUMERATE "); break; case LF_FRIENDFCN: System.out.print("LF_FRIENDFCN "); break; case LF_INDEX: System.out.print("LF_INDEX "); break; case LF_MEMBER: System.out.print("LF_MEMBER "); System.out.print(iter.getMemberName() + " "); break; case LF_STMEMBER: System.out.print("LF_STMEMBER "); break; case LF_METHOD: System.out.print("LF_METHOD "); System.out.print(iter.getMethodName() + " "); break; case LF_NESTTYPE: System.out.print("LF_NESTTYPE "); break; case LF_VFUNCTAB: System.out.print("LF_VFUNCTAB "); break; case LF_FRIENDCLS: System.out.print("LF_FRIENDCLS "); break; case LF_ONEMETHOD: System.out.print("LF_ONEMETHOD "); System.out.print(iter.getOneMethodName() + " "); break; case LF_VFUNCOFF: System.out.print("LF_VFUNCOFF "); break; case LF_NESTTYPEEX: System.out.print("LF_NESTTYPEEX "); break; case LF_MEMBERMODIFY: System.out.print("LF_MEMBERMODIFY "); break; case LF_CHAR: System.out.print("LF_CHAR "); break; case LF_SHORT: System.out.print("LF_SHORT "); break; case LF_USHORT: System.out.print("LF_USHORT "); break; case LF_LONG: System.out.print("LF_LONG "); break; case LF_ULONG: System.out.print("LF_ULONG "); break; case LF_REAL32: System.out.print("LF_REAL32 "); break; case LF_REAL64: System.out.print("LF_REAL64 "); break; case LF_REAL80: System.out.print("LF_REAL80 "); break; case LF_REAL128: System.out.print("LF_REAL128 "); break; case LF_QUADWORD: System.out.print("LF_QUADWORD "); break; case LF_UQUADWORD: System.out.print("LF_UQUADWORD "); break; case LF_REAL48: System.out.print("LF_REAL48 "); break; case LF_COMPLEX32: System.out.print("LF_COMPLEX32 "); break; case LF_COMPLEX64: System.out.print("LF_COMPLEX64 "); break; case LF_COMPLEX80: System.out.print("LF_COMPLEX80 "); break; case LF_COMPLEX128: System.out.print("LF_COMPLEX128 "); break; case LF_VARSTRING: System.out.print("LF_VARSTRING "); break; case LF_PAD0: System.out.print("LF_PAD0 "); break; case LF_PAD1: System.out.print("LF_PAD1 "); break; case LF_PAD2: System.out.print("LF_PAD2 "); break; case LF_PAD3: System.out.print("LF_PAD3 "); break; case LF_PAD4: System.out.print("LF_PAD4 "); break; case LF_PAD5: System.out.print("LF_PAD5 "); break; case LF_PAD6: System.out.print("LF_PAD6 "); break; case LF_PAD7: System.out.print("LF_PAD7 "); break; case LF_PAD8: System.out.print("LF_PAD8 "); break; case LF_PAD9: System.out.print("LF_PAD9 "); break; case LF_PAD10: System.out.print("LF_PAD10 "); break; case LF_PAD11: System.out.print("LF_PAD11 "); break; case LF_PAD12: System.out.print("LF_PAD12 "); break; case LF_PAD13: System.out.print("LF_PAD13 "); break; case LF_PAD14: System.out.print("LF_PAD14 "); break; case LF_PAD15: System.out.print("LF_PAD15 "); break; default: System.out.print("(Unknown leaf " + leaf + ")"); } iter.typeStringNext(); } System.out.println(""); iter.next(); } } }
mit
thedrummeraki/Aki-SSL
src/org/bouncycastle/math/ec/ScaleXPointMap.java
299
package org.bouncycastle.math.ec; public class ScaleXPointMap implements ECPointMap { protected final ECFieldElement scale; public ScaleXPointMap(ECFieldElement scale) { this.scale = scale; } public ECPoint map(ECPoint p) { return p.scaleX(scale); } }
apache-2.0
nknize/elasticsearch
server/src/test/java/org/elasticsearch/search/suggest/term/SuggestModeTests.java
2792
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest.term; import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase; import java.io.IOException; import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import static org.hamcrest.Matchers.equalTo; /** * Test the {@link SuggestMode} enum. */ public class SuggestModeTests extends AbstractWriteableEnumTestCase { public SuggestModeTests() { super(SuggestMode::readFromStream); } @Override public void testValidOrdinals() { assertThat(SuggestMode.MISSING.ordinal(), equalTo(0)); assertThat(SuggestMode.POPULAR.ordinal(), equalTo(1)); assertThat(SuggestMode.ALWAYS.ordinal(), equalTo(2)); } @Override public void testFromString() { assertThat(SuggestMode.resolve("missing"), equalTo(SuggestMode.MISSING)); assertThat(SuggestMode.resolve("popular"), equalTo(SuggestMode.POPULAR)); assertThat(SuggestMode.resolve("always"), equalTo(SuggestMode.ALWAYS)); final String doesntExist = "doesnt_exist"; try { SuggestMode.resolve(doesntExist); fail("SuggestMode should not have an element " + doesntExist); } catch (IllegalArgumentException e) { } try { SuggestMode.resolve(null); fail("SuggestMode.resolve on a null value should throw an exception."); } catch (NullPointerException e) { assertThat(e.getMessage(), equalTo("Input string is null")); } } @Override public void testWriteTo() throws IOException { assertWriteToStream(SuggestMode.MISSING, 0); assertWriteToStream(SuggestMode.POPULAR, 1); assertWriteToStream(SuggestMode.ALWAYS, 2); } @Override public void testReadFrom() throws IOException { assertReadFromStream(0, SuggestMode.MISSING); assertReadFromStream(1, SuggestMode.POPULAR); assertReadFromStream(2, SuggestMode.ALWAYS); } }
apache-2.0
gwq5210/litlib
thirdparty/sources/protobuf/java/core/src/main/java/com/google/protobuf/MessageLiteOrBuilder.java
2717
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.protobuf; /** * Base interface for methods common to {@link MessageLite} * and {@link MessageLite.Builder} to provide type equivalency. * * @author jonp@google.com (Jon Perlow) */ public interface MessageLiteOrBuilder { /** * Get an instance of the type with no fields set. Because no fields are set, * all getters for singular fields will return default values and repeated * fields will appear empty. * This may or may not be a singleton. This differs from the * {@code getDefaultInstance()} method of generated message classes in that * this method is an abstract method of the {@code MessageLite} interface * whereas {@code getDefaultInstance()} is a static method of a specific * class. They return the same thing. */ MessageLite getDefaultInstanceForType(); /** * Returns true if all required fields in the message and all embedded * messages are set, false otherwise. * * <p>See also: {@link MessageOrBuilder#getInitializationErrorString()} */ boolean isInitialized(); }
gpl-3.0
huntergdavis/json_resume
JSONResumeViewer/app/src/main/java/com/hunterdavis/jsonresumeviewer/IconDownloadTask.java
2380
package com.hunterdavis.jsonresumeviewer; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.view.View; import android.widget.ImageView; import com.squareup.okhttp.Cache; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.Response; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.ref.WeakReference; import java.net.HttpURLConnection; import java.net.URL; /** * We have to do this instead of using picasso.. why? * -- because picasso won't render favico.ico files. ugh */ public class IconDownloadTask extends AsyncTask<String, Void, Bitmap> { private final WeakReference imageViewReference; public IconDownloadTask(ImageView imageView) { imageViewReference = new WeakReference(imageView); } @Override // Actual download method, run in the task thread protected Bitmap doInBackground(String... params) { // params comes from the execute() call: params[0] is the url. return getBitmapFromURL(params[0]); } @Override // Once the image is downloaded, associates it to the imageView protected void onPostExecute(Bitmap bitmap) { if (isCancelled()) { bitmap = null; } if (imageViewReference != null) { ImageView imageView = (ImageView) imageViewReference.get(); if (imageView != null) { if (bitmap != null) { imageView.setImageBitmap(bitmap); imageView.setVisibility(View.VISIBLE); } } } } public Bitmap getBitmapFromURL(String address) { try { URL url = new URL(address); Request request = new Request.Builder() .url(url) .build(); Response response = JsonResumeActivity.client.newCall(request).execute(); InputStream input = response.body().byteStream(); BitmapFactory.Options options = new BitmapFactory.Options(); Bitmap myBitmap = BitmapFactory.decodeStream(input, null, options); return myBitmap; } catch (IOException e) { e.printStackTrace(); return null; } } }
isc
io7m/jsycamore
com.io7m.jsycamore.api/src/main/java/com/io7m/jsycamore/api/components/SyButtonReadableType.java
1796
/* * Copyright © 2016 <code@io7m.com> http://io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.jsycamore.api.components; import com.io7m.jsycamore.api.themes.SyThemeButtonType; import java.util.Objects; import java.util.Optional; import java.util.function.BiFunction; /** * The type of buttons. */ public interface SyButtonReadableType extends SyComponentReadableType { /** * @param <T> The precise type of theme * * @return The current theme for the button */ <T extends SyThemeButtonType> Optional<T> theme(); /** * @return The current state of the button */ SyButtonState buttonState(); @Override default <A, B> B matchComponentReadable( final A context, final BiFunction<A, SyButtonReadableType, B> on_button, final BiFunction<A, SyPanelReadableType, B> on_panel, final BiFunction<A, SyLabelReadableType, B> on_label, final BiFunction<A, SyImageReadableType, B> on_image, final BiFunction<A, SyMeterReadableType, B> on_meter) { return Objects.requireNonNull(on_button, "Button").apply(context, this); } }
isc
atomicint/aj8
server/src/main/java/org/apollo/game/msg/decoder/ThirdInventoryItemActionMessageDecoder.java
1099
package org.apollo.game.msg.decoder; import org.apollo.game.model.inter.Interfaces.InventoryAmountOption; import org.apollo.game.msg.MessageDecoder; import org.apollo.game.msg.annotate.DecodesMessage; import org.apollo.game.msg.impl.ItemActionMessage; import org.apollo.net.codec.game.DataTransformation; import org.apollo.net.codec.game.DataType; import org.apollo.net.codec.game.GamePacket; import org.apollo.net.codec.game.GamePacketReader; /** * A {@link MessageDecoder} for the {@link ItemActionMessage}. * * @author Graham */ @DecodesMessage(87) public final class ThirdInventoryItemActionMessageDecoder implements MessageDecoder<ItemActionMessage> { @Override public ItemActionMessage decode(GamePacket packet) { GamePacketReader reader = new GamePacketReader(packet); int id = (int) reader.getUnsigned(DataType.SHORT, DataTransformation.ADD); int interfaceId = (int) reader.getUnsigned(DataType.SHORT); int slot = (int) reader.getUnsigned(DataType.SHORT, DataTransformation.ADD); return new ItemActionMessage(InventoryAmountOption.OPTION_TEN, interfaceId, id, slot); } }
isc
badlogic/avian
classpath/java/lang/Integer.java
4672
/* Copyright (c) 2008, Avian Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. There is NO WARRANTY for this software. See license.txt for details. */ package java.lang; public final class Integer extends Number implements Comparable<Integer> { public static final Class TYPE = Class.forCanonicalName("I"); public static final int MIN_VALUE = 0x80000000; public static final int MAX_VALUE = 0x7FFFFFFF; private final int value; public Integer(int value) { this.value = value; } public Integer(String s) { this.value = parseInt(s); } public static Integer valueOf(int value) { return new Integer(value); } public static Integer valueOf(String value) { return valueOf(parseInt(value)); } public boolean equals(Object o) { return o instanceof Integer && ((Integer) o).value == value; } public int hashCode() { return value; } public int compareTo(Integer other) { return value - other.value; } public String toString() { return toString(value); } public static String toString(int v, int radix) { return Long.toString(v, radix); } public static String toString(int v) { return toString(v, 10); } public static String toHexString(int v) { return Long.toString(((long) v) & 0xFFFFFFFFL, 16); } public static String toBinaryString(int v) { return Long.toString(((long) v) & 0xFFFFFFFFL, 2); } public byte byteValue() { return (byte) value; } public short shortValue() { return (short) value; } public int intValue() { return value; } public long longValue() { return value; } public float floatValue() { return (float) value; } public double doubleValue() { return (double) value; } public static int parseInt(String s) { return parseInt(s, 10); } public static int parseInt(String s, int radix) { return (int) Long.parseLong(s, radix); } /** * Reverses the order of the bytes of the specified integer. * * @param i * the integer value for which to reverse the byte order. * @return the reversed value. * @since 1.5 */ public static int reverseBytes(int i) { // Hacker's Delight 7-1, with minor tweak from Veldmeijer // http://graphics.stanford.edu/~seander/bithacks.html i = ((i >>> 8) & 0x00FF00FF) | ((i & 0x00FF00FF) << 8); return ( i >>> 16 ) | ( i << 16); } /** * Determines the number of leading zeros in the specified integer prior to * the {@link #highestOneBit(int) highest one bit}. * * @param i * the integer to examine. * @return the number of leading zeros in {@code i}. * @since 1.5 */ public static int numberOfLeadingZeros(int i) { // Hacker's Delight, Figure 5-6 if (i <= 0) { return (~i >> 26) & 32; } int n = 1; if (i >> 16 == 0) { n += 16; i <<= 16; } if (i >> 24 == 0) { n += 8; i <<= 8; } if (i >> 28 == 0) { n += 4; i <<= 4; } if (i >> 30 == 0) { n += 2; i <<= 2; } return n - (i >>> 31); } /** * Table for Seal's algorithm for Number of Trailing Zeros. Hacker's Delight * online, Figure 5-18 (http://www.hackersdelight.org/revisions.pdf) * The entries whose value is -1 are never referenced. */ private static final byte[] NTZ_TABLE = { 32, 0, 1, 12, 2, 6, -1, 13, 3, -1, 7, -1, -1, -1, -1, 14, 10, 4, -1, -1, 8, -1, -1, 25, -1, -1, -1, -1, -1, 21, 27, 15, 31, 11, 5, -1, -1, -1, -1, -1, 9, -1, -1, 24, -1, -1, 20, 26, 30, -1, -1, -1, -1, 23, -1, 19, 29, -1, 22, 18, 28, 17, 16, -1 }; /** * Determines the number of trailing zeros in the specified integer after * the {@link #lowestOneBit(int) lowest one bit}. * * @param i * the integer to examine. * @return the number of trailing zeros in {@code i}. * @since 1.5 */ public static int numberOfTrailingZeros(int i) { // Seal's algorithm - Hacker's Delight 5-18 // BEGIN android-changed - Harmony version should be one-liner in comment below i &= -i; i = (i << 4) + i; // x *= 17 i = (i << 6) + i; // x *= 65 i = (i << 16) - i; // x *= 65535 return NTZ_TABLE[i >>> 26]; // NTZ_TABLE[((i & -i) * 0x0450FBAF) >>> 26] // END android-changed } }
isc
pauldoo/scratch
Tuner/branches/0.2/src/AudioInput.java
1228
/* Tuner, a simple application to help you tune your musical instrument. Copyright (c) 2003, 2004, 2005, 2012 Paul Richards <paul.richards@gmail.com> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ import java.io.*; public class AudioInput { private DataInputStream dis; private int offset; public AudioInput(DataInputStream dis) { this.dis = dis; } public int readSample() throws IOException { int sample = offset + dis.readShort(); if ( sample > 0 ) offset --; else if ( sample < 0 ) offset ++; return sample; } }
isc
MikeBull94/svg-stockpile
api/src/test/java/com/mikebull94/stockpile/svg/processor/XmlEventProcessorTester.java
2996
package com.mikebull94.stockpile.svg.processor; import com.google.common.base.Preconditions; import com.mikebull94.stockpile.xml.XmlEventProcessor; import org.mockito.Mock; import javax.xml.stream.events.EndElement; import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; import java.util.List; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.Consumer; import java.util.function.Predicate; import static org.junit.Assert.assertTrue; /** * Provides a behaviour driven development framework for testing {@link XmlEventProcessor}s. */ public final class XmlEventProcessorTester { private static final String PREDICATE_FAILURE = "Predicate.test returned false"; public static XmlEventProcessorTester test(XmlEventProcessor testee) { return new XmlEventProcessorTester(testee); } @Mock private XMLEvent event; @Mock private StartElement startElement; @Mock private EndElement endElement; private boolean eventAccepted = true; private List<XMLEvent> processedResults; private final XmlEventProcessor testee; private XmlEventProcessorTester(XmlEventProcessor testee) { this.testee = Preconditions.checkNotNull(testee); } public XmlEventProcessorTester given(BiConsumer<XmlEventProcessorTester, XMLEvent> consumer) { consumer.accept(this, event); return this; } public XmlEventProcessorTester given(Consumer<XMLEvent> consumer) { consumer.accept(event); return this; } public XmlEventProcessorTester when(BiFunction<XmlEventProcessor, XMLEvent, ? extends List<XMLEvent>> function) { processedResults = function.apply(testee, event); return this; } public XmlEventProcessorTester when(BiPredicate<XmlEventProcessor, XMLEvent> predicate) { eventAccepted &= predicate.test(testee, event); return this; } public XmlEventProcessorTester then(BiPredicate<XmlEventProcessorTester, XMLEvent> predicate) { return then(PREDICATE_FAILURE, predicate); } public XmlEventProcessorTester then(String message, BiPredicate<XmlEventProcessorTester, XMLEvent> predicate) { assertTrue(message, predicate.test(this, event)); return this; } public XmlEventProcessorTester then(Predicate<XmlEventProcessorTester> predicate) { return then(PREDICATE_FAILURE, predicate); } public XmlEventProcessorTester then(String message, Predicate<XmlEventProcessorTester> predicate) { assertTrue(message, predicate.test(this)); return this; } public EndElement getEndElement() { return endElement; } public StartElement getStartElement() { return startElement; } public boolean isEventAccepted() { return eventAccepted; } public XMLEvent getProcessedResult(int index) { return processedResults.get(index); } public boolean processResultContains(XMLEvent event) { for (XMLEvent processed : processedResults) { if (processed.toString().equals(event.toString())) { return true; } } return false; } }
isc
PaulNoth/hackerrank
practice/algorithms/sorting/correctness_and_the_loop_invariant/Solution.java
1125
import java.io.*; import java.util.*; public class Solution { public static void insertionSort(int[] ar){ for(int i = 1; i < ar.length; i++) { int num = ar[i]; int swapPos = i - 1; int l = i - 1; boolean isSwap = false; while(l >= 0) { if(num < ar[l]) { isSwap = true; swapPos = l; l--; } else { break; } } if(isSwap) { for(int j = i - 1; j >= swapPos ; j--) { ar[j + 1] = ar[j]; } ar[swapPos] = num; } } printArray(ar); } static void printArray(int[] ar) { for(int n: ar){ System.out.print(n+" "); } } public static void main(String[] args) { Scanner in = new Scanner(System.in); int n = in.nextInt(); int[] ar = new int[n]; for(int i=0;i<n;i++){ ar[i]=in.nextInt(); } insertionSort(ar); } }
mit
tiffit/TaleCraft
src/main/java/talecraft/voxelator/shapes/VXShapeSphere.java
2552
package talecraft.voxelator.shapes; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import talecraft.util.BlockRegion; import talecraft.util.MutableBlockPos; import talecraft.voxelator.BrushParameter; import talecraft.voxelator.CachedWorldDiff; import talecraft.voxelator.VXShape; import talecraft.voxelator.Voxelator.ShapeFactory; import talecraft.voxelator.params.BooleanBrushParameter; import talecraft.voxelator.params.FloatBrushParameter; public class VXShapeSphere extends VXShape { private static final BrushParameter[] PARAMS = new BrushParameter[]{ new FloatBrushParameter("radius", 1, 64, 5), new BooleanBrushParameter("hollow", false) }; public static ShapeFactory FACTORY = new ShapeFactory() { @Override public String getName() { return "sphere"; } @Override public VXShape newShape(NBTTagCompound shapeData, BlockPos origin) { int px = shapeData.getInteger("position.x") + origin.getX(); int py = shapeData.getInteger("position.y") + origin.getY(); int pz = shapeData.getInteger("position.z") + origin.getZ(); float r = shapeData.getFloat("radius"); boolean hollow = shapeData.getBoolean("hollow"); return new VXShapeSphere(new BlockPos(px, py, pz), r, hollow); } @Override public NBTTagCompound newShape(String[] parameters) { if(parameters.length == 1) { NBTTagCompound shapeData = new NBTTagCompound(); shapeData.setString("type", getName()); shapeData.setFloat("radius", Float.parseFloat(parameters[0])); shapeData.setBoolean("hollow", Boolean.parseBoolean(parameters[1])); return shapeData; } return null; } @Override public BrushParameter[] getParameters() { return PARAMS; } }; private final BlockPos position; private final float radius; private final float radiusSquared; private final boolean hollow; public VXShapeSphere(BlockPos position, float radius, boolean hollow) { this.position = position; this.radius = radius; this.radiusSquared = radius*radius; this.hollow = hollow; } @Override public BlockPos getCenter() { return position; } @Override public BlockRegion getRegion() { return new BlockRegion(position, MathHelper.ceil(radius)); } @Override public boolean test(BlockPos pos, BlockPos center, MutableBlockPos offset, CachedWorldDiff fworld) { return position.distanceSq(pos) < radiusSquared && (hollow ? !(new VXShapeSphere(position, radius-1, false)).test(pos, center, offset, fworld) : true); } }
mit
jkeeler16/portfolio-manager-java
src/com/es/manager/property/design/OpenFootageType.java
327
package com.es.manager.property.design; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; @XmlAccessorType(XmlAccessType.FIELD) @XmlRootElement(name="openFootage") public class OpenFootageType extends OptionalFloorAreaType { }
mit
Prototik/HoloEverywhere-Addon-Facebook-SDK
src/com/facebook/widget/WebDialog.java
28527
/** * Copyright 2010-present Facebook. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.widget; import android.annotation.SuppressLint; import org.holoeverywhere.app.Dialog; import org.holoeverywhere.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.net.Uri; import android.net.http.SslError; import android.os.Bundle; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.webkit.SslErrorHandler; import android.webkit.WebView; import android.webkit.WebViewClient; import org.holoeverywhere.widget.FrameLayout; import android.widget.ImageView; import org.holoeverywhere.widget.LinearLayout; import com.facebook.*; import com.facebook.android.*; import com.facebook.internal.Logger; import com.facebook.internal.ServerProtocol; import com.facebook.internal.Utility; import com.facebook.internal.Validate; /** * This class provides a mechanism for displaying Facebook Web dialogs inside a Dialog. Helper * methods are provided to construct commonly-used dialogs, or a caller can specify arbitrary * parameters to call other dialogs. */ public class WebDialog extends Dialog { private static final String LOG_TAG = Logger.LOG_TAG_BASE + "WebDialog"; private static final String DISPLAY_TOUCH = "touch"; private static final String USER_AGENT = "user_agent"; static final String REDIRECT_URI = "fbconnect://success"; static final String CANCEL_URI = "fbconnect://cancel"; static final boolean DISABLE_SSL_CHECK_FOR_TESTING = false; public static final int DEFAULT_THEME = android.R.style.Theme_Translucent_NoTitleBar; private String url; private OnCompleteListener onCompleteListener; private WebView webView; private ProgressDialog spinner; private ImageView crossImageView; private FrameLayout contentFrameLayout; private boolean listenerCalled = false; private boolean isDetached = false; /** * Interface that implements a listener to be called when the user's interaction with the * dialog completes, whether because the dialog finished successfully, or it was cancelled, * or an error was encountered. */ public interface OnCompleteListener { /** * Called when the dialog completes. * * @param values on success, contains the values returned by the dialog * @param error on an error, contains an exception describing the error */ void onComplete(Bundle values, FacebookException error); } /** * Constructor which can be used to display a dialog with an already-constructed URL. * * @param context the context to use to display the dialog * @param url the URL of the Web Dialog to display; no validation is done on this URL, but it should * be a valid URL pointing to a Facebook Web Dialog */ public WebDialog(Context context, String url) { this(context, url, DEFAULT_THEME); } /** * Constructor which can be used to display a dialog with an already-constructed URL and a custom theme. * * @param context the context to use to display the dialog * @param url the URL of the Web Dialog to display; no validation is done on this URL, but it should * be a valid URL pointing to a Facebook Web Dialog * @param theme identifier of a theme to pass to the Dialog class */ public WebDialog(Context context, String url, int theme) { super(context, theme); this.url = url; } /** * Constructor which will construct the URL of the Web dialog based on the specified parameters. * * @param context the context to use to display the dialog * @param action the portion of the dialog URL following "dialog/" * @param parameters parameters which will be included as part of the URL * @param theme identifier of a theme to pass to the Dialog class * @param listener the listener to notify, or null if no notification is desired */ public WebDialog(Context context, String action, Bundle parameters, int theme, OnCompleteListener listener) { super(context, theme); if (parameters == null) { parameters = new Bundle(); } parameters.putString(ServerProtocol.DIALOG_PARAM_DISPLAY, DISPLAY_TOUCH); parameters.putString(ServerProtocol.DIALOG_PARAM_TYPE, USER_AGENT); Uri uri = Utility.buildUri(ServerProtocol.DIALOG_AUTHORITY, ServerProtocol.DIALOG_PATH + action, parameters); this.url = uri.toString(); onCompleteListener = listener; } /** * Sets the listener which will be notified when the dialog finishes. * * @param listener the listener to notify, or null if no notification is desired */ public void setOnCompleteListener(OnCompleteListener listener) { onCompleteListener = listener; } /** * Gets the listener which will be notified when the dialog finishes. * * @return the listener, or null if none has been specified */ public OnCompleteListener getOnCompleteListener() { return onCompleteListener; } @Override public void dismiss() { if (webView != null) { webView.stopLoading(); } if (!isDetached) { if (spinner.isShowing()) { spinner.dismiss(); } super.dismiss(); } } @Override public void onDetachedFromWindow() { isDetached = true; super.onDetachedFromWindow(); } @Override public void onAttachedToWindow() { isDetached = false; super.onAttachedToWindow(); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setOnCancelListener(new OnCancelListener() { @Override public void onCancel(DialogInterface dialogInterface) { sendCancelToListener(); } }); spinner = new ProgressDialog(getContext()); spinner.requestWindowFeature(Window.FEATURE_NO_TITLE); spinner.setMessage(getContext().getString(R.string.com_facebook_loading)); spinner.setOnCancelListener(new OnCancelListener() { @Override public void onCancel(DialogInterface dialogInterface) { sendCancelToListener(); WebDialog.this.dismiss(); } }); requestWindowFeature(Window.FEATURE_NO_TITLE); contentFrameLayout = new FrameLayout(getContext()); /* Create the 'x' image, but don't add to the contentFrameLayout layout yet * at this point, we only need to know its drawable width and height * to place the webview */ createCrossImage(); /* Now we know 'x' drawable width and height, * layout the webivew and add it the contentFrameLayout layout */ int crossWidth = crossImageView.getDrawable().getIntrinsicWidth(); setUpWebView(crossWidth / 2); /* Finally add the 'x' image to the contentFrameLayout layout and * add contentFrameLayout to the Dialog view */ contentFrameLayout.addView(crossImageView, new ViewGroup.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); addContentView(contentFrameLayout, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); } private void sendSuccessToListener(Bundle values) { if (onCompleteListener != null && !listenerCalled) { listenerCalled = true; onCompleteListener.onComplete(values, null); } } private void sendErrorToListener(Throwable error) { if (onCompleteListener != null && !listenerCalled) { listenerCalled = true; FacebookException facebookException = null; if (error instanceof FacebookException) { facebookException = (FacebookException) error; } else { facebookException = new FacebookException(error); } onCompleteListener.onComplete(null, facebookException); } } private void sendCancelToListener() { sendErrorToListener(new FacebookOperationCanceledException()); } private void createCrossImage() { crossImageView = new ImageView(getContext()); // Dismiss the dialog when user click on the 'x' crossImageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { sendCancelToListener(); WebDialog.this.dismiss(); } }); Drawable crossDrawable = getContext().getResources().getDrawable(R.drawable.com_facebook_close); crossImageView.setImageDrawable(crossDrawable); /* 'x' should not be visible while webview is loading * make it visible only after webview has fully loaded */ crossImageView.setVisibility(View.INVISIBLE); } @SuppressLint("SetJavaScriptEnabled") private void setUpWebView(int margin) { LinearLayout webViewContainer = new LinearLayout(getContext()); webView = new WebView(getContext()); webView.setVerticalScrollBarEnabled(false); webView.setHorizontalScrollBarEnabled(false); webView.setWebViewClient(new DialogWebViewClient()); webView.getSettings().setJavaScriptEnabled(true); webView.loadUrl(url); webView.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); webView.setVisibility(View.INVISIBLE); webView.getSettings().setSavePassword(false); webViewContainer.setPadding(margin, margin, margin, margin); webViewContainer.addView(webView); contentFrameLayout.addView(webViewContainer); } private class DialogWebViewClient extends WebViewClient { @Override @SuppressWarnings("deprecation") public boolean shouldOverrideUrlLoading(WebView view, String url) { Utility.logd(LOG_TAG, "Redirect URL: " + url); if (url.startsWith(WebDialog.REDIRECT_URI)) { Bundle values = Util.parseUrl(url); String error = values.getString("error"); if (error == null) { error = values.getString("error_type"); } String errorMessage = values.getString("error_msg"); if (errorMessage == null) { errorMessage = values.getString("error_description"); } String errorCodeString = values.getString("error_code"); int errorCode = FacebookRequestError.INVALID_ERROR_CODE; if (!Utility.isNullOrEmpty(errorCodeString)) { try { errorCode = Integer.parseInt(errorCodeString); } catch (NumberFormatException ex) { errorCode = FacebookRequestError.INVALID_ERROR_CODE; } } if (Utility.isNullOrEmpty(error) && Utility .isNullOrEmpty(errorMessage) && errorCode == FacebookRequestError.INVALID_ERROR_CODE) { sendSuccessToListener(values); } else if (error != null && (error.equals("access_denied") || error.equals("OAuthAccessDeniedException"))) { sendCancelToListener(); } else { FacebookRequestError requestError = new FacebookRequestError(errorCode, error, errorMessage); sendErrorToListener(new FacebookServiceException(requestError, errorMessage)); } WebDialog.this.dismiss(); return true; } else if (url.startsWith(WebDialog.CANCEL_URI)) { sendCancelToListener(); WebDialog.this.dismiss(); return true; } else if (url.contains(DISPLAY_TOUCH)) { return false; } // launch non-dialog URLs in a full browser getContext().startActivity( new Intent(Intent.ACTION_VIEW, Uri.parse(url))); return true; } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { super.onReceivedError(view, errorCode, description, failingUrl); sendErrorToListener(new FacebookDialogException(description, errorCode, failingUrl)); WebDialog.this.dismiss(); } @Override public void onReceivedSslError(WebView view, SslErrorHandler handler, SslError error) { if (DISABLE_SSL_CHECK_FOR_TESTING) { handler.proceed(); } else { super.onReceivedSslError(view, handler, error); sendErrorToListener(new FacebookDialogException(null, ERROR_FAILED_SSL_HANDSHAKE, null)); handler.cancel(); WebDialog.this.dismiss(); } } @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { Utility.logd(LOG_TAG, "Webview loading URL: " + url); super.onPageStarted(view, url, favicon); if (!isDetached) { spinner.show(); } } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); if (!isDetached) { spinner.dismiss(); } /* * Once web view is fully loaded, set the contentFrameLayout background to be transparent * and make visible the 'x' image. */ contentFrameLayout.setBackgroundColor(Color.TRANSPARENT); webView.setVisibility(View.VISIBLE); crossImageView.setVisibility(View.VISIBLE); } } private static class BuilderBase<CONCRETE extends BuilderBase<?>> { private Context context; private Session session; private String applicationId; private String action; private int theme = DEFAULT_THEME; private OnCompleteListener listener; private Bundle parameters; protected BuilderBase(Context context, Session session, String action, Bundle parameters) { Validate.notNull(session, "session"); if (!session.isOpened()) { throw new FacebookException("Attempted to use a Session that was not open."); } this.session = session; finishInit(context, action, parameters); } protected BuilderBase(Context context, String applicationId, String action, Bundle parameters) { Validate.notNullOrEmpty(applicationId, "applicationId"); this.applicationId = applicationId; finishInit(context, action, parameters); } /** * Sets a theme identifier which will be passed to the underlying Dialog. * * @param theme a theme identifier which will be passed to the Dialog class * @return the builder */ public CONCRETE setTheme(int theme) { this.theme = theme; @SuppressWarnings("unchecked") CONCRETE result = (CONCRETE) this; return result; } /** * Sets the listener which will be notified when the dialog finishes. * * @param listener the listener to notify, or null if no notification is desired * @return the builder */ public CONCRETE setOnCompleteListener(OnCompleteListener listener) { this.listener = listener; @SuppressWarnings("unchecked") CONCRETE result = (CONCRETE) this; return result; } /** * Constructs a WebDialog using the parameters provided. The dialog is not shown, * but is ready to be shown by calling Dialog.show(). * * @return the WebDialog */ public WebDialog build() { if (session != null && session.isOpened()) { parameters.putString(ServerProtocol.DIALOG_PARAM_APP_ID, session.getApplicationId()); parameters.putString(ServerProtocol.DIALOG_PARAM_ACCESS_TOKEN, session.getAccessToken()); } else { parameters.putString(ServerProtocol.DIALOG_PARAM_APP_ID, applicationId); } if (!parameters.containsKey(ServerProtocol.DIALOG_PARAM_REDIRECT_URI)) { parameters.putString(ServerProtocol.DIALOG_PARAM_REDIRECT_URI, REDIRECT_URI); } return new WebDialog(context, action, parameters, theme, listener); } protected String getApplicationId() { return applicationId; } protected Context getContext() { return context; } protected int getTheme() { return theme; } protected Bundle getParameters() { return parameters; } protected WebDialog.OnCompleteListener getListener() { return listener; } private void finishInit(Context context, String action, Bundle parameters) { this.context = context; this.action = action; if (parameters != null) { this.parameters = parameters; } else { this.parameters = new Bundle(); } } } /** * Provides a builder that allows construction of an arbitary Facebook web dialog. */ public static class Builder extends BuilderBase<Builder> { /** * Constructor that builds a dialog for an authenticated user. * * @param context the Context within which the dialog will be shown. * @param session the Session representing an authenticating user to use for * showing the dialog; must not be null, and must be opened. * @param action the portion of the dialog URL following www.facebook.com/dialog/. * See https://developers.facebook.com/docs/reference/dialogs/ for details. * @param parameters a Bundle containing parameters to pass as part of the URL. */ public Builder(Context context, Session session, String action, Bundle parameters) { super(context, session, action, parameters); } /** * Constructor that builds a dialog without an authenticated user. * * @param context the Context within which the dialog will be shown. * @param applicationId the application ID to be included in the dialog URL. * @param action the portion of the dialog URL following www.facebook.com/dialog/. * See https://developers.facebook.com/docs/reference/dialogs/ for details. * @param parameters a Bundle containing parameters to pass as part of the URL. */ public Builder(Context context, String applicationId, String action, Bundle parameters) { super(context, applicationId, action, parameters); } } /** * Provides a builder that allows construction of the parameters for showing * the Feed Dialog (https://developers.facebook.com/docs/reference/dialogs/feed/). */ public static class FeedDialogBuilder extends BuilderBase<FeedDialogBuilder> { private static final String FEED_DIALOG = "feed"; private static final String FROM_PARAM = "from"; private static final String TO_PARAM = "to"; private static final String LINK_PARAM = "link"; private static final String PICTURE_PARAM = "picture"; private static final String SOURCE_PARAM = "source"; private static final String NAME_PARAM = "name"; private static final String CAPTION_PARAM = "caption"; private static final String DESCRIPTION_PARAM = "description"; /** * Constructor. * * @param context the Context within which the dialog will be shown. * @param session the Session representing an authenticating user to use for * showing the dialog; must not be null, and must be opened. */ public FeedDialogBuilder(Context context, Session session) { super(context, session, FEED_DIALOG, null); } /** * Constructor. * * @param context the Context within which the dialog will be shown. * @param parameters a Bundle containing parameters to pass as part of the * dialog URL. No validation is done on these parameters; it is * the caller's responsibility to ensure they are valid. * @param session the Session representing an authenticating user to use for * showing the dialog; must not be null, and must be opened. */ public FeedDialogBuilder(Context context, Session session, Bundle parameters) { super(context, session, FEED_DIALOG, parameters); } /** * Sets the ID of the profile that is posting to Facebook. If none is specified, * the default is "me". This profile must be either the authenticated user or a * Page that the user is an administrator of. * * @param id Facebook ID of the profile to post from * @return the builder */ public FeedDialogBuilder setFrom(String id) { getParameters().putString(FROM_PARAM, id); return this; } /** * Sets the ID of the profile that the story will be published to. If not specified, it * will default to the same profile that the story is being published from. * * @param id Facebook ID of the profile to post to * @return the builder */ public FeedDialogBuilder setTo(String id) { getParameters().putString(TO_PARAM, id); return this; } /** * Sets the URL of a link to be shared. * * @param link the URL * @return the builder */ public FeedDialogBuilder setLink(String link) { getParameters().putString(LINK_PARAM, link); return this; } /** * Sets the URL of a picture to be shared. * * @param picture the URL of the picture * @return the builder */ public FeedDialogBuilder setPicture(String picture) { getParameters().putString(PICTURE_PARAM, picture); return this; } /** * Sets the URL of a media file attached to this post. If this is set, any picture * set via setPicture will be ignored. * * @param source the URL of the media file * @return the builder */ public FeedDialogBuilder setSource(String source) { getParameters().putString(SOURCE_PARAM, source); return this; } /** * Sets the name of the item being shared. * * @param name the name * @return the builder */ public FeedDialogBuilder setName(String name) { getParameters().putString(NAME_PARAM, name); return this; } /** * Sets the caption to be displayed. * * @param caption the caption * @return the builder */ public FeedDialogBuilder setCaption(String caption) { getParameters().putString(CAPTION_PARAM, caption); return this; } /** * Sets the description to be displayed. * * @param description the description * @return the builder */ public FeedDialogBuilder setDescription(String description) { getParameters().putString(DESCRIPTION_PARAM, description); return this; } } /** * Provides a builder that allows construction of the parameters for showing * the Feed Dialog (https://developers.facebook.com/docs/reference/dialogs/feed/). */ public static class RequestsDialogBuilder extends BuilderBase<RequestsDialogBuilder> { private static final String APPREQUESTS_DIALOG = "apprequests"; private static final String MESSAGE_PARAM = "message"; private static final String TO_PARAM = "to"; private static final String DATA_PARAM = "data"; private static final String TITLE_PARAM = "title"; /** * Constructor. * * @param context the Context within which the dialog will be shown. * @param session the Session representing an authenticating user to use for * showing the dialog; must not be null, and must be opened. */ public RequestsDialogBuilder(Context context, Session session) { super(context, session, APPREQUESTS_DIALOG, null); } /** * Constructor. * * @param context the Context within which the dialog will be shown. * @param parameters a Bundle containing parameters to pass as part of the * dialog URL. No validation is done on these parameters; it is * the caller's responsibility to ensure they are valid. * @param session the Session representing an authenticating user to use for * showing the dialog; must not be null, and must be opened. */ public RequestsDialogBuilder(Context context, Session session, Bundle parameters) { super(context, session, APPREQUESTS_DIALOG, parameters); } /** * Sets the string users receiving the request will see. The maximum length * is 60 characters. * * @param message the message * @return the builder */ public RequestsDialogBuilder setMessage(String message) { getParameters().putString(MESSAGE_PARAM, message); return this; } /** * Sets the user ID or user name the request will be sent to. If this is not * specified, a friend selector will be displayed and the user can select up * to 50 friends. * * @param id the id or user name to send the request to * @return the builder */ public RequestsDialogBuilder setTo(String id) { getParameters().putString(TO_PARAM, id); return this; } /** * Sets optional data which can be used for tracking; maximum length is 255 * characters. * * @param data the data * @return the builder */ public RequestsDialogBuilder setData(String data) { getParameters().putString(DATA_PARAM, data); return this; } /** * Sets an optional title for the dialog; maximum length is 50 characters. * * @param title the title * @return the builder */ public RequestsDialogBuilder setTitle(String title) { getParameters().putString(TITLE_PARAM, title); return this; } } }
mit
imjacobclark/SimplePing
src/main/java/com/uk/jacob/SimplePingApplication.java
312
package com.uk.jacob; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SimplePingApplication { public static void main(String[] args) { SpringApplication.run(SimplePingApplication.class, args); } }
mit
DirkyJerky/Study-J1
src/26/Main.java
473
import java.time.Duration; public class Main { public static void main (String[] args) throws Exception { if(args.length < 3) { die(); } Duration time = Duration.ZERO .plusHours(Long.parseLong(args[0])) .plusMinutes(Long.parseLong(args[1])) .plusSeconds(Long.parseLong(args[2])); System.out.printf("Seconds: %d", time.getSeconds()); } public static void die() { System.err.println("Usage: % hours minutes seconds"); System.exit(1); } }
mit
Blackdread/filter-sort-jooq-api
src/main/java/org/blackdread/filtersortjooqapi/filter/FilteringJooq.java
9040
package org.blackdread.filtersortjooqapi.filter; import com.google.common.collect.ImmutableList; import org.apache.commons.lang3.StringUtils; import org.blackdread.filtersortjooqapi.exception.FilteringApiException; import org.jooq.Condition; import org.jooq.impl.DSL; import javax.validation.constraints.NotNull; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; /** * <p>Interface to be implemented by repository that wants to provide filtering</p> * <p>It allows to be consistent through the app for filtering</p> * Created by Yoann CAPLAIN on 2017/8/25. */ public interface FilteringJooq { // TODO Idea for Filter chaining AND/OR -> so kind of strategy pattern -> default on is all are AND, if overridden then it follow the one given (done via default method and overrides), we pass the whole context -> all values that matched and are ready to be filtered // See Operator from jOOQ static final List<String> DEFAULT_IGNORED_KEY_FOR_FILTERING = ImmutableList.of("sort", "page", "size"); /** * @param requestParams Keys and values to filter on * @return Fully constructed condition chaining (with nested conditions if implementation has) * @throws FilteringApiException if any filter field given cannot be found (it should result in a 400 error message) */ @NotNull default Condition buildConditions(final Map<String, String> requestParams) { final List<Condition> conditions = new ArrayList<>(requestParams.size()); // final List<String> usedKeys = new ArrayList<>(requestParams.size()); final List<String> usedKeys = new ArrayList<>(getFilterValues().stream() .mapToInt(FilterValue::size) .sum()); for (Map.Entry<String, String> entry : requestParams.entrySet()) { if (getIgnoredKeys().contains(entry.getKey())) continue; if (usedKeys.contains(entry.getKey())) continue; final FilterValue filterValue = getFilter(entry.getKey()) .orElseThrow(() -> new FilteringApiException("No filter found with key (" + entry.getKey() + ")")); final List<String> filterValueKeys = filterValue.getKeyParser().keys(); if (!requestParams.keySet().containsAll(filterValueKeys)) { if (filterValue.isThrowOnMissingKeysFound()) { throw new FilteringApiException("Filter is expecting all keys to be present (" + String.join(",", filterValueKeys) + ")"); } else { continue; } } usedKeys.addAll(filterValueKeys); if (!filterValue.isConditionSupplier()) { final ArrayList<String> valuesOfKeys = new ArrayList<>(); for (String key : filterValueKeys) { final Optional<String> value = getValue(requestParams, key); if (!value.isPresent()) { //TODO Deprecated if (isSkipNotPresentValue()) { valuesOfKeys.add(""); continue; } else { throw new FilteringApiException("Value is empty for filter key (" + entry.getKey() + ")"); } } valuesOfKeys.add(value.get()); } final Function<String, ?>[] parsers = filterValue.getKeyParser().getParsers(); if (parsers.length != valuesOfKeys.size()) // cannot happen if API has no errors throw new IllegalStateException("Values to parse and parser count should be identical"); final List<Object> parsedValues = new ArrayList<>(valuesOfKeys.size()); for (int i = 0; i < parsers.length; i++) { parsedValues.add(parsers[i].apply(valuesOfKeys.get(i))); } conditions.add(filterValue.buildCondition(parsedValues)); continue; } if (filterValue.isConditionSupplier()) { conditions.add(filterValue.buildCondition()); continue; } throw new IllegalStateException("Cannot reach"); } return conditions.stream() .reduce(Condition::and) .orElse(DSL.trueCondition()); } // not needed for now // default boolean hasAndNotEmpty(final Map<String, String> map, final String key) { // return StringUtils.isNotBlank(map.get(key)); // } /** * @param map Map to search in * @param key Key to look for * @return The value contained in the map if key is found */ default Optional<String> getValue(final Map<String, String> map, final String key) { return Optional.ofNullable(map.get(key)) .filter(StringUtils::isNotBlank); } /** * Get filter associated with the key * * @param key Key to search for * @return Filter associated with key or an empty optional if not found */ default Optional<FilterValue> getFilter(final String key) { return getFilterValues().stream() .filter(filterValue -> filterValue.getKeyParser().keys().contains(key)) .findFirst(); } /* * Type safety version of getFilter * * @param key Key to search for * @param tClass Class of filter type * @param <T> * @return Filter associated with key or an empty optional if not found * @deprecated Not sure it is useful now as not used */ /* default <T> Optional<FilterValue<T>> getFilter(final String key, final Class<T> tClass) { return Optional.ofNullable((FilterValue<T>) getFilterValues().get(key)); } // */ /** * @return List of FilterValue to create condition */ @NotNull List<FilterValue> getFilterValues(); /** * <p>his is not a replacement of good code, you should pass to buildConditions only a map of key and values that should be used for filtering and not always define ignored keys</p> * <p>This should almost never be overridden but as a precaution we include that possibility in this API</p> * * @return List of keys that filtering will skip if encountered */ default List<String> getIgnoredKeys() { return DEFAULT_IGNORED_KEY_FOR_FILTERING; } /** * By default it is False. * <p>This is not a replacement of good code, you should pass to buildConditions only a map of key and values that should be used for filtering and that value is set</p> * <p>For boolean/isNull/isNotNull aliases, simply decide constant value you want to use like true,false,1,0,isnull,isnotnull,etc</p> * <p>If true then no exception are thrown on empty value for a key value from {@code Map< String, String>}</p> * <p>If False then exception are thrown on empty value for a key value from {@code Map< String, String>}</p> * * @return True if should skip value that are not present for a key * @deprecated Not sure to keep that as it makes unexpected result and does not respect API from client who sent a filter key with no value but that key expects to have a value */ @Deprecated default boolean isSkipNotPresentValue() { return false; } /** * Helper method but not recommended to use as parsing has to be done inside conditionCreator * <p>Condition is called only if key is found and has a value</p> * * @param map Map containing key and values of request * @param key Key to search for * @param conditionCreator Condition creator called if key is found * @return Condition created or empty optional * @deprecated Only deprecated as not sure we should keep that, never used for projects (might be deleted) */ default Optional<Condition> getCondition(final Map<String, String> map, final String key, final Function<String, Condition> conditionCreator) { return getValue(map, key) .map(conditionCreator); } /** * Helper method but not recommended to use as parsing has to be done inside conditionCreator * <p>Condition is called only if key is found and has a value</p> * * @param map Map containing key and values of request * @param key Key to search for * @param conditionCreator Condition creator called if key is found * @return Condition created or null * @deprecated Only deprecated as not sure we should keep that, never used for projects (might be deleted) */ default Condition getConditionOrNull(final Map<String, String> map, final String key, final Function<String, Condition> conditionCreator) { return getValue(map, key) .map(conditionCreator) .orElse(null); } }
mit
RezzedUp/skript-votifier-hook
src/main/java/com/rezzedup/skriptvotifierhook/expressions/VoterIpAddressExpression.java
778
package com.rezzedup.skriptvotifierhook.expressions; import ch.njol.skript.expressions.base.SimplePropertyExpression; import com.vexsoftware.votifier.model.Vote; public class VoterIpAddressExpression extends SimplePropertyExpression<Vote, String> { public static final String PATTERN = "[(voter|sender)] [ip(-| )]address"; static { SimplePropertyExpression.register(VoterIpAddressExpression.class, String.class, PATTERN, "vote"); } @Override public String convert(Vote vote) { return vote.getAddress(); } @Override protected String getPropertyName() { return "ip address"; } @Override public Class<? extends String> getReturnType() { return String.class; } }
mit
finky/Android-todolist
app/src/main/java/com/r_mades/todolist/fragments/TodoMainFragment.java
6890
package com.r_mades.todolist.fragments; import android.app.DatePickerDialog; import android.app.Fragment; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Editable; import android.text.Spannable; import android.text.SpannableString; import android.text.Spanned; import android.text.TextWatcher; import android.text.style.BackgroundColorSpan; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.DatePicker; import android.widget.EditText; import android.widget.ImageButton; import com.r_mades.todolist.NotifService; import com.r_mades.todolist.R; import com.r_mades.todolist.TodolistApp; import com.r_mades.todolist.adapters.TasksAdapter; import com.r_mades.todolist.data.TaskItem; import com.r_mades.todolist.data.TaskItemRealm; import java.util.Calendar; import android.app.DatePickerDialog.OnDateSetListener; import java.util.Date; import java.util.GregorianCalendar; import io.realm.Realm; import static android.R.attr.delay; /** * Info about this file here. * Project: ToDoList * Created: veloc1 * Date: 8/8/16 */ public class TodoMainFragment extends Fragment implements View.OnClickListener { EditText mNewTaskText; private TaskItemRealm mTaskItem = new TaskItemRealm(); @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Достаем разметку из xml-файла и создаем из нее объект view для использования View root = inflater.inflate(R.layout.fragment_main, container, false); // Находим recyclerView используя его id в разметке RecyclerView recyclerView = (RecyclerView) root.findViewById(R.id.list); recyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); // Ставим наш адаптер для recyclerView, чтобы он мог показать нужные элементы, в конструктор отдаем слушатель нажатий на кнопку "Завершить" recyclerView.setAdapter(new TasksAdapter(getActivity(), new TasksAdapter.OnDoneClickListener() { @Override public void onDoneClick(TaskItemRealm item) { // Обновляем элемент item.done = 1; ((TodolistApp) getActivity().getApplication()).getProvider().addObject(item); } })); mNewTaskText = (EditText) root.findViewById(R.id.new_task); mNewTaskText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { boolean processed = true; if (s instanceof Spannable) { BackgroundColorSpan[] spans = s.getSpans(0, s.length(), BackgroundColorSpan.class); if (spans == null || spans.length == 0) { processed = false; } } else { processed = false; } if (s.toString().matches("(.*) (\\d*) сек (.*)?") && !processed) { String text = s.toString(); int seconds = s.toString().indexOf(" сек "); int secondsStart = text.substring(0, seconds).lastIndexOf(' '); int secondsVal = Integer.valueOf(text.substring(secondsStart + 1, seconds)); Spannable spannable = new SpannableString(s); spannable.setSpan(new BackgroundColorSpan(0x80000000), secondsStart + 1, seconds + 4, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); mNewTaskText.setText(spannable); mNewTaskText.setSelection(mNewTaskText.getText().length() - 1); } } }); View addButton = root.findViewById(R.id.add_button); addButton.setOnClickListener(this); ImageButton notifyButton = (ImageButton) root.findViewById(R.id.notify_date); notifyButton.setOnClickListener(this); return root; } /** * Описываем, что происходит по клику на кнопку. * @param view на какую вьюху произошел клик */ @Override public void onClick(View view) { switch (view.getId()){ case R.id.add_button: mTaskItem.title = mNewTaskText.getText().toString(); ((TodolistApp) getActivity().getApplication()).getProvider().addObject(mTaskItem); int delay = findDelayInString(mTaskItem.title); if (delay != -1) { Intent intent = new Intent(getActivity(), NotifService.class); intent.putExtra(NotifService.ID, ((TodolistApp) getActivity().getApplicationContext()).getProvider().count()); intent.putExtra(NotifService.DELAY, delay); getActivity().startService(intent); } mNewTaskText.setText(""); mTaskItem = new TaskItemRealm(); break; case R.id.notify_date: int year, month , day; Calendar c = Calendar.getInstance(); if (mTaskItem.notifTime != null) c.setTime(mTaskItem.notifTime); year = c.get(Calendar.YEAR); month = c.get(Calendar.MONTH); day = c.get(Calendar.DAY_OF_MONTH); new DatePickerDialog(getActivity(), myCallBack, year, month, day).show(); break; } } OnDateSetListener myCallBack = new OnDateSetListener() { public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { Calendar c = Calendar.getInstance(); c.set(year, monthOfYear, dayOfMonth); mTaskItem.notifTime = c.getTime(); } }; private int findDelayInString(String text) { if (text.matches("(.*) (\\d*) сек (.*)?")) { int seconds = text.toString().indexOf(" сек "); int secondsStart = text.substring(0, seconds).lastIndexOf(' '); int secondsVal = Integer.valueOf(text.substring(secondsStart + 1, seconds)); return secondsVal; } return -1; } }
mit