repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
jomarko/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-api/src/main/java/org/kie/workbench/common/dmn/api/definition/model/IsInformationItem.java | 1221 | /*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.api.definition.model;
import org.kie.workbench.common.dmn.api.definition.HasName;
import org.kie.workbench.common.dmn.api.definition.HasTypeRef;
import org.kie.workbench.common.dmn.api.property.dmn.Id;
import org.kie.workbench.common.stunner.core.domainobject.DomainObject;
public interface IsInformationItem extends DomainObject,
HasName,
HasTypeRef {
Id getId();
DMNModelInstrumentedBase getParent();
void setParent(final DMNModelInstrumentedBase parent);
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/ArjunaJTS/jtax/classes/com/arjuna/ats/internal/jta/resources/jts/orbspecific/LastResourceRecord.java | 5436 | /*
* JBoss, Home of Professional Open Source
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a full listing
* of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
/*
* Copyright (C) 2005
*
* Arjuna Solutions Limited,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id$
*/
package com.arjuna.ats.internal.jta.resources.jts.orbspecific;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import org.omg.CORBA.SystemException;
import org.omg.CORBA.TRANSACTION_ROLLEDBACK;
import org.omg.CosTransactions.HeuristicHazard;
import org.omg.CosTransactions.HeuristicMixed;
import org.omg.CosTransactions.HeuristicRollback;
import org.omg.CosTransactions.NotPrepared;
import org.omg.CosTransactions.Vote;
import com.arjuna.ArjunaOTS.OTSAbstractRecord;
import com.arjuna.ats.arjuna.common.Uid;
import com.arjuna.ats.arjuna.common.arjPropertyManager;
import com.arjuna.ats.arjuna.coordinator.RecordType;
import com.arjuna.ats.internal.jta.transaction.jts.TransactionImple;
import com.arjuna.ats.internal.jta.utils.jtaxLogger;
/**
* XAResourceRecord implementing the Last Resource Commit Optimisation.
*
* @author Kevin Conner (Kevin.Conner@arjuna.com)
* @version $Id$
* @since ATS 4.1
*/
public class LastResourceRecord extends XAResourceRecord
{
/**
* The Uid for all last xa resource records.
*/
private static final String UID = Uid.lastResourceUid().stringForm() ;
/**
* Construct the record for last resource commit optimisation.
* @param tx The current transaction.
* @param xaResource The associated XA resource.
* @param xid The X/Open transaction identifier.
* @param params Additional parameters.
*/
public LastResourceRecord(final TransactionImple tx, final XAResource xaResource, final Xid xid, final Object[] params)
{
super(tx, xaResource, xid, params) ;
}
/**
* The type id for this record.
*/
public int type_id()
throws SystemException
{
return RecordType.LASTRESOURCE ;
}
/**
* The UID for this resource.
*/
public String uid()
throws SystemException
{
return UID ;
}
/**
* Commit this resource.
*/
public void commit()
throws SystemException, NotPrepared, HeuristicRollback, HeuristicMixed, HeuristicHazard
{
}
/**
* Prepare this resource.
*/
public Vote prepare()
throws HeuristicMixed, HeuristicHazard, SystemException
{
try
{
commit_one_phase() ;
return Vote.VoteCommit ;
}
catch (final TRANSACTION_ROLLEDBACK tr)
{
return Vote.VoteRollback ;
}
}
/**
* The type for saving state.
*/
public String type()
{
return "/CosTransactions/LastXAResourceRecord" ;
}
public boolean saveRecord() throws SystemException
{
return false;
}
public boolean shouldAdd(OTSAbstractRecord record) throws SystemException
{
if( record.type_id() == type_id() )
{
if(ALLOW_MULTIPLE_LAST_RESOURCES)
{
if (!_disableMLRWarning || (_disableMLRWarning && !_issuedWarning))
{
jtaxLogger.i18NLogger.warn_jtax_resources_jts_orbspecific_lastResource_multipleWarning(record.toString());
_issuedWarning = true;
}
return true;
}
else
{
jtaxLogger.i18NLogger.warn_jtax_resources_jts_orbspecific_lastResource_disallow(record.toString());
return false;
}
}
else
{
return true;
}
}
private static final boolean ALLOW_MULTIPLE_LAST_RESOURCES = arjPropertyManager.getCoreEnvironmentBean()
.isAllowMultipleLastResources();
private static boolean _disableMLRWarning = false;
private static boolean _issuedWarning = false;
/**
* Static block writes warning messages to the log if either multiple last resources are enabled
* or multiple last resources warning is disabled.
*/
static
{
if (ALLOW_MULTIPLE_LAST_RESOURCES)
{
jtaxLogger.i18NLogger.warn_jtax_resources_jts_orbspecific_lastResource_startupWarning();
}
if (arjPropertyManager.getCoreEnvironmentBean().isDisableMultipleLastResourcesWarning())
{
jtaxLogger.i18NLogger.warn_jtax_resources_jts_orbspecific_lastResource_disableWarning();
_disableMLRWarning = true;
}
}
}
| apache-2.0 |
petmit/elasticsearch | src/main/java/org/elasticsearch/action/get/MultiGetRequest.java | 22360 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.get;
import com.google.common.collect.Iterators;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.*;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest {
/**
* A single get item.
*/
public static class Item implements Streamable, IndicesRequest {
private String index;
private String type;
private String id;
private String routing;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private FetchSourceContext fetchSourceContext;
public Item() {
}
/**
* Constructs a single get item.
*
* @param index The index name
* @param type The type (can be null)
* @param id The id
*/
public Item(String index, @Nullable String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
public String index() {
return this.index;
}
@Override
public String[] indices() {
return new String[]{index};
}
@Override
public IndicesOptions indicesOptions() {
return GetRequest.INDICES_OPTIONS;
}
public Item index(String index) {
this.index = index;
return this;
}
public String type() {
return this.type;
}
public Item type(String type) {
this.type = type;
return this;
}
public String id() {
return this.id;
}
/**
* The routing associated with this document.
*/
public Item routing(String routing) {
this.routing = routing;
return this;
}
public String routing() {
return this.routing;
}
public Item parent(String parent) {
if (routing == null) {
this.routing = parent;
}
return this;
}
public Item fields(String... fields) {
this.fields = fields;
return this;
}
public String[] fields() {
return this.fields;
}
public long version() {
return version;
}
public Item version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public Item versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
/**
* Allows setting the {@link FetchSourceContext} for this request, controlling if and how _source should be returned.
*/
public Item fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
public static Item readItem(StreamInput in) throws IOException {
Item item = new Item();
item.readFrom(in);
return item;
}
@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readSharedString();
type = in.readOptionalSharedString();
id = in.readString();
routing = in.readOptionalString();
int size = in.readVInt();
if (size > 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
fetchSourceContext = FetchSourceContext.optionalReadFromStream(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeSharedString(index);
out.writeOptionalSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
out.writeLong(version);
out.writeByte(versionType.getValue());
FetchSourceContext.optionalWriteToStream(fetchSourceContext, out);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Item)) return false;
Item item = (Item) o;
if (version != item.version) return false;
if (fetchSourceContext != null ? !fetchSourceContext.equals(item.fetchSourceContext) : item.fetchSourceContext != null)
return false;
if (!Arrays.equals(fields, item.fields)) return false;
if (!id.equals(item.id)) return false;
if (!index.equals(item.index)) return false;
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
if (type != null ? !type.equals(item.type) : item.type != null) return false;
if (versionType != item.versionType) return false;
return true;
}
@Override
public int hashCode() {
int result = index.hashCode();
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + id.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0);
result = 31 * result + (int) (version ^ (version >>> 32));
result = 31 * result + versionType.hashCode();
result = 31 * result + (fetchSourceContext != null ? fetchSourceContext.hashCode() : 0);
return result;
}
}
String preference;
Boolean realtime;
boolean refresh;
public boolean ignoreErrorsOnGeneratedFields = false;
List<Item> items = new ArrayList<>();
public MultiGetRequest() {
}
/**
* Creates a multi get request caused by some other request, which is provided as an
* argument so that its headers and context can be copied to the new request
*/
public MultiGetRequest(ActionRequest request) {
super(request);
}
public List<Item> getItems() {
return this.items;
}
public MultiGetRequest add(Item item) {
items.add(item);
return this;
}
public MultiGetRequest add(String index, @Nullable String type, String id) {
items.add(new Item(index, type, id));
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (items.isEmpty()) {
validationException = ValidateActions.addValidationError("no documents to get", validationException);
} else {
for (int i = 0; i < items.size(); i++) {
Item item = items.get(i);
if (item.index() == null) {
validationException = ValidateActions.addValidationError("index is missing for doc " + i, validationException);
}
if (item.id() == null) {
validationException = ValidateActions.addValidationError("id is missing for doc " + i, validationException);
}
}
}
return validationException;
}
@Override
public List<? extends IndicesRequest> subRequests() {
return items;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public MultiGetRequest preference(String preference) {
this.preference = preference;
return this;
}
public String preference() {
return this.preference;
}
public boolean realtime() {
return this.realtime == null ? true : this.realtime;
}
public MultiGetRequest realtime(Boolean realtime) {
this.realtime = realtime;
return this;
}
public boolean refresh() {
return this.refresh;
}
public MultiGetRequest refresh(boolean refresh) {
this.refresh = refresh;
return this;
}
public MultiGetRequest ignoreErrorsOnGeneratedFields(boolean ignoreErrorsOnGeneratedFields) {
this.ignoreErrorsOnGeneratedFields = ignoreErrorsOnGeneratedFields;
return this;
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, byte[] data, int from, int length) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, new BytesArray(data, from, length), true);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, data, true);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data, boolean allowExplicitIndex) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, null, data, allowExplicitIndex);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, BytesReference data, boolean allowExplicitIndex) throws Exception {
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("docs".equals(currentFieldName)) {
parseDocuments(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting, allowExplicitIndex);
} else if ("ids".equals(currentFieldName)) {
parseIds(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting);
}
}
}
}
return this;
}
public static void parseDocuments(XContentParser parser, List<Item> items, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, boolean allowExplicitIndex) throws IOException {
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchIllegalArgumentException("docs array element should include an object");
}
String index = defaultIndex;
String type = defaultType;
String id = null;
String routing = defaultRouting;
String parent = null;
List<String> fields = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
FetchSourceContext fetchSourceContext = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new ElasticsearchIllegalArgumentException("explicit index in multi get is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>();
fields.add(parser.text());
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_source".equals(currentFieldName)) {
if (parser.isBooleanValue()) {
fetchSourceContext = new FetchSourceContext(parser.booleanValue());
} else if (token == XContentParser.Token.VALUE_STRING) {
fetchSourceContext = new FetchSourceContext(new String[]{parser.text()});
} else {
throw new ElasticsearchParseException("illegal type for _source: [" + token + "]");
}
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else if ("_source".equals(currentFieldName)) {
ArrayList<String> includes = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
includes.add(parser.text());
}
fetchSourceContext = new FetchSourceContext(includes.toArray(Strings.EMPTY_ARRAY));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("_source".equals(currentFieldName)) {
List<String> currentList = null, includes = null, excludes = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if ("includes".equals(currentFieldName) || "include".equals(currentFieldName)) {
currentList = includes != null ? includes : (includes = new ArrayList<>(2));
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
} else {
throw new ElasticsearchParseException("Source definition may not contain " + parser.text());
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
currentList.add(parser.text());
}
} else if (token.isValue()) {
currentList.add(parser.text());
} else {
throw new ElasticsearchParseException("unexpected token while parsing source settings");
}
}
fetchSourceContext = new FetchSourceContext(
includes == null ? Strings.EMPTY_ARRAY : includes.toArray(new String[includes.size()]),
excludes == null ? Strings.EMPTY_ARRAY : excludes.toArray(new String[excludes.size()]));
}
}
}
String[] aFields;
if (fields != null) {
aFields = fields.toArray(new String[fields.size()]);
} else {
aFields = defaultFields;
}
items.add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent).version(version).versionType(versionType)
.fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext));
}
}
public static void parseDocuments(XContentParser parser, List<Item> items) throws IOException {
parseDocuments(parser, items, null, null, null, null, null, true);
}
public static void parseIds(XContentParser parser, List<Item> items, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (!token.isValue()) {
throw new ElasticsearchIllegalArgumentException("ids array element should only contain ids");
}
items.add(new Item(defaultIndex, defaultType, parser.text()).fields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting));
}
}
public static void parseIds(XContentParser parser, List<Item> items) throws IOException {
parseIds(parser, items, null, null, null, null, null);
}
@Override
public Iterator<Item> iterator() {
return Iterators.unmodifiableIterator(items.iterator());
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
preference = in.readOptionalString();
refresh = in.readBoolean();
byte realtime = in.readByte();
if (realtime == 0) {
this.realtime = false;
} else if (realtime == 1) {
this.realtime = true;
}
ignoreErrorsOnGeneratedFields = in.readBoolean();
int size = in.readVInt();
items = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
items.add(Item.readItem(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(preference);
out.writeBoolean(refresh);
if (realtime == null) {
out.writeByte((byte) -1);
} else if (realtime == false) {
out.writeByte((byte) 0);
} else {
out.writeByte((byte) 1);
}
out.writeBoolean(ignoreErrorsOnGeneratedFields);
out.writeVInt(items.size());
for (Item item : items) {
item.writeTo(out);
}
}
}
| apache-2.0 |
alexzaitzev/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsMetaManager.java | 131246 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.igfs;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteCompute;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteInterruptedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterTopologyException;
import org.apache.ignite.configuration.FileSystemConfiguration;
import org.apache.ignite.events.EventType;
import org.apache.ignite.events.IgfsEvent;
import org.apache.ignite.igfs.IgfsConcurrentModificationException;
import org.apache.ignite.igfs.IgfsDirectoryNotEmptyException;
import org.apache.ignite.igfs.IgfsException;
import org.apache.ignite.igfs.IgfsFile;
import org.apache.ignite.igfs.IgfsParentNotDirectoryException;
import org.apache.ignite.igfs.IgfsPath;
import org.apache.ignite.igfs.IgfsPathAlreadyExistsException;
import org.apache.ignite.igfs.IgfsPathIsDirectoryException;
import org.apache.ignite.igfs.IgfsPathIsNotDirectoryException;
import org.apache.ignite.igfs.IgfsPathNotFoundException;
import org.apache.ignite.igfs.IgfsUserContext;
import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem;
import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager;
import org.apache.ignite.internal.processors.cache.GridCacheInternal;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.internal.processors.igfs.client.IgfsClientAbstractCallable;
import org.apache.ignite.internal.processors.igfs.client.meta.IgfsClientMetaIdsForPathCallable;
import org.apache.ignite.internal.processors.igfs.client.meta.IgfsClientMetaInfoForPathCallable;
import org.apache.ignite.internal.processors.igfs.client.meta.IgfsClientMetaUnlockCallable;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaDirectoryCreateProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaDirectoryListingAddProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaDirectoryListingRemoveProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaDirectoryListingRenameProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaDirectoryListingReplaceProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaFileCreateProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaFileLockProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaFileReserveSpaceProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaFileUnlockProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaUpdatePropertiesProcessor;
import org.apache.ignite.internal.processors.igfs.meta.IgfsMetaUpdateTimesProcessor;
import org.apache.ignite.internal.util.GridLeanMap;
import org.apache.ignite.internal.util.GridSpinBusyLock;
import org.apache.ignite.internal.util.lang.GridClosureException;
import org.apache.ignite.internal.util.lang.IgniteOutClosureX;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.events.EventType.EVT_IGFS_DIR_RENAMED;
import static org.apache.ignite.events.EventType.EVT_IGFS_FILE_RENAMED;
/**
* Cache based structure (meta data) manager.
*/
public class IgfsMetaManager extends IgfsManager {
/** Comparator for Id sorting. */
private static final Comparator<IgniteUuid> PATH_ID_SORTING_COMPARATOR = new Comparator<IgniteUuid>() {
@Override public int compare(IgniteUuid u1, IgniteUuid u2) {
if (u1 == u2)
return 0;
if (u1 == null)
return -1;
return u1.compareTo(u2);
}
};
/** IGFS configuration. */
private FileSystemConfiguration cfg;
/** Metadata cache. */
private IgniteInternalCache<Object, Object> metaCache;
/** */
private CountDownLatch metaCacheStartLatch;
/** File ID to file info projection. */
private IgniteInternalCache<IgniteUuid, IgfsEntryInfo> id2InfoPrj;
/** Predefined key for sampling mode value. */
private GridCacheInternal sampling;
/** Logger. */
private IgniteLogger log;
/** Delete worker. */
private volatile IgfsDeleteWorker delWorker;
/** Events manager. */
private GridEventStorageManager evts;
/** Local node. */
private ClusterNode locNode;
/** Busy lock. */
private final GridSpinBusyLock busyLock = new GridSpinBusyLock();
/** Relaxed flag. */
private final boolean relaxed;
/** Client flag. */
private final boolean client;
/** Compute facade for client tasks. */
private IgniteCompute cliCompute;
/** Compute facade for client tasks. */
private String metaCacheName;
/**
* Constructor.
*
* @param relaxed Relaxed mode flag.
* @param client Client flag.
*/
public IgfsMetaManager(boolean relaxed, boolean client) {
this.relaxed = relaxed;
this.client = client;
}
/**
* Await initialization.
*/
void awaitInit() {
try {
metaCacheStartLatch.await();
}
catch (InterruptedException e) {
throw new IgniteInterruptedException(e);
}
}
/** {@inheritDoc} */
@Override protected void start0() throws IgniteCheckedException {
metaCacheStartLatch = new CountDownLatch(1);
cfg = igfsCtx.configuration();
evts = igfsCtx.kernalContext().event();
sampling = new IgfsSamplingKey(cfg.getName());
log = igfsCtx.kernalContext().log(IgfsMetaManager.class);
metaCacheName = cfg.getMetaCacheConfiguration().getName();
}
/** {@inheritDoc} */
@SuppressWarnings("RedundantCast")
@Override protected void onKernalStart0() throws IgniteCheckedException {
metaCache = igfsCtx.kernalContext().cache().getOrStartCache(metaCacheName);
assert metaCache != null;
igfsCtx.kernalContext().cache().internalCache(metaCacheName).preloader().startFuture()
.listen(new CI1<IgniteInternalFuture<Object>>() {
@Override public void apply(IgniteInternalFuture<Object> f) {
metaCacheStartLatch.countDown();
}
});
id2InfoPrj = (IgniteInternalCache<IgniteUuid, IgfsEntryInfo>)metaCache.<IgniteUuid, IgfsEntryInfo>cache();
locNode = igfsCtx.kernalContext().discovery().localNode();
// Start background delete worker.
if (!client) {
delWorker = new IgfsDeleteWorker(igfsCtx);
delWorker.start();
}
}
/** {@inheritDoc} */
@Override protected void onKernalStop0(boolean cancel) {
IgfsDeleteWorker delWorker0 = delWorker;
if (delWorker0 != null) {
delWorker0.cancel();
try {
U.join(delWorker0);
}
catch (IgniteInterruptedCheckedException ignored) {
// No-op.
}
}
busyLock.block();
}
/**
* @return Client flag.
*/
boolean isClient() {
return client;
}
/**
* Run client task.
*
* @param task Task.
* @return Result.
*/
<T> T runClientTask(IgfsClientAbstractCallable<T> task) {
try {
return (cfg.isColocateMetadata()) ?
clientCompute().affinityCall(metaCacheName, IgfsUtils.ROOT_ID, task) :
clientCompute().call(task);
}
catch (Exception e) {
if (X.hasCause(e, ClusterTopologyException.class))
throw new IgfsException("Failed to execute operation because there are no IGFS metadata nodes." , e);
IgfsException igfsEx = X.cause(e, IgfsException.class);
if (igfsEx != null)
throw igfsEx;
throw e;
}
}
/**
* Get compute facade for client tasks.
*
* @return Compute facade.
*/
private IgniteCompute clientCompute() {
assert client;
IgniteCompute cliCompute0 = cliCompute;
if (cliCompute0 == null) {
IgniteEx ignite = igfsCtx.kernalContext().grid();
ClusterGroup cluster = ignite.cluster().forIgfsMetadataDataNodes(cfg.getName(), metaCacheName);
cliCompute0 = ignite.compute(cluster);
cliCompute = cliCompute0;
}
assert cliCompute0 != null;
return cliCompute0;
}
/**
* Gets file ID for specified path.
*
* @param path Path.
* @return File ID for specified path or {@code null} if such file doesn't exist.
* @throws IgniteCheckedException If failed.
*/
@Nullable public IgniteUuid fileId(IgfsPath path) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
return fileId(path, false);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file ID because Grid is stopping: " + path);
}
/**
* Gets file ID for specified path possibly skipping existing transaction.
*
* @param path Path.
* @param skipTx Whether to skip existing transaction.
* @return File ID for specified path or {@code null} if such file doesn't exist.
* @throws IgniteCheckedException If failed.
*/
@Nullable private IgniteUuid fileId(IgfsPath path, boolean skipTx) throws IgniteCheckedException {
List<IgniteUuid> ids = fileIds(path, skipTx);
assert ids != null && !ids.isEmpty() : "Invalid file IDs [path=" + path + ", ids=" + ids + ']';
return ids.get(ids.size() - 1);
}
/**
* Gets file ID by its name from parent directory listing.
*
* @param parentId Parent directory ID to get child ID for.
* @param fileName File name in parent listing to get file ID for.
* @return File ID.
* @throws IgniteCheckedException If failed.
*/
@Nullable public IgniteUuid fileId(IgniteUuid parentId, String fileName) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
return fileId(parentId, fileName, false);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file ID because Grid is stopping [parentId=" + parentId +
", fileName=" + fileName + ']');
}
/**
* Gets file ID by its name from parent directory listing possibly skipping existing transaction.
*
* @param parentId Parent directory ID to get child ID for.
* @param fileName File name in parent listing to get file ID for.
* @param skipTx Whether to skip existing transaction.
* @return File ID.
* @throws IgniteCheckedException If failed.
*/
@Nullable private IgniteUuid fileId(IgniteUuid parentId, String fileName, boolean skipTx)
throws IgniteCheckedException {
IgfsListingEntry entry = directoryListing(parentId, skipTx).get(fileName);
if (entry == null) {
if (log.isDebugEnabled())
log.debug("Missing file ID [parentId=" + parentId + ", fileName=" + fileName + ']');
return null;
}
return entry.fileId();
}
/**
* Gets all file IDs for components of specified path. Result cannot be empty - there is at least root element.
* But each element (except the first) can be {@code null} if such files don't exist.
*
* @param path Path.
* @return Collection of file IDs for components of specified path.
* @throws IgniteCheckedException If failed.
*/
public List<IgniteUuid> fileIds(IgfsPath path) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
return fileIds(path, false);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file IDS because Grid is stopping: " + path);
}
/**
* Gets all file IDs for components of specified path. Result cannot be empty - there is at least root element.
* But each element (except the first) can be {@code null} if such files don't exist.
*
* @param path Path.
* @return Collection of file IDs for components of specified path.
* @throws IgniteCheckedException If failed.
*/
public IgfsPathIds pathIds(IgfsPath path) throws IgniteCheckedException {
// Prepare parts.
String[] components = path.componentsArray();
String[] parts = new String[components.length + 1];
System.arraycopy(components, 0, parts, 1, components.length);
// Get IDs.
if (client) {
List<IgniteUuid> ids = runClientTask(new IgfsClientMetaIdsForPathCallable(cfg.getName(),
IgfsUserContext.currentUser(), path));
return new IgfsPathIds(path, parts, ids.toArray(new IgniteUuid[ids.size()]));
}
else {
if (busyLock.enterBusy()) {
try {
validTxState(false);
IgniteUuid[] ids = new IgniteUuid[parts.length];
ids[0] = IgfsUtils.ROOT_ID;
for (int i = 1; i < ids.length; i++) {
IgniteUuid id = fileId(ids[i - 1], parts[i], false);
if (id != null)
ids[i] = id;
else
break;
}
// Return.
return new IgfsPathIds(path, parts, ids);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file IDS because Grid is stopping: " + path);
}
}
/**
* Gets all file IDs for components of specified path possibly skipping existing transaction. Result cannot
* be empty - there is at least root element. But each element (except the first) can be {@code null} if such
* files don't exist.
*
* @param path Path.
* @param skipTx Whether to skip existing transaction.
* @return Collection of file IDs for components of specified path.
* @throws IgniteCheckedException If failed.
*/
private List<IgniteUuid> fileIds(IgfsPath path, boolean skipTx) throws IgniteCheckedException {
assert path != null;
// Path components.
Collection<String> components = path.components();
// Collection of file IDs for components of specified path.
List<IgniteUuid> ids = new ArrayList<>(components.size() + 1);
ids.add(IgfsUtils.ROOT_ID); // Always add root ID.
IgniteUuid fileId = IgfsUtils.ROOT_ID;
for (String s : components) {
assert !s.isEmpty();
if (fileId != null)
fileId = fileId(fileId, s, skipTx);
ids.add(fileId);
}
return ids;
}
/**
* Ensure that entry with the given ID exists in meta cache.
*
* @param fileId File id.
* @return {@code True} in case such entry exists.
* @throws IgniteCheckedException IF failed.
*/
public boolean exists(IgniteUuid fileId) throws IgniteCheckedException{
if (busyLock.enterBusy()) {
try {
assert fileId != null;
// containsKey() doesn't work here since meta cache can be PARTITIONED (we do not restrict if!).
return info(fileId) != null;
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to check file system entry existence because Grid is stopping: " +
fileId);
}
/**
* Gets file info by its ID.
* NB: this method is used both in Tx and out of Tx.
*
* @param fileId File ID to get details for.
* @return File info.
* @throws IgniteCheckedException If failed.
*/
@Nullable public IgfsEntryInfo info(@Nullable IgniteUuid fileId) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
if (fileId == null)
return null;
IgfsEntryInfo info = getInfo(fileId);
// Force root ID always exist in cache.
if (info == null && IgfsUtils.ROOT_ID.equals(fileId))
info = createSystemDirectoryIfAbsent(fileId);
return info;
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file info because Grid is stopping: " + fileId);
}
/**
* Gets files details by their IDs.
*
* @param fileIds file IDs to get details for.
* @return Files details.
* @throws IgniteCheckedException If failed.
*/
public Map<IgniteUuid, IgfsEntryInfo> infos(Collection<IgniteUuid> fileIds) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
assert fileIds != null;
if (F.isEmpty(fileIds))
return Collections.emptyMap();
Map<IgniteUuid, IgfsEntryInfo> map = getInfos(fileIds);
// Force root ID always exist in cache.
if (fileIds.contains(IgfsUtils.ROOT_ID) && !map.containsKey(IgfsUtils.ROOT_ID)) {
map = new GridLeanMap<>(map);
map.put(IgfsUtils.ROOT_ID, createSystemDirectoryIfAbsent(IgfsUtils.ROOT_ID));
}
return map;
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file infos because Grid is stopping: " + fileIds);
}
/**
* Lock the file explicitly outside of transaction.
*
* @param fileId File ID to lock.
* @param del If file is being locked for delete.
* @return Locked file info or {@code null} if file cannot be locked or doesn't exist.
* @throws IgniteCheckedException If the file with such id does not exist, or on another failure.
*/
public @Nullable IgfsEntryInfo lock(IgniteUuid fileId, boolean del) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
assert fileId != null;
try (GridNearTxLocal tx = startTx()) {
// Lock file ID for this transaction.
IgfsEntryInfo oldInfo = info(fileId);
if (oldInfo == null)
return null;
if (oldInfo.lockId() != null)
return null; // The file is already locked, we cannot lock it.
IgfsEntryInfo newInfo = invokeLock(fileId, del);
tx.commit();
return newInfo;
}
catch (GridClosureException e) {
throw U.cast(e);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to obtain lock because Grid is stopping: " + fileId);
}
/**
* Create file lock ID.
*
* @param del If lock ID is required for file deletion.
* @return Lock ID.
*/
private IgniteUuid createFileLockId(boolean del) {
if (del)
return IgfsUtils.DELETE_LOCK_ID;
return IgniteUuid.fromUuid(locNode.id());
}
/**
* Remove explicit lock on file held by the current stream.
*
* @param fileId File ID.
* @param lockId Lock ID.
* @param modificationTime Modification time to write to file info.
* @throws IgniteCheckedException If failed.
*/
public void unlock(final IgniteUuid fileId, final IgniteUuid lockId, final long modificationTime)
throws IgniteCheckedException {
unlock(fileId, lockId, modificationTime, false, 0, null);
}
/**
* Remove explicit lock on file held by the current stream.
*
* @param fileId File ID.
* @param lockId Lock ID.
* @param modificationTime Modification time to write to file info.
* @param updateSpace Whether to update space.
* @param space Space.
* @param affRange Affinity range.
* @throws IgniteCheckedException If failed.
*/
public void unlock(final IgniteUuid fileId, final IgniteUuid lockId, final long modificationTime,
final boolean updateSpace, final long space, @Nullable final IgfsFileAffinityRange affRange)
throws IgniteCheckedException {
if(client) {
runClientTask(new IgfsClientMetaUnlockCallable(cfg.getName(), IgfsUserContext.currentUser(), fileId,
lockId, modificationTime, updateSpace, space, affRange));
return;
}
validTxState(false);
if (busyLock.enterBusy()) {
try {
if (lockId == null)
return;
// Temporary clear interrupted state for unlocking.
final boolean interrupted = Thread.interrupted();
try {
IgfsUtils.doInTransactionWithRetries(id2InfoPrj, new IgniteOutClosureX<Void>() {
@Override public Void applyx() throws IgniteCheckedException {
validTxState(true);
// Lock file ID for this transaction.
IgfsEntryInfo oldInfo = info(fileId);
if (oldInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to unlock file (file not " +
"found): " + fileId));
if (!F.eq(lockId, oldInfo.lockId()))
throw new IgniteCheckedException("Failed to unlock file (inconsistent file lock ID) " +
"[fileId=" + fileId + ", lockId=" + lockId + ", actualLockId=" +
oldInfo.lockId() + ']');
id2InfoPrj.invoke(fileId,
new IgfsMetaFileUnlockProcessor(modificationTime, updateSpace, space, affRange));
return null;
}
});
}
finally {
validTxState(false);
if (interrupted)
Thread.currentThread().interrupt();
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to unlock file system entry because Grid is stopping: " + fileId);
}
/**
* Lock file IDs participating in the transaction.<br/>
*
* @param fileIds file IDs to lock.
* @return Locked file details. Resulting map doesn't contain details for not-existent files.
* @throws IgniteCheckedException If failed.
*/
private Map<IgniteUuid, IgfsEntryInfo> lockIds(IgniteUuid... fileIds) throws IgniteCheckedException {
validTxState(true);
assert fileIds != null && fileIds.length > 0;
Arrays.sort(fileIds);
return lockIds(Arrays.asList(fileIds));
}
/**
* Answers if the collection is sorted.
*
* @param col The collection to check.
* @param <T> The type of the collection elements.
* @return If the collection sorted.
*/
private static <T extends Comparable<T>> boolean isSorted(Collection<T> col) {
T prev = null;
for (T t: col) {
if (t == null)
throw new NullPointerException("Collections should not contain nulls");
if (prev != null && prev.compareTo(t) > 0)
return false; // disordered.
prev = t;
}
return true;
}
/**
* Lock file IDs.
*
* @param fileIds File IDs (sorted).
* @return Map with lock info.
* @throws IgniteCheckedException If failed.
*/
private Map<IgniteUuid, IgfsEntryInfo> lockIds(Collection<IgniteUuid> fileIds) throws IgniteCheckedException {
assert isSorted(fileIds);
validTxState(true);
if (log.isDebugEnabled())
log.debug("Locking file ids: " + fileIds);
// Lock files and get their infos.
Map<IgniteUuid, IgfsEntryInfo> map = getInfos(fileIds);
if (log.isDebugEnabled())
log.debug("Locked file ids: " + fileIds);
for (IgniteUuid fileId : fileIds) {
if (IgfsUtils.isRootOrTrashId(fileId)) {
if (!map.containsKey(fileId))
map.put(fileId, createSystemDirectoryIfAbsent(fileId));
}
}
// Returns detail's map for locked IDs.
return map;
}
/**
* create system entry if it is absent.
*
* @param id System entry ID.
* @return Value of created or existing system entry.
* @throws IgniteCheckedException On error.
*/
private IgfsEntryInfo createSystemDirectoryIfAbsent(IgniteUuid id)
throws IgniteCheckedException {
assert IgfsUtils.isRootOrTrashId(id);
IgfsEntryInfo info = IgfsUtils.createDirectory(id);
IgfsEntryInfo oldInfo = id2InfoPrj.getAndPutIfAbsent(id, info);
if (oldInfo != null)
info = oldInfo;
return info;
}
/**
* List child files for specified file ID.
*
* @param fileId File to list child files for.
* @return Directory listing for the specified file.
* @throws IgniteCheckedException If failed.
*/
public Map<String, IgfsListingEntry> directoryListing(IgniteUuid fileId) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
return directoryListing(fileId, false);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get directory listing because Grid is stopping: " + fileId);
}
/**
* Gets first available file info for fragmentizer.
*
* @param exclude File IDs to exclude from result.
* @return First qualified file info.
* @throws IgniteCheckedException If failed to get file for fragmentizer.
*/
public IgfsEntryInfo fileForFragmentizer(Collection<IgniteUuid> exclude) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
return fileForFragmentizer0(IgfsUtils.ROOT_ID, exclude);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get file for framentizer because Grid is stopping.");
}
/**
* Gets first available file info for fragmentizer.
*
* @param parentId Parent ID to scan.
* @param exclude File IDs to exclude from result.
* @return First qualified file info.
* @throws IgniteCheckedException If failed to get file for fragmentizer.
*/
private IgfsEntryInfo fileForFragmentizer0(IgniteUuid parentId, Collection<IgniteUuid> exclude)
throws IgniteCheckedException {
IgfsEntryInfo info = info(parentId);
// Check if file was concurrently deleted.
if (info == null)
return null;
assert info.isDirectory();
Map<String, IgfsListingEntry> listing = info.listing();
for (IgfsListingEntry entry : listing.values()) {
if (entry.isFile()) {
IgfsEntryInfo fileInfo = info(entry.fileId());
if (fileInfo != null) {
if (!exclude.contains(fileInfo.id()) &&
fileInfo.fileMap() != null &&
!fileInfo.fileMap().ranges().isEmpty())
return fileInfo;
}
}
else {
IgfsEntryInfo fileInfo = fileForFragmentizer0(entry.fileId(), exclude);
if (fileInfo != null)
return fileInfo;
}
}
return null;
}
/**
* List child files for specified file ID possibly skipping existing transaction.
*
* @param fileId File to list child files for.
* @param skipTx Whether to skip existing transaction.
* @return Directory listing for the specified file.*
* @throws IgniteCheckedException If failed.
*/
private Map<String, IgfsListingEntry> directoryListing(IgniteUuid fileId, boolean skipTx)
throws IgniteCheckedException {
assert fileId != null;
IgfsEntryInfo info = skipTx ? id2InfoPrj.getAllOutTx(Collections.singleton(fileId)).get(fileId) :
getInfo(fileId);
return info == null ? Collections.<String, IgfsListingEntry>emptyMap() : info.listing();
}
/**
* Add file into file system structure. Do not create new transaction expecting that the one already exists.
*
* @param parentId Parent file ID.
* @param fileName File name in the parent's listing.
* @param newFileInfo File info to store in the parent's listing.
* @return File id already stored in meta cache or {@code null} if passed file info was stored.
* @throws IgniteCheckedException If failed.
*/
private IgniteUuid putIfAbsentNonTx(IgniteUuid parentId, String fileName, IgfsEntryInfo newFileInfo)
throws IgniteCheckedException {
if (log.isDebugEnabled())
log.debug("Locking parent id [parentId=" + parentId + ", fileName=" + fileName + ", newFileInfo=" +
newFileInfo + ']');
validTxState(true);
// Lock only parent file ID.
IgfsEntryInfo parentInfo = info(parentId);
if (parentInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to lock parent directory (not found): " +
parentId));
if (!parentInfo.isDirectory())
throw fsException(new IgfsPathIsNotDirectoryException("Parent file is not a directory: " + parentInfo));
IgfsListingEntry childEntry = parentInfo.listing().get(fileName);
if (childEntry != null)
return childEntry.fileId();
createNewEntry(newFileInfo, parentId, fileName);
return null;
}
/**
* Move routine.
*
* @param srcPath Source path.
* @param dstPath Destination path.
* @throws IgniteCheckedException In case of exception.
*/
public void move(IgfsPath srcPath, IgfsPath dstPath) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
// Prepare path IDs.
IgfsPathIds srcPathIds = pathIds(srcPath);
IgfsPathIds dstPathIds = pathIds(dstPath);
// Source path must exists.
if (!srcPathIds.allExists())
throw new IgfsPathNotFoundException("Failed to perform move because source path is not " +
"found: " + srcPath);
// At this point we need to understand name of resulting entry. It will be either destination leaf
// or source leaf depending on existence.
String dstName;
if (dstPathIds.lastExists())
// Full destination path exists -> use source name.
dstName = srcPathIds.lastPart();
else {
if (dstPathIds.lastParentExists()) {
// Destination path doesn't exists -> use destination name.
dstName = dstPathIds.lastPart();
dstPathIds = dstPathIds.parent();
}
else
// Destination parent is not found either -> exception.
throw new IgfsPathNotFoundException("Failed to perform move because destination path is not " +
"found: " + dstPath.parent());
}
// Lock participating IDs.
final Set<IgniteUuid> lockIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
srcPathIds.addExistingIds(lockIds, relaxed);
dstPathIds.addExistingIds(lockIds, relaxed);
try (GridNearTxLocal tx = startTx()) {
// Obtain the locks.
final Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(lockIds);
// Verify integrity of source and destination paths.
if (!srcPathIds.verifyIntegrity(lockInfos, relaxed))
throw new IgfsPathNotFoundException("Failed to perform move because source directory " +
"structure changed concurrently [src=" + srcPath + ", dst=" + dstPath + ']');
if (!dstPathIds.verifyIntegrity(lockInfos, relaxed))
throw new IgfsPathNotFoundException("Failed to perform move because destination directory " +
"structure changed concurrently [src=" + srcPath + ", dst=" + dstPath + ']');
// Addiional check: is destination directory?
IgfsEntryInfo dstParentInfo = lockInfos.get(dstPathIds.lastId());
if (dstParentInfo.isFile())
throw new IgfsPathAlreadyExistsException("Failed to perform move because destination points " +
"to existing file [src=" + srcPath + ", dst=" + dstPath + ']');
// Additional check: does destination already has child with the same name?
if (dstParentInfo.hasChild(dstName))
throw new IgfsPathAlreadyExistsException("Failed to perform move because destination already " +
"contains entry with the same name existing file [src=" + srcPath +
", dst=" + dstPath + ']');
// Actual move: remove from source parent and add to destination target.
IgfsEntryInfo srcParentInfo = lockInfos.get(srcPathIds.lastParentId());
IgfsEntryInfo srcInfo = lockInfos.get(srcPathIds.lastId());
String srcName = srcPathIds.lastPart();
IgfsListingEntry srcEntry = srcParentInfo.listing().get(srcName);
transferEntry(srcEntry, srcParentInfo.id(), srcName, dstParentInfo.id(), dstName);
tx.commit();
// Fire events.
IgfsPath newPath = new IgfsPath(dstPathIds.path(), dstName);
IgfsUtils.sendEvents(igfsCtx.kernalContext(), srcPath, newPath,
srcInfo.isFile() ? EVT_IGFS_FILE_RENAMED : EVT_IGFS_DIR_RENAMED);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to perform move because Grid is stopping [srcPath=" +
srcPath + ", dstPath=" + dstPath + ']');
}
/**
* Move or rename file in existing transaction.
*
* @param fileId File ID to move or rename.
* @param srcFileName Original file name in the parent's listing.
* @param srcParentId Parent directory ID.
* @param destFileName New file name in the parent's listing after moving.
* @param destParentId New parent directory ID.
* @throws IgniteCheckedException If failed.
*/
private void moveNonTx(IgniteUuid fileId, String srcFileName, IgniteUuid srcParentId, String destFileName,
IgniteUuid destParentId) throws IgniteCheckedException {
validTxState(true);
assert fileId != null;
assert srcFileName != null;
assert srcParentId != null;
assert destFileName != null;
assert destParentId != null;
if (srcParentId.equals(destParentId) && srcFileName.equals(destFileName)) {
if (log.isDebugEnabled())
log.debug("File is moved to itself [fileId=" + fileId +
", fileName=" + srcFileName + ", parentId=" + srcParentId + ']');
return; // File is moved to itself.
}
// Lock file ID and parent IDs for this transaction.
Map<IgniteUuid, IgfsEntryInfo> infoMap = lockIds(srcParentId, fileId, destParentId);
IgfsEntryInfo srcInfo = infoMap.get(srcParentId);
if (srcInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to lock source directory (not found?)" +
" [srcParentId=" + srcParentId + ']'));
if (!srcInfo.isDirectory())
throw fsException(new IgfsPathIsNotDirectoryException("Source is not a directory: " + srcInfo));
IgfsEntryInfo destInfo = infoMap.get(destParentId);
if (destInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to lock destination directory (not found?)" +
" [destParentId=" + destParentId + ']'));
if (!destInfo.isDirectory())
throw fsException(new IgfsPathIsNotDirectoryException("Destination is not a directory: " + destInfo));
IgfsEntryInfo fileInfo = infoMap.get(fileId);
if (fileInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to lock target file (not found?) [fileId=" +
fileId + ']'));
IgfsListingEntry srcEntry = srcInfo.listing().get(srcFileName);
// If source file does not exist or was re-created.
if (srcEntry == null || !srcEntry.fileId().equals(fileId))
throw fsException(new IgfsPathNotFoundException("Failed to remove file name from the source directory" +
" (file not found) [fileId=" + fileId + ", srcFileName=" + srcFileName +
", srcParentId=" + srcParentId + ", srcEntry=" + srcEntry + ']'));
// If stored file already exist.
if (destInfo.hasChild(destFileName))
throw fsException(new IgfsPathAlreadyExistsException("Failed to add file name into the destination " +
" directory (file already exists) [fileId=" + fileId + ", destFileName=" + destFileName +
", destParentId=" + destParentId + ']'));
transferEntry(srcEntry, srcParentId, srcFileName, destParentId, destFileName);
}
/**
* Deletes (moves to TRASH) all elements under the root folder.
*
* @return The new Id if the artificially created folder containing all former root
* elements moved to TRASH folder.
* @throws IgniteCheckedException On error.
*/
@SuppressWarnings("RedundantCast")
IgniteUuid format() throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
IgniteUuid trashId = IgfsUtils.randomTrashId();
try (GridNearTxLocal tx = startTx()) {
// NB: We may lock root because its id is less than any other id:
final IgfsEntryInfo rootInfo = lockIds(IgfsUtils.ROOT_ID, trashId).get(IgfsUtils.ROOT_ID);
assert rootInfo != null;
Map<String, IgfsListingEntry> rootListingMap = rootInfo.listing();
assert rootListingMap != null;
if (rootListingMap.isEmpty())
return null; // Root is empty, nothing to do.
// Construct new info and move locked entries from root to it.
Map<String, IgfsListingEntry> transferListing = new HashMap<>(rootListingMap);
IgfsEntryInfo newInfo = IgfsUtils.createDirectory(
IgniteUuid.randomUuid(),
transferListing,
(Map<String, String>) null
);
createNewEntry(newInfo, trashId, newInfo.id().toString());
// Remove listing entries from root.
// Note that root directory properties and other attributes are preserved:
id2InfoPrj.put(IgfsUtils.ROOT_ID, rootInfo.listing(null));
tx.commit();
signalDeleteWorker();
return newInfo.id();
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to perform format because Grid is stopping.");
}
/**
* Whether operation must be re-tried because we have suspicious links which may broke secondary file system
* consistency.
*
* @param pathIds Path IDs.
* @param lockInfos Lock infos.
* @return Whether to re-try.
*/
private static boolean isRetryForSecondary(IgfsPathIds pathIds, Map<IgniteUuid, IgfsEntryInfo> lockInfos) {
// We need to ensure that the last locked info is not linked with expected child.
// Otherwise there was some concurrent file system update and we have to re-try.
// That is, the following situation lead to re-try:
// 1) We queried path /A/B/C
// 2) Returned IDs are ROOT_ID, A_ID, B_ID, null
// 3) But B's info contains C as child. It mean's that
if (!pathIds.allExists()) {
// Find the last locked index
IgfsEntryInfo lastLockedInfo = null;
int lastLockedIdx = -1;
while (lastLockedIdx < pathIds.lastExistingIndex()) {
IgfsEntryInfo nextInfo = lockInfos.get(pathIds.id(lastLockedIdx + 1));
if (nextInfo != null) {
lastLockedInfo = nextInfo;
lastLockedIdx++;
}
else
break;
}
assert lastLockedIdx < pathIds.count();
if (lastLockedInfo != null) {
String part = pathIds.part(lastLockedIdx + 1);
if (lastLockedInfo.listing().containsKey(part))
return true;
}
}
return false;
}
/**
* Move path to the trash directory.
*
* @param path Path.
* @param recursive Recursive flag.
* @param secondaryFs Secondary file system (optional).
* @return ID of an entry located directly under the trash directory.
* @throws IgniteCheckedException If failed.
*/
IgfsDeleteResult softDelete(final IgfsPath path, final boolean recursive,
@Nullable IgfsSecondaryFileSystem secondaryFs) throws IgniteCheckedException {
while (true) {
if (busyLock.enterBusy()) {
try {
validTxState(false);
IgfsPathIds pathIds = pathIds(path);
if (!pathIds.allExists() && secondaryFs == null)
return new IgfsDeleteResult(false, null);
IgniteUuid victimId = pathIds.lastId();
String victimName = pathIds.lastPart();
if (IgfsUtils.isRootId(victimId))
throw new IgfsException("Cannot remove root directory");
// Prepare IDs to lock.
SortedSet<IgniteUuid> allIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
pathIds.addExistingIds(allIds, relaxed);
IgniteUuid trashId = IgfsUtils.randomTrashId();
allIds.add(trashId);
try (GridNearTxLocal tx = startTx()) {
// Lock participants.
Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(allIds);
if (secondaryFs != null && isRetryForSecondary(pathIds, lockInfos))
continue;
// Ensure that all participants are still in place.
if (!pathIds.allExists() || !pathIds.verifyIntegrity(lockInfos, relaxed)) {
// For DUAL mode we will try to update the underlying FS still. Note we do that inside TX.
if (secondaryFs != null) {
boolean res = secondaryFs.delete(path, recursive);
return new IgfsDeleteResult(res, null);
}
else
return new IgfsDeleteResult(false, null);
}
IgfsEntryInfo victimInfo = lockInfos.get(victimId);
// Cannot delete non-empty directory if recursive flag is not set.
if (!recursive && victimInfo.hasChildren())
throw new IgfsDirectoryNotEmptyException("Failed to remove directory (directory is not " +
"empty and recursive flag is not set).");
// Prepare trash data.
IgfsEntryInfo trashInfo = lockInfos.get(trashId);
final String trashName = IgfsUtils.composeNameForTrash(path, victimId);
assert !trashInfo.hasChild(trashName) : "Failed to add file name into the " +
"destination directory (file already exists) [destName=" + trashName + ']';
IgniteUuid parentId = pathIds.lastParentId();
IgfsEntryInfo parentInfo = lockInfos.get(parentId);
// Propagate call to the secondary file system.
if (secondaryFs != null && !secondaryFs.delete(path, recursive))
return new IgfsDeleteResult(false, null);
transferEntry(parentInfo.listing().get(victimName), parentId, victimName, trashId, trashName);
tx.commit();
signalDeleteWorker();
return new IgfsDeleteResult(true, victimInfo);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to perform soft delete because Grid is " +
"stopping [path=" + path + ']');
}
}
/**
* Remove listing entries of the given parent.
* This operation actually deletes directories from TRASH, is used solely by IgfsDeleteWorker.
*
* @param parentId Parent ID.
* @param listing Listing entries.
* @return Collection of really deleted entries.
* @throws IgniteCheckedException If failed.
*/
Collection<IgniteUuid> delete(IgniteUuid parentId, Map<String, IgfsListingEntry> listing)
throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
assert parentId != null;
assert listing != null;
validTxState(false);
try (GridNearTxLocal tx = startTx()) {
Collection<IgniteUuid> res = new HashSet<>();
// Obtain all necessary locks in one hop.
IgniteUuid[] allIds = new IgniteUuid[listing.size() + 1];
allIds[0] = parentId;
int i = 1;
for (IgfsListingEntry childEntry : listing.values())
allIds[i++] = childEntry.fileId();
Map<IgniteUuid, IgfsEntryInfo> locks = lockIds(allIds);
IgfsEntryInfo parentInfo = locks.get(parentId);
// Ensure parent is still in place.
if (parentInfo != null) {
Map<String, IgfsListingEntry> parentListing = parentInfo.listing();
Map<String, IgfsListingEntry> newListing = new HashMap<>(parentListing.size(), 1.0f);
newListing.putAll(parentListing);
// Remove child entries if possible.
for (Map.Entry<String, IgfsListingEntry> entry : listing.entrySet()) {
String childName = entry.getKey();
IgniteUuid childId = entry.getValue().fileId();
IgfsEntryInfo entryInfo = locks.get(childId);
if (entryInfo != null) {
// File must be locked for deletion:
assert entryInfo.isDirectory() || IgfsUtils.DELETE_LOCK_ID.equals(entryInfo.lockId());
// Delete only files or empty folders.
if (!entryInfo.hasChildren()) {
id2InfoPrj.remove(childId);
newListing.remove(childName);
res.add(childId);
}
}
else {
// Entry was deleted concurrently.
newListing.remove(childName);
res.add(childId);
}
}
// Update parent listing.
id2InfoPrj.put(parentId, parentInfo.listing(newListing));
}
tx.commit();
return res;
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to perform delete because Grid is stopping [parentId=" +
parentId + ", listing=" + listing + ']');
}
/**
* Remove entry from the metadata listing.
* Used solely by IgfsDeleteWorker.
*
* @param parentId Parent ID.
* @param name Name.
* @param id ID.
* @return {@code True} in case the entry really was removed from the cache by this call.
* @throws IgniteCheckedException If failed.
*/
boolean delete(IgniteUuid parentId, String name, IgniteUuid id) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
try (GridNearTxLocal tx = startTx()) {
Map<IgniteUuid, IgfsEntryInfo> infos = lockIds(parentId, id);
IgfsEntryInfo victim = infos.get(id);
if (victim == null)
return false;
assert victim.isDirectory() || IgfsUtils.DELETE_LOCK_ID.equals(victim.lockId()) :
" isDir: " + victim.isDirectory() + ", lockId: " + victim.lockId();
// Proceed only in case both parent and child exist.
if (infos.containsKey(parentId) && infos.containsKey(id)) {
IgfsEntryInfo parentInfo = infos.get(parentId);
assert parentInfo != null;
IgfsListingEntry childEntry = parentInfo.listing().get(name);
if (childEntry != null)
id2InfoPrj.invoke(parentId, new IgfsMetaDirectoryListingRemoveProcessor(name, id));
id2InfoPrj.remove(id);
tx.commit();
return true;
}
return false;
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to perform delete because Grid is stopping [parentId=" +
parentId + ", name=" + name + ", id=" + id + ']');
}
/**
* Update file info (file properties) in cache in existing transaction.
*
* @param fileId File ID to update information for.
* @param props Properties to set for the file.
* @return Updated file info or {@code null} if such file ID not found.
* @throws IgniteCheckedException If operation failed.
*/
@Nullable private IgfsEntryInfo updatePropertiesNonTx(final IgniteUuid fileId, Map<String, String> props)
throws IgniteCheckedException {
assert fileId != null;
assert !F.isEmpty(props) : "Expects not-empty file's properties";
validTxState(true);
if (log.isDebugEnabled())
log.debug("Update file properties [fileId=" + fileId + ", props=" + props + ']');
try {
final IgfsEntryInfo oldInfo = info(fileId);
if (oldInfo == null)
return null;
return invokeAndGet(fileId, new IgfsMetaUpdatePropertiesProcessor(props));
}
catch (GridClosureException e) {
throw U.cast(e);
}
}
/**
* Update file info (file properties) in cache.
*
* @param fileId File ID to update information for.
* @param props Properties to set for the file.
* @return Updated file info or {@code null} if such file ID not found.
* @throws IgniteCheckedException If operation failed.
*/
@Nullable public IgfsEntryInfo updateProperties(IgniteUuid fileId, Map<String, String> props)
throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
try (GridNearTxLocal tx = startTx()) {
IgfsEntryInfo info = updatePropertiesNonTx(fileId, props);
tx.commit();
return info;
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to update properties because Grid is stopping [fileId=" + fileId +
", props=" + props + ']');
}
/**
* Reserve space for file.
*
* @param fileId File ID.
* @param space Space.
* @param affRange Affinity range.
* @return New file info.
*/
public IgfsEntryInfo reserveSpace(IgniteUuid fileId, long space, IgfsFileAffinityRange affRange)
throws IgniteCheckedException {
validTxState(false);
if (busyLock.enterBusy()) {
try {
if (log.isDebugEnabled())
log.debug("Reserve file space: " + fileId);
try (GridNearTxLocal tx = startTx()) {
// Lock file ID for this transaction.
IgfsEntryInfo oldInfo = info(fileId);
if (oldInfo == null)
throw fsException("File has been deleted concurrently: " + fileId);
IgfsEntryInfo newInfo =
invokeAndGet(fileId, new IgfsMetaFileReserveSpaceProcessor(space, affRange));
tx.commit();
return newInfo;
}
catch (GridClosureException e) {
throw U.cast(e);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to reserve file space because Grid is stopping:" + fileId);
}
/**
* Update file info in cache.
*
* @param fileId File ID to update information for.
* @param proc Entry processor to invoke.
* @return Updated file info or {@code null} if such file ID not found.
* @throws IgniteCheckedException If failed.
*/
@Nullable public IgfsEntryInfo updateInfo(IgniteUuid fileId,
EntryProcessor<IgniteUuid, IgfsEntryInfo, IgfsEntryInfo> proc) throws IgniteCheckedException {
validTxState(false);
assert fileId != null;
assert proc != null;
if (busyLock.enterBusy()) {
try {
if (log.isDebugEnabled())
log.debug("Update file info [fileId=" + fileId + ", proc=" + proc + ']');
try (GridNearTxLocal tx = startTx()) {
// Lock file ID for this transaction.
IgfsEntryInfo oldInfo = info(fileId);
if (oldInfo == null)
return null; // File not found.
IgfsEntryInfo newInfo = invokeAndGet(fileId, proc);
if (newInfo == null)
throw fsException("Failed to update file info with null value" +
" [oldInfo=" + oldInfo + ", newInfo=null, proc=" + proc + ']');
if (!oldInfo.id().equals(newInfo.id()))
throw fsException("Failed to update file info (file IDs differ)" +
" [oldInfo=" + oldInfo + ", newInfo=" + newInfo + ", proc=" + proc + ']');
if (oldInfo.isDirectory() != newInfo.isDirectory())
throw fsException("Failed to update file info (file types differ)" +
" [oldInfo=" + oldInfo + ", newInfo=" + newInfo + ", proc=" + proc + ']');
tx.commit();
return newInfo;
}
catch (GridClosureException e) {
throw U.cast(e);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to update file system entry info because Grid is stopping: " +
fileId);
}
/**
* Mkdirs implementation.
*
* @param path The path to create.
* @param props The properties to use for created directories.
* @return True if a directory was created during the operation.
* @throws IgniteCheckedException If a non-directory file exists on the requested path, and in case of other errors.
*/
boolean mkdirs(final IgfsPath path, final Map<String, String> props) throws IgniteCheckedException {
validTxState(false);
while (true) {
if (busyLock.enterBusy()) {
try {
// Prepare path IDs.
IgfsPathIds pathIds = pathIds(path);
// Prepare lock IDs. Essentially, they consist of two parts: existing IDs and potential new IDs.
Set<IgniteUuid> lockIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
pathIds.addExistingIds(lockIds, relaxed);
pathIds.addSurrogateIds(lockIds);
// Start TX.
try (GridNearTxLocal tx = startTx()) {
final Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(lockIds);
if (!pathIds.verifyIntegrity(lockInfos, relaxed))
// Directory structure changed concurrently. So we simply re-try.
continue;
// Check if the whole structure is already in place.
if (pathIds.allExists()) {
if (lockInfos.get(pathIds.lastExistingId()).isDirectory())
return false;
else
throw new IgfsParentNotDirectoryException("Failed to create directory (parent " +
"element is not a directory)");
}
IgfsPathsCreateResult res = createDirectory(pathIds, lockInfos, props);
if (res == null)
continue;
// Commit TX.
tx.commit();
generateCreateEvents(res.createdPaths(), false);
// We are done.
return true;
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to mkdir because Grid is stopping. [path=" + path + ']');
}
}
/**
* Set sampling flag.
*
* @param val Sampling flag state or {@code null} to clear sampling state and mark it as "not set".
* @return {@code True} if sampling mode was actually changed by this call.
* @throws IgniteCheckedException If failed.
*/
public boolean sampling(Boolean val) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
try (GridNearTxLocal tx = startTx()) {
Object prev = val != null ? metaCache.getAndPut(sampling, val) : metaCache.getAndRemove(sampling);
tx.commit();
return !F.eq(prev, val);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to set sampling flag because Grid is stopping.");
}
/**
* Get sampling flag state.
*
* @return {@code True} in case sampling is enabled, {@code false} otherwise or {@code null} in case sampling
* is not set.
* @throws IgniteCheckedException If failed.
*/
public Boolean sampling() throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
validTxState(false);
Object val = metaCache.get(sampling);
return (val == null || !(val instanceof Boolean)) ? null : (Boolean)val;
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to get sampling flag because Grid is stopping.");
}
/**
* Put new entry to meta cache immediately linking it to parent.
*
* @param info Info to put.
* @param parentId Parent ID.
* @param name Name in parent.
* @throws IgniteCheckedException If failed.
*/
private void createNewEntry(IgfsEntryInfo info, IgniteUuid parentId, String name) throws IgniteCheckedException {
validTxState(true);
if (!id2InfoPrj.putIfAbsent(info.id(), info))
throw fsException("Failed to create new metadata entry due to ID conflict: " + info.id());
if (parentId != null)
id2InfoPrj.invoke(parentId, new IgfsMetaDirectoryListingAddProcessor(name, new IgfsListingEntry(info)));
}
/**
* Transfer entry from one directory to another.
*
* @param entry Entry to be transferred.
* @param srcId Source ID.
* @param srcName Source name.
* @param destId Destination ID.
* @param destName Destination name.
* @throws IgniteCheckedException If failed.
*/
private void transferEntry(IgfsListingEntry entry, IgniteUuid srcId, String srcName,
IgniteUuid destId, String destName) throws IgniteCheckedException {
validTxState(true);
if (F.eq(srcId, destId))
id2InfoPrj.invoke(srcId, new IgfsMetaDirectoryListingRenameProcessor(srcName, destName));
else {
Map<IgniteUuid, EntryProcessor<IgniteUuid, IgfsEntryInfo, Void>> procMap = new HashMap<>();
procMap.put(srcId, new IgfsMetaDirectoryListingRemoveProcessor(srcName, entry.fileId()));
procMap.put(destId, new IgfsMetaDirectoryListingAddProcessor(destName, entry));
id2InfoPrj.invokeAll(procMap);
}
}
/**
* Invoke lock processor.
*
* @param id File ID.
* @param del Whether lock is taken for delete.
* @return Resulting file info.
* @throws IgniteCheckedException If failed.
*/
private IgfsEntryInfo invokeLock(IgniteUuid id, boolean del) throws IgniteCheckedException {
return invokeAndGet(id, new IgfsMetaFileLockProcessor(createFileLockId(del)));
}
/**
* Invoke some processor and return new value.
*
* @param id ID.
* @param proc Processor.
* @return New file info.
* @throws IgniteCheckedException If failed.
*/
private IgfsEntryInfo invokeAndGet(IgniteUuid id, EntryProcessor<IgniteUuid, IgfsEntryInfo, IgfsEntryInfo> proc)
throws IgniteCheckedException {
validTxState(true);
EntryProcessorResult<IgfsEntryInfo> res = id2InfoPrj.invoke(id, proc);
assert res != null;
return res.get();
}
/**
* Get info.
*
* @param id ID.
* @return Info.
* @throws IgniteCheckedException If failed.
*/
@Nullable private IgfsEntryInfo getInfo(IgniteUuid id) throws IgniteCheckedException {
return id2InfoPrj.get(id);
}
/**
* Get several infos.
*
* @param ids IDs.
* @return Infos map.
* @throws IgniteCheckedException If failed.
*/
private Map<IgniteUuid, IgfsEntryInfo> getInfos(Collection<IgniteUuid> ids) throws IgniteCheckedException {
return id2InfoPrj.getAll(ids);
}
/**
* A delegate method that performs file creation in the synchronization task.
*
* @param fs File system.
* @param path Path.
* @param simpleCreate "Simple create" flag.
* @param props Properties..
* @param overwrite Overwrite flag.
* @param bufSize Buffer size.
* @param replication Replication factor.
* @param blockSize Block size.
* @param affKey Affinity key.
* @param infos Map from paths to corresponding infos.
* @param pendingEvts A non-null collection the events are to be accumulated in.
* @param t1 A signle-object tuple to hold the created output stream.
* @return Output stream descriptor.
* @throws Exception On error.
*/
IgfsCreateResult onSuccessCreate(IgfsSecondaryFileSystem fs, IgfsPath path,
boolean simpleCreate, @Nullable final Map<String, String> props, boolean overwrite,
int bufSize, short replication, long blockSize, IgniteUuid affKey, Map<IgfsPath, IgfsEntryInfo> infos,
final Deque<IgfsEvent> pendingEvts, final T1<OutputStream> t1) throws Exception {
validTxState(true);
assert !infos.isEmpty();
// Determine the first existing parent.
IgfsPath parentPath = null;
for (IgfsPath curPath : infos.keySet()) {
if (parentPath == null || curPath.isSubDirectoryOf(parentPath))
parentPath = curPath;
}
assert parentPath != null;
IgfsEntryInfo parentInfo = infos.get(parentPath);
// Delegate to the secondary file system.
OutputStream out = simpleCreate ? fs.create(path, overwrite) :
fs.create(path, bufSize, overwrite, replication, blockSize, props);
t1.set(out);
IgfsPath parent0 = path.parent();
assert parent0 != null : "path.parent() is null (are we creating ROOT?): " + path;
// If some of the parent directories were missing, synchronize again.
if (!parentPath.equals(parent0)) {
parentInfo = synchronize(fs, parentPath, parentInfo, parent0, true, null);
// Fire notification about missing directories creation.
if (evts.isRecordable(EventType.EVT_IGFS_DIR_CREATED)) {
IgfsPath evtPath = parent0;
while (!parentPath.equals(evtPath)) {
pendingEvts.addFirst(new IgfsEvent(evtPath, locNode,
EventType.EVT_IGFS_DIR_CREATED));
evtPath = evtPath.parent();
assert evtPath != null; // If this fails, then ROOT does not exist.
}
}
}
// Get created file info.
IgfsFile status = fs.info(path);
if (status == null)
throw fsException("Failed to open output stream to the file created in " +
"the secondary file system because it no longer exists: " + path);
else if (status.isDirectory())
throw fsException("Failed to open output stream to the file created in " +
"the secondary file system because the path points to a directory: " + path);
IgfsEntryInfo newInfo = IgfsUtils.createFile(
IgniteUuid.randomUuid(),
igfsCtx.configuration().getBlockSize(),
status.length(),
affKey,
createFileLockId(false),
igfsCtx.igfs().evictExclude(path, false),
status.properties(),
status.accessTime(),
status.modificationTime()
);
// Add new file info to the listing optionally removing the previous one.
assert parentInfo != null;
IgniteUuid oldId = putIfAbsentNonTx(parentInfo.id(), path.name(), newInfo);
if (oldId != null) {
IgfsEntryInfo oldInfo = info(oldId);
assert oldInfo != null; // Otherwise cache is in inconsistent state.
// The contact is that we cannot overwrite a file locked for writing:
if (oldInfo.lockId() != null)
throw fsException("Failed to overwrite file (file is opened for writing) [path=" +
path + ", fileId=" + oldId + ", lockId=" + oldInfo.lockId() + ']');
id2InfoPrj.remove(oldId); // Remove the old one.
id2InfoPrj.invoke(parentInfo.id(), new IgfsMetaDirectoryListingRemoveProcessor(
path.name(), parentInfo.listing().get(path.name()).fileId()));
createNewEntry(newInfo, parentInfo.id(), path.name()); // Put new one.
igfsCtx.data().delete(oldInfo);
}
// Record CREATE event if needed.
if (oldId == null && evts.isRecordable(EventType.EVT_IGFS_FILE_CREATED))
pendingEvts.add(new IgfsEvent(path, locNode, EventType.EVT_IGFS_FILE_CREATED));
return new IgfsCreateResult(newInfo, out);
}
/**
* Append to a file in DUAL mode.
*
* @param fs File system.
* @param path Path.
* @param bufSize Buffer size.
* @param create Create flag.
* @return Output stream descriptor.
* @throws IgniteCheckedException If output stream open for append has failed.
*/
public IgfsCreateResult appendDual(final IgfsSecondaryFileSystem fs, final IgfsPath path,
final int bufSize, final boolean create) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
assert fs != null;
assert path != null;
// Events to fire (can be done outside of a transaction).
final Deque<IgfsEvent> pendingEvts = new LinkedList<>();
SynchronizationTask<IgfsCreateResult> task =
new SynchronizationTask<IgfsCreateResult>() {
/** Container for the secondary file system output stream. */
private final T1<OutputStream> outT1 = new T1<>(null);
@Override public IgfsCreateResult onSuccess(Map<IgfsPath,
IgfsEntryInfo> infos) throws Exception {
validTxState(true);
final IgfsEntryInfo info = infos.get(path);
final IgfsEntryInfo lockedInfo;
if (info == null)
return onSuccessCreate(fs, path, true/*simpleCreate*/, null,
false/*overwrite*/, bufSize, (short)0, 0, null, infos, pendingEvts, outT1);
else {
if (info.isDirectory())
throw fsException("Failed to open output stream to the file in the " +
"secondary file system because the path points to a directory: " + path);
outT1.set(fs.append(path, bufSize, false, null));
// Synchronize file ending.
long len = info.length();
int blockSize = info.blockSize();
int remainder = (int) (len % blockSize);
if (remainder > 0) {
int blockIdx = (int) (len / blockSize);
try (IgfsSecondaryFileSystemPositionedReadable reader = fs.open(path, bufSize)) {
IgniteInternalFuture<byte[]> fut =
igfsCtx.data().dataBlock(info, path, blockIdx, reader);
assert fut != null;
fut.get();
}
}
if (info.lockId() != null) {
throw fsException("Failed to open file (file is opened for writing) [path=" +
path + ", fileId=" + info.id() + ", lockId=" + info.lockId() + ']');
}
// Set lock and return.
lockedInfo = invokeLock(info.id(), false);
}
if (evts.isRecordable(EventType.EVT_IGFS_FILE_OPENED_WRITE))
pendingEvts.add(new IgfsEvent(path, locNode, EventType.EVT_IGFS_FILE_OPENED_WRITE));
return new IgfsCreateResult(lockedInfo, outT1.get());
}
@Override public IgfsCreateResult onFailure(@Nullable Exception err)
throws IgniteCheckedException {
U.closeQuiet(outT1.get());
U.error(log, "File append in DUAL mode failed [path=" + path + ", bufferSize=" + bufSize +
']', err);
throw new IgniteCheckedException("Failed to append to the file due to secondary file " +
"system exception: " + path, err);
}
};
try {
return synchronizeAndExecute(task, fs, !create/*strict*/, path);
}
finally {
for (IgfsEvent evt : pendingEvts)
evts.record(evt);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to append to file in DUAL mode because Grid is stopping: " + path);
}
/**
* Get info for the given path.
*
* @param path Path.
* @return Info.
* @throws IgniteCheckedException If failed.
*/
@Nullable public IgfsEntryInfo infoForPath(IgfsPath path) throws IgniteCheckedException {
return client ? runClientTask(new IgfsClientMetaInfoForPathCallable(cfg.getName(),
IgfsUserContext.currentUser(), path)) : info(fileId(path));
}
/**
* Get IDs for the given path.
*
* @param path Path.
* @return IDs.
* @throws IgniteCheckedException If failed.
*/
public List<IgniteUuid> idsForPath(IgfsPath path) throws IgniteCheckedException {
return client ? runClientTask(new IgfsClientMetaIdsForPathCallable(cfg.getName(),
IgfsUserContext.currentUser(), path)) : fileIds(path);
}
/**
* Open file in DUAL mode.
*
* @param fs Secondary file system.
* @param path Path to open.
* @param bufSize Buffer size.
* @return Input stream descriptor.
* @throws IgniteCheckedException If input stream open has failed.
*/
public IgfsSecondaryInputStreamDescriptor openDual(final IgfsSecondaryFileSystem fs, final IgfsPath path,
final int bufSize) throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
assert fs != null;
assert path != null;
// First, try getting file info without any transactions and synchronization.
IgfsEntryInfo info = infoForPath(path);
if (info != null) {
if (!info.isFile())
throw fsException(new IgfsPathIsDirectoryException("Failed to open file (not a file): " +
path));
return new IgfsSecondaryInputStreamDescriptor(info, lazySecondaryReader(fs, path, bufSize));
}
// If failed, try synchronize.
SynchronizationTask<IgfsSecondaryInputStreamDescriptor> task =
new SynchronizationTask<IgfsSecondaryInputStreamDescriptor>() {
@Override public IgfsSecondaryInputStreamDescriptor onSuccess(
Map<IgfsPath, IgfsEntryInfo> infos) throws Exception {
IgfsEntryInfo info = infos.get(path);
if (info == null)
throw fsException(new IgfsPathNotFoundException("File not found: " + path));
if (!info.isFile())
throw fsException(new IgfsPathIsDirectoryException("Failed to open file " +
"(not a file): " + path));
return new IgfsSecondaryInputStreamDescriptor(infos.get(path),
lazySecondaryReader(fs, path, bufSize));
}
@Override public IgfsSecondaryInputStreamDescriptor onFailure(@Nullable Exception err)
throws IgniteCheckedException {
U.error(log, "File open in DUAL mode failed [path=" + path + ", bufferSize=" + bufSize +
']', err);
throw new IgniteCheckedException("Failed to open the path due to secondary file system " +
"exception: " + path, err);
}
};
return synchronizeAndExecute(task, fs, false, path);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to open file in DUAL mode because Grid is stopping: " + path);
}
/**
* Create lazy secondary file system reader.
*
* @param fs File system.
* @param path Path.
* @param bufSize Buffer size.
* @return Lazy reader.
*/
private static IgfsLazySecondaryFileSystemPositionedReadable lazySecondaryReader(IgfsSecondaryFileSystem fs,
IgfsPath path, int bufSize) {
return new IgfsLazySecondaryFileSystemPositionedReadable(fs, path, bufSize);
}
/**
* Synchronizes with secondary file system.
*
* @param fs File system.
* @param path Path.
* @return File info or {@code null} if file not found.
* @throws IgniteCheckedException If sync task failed.
*/
@Nullable public IgfsEntryInfo synchronizeFileDual(final IgfsSecondaryFileSystem fs, final IgfsPath path)
throws IgniteCheckedException {
assert fs != null;
assert path != null;
if (busyLock.enterBusy()) {
try {
// First, try getting file info without any transactions and synchronization.
IgfsEntryInfo info = infoForPath(path);
if (info != null)
return info;
// If failed, try synchronize.
SynchronizationTask<IgfsEntryInfo> task =
new SynchronizationTask<IgfsEntryInfo>() {
@Override public IgfsEntryInfo onSuccess(Map<IgfsPath, IgfsEntryInfo> infos)
throws Exception {
return infos.get(path);
}
@Override public IgfsEntryInfo onFailure(@Nullable Exception err) throws IgniteCheckedException {
throw new IgniteCheckedException("Failed to synchronize path due to secondary file " +
"system exception: " + path, err);
}
};
return synchronizeAndExecute(task, fs, false, path);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to synchronize file because Grid is stopping: " + path);
}
/**
* Create directory in DUAL mode.
*
* @param fs Secondary file system.
* @param path Path to create.
* @param props Properties to be applied.
* @return {@code True} in case rename was successful.
* @throws IgniteCheckedException If directory creation failed.
*/
public boolean mkdirsDual(final IgfsSecondaryFileSystem fs, final IgfsPath path, final Map<String, String> props)
throws IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
assert fs != null;
assert path != null;
if (path.parent() == null)
return true; // No additional handling for root directory is needed.
// Events to fire (can be done outside of a transaction).
final Deque<IgfsEvent> pendingEvts = new LinkedList<>();
SynchronizationTask<Boolean> task = new SynchronizationTask<Boolean>() {
@Override public Boolean onSuccess(Map<IgfsPath, IgfsEntryInfo> infos) throws Exception {
fs.mkdirs(path, props);
assert !infos.isEmpty();
// Now perform synchronization again starting with the last created parent.
IgfsPath parentPath = null;
for (IgfsPath curPath : infos.keySet()) {
if (parentPath == null || curPath.isSubDirectoryOf(parentPath))
parentPath = curPath;
}
assert parentPath != null;
IgfsEntryInfo parentPathInfo = infos.get(parentPath);
synchronize(fs, parentPath, parentPathInfo, path, true, null);
if (evts.isRecordable(EventType.EVT_IGFS_DIR_CREATED)) {
IgfsPath evtPath = path;
while (!parentPath.equals(evtPath)) {
pendingEvts.addFirst(new IgfsEvent(evtPath, locNode, EventType.EVT_IGFS_DIR_CREATED));
evtPath = evtPath.parent();
assert evtPath != null; // If this fails, then ROOT does not exist.
}
}
return true;
}
@Override public Boolean onFailure(@Nullable Exception err) throws IgniteCheckedException {
U.error(log, "Directory creation in DUAL mode failed [path=" + path + ", properties=" + props +
']', err);
throw new IgniteCheckedException("Failed to create the path due to secondary file system " +
"exception: " + path, err);
}
};
try {
return synchronizeAndExecute(task, fs, false, path.parent());
}
finally {
for (IgfsEvent evt : pendingEvts)
evts.record(evt);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to create directory in DUAL mode because Grid is stopping: " +
path);
}
/**
* Rename path in DUAL mode.
*
* @param fs Secondary file system.
* @param src Source path.
* @param dest Destination path.
* @return Operation result.
* @throws IgniteCheckedException If failed.
*/
public boolean renameDual(final IgfsSecondaryFileSystem fs, final IgfsPath src, final IgfsPath dest) throws
IgniteCheckedException {
if (busyLock.enterBusy()) {
try {
assert fs != null;
assert src != null;
assert dest != null;
if (src.parent() == null)
return false; // Root directory cannot be renamed.
// Events to fire (can be done outside of a transaction).
final Collection<IgfsEvent> pendingEvts = new LinkedList<>();
SynchronizationTask<Boolean> task = new SynchronizationTask<Boolean>() {
@Override public Boolean onSuccess(Map<IgfsPath, IgfsEntryInfo> infos) throws Exception {
IgfsEntryInfo srcInfo = infos.get(src);
IgfsEntryInfo srcParentInfo = infos.get(src.parent());
IgfsEntryInfo destInfo = infos.get(dest);
IgfsEntryInfo destParentInfo = dest.parent() != null ? infos.get(dest.parent()) : null;
// Source path and destination (or destination parent) must exist.
if (srcInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to rename " +
"(source path not found): " + src));
if (destInfo == null && destParentInfo == null)
throw fsException(new IgfsPathNotFoundException("Failed to rename " +
"(destination path not found): " + dest));
// Delegate to the secondary file system.
fs.rename(src, dest);
// Rename was successful, perform compensation in the local file system.
if (destInfo == null)
moveNonTx(srcInfo.id(), src.name(), srcParentInfo.id(), dest.name(), destParentInfo.id());
else {
// Move.
if (destInfo.isFile())
throw fsException("Failed to rename the path in the local file system " +
"because destination path already exists and it is a file: " + dest);
else
moveNonTx(srcInfo.id(), src.name(), srcParentInfo.id(), src.name(), destInfo.id());
}
// Record event if needed.
if (srcInfo.isFile()) {
if (evts.isRecordable(EventType.EVT_IGFS_FILE_RENAMED))
pendingEvts.add(new IgfsEvent(
src,
destInfo == null ? dest : new IgfsPath(dest, src.name()),
locNode,
EventType.EVT_IGFS_FILE_RENAMED));
}
else if (evts.isRecordable(EventType.EVT_IGFS_DIR_RENAMED))
pendingEvts.add(new IgfsEvent(src, dest, locNode, EventType.EVT_IGFS_DIR_RENAMED));
return true;
}
@Override public Boolean onFailure(@Nullable Exception err) throws IgniteCheckedException {
U.error(log, "Path rename in DUAL mode failed [source=" + src + ", destination=" + dest + ']',
err);
throw new IgniteCheckedException("Failed to rename the path due to secondary file system " +
"exception: " + src, err);
}
};
try {
return synchronizeAndExecute(task, fs, false, src, dest);
}
finally {
for (IgfsEvent evt : pendingEvts)
evts.record(evt);
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to rename in DUAL mode because Grid is stopping [src=" + src +
", dest=" + dest + ']');
}
/**
* Update path in DUAL mode.
*
* @param fs Secondary file system.
* @param path Path to update.
* @param props Properties to be applied.
* @return Update file info.
* @throws IgniteCheckedException If update failed.
*/
public IgfsEntryInfo updateDual(final IgfsSecondaryFileSystem fs, final IgfsPath path,
final Map<String, String> props) throws IgniteCheckedException {
assert fs != null;
assert path != null;
assert props != null && !props.isEmpty();
if (busyLock.enterBusy()) {
try {
SynchronizationTask<IgfsEntryInfo> task = new SynchronizationTask<IgfsEntryInfo>() {
@Override public IgfsEntryInfo onSuccess(Map<IgfsPath, IgfsEntryInfo> infos) throws Exception {
if (infos.get(path) == null)
return null;
fs.update(path, props);
return updatePropertiesNonTx(infos.get(path).id(), props);
}
@Override public IgfsEntryInfo onFailure(@Nullable Exception err) throws IgniteCheckedException {
U.error(log, "Path update in DUAL mode failed [path=" + path + ", properties=" + props + ']',
err);
throw new IgniteCheckedException("Failed to update the path due to secondary file system " +
"exception: " + path, err);
}
};
return synchronizeAndExecute(task, fs, false, path);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to update in DUAL mode because Grid is stopping: " + path);
}
/**
* Synchronize directory structure with the secondary file system.
*
* @param fs Secondary file system.
* @param startPath Start path.
* @param startPathInfo Start path info.
* @param endPath End path.
* @param strict Whether all paths must exist in the secondary file system.
* @param created Optional map where data about all created values is put.
* @return File info of the end path.
* @throws IgniteCheckedException If failed.
*/
private IgfsEntryInfo synchronize(IgfsSecondaryFileSystem fs,
IgfsPath startPath,
IgfsEntryInfo startPathInfo,
IgfsPath endPath,
boolean strict,
@Nullable Map<IgfsPath, IgfsEntryInfo> created)
throws IgniteCheckedException
{
assert fs != null;
assert startPath != null && startPathInfo != null && endPath != null;
validTxState(true);
IgfsEntryInfo parentInfo = startPathInfo;
List<String> components = endPath.components();
IgfsPath curPath = startPath;
for (int i = startPath.components().size(); i < components.size(); i++) {
curPath = new IgfsPath(curPath, components.get(i));
if (created != null && created.containsKey(curPath))
// Re-use already created info.
parentInfo = created.get(curPath);
else {
// Get file status from the secondary file system.
IgfsFile status;
try {
status = fs.info(curPath);
}
catch (IgniteException e) {
throw new IgniteCheckedException("Failed to get path information: " + e, e);
}
if (status != null) {
if (!status.isDirectory() && !curPath.equals(endPath))
throw new IgniteCheckedException("Failed to create path the locally because secondary file " +
"system directory structure was modified concurrently and the path is not a directory as " +
"expected: " + curPath);
}
else {
if (strict) {
throw new IgniteCheckedException("Failed to create path locally due to secondary file system " +
"exception: " + curPath);
}
else if (created != null)
created.put(curPath.parent(), parentInfo);
return null;
}
// Recreate the path locally.
IgfsEntryInfo curInfo = status.isDirectory() ?
IgfsUtils.createDirectory(
IgniteUuid.randomUuid(),
null,
status.properties(),
status.accessTime(),
status.modificationTime()
) :
IgfsUtils.createFile(
IgniteUuid.randomUuid(),
igfsCtx.configuration().getBlockSize(),
status.length(),
null,
null,
igfsCtx.igfs().evictExclude(curPath, false),
status.properties(),
status.accessTime(),
status.modificationTime()
);
assert parentInfo != null;
IgniteUuid oldId = putIfAbsentNonTx(parentInfo.id(), components.get(i), curInfo);
if (oldId != null)
curInfo = info(oldId);
if (created != null)
created.put(curPath, curInfo);
parentInfo = curInfo;
}
}
return parentInfo;
}
/**
* Synchronize file system structure and then execute provided task. All these actions are performed withing
* the transaction.
*
* @param task Task to execute.
* @param fs File system.
* @param strict Whether paths must be re-created strictly.
* @param paths Paths to synchronize.
* @return Result of task execution.
* @throws IgniteCheckedException If failed.
*/
private <T> T synchronizeAndExecute(SynchronizationTask<T> task,
IgfsSecondaryFileSystem fs,
boolean strict,
IgfsPath... paths)
throws IgniteCheckedException
{
return synchronizeAndExecute(task, fs, strict, null, paths);
}
/**
* Synchronize file system structure and then execute provided task. All these actions are performed withing
* the transaction.
*
* @param task Task to execute.
* @param fs File system.
* @param strict Whether paths must be re-created strictly.
* @param extraLockIds Additional IDs to lock (optional).
* @param paths Paths to synchronize.
* @return Result of task execution.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings({"Contract", "ConstantConditions"})
private <T> T synchronizeAndExecute(SynchronizationTask<T> task, IgfsSecondaryFileSystem fs, boolean strict,
@Nullable Collection<IgniteUuid> extraLockIds, IgfsPath... paths) throws IgniteCheckedException {
assert task != null;
assert fs != null;
assert paths != null && paths.length > 0;
// Sort paths so that we know in which order to synchronize them.
if (paths.length > 1)
Arrays.sort(paths);
boolean finished = false;
T res = null;
while (!finished) {
// Obtain existing IDs outside the transaction.
List<List<IgniteUuid>> pathIds = new ArrayList<>(paths.length);
for (IgfsPath path : paths)
pathIds.add(idsForPath(path));
// Start pessimistic.
try (GridNearTxLocal tx = startTx()) {
// Lock the very first existing parents and possibly the leaf as well.
Map<IgfsPath, IgfsPath> pathToParent = new HashMap<>();
Map<IgfsPath, IgniteUuid> pathToId = new HashMap<>();
for (int i = 0; i < paths.length; i++) {
IgfsPath path = paths[i];
// Determine the very first existing parent
List<IgniteUuid> ids = pathIds.get(i);
if (ids.size() > 1) {
// The path is not root.
IgfsPath parentPath = path.parent();
IgniteUuid parentId = ids.get(ids.size() - 2);
for (int j = ids.size() - 3; j >= 0; j--) {
if (parentId != null)
break;
else {
parentPath = parentPath.parent();
parentId = ids.get(j);
}
}
assert parentPath != null && parentId != null;
pathToParent.put(path, parentPath);
pathToId.put(parentPath, parentId);
}
IgniteUuid pathId = ids.get(ids.size() - 1);
if (pathId != null)
pathToId.put(path, pathId);
}
IgniteUuid[] lockArr = new IgniteUuid[extraLockIds == null ? pathToId.size() : pathToId.size() +
extraLockIds.size()];
int idx = 0;
for (IgniteUuid id : pathToId.values())
lockArr[idx++] = id;
if (extraLockIds != null) {
for (IgniteUuid id : extraLockIds)
lockArr[idx++] = id;
}
Map<IgniteUuid, IgfsEntryInfo> idToInfo = lockIds(lockArr);
if (extraLockIds != null) {
for (IgniteUuid id : extraLockIds)
idToInfo.remove(id);
}
// Ensure that locked IDs still point to expected paths.
IgfsPath changed = null;
for (Map.Entry<IgfsPath, IgniteUuid> entry : pathToId.entrySet()) {
if (!idToInfo.containsKey(entry.getValue()) ||
!F.eq(entry.getValue(), fileId(entry.getKey(), true))) {
changed = entry.getKey();
break;
}
}
if (changed != null) {
finished = true;
throw fsException(new IgfsConcurrentModificationException("File system entry has been " +
"modified concurrently: " + changed));
}
else {
boolean newParents = false;
// Check whether any new parents appeared before we have obtained the locks.
for (int i = 0; i < paths.length; i++) {
List<IgniteUuid> newIds = fileIds(paths[i], true);
if (!pathIds.get(i).equals(newIds)) {
newParents = true;
break;
}
}
if (newParents)
continue; // Release all locks and try again.
else {
// Perform synchronization.
Map<IgfsPath, IgfsEntryInfo> infos = new HashMap<>();
TreeMap<IgfsPath, IgfsEntryInfo> created = new TreeMap<>();
for (IgfsPath path : paths) {
IgfsPath parentPath = path.parent();
if (pathToId.containsKey(path)) {
infos.put(path, info(pathToId.get(path)));
if (parentPath != null)
infos.put(parentPath, info(pathToId.get(parentPath)));
}
else {
IgfsPath firstParentPath = pathToParent.get(path);
assert firstParentPath != null;
assert pathToId.get(firstParentPath) != null;
IgfsEntryInfo info = synchronize(fs,
firstParentPath,
idToInfo.get(pathToId.get(firstParentPath)),
path,
strict,
created);
assert strict && info != null || !strict;
if (info != null)
infos.put(path, info);
if (parentPath != null) {
if (parentPath.equals(firstParentPath))
infos.put(firstParentPath, idToInfo.get(pathToId.get(firstParentPath)));
else {
assert strict && created.get(parentPath) != null || !strict;
if (created.get(parentPath) != null)
infos.put(parentPath, created.get(parentPath));
else {
// Put the last created path.
infos.put(created.lastKey(), created.get(created.lastKey()));
}
}
}
}
}
// Finally, execute the task.
finished = true;
try {
res = task.onSuccess(infos);
}
catch (Exception e) {
res = task.onFailure(e);
}
}
}
tx.commit();
}
catch (IgniteCheckedException e) {
if (!finished) {
finished = true;
res = task.onFailure(e);
}
else
throw e;
}
}
return res;
}
/**
* Check transaction is (not) started.
*
* @param inTx Expected transaction state.
*/
private void validTxState(boolean inTx) {
assert (inTx && id2InfoPrj.tx() != null) || (!inTx && id2InfoPrj.tx() == null) :
"Invalid TX state [expected=" + inTx + ", actual=" + (id2InfoPrj.tx() != null) + ']';
}
/**
* Start transaction on meta cache.
*
* @return Transaction.
*/
private GridNearTxLocal startTx() {
return metaCache.txStartEx(TransactionConcurrency.PESSIMISTIC, TransactionIsolation.REPEATABLE_READ);
}
/**
* Update times.
*
* @param path Path.
* @param accessTime Access time.
* @param modificationTime Modification time.
* @param secondaryFs Secondary file system.
* @throws IgniteCheckedException If failed.
*/
public void updateTimes(IgfsPath path, long modificationTime, long accessTime,
IgfsSecondaryFileSystem secondaryFs) throws IgniteCheckedException {
while (true) {
if (busyLock.enterBusy()) {
try {
validTxState(false);
// Prepare path IDs.
IgfsPathIds pathIds = pathIds(path);
// Prepare lock IDs.
Set<IgniteUuid> lockIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
pathIds.addExistingIds(lockIds, relaxed);
// Start TX.
try (GridNearTxLocal tx = startTx()) {
Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(lockIds);
if (secondaryFs != null && isRetryForSecondary(pathIds, lockInfos))
continue;
if (!pathIds.verifyIntegrity(lockInfos, relaxed))
// Directory structure changed concurrently. So we re-try.
continue;
if (pathIds.allExists()) {
// All files are in place. Update both primary and secondary file systems.
if (secondaryFs != null)
secondaryFs.setTimes(path, modificationTime, accessTime);
IgniteUuid targetId = pathIds.lastExistingId();
IgfsEntryInfo targetInfo = lockInfos.get(targetId);
id2InfoPrj.invoke(targetId, new IgfsMetaUpdateTimesProcessor(
accessTime == -1 ? targetInfo.accessTime() : accessTime,
modificationTime == -1 ? targetInfo.modificationTime() : modificationTime)
);
tx.commit();
return;
}
else {
// Propagate call to the secondary FS, as we might haven't cache this part yet.
if (secondaryFs != null) {
secondaryFs.setTimes(path, modificationTime, accessTime);
return;
}
else
throw new IgfsPathNotFoundException("Failed to update times (path not found): " + path);
}
}
}
catch (IgniteException | IgniteCheckedException e) {
throw e;
}
catch (Exception e) {
throw new IgniteCheckedException("setTimes failed due to unexpected exception: " + path, e);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to update times because Grid is stopping: " + path);
}
}
/**
* @param msg Error message.
* @return Checked exception.
*/
private static IgniteCheckedException fsException(String msg) {
return new IgniteCheckedException(new IgfsException(msg));
}
/**
* @param err Unchecked exception.
* @return Checked exception.
*/
private static IgniteCheckedException fsException(IgfsException err) {
return new IgniteCheckedException(err);
}
/**
* Append routine.
*
* @param path Path.
* @param dirProps Directory properties.
* @param create Create flag.
* @param blockSize Block size.
* @param affKey Affinity key.
* @param evictExclude Evict exclude flag.
* @param fileProps File properties.
* @return Resulting info.
* @throws IgniteCheckedException If failed.
*/
IgfsEntryInfo append(
final IgfsPath path,
Map<String, String> dirProps,
final boolean create,
final int blockSize,
final @Nullable IgniteUuid affKey,
final boolean evictExclude,
@Nullable Map<String, String> fileProps) throws IgniteCheckedException {
validTxState(false);
while (true) {
if (busyLock.enterBusy()) {
try {
// Prepare path IDs.
IgfsPathIds pathIds = pathIds(path);
// Fail-fast: create flag is not specified and some paths are missing.
if (!pathIds.allExists() && !create)
throw new IgfsPathNotFoundException("Failed to append because file is not found: " + path);
// Prepare lock IDs.
Set<IgniteUuid> lockIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
pathIds.addExistingIds(lockIds, relaxed);
pathIds.addSurrogateIds(lockIds);
// Start TX.
try (GridNearTxLocal tx = startTx()) {
Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(lockIds);
if (!pathIds.verifyIntegrity(lockInfos, relaxed))
// Directory structure changed concurrently. So we simply re-try.
continue;
if (pathIds.allExists()) {
// All participants are found. Simply open the stream.
IgfsEntryInfo info = lockInfos.get(pathIds.lastId());
// Check: is it a file?
if (!info.isFile())
throw new IgfsPathIsDirectoryException("Failed to open file for write." + path);
// Check if file already opened for write.
if (info.lockId() != null)
throw new IgfsException("File is already opened for write: " + path);
// At this point we can open the stream safely.
info = invokeLock(info.id(), false);
tx.commit();
IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EventType.EVT_IGFS_FILE_OPENED_WRITE);
return info;
}
else {
// Create file and parent folders.
IgfsPathsCreateResult res = createFile(pathIds, lockInfos, dirProps, fileProps, blockSize,
affKey, evictExclude, null, null);
if (res == null)
continue;
// Commit.
tx.commit();
// Generate events.
generateCreateEvents(res.createdPaths(), true);
return res.info();
}
}
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to append for file because Grid is stopping:" + path);
}
}
/**
* Create a file.
*
* @param path Path.
* @param dirProps Directory properties.
* @param overwrite Overwrite flag.
* @param blockSize Block size.
* @param affKey Affinity key.
* @param evictExclude Evict exclude flag.
* @param fileProps File properties.
* @param secondaryCtx Secondary file system create context.
* @return @return Operation result.
* @throws IgniteCheckedException If failed.
*/
IgfsCreateResult create(
final IgfsPath path,
Map<String, String> dirProps,
final boolean overwrite,
final int blockSize,
final @Nullable IgniteUuid affKey,
final boolean evictExclude,
@Nullable Map<String, String> fileProps,
@Nullable IgfsSecondaryFileSystemCreateContext secondaryCtx) throws IgniteCheckedException {
validTxState(false);
while (true) {
if (busyLock.enterBusy()) {
OutputStream secondaryOut = null;
try {
// Prepare path IDs.
IgfsPathIds pathIds = pathIds(path);
// Prepare lock IDs.
Set<IgniteUuid> lockIds = new TreeSet<>(PATH_ID_SORTING_COMPARATOR);
pathIds.addExistingIds(lockIds, relaxed);
pathIds.addSurrogateIds(lockIds);
// In overwrite mode we also lock ID of potential replacement as well as trash ID.
IgniteUuid overwriteId = IgniteUuid.randomUuid();
IgniteUuid trashId = IgfsUtils.randomTrashId();
if (overwrite) {
lockIds.add(overwriteId);
// Trash ID is only added if we suspect conflict.
if (pathIds.allExists())
lockIds.add(trashId);
}
// Start TX.
try (GridNearTxLocal tx = startTx()) {
Map<IgniteUuid, IgfsEntryInfo> lockInfos = lockIds(lockIds);
if (secondaryCtx != null && isRetryForSecondary(pathIds, lockInfos))
continue;
if (!pathIds.verifyIntegrity(lockInfos, relaxed))
// Directory structure changed concurrently. So we simply re-try.
continue;
if (pathIds.allExists()) {
// All participants found.
IgfsEntryInfo oldInfo = lockInfos.get(pathIds.lastId());
// Check: is it a file?
if (!oldInfo.isFile())
throw new IgfsPathIsDirectoryException("Failed to create a file: " + path);
// Check: can we overwrite it?
if (!overwrite)
throw new IgfsPathAlreadyExistsException("Failed to create a file: " + path);
// Check if file already opened for write.
if (oldInfo.lockId() != null)
throw new IgfsException("File is already opened for write: " + path);
// At this point file can be re-created safely.
// Add existing to trash listing.
IgniteUuid oldId = pathIds.lastId();
id2InfoPrj.invoke(trashId, new IgfsMetaDirectoryListingAddProcessor(
IgfsUtils.composeNameForTrash(path, oldId), new IgfsListingEntry(oldInfo)));
// Replace ID in parent directory.
String name = pathIds.lastPart();
IgniteUuid parentId = pathIds.lastParentId();
id2InfoPrj.invoke(parentId, new IgfsMetaDirectoryListingReplaceProcessor(name, overwriteId));
// Create the file.
IgniteUuid newLockId = createFileLockId(false);
long newAccessTime;
long newModificationTime;
Map<String, String> newProps;
long newLen;
int newBlockSize;
if (secondaryCtx != null) {
secondaryOut = secondaryCtx.create();
newAccessTime = 0L;
newModificationTime = 0L;
newProps = null;
}
else {
newAccessTime = System.currentTimeMillis();
newModificationTime = newAccessTime;
newProps = fileProps;
}
newLen = 0L;
newBlockSize = blockSize;
IgfsEntryInfo newInfo = invokeAndGet(overwriteId,
new IgfsMetaFileCreateProcessor(newAccessTime, newModificationTime, newProps,
newBlockSize, affKey, newLockId, evictExclude, newLen));
// Prepare result and commit.
tx.commit();
IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EventType.EVT_IGFS_FILE_OPENED_WRITE);
return new IgfsCreateResult(newInfo, secondaryOut);
}
else {
// Create file and parent folders.
T1<OutputStream> secondaryOutHolder = null;
if (secondaryCtx != null)
secondaryOutHolder = new T1<>();
IgfsPathsCreateResult res;
try {
res = createFile(pathIds, lockInfos, dirProps, fileProps, blockSize,
affKey, evictExclude, secondaryCtx, secondaryOutHolder);
}
finally {
if (secondaryOutHolder != null)
secondaryOut = secondaryOutHolder.get();
}
if (res == null)
continue;
// Commit.
tx.commit();
// Generate events.
generateCreateEvents(res.createdPaths(), true);
return new IgfsCreateResult(res.info(), secondaryOut);
}
}
}
catch (IgniteException | IgniteCheckedException e) {
U.closeQuiet(secondaryOut);
throw e;
}
catch (Exception e) {
U.closeQuiet(secondaryOut);
throw new IgniteCheckedException("Create failed due to unexpected exception: " + path, e);
}
finally {
busyLock.leaveBusy();
}
}
else
throw new IllegalStateException("Failed to mkdir because Grid is stopping. [path=" + path + ']');
}
}
/**
* Create directory and it's parents.
*
* @param pathIds Path IDs.
* @param lockInfos Lock infos.
* @param dirProps Directory properties.
* @return Result or {@code} if the first parent already contained child with the same name.
* @throws IgniteCheckedException If failed.
*/
@Nullable IgfsPathsCreateResult createDirectory(IgfsPathIds pathIds, Map<IgniteUuid, IgfsEntryInfo> lockInfos,
Map<String, String> dirProps) throws IgniteCheckedException {
// Check if entry we are going to write to is directory.
if (lockInfos.get(pathIds.lastExistingId()).isFile())
throw new IgfsParentNotDirectoryException("Failed to create directory (parent " +
"element is not a directory)");
return createFileOrDirectory(true, pathIds, lockInfos, dirProps, null, 0, null, false, null, null);
}
/**
* Create file and all it's parents.
*
* @param pathIds Paths IDs.
* @param lockInfos Lock infos.
* @param dirProps Directory properties.
* @param fileProps File propertris.
* @param blockSize Block size.
* @param affKey Affinity key (optional)
* @param evictExclude Evict exclude flag.
* @param secondaryCtx Secondary file system create context.
* @param secondaryOutHolder Holder for the secondary output stream.
* @return Result or {@code} if the first parent already contained child with the same name.
* @throws IgniteCheckedException If failed.
*/
@Nullable private IgfsPathsCreateResult createFile(IgfsPathIds pathIds, Map<IgniteUuid, IgfsEntryInfo> lockInfos,
Map<String, String> dirProps, Map<String, String> fileProps, int blockSize, @Nullable IgniteUuid affKey,
boolean evictExclude, @Nullable IgfsSecondaryFileSystemCreateContext secondaryCtx,
@Nullable T1<OutputStream> secondaryOutHolder)
throws IgniteCheckedException{
// Check if entry we are going to write to is directory.
if (lockInfos.get(pathIds.lastExistingId()).isFile())
throw new IgfsParentNotDirectoryException("Failed to open file for write " +
"(parent element is not a directory): " + pathIds.path());
return createFileOrDirectory(false, pathIds, lockInfos, dirProps, fileProps, blockSize, affKey, evictExclude,
secondaryCtx, secondaryOutHolder);
}
/**
* Create file or directory.
*
* @param dir Directory flag.
* @param pathIds Path IDs.
* @param lockInfos Lock infos.
* @param dirProps Directory properties.
* @param fileProps File properties.
* @param blockSize Block size.
* @param affKey Affinity key.
* @param evictExclude Evict exclude flag.
* @param secondaryCtx Secondary file system create context.
* @param secondaryOutHolder Secondary output stream holder.
* @return Result.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
private IgfsPathsCreateResult createFileOrDirectory(boolean dir, IgfsPathIds pathIds,
Map<IgniteUuid, IgfsEntryInfo> lockInfos, Map<String, String> dirProps, Map<String, String> fileProps,
int blockSize, @Nullable IgniteUuid affKey, boolean evictExclude,
@Nullable IgfsSecondaryFileSystemCreateContext secondaryCtx, @Nullable T1<OutputStream> secondaryOutHolder)
throws IgniteCheckedException {
// This is our starting point.
int lastExistingIdx = pathIds.lastExistingIndex();
IgfsEntryInfo lastExistingInfo = lockInfos.get(pathIds.lastExistingId());
// If current info already contains entry with the same name as it's child, then something
// has changed concurrently. We must re-try because we cannot get info of this unexpected
// element due to possible deadlocks.
int curIdx = lastExistingIdx + 1;
String curPart = pathIds.part(curIdx);
IgniteUuid curId = pathIds.surrogateId(curIdx);
if (lastExistingInfo.hasChild(curPart))
return null;
// Create entry in the secondary file system if needed.
if (secondaryCtx != null) {
assert secondaryOutHolder != null;
secondaryOutHolder.set(secondaryCtx.create());
}
Map<IgniteUuid, EntryProcessor> procMap = new HashMap<>();
// First step: add new entry to the last existing element.
procMap.put(lastExistingInfo.id(), new IgfsMetaDirectoryListingAddProcessor(curPart,
new IgfsListingEntry(curId, dir || !pathIds.isLastIndex(curIdx))));
// Events support.
IgfsPath lastCreatedPath = pathIds.lastExistingPath();
List<IgfsPath> createdPaths = new ArrayList<>(pathIds.count() - curIdx);
// Second step: create middle directories.
long curTime = System.currentTimeMillis();
while (curIdx < pathIds.count() - 1) {
lastCreatedPath = new IgfsPath(lastCreatedPath, curPart);
int nextIdx = curIdx + 1;
String nextPart = pathIds.part(nextIdx);
IgniteUuid nextId = pathIds.surrogateId(nextIdx);
long accessTime;
long modificationTime;
Map<String, String> props;
if (secondaryCtx != null) {
accessTime = 0L;
modificationTime = 0L;
props = null;
}
else {
accessTime = curTime;
modificationTime = curTime;
props = dirProps;
}
procMap.put(curId, new IgfsMetaDirectoryCreateProcessor(accessTime, modificationTime, props,
nextPart, new IgfsListingEntry(nextId, dir || !pathIds.isLastIndex(nextIdx))));
// Save event.
createdPaths.add(lastCreatedPath);
// Advance things further.
curIdx++;
curPart = nextPart;
curId = nextId;
}
// Third step: create leaf.
if (dir) {
long accessTime;
long modificationTime;
Map<String, String> props;
if (secondaryCtx != null) {
accessTime = 0L;
modificationTime = 0L;
props = null;
}
else {
accessTime = curTime;
modificationTime = curTime;
props = dirProps;
}
procMap.put(curId, new IgfsMetaDirectoryCreateProcessor(accessTime, modificationTime, props));
}
else {
long newAccessTime;
long newModificationTime;
Map<String, String> newProps;
long newLen;
int newBlockSize;
if (secondaryCtx != null) {
newAccessTime = 0L;
newModificationTime = 0L;
newProps = null;
}
else {
newAccessTime = curTime;
newModificationTime = curTime;
newProps = fileProps;
}
newLen = 0L;
newBlockSize = blockSize;
procMap.put(curId, new IgfsMetaFileCreateProcessor(newAccessTime, newModificationTime, newProps,
newBlockSize, affKey, createFileLockId(false), evictExclude, newLen));
}
createdPaths.add(pathIds.path());
// Execute cache operations.
Map<Object, EntryProcessorResult> invokeRes = ((IgniteInternalCache)id2InfoPrj).invokeAll(procMap);
IgfsEntryInfo info = (IgfsEntryInfo)invokeRes.get(curId).get();
return new IgfsPathsCreateResult(createdPaths, info);
}
/**
* Generate events for created file or directory.
*
* @param createdPaths Created paths.
* @param file Whether file was created.
*/
private void generateCreateEvents(List<IgfsPath> createdPaths, boolean file) {
if (evts.isRecordable(EventType.EVT_IGFS_DIR_CREATED)) {
for (int i = 0; i < createdPaths.size() - 1; i++)
IgfsUtils.sendEvents(igfsCtx.kernalContext(), createdPaths.get(i),
EventType.EVT_IGFS_DIR_CREATED);
}
IgfsPath leafPath = createdPaths.get(createdPaths.size() - 1);
if (file) {
IgfsUtils.sendEvents(igfsCtx.kernalContext(), leafPath, EventType.EVT_IGFS_FILE_CREATED);
IgfsUtils.sendEvents(igfsCtx.kernalContext(), leafPath, EventType.EVT_IGFS_FILE_OPENED_WRITE);
}
else
IgfsUtils.sendEvents(igfsCtx.kernalContext(), leafPath, EventType.EVT_IGFS_DIR_CREATED);
}
/**
* Signal delete worker thread.
*/
private void signalDeleteWorker() {
IgfsDeleteWorker delWorker0 = delWorker;
if (delWorker0 != null)
delWorker0.signal();
}
/**
* Synchronization task interface.
*/
private static interface SynchronizationTask<T> {
/**
* Callback handler in case synchronization was successful.
*
* @param infos Map from paths to corresponding infos.
* @return Task result.
* @throws Exception If failed.
*/
public T onSuccess(Map<IgfsPath, IgfsEntryInfo> infos) throws Exception;
/**
* Callback handler in case synchronization failed.
*
* @param err Optional exception.
* @return Task result.
* @throws IgniteCheckedException In case exception is to be thrown in that case.
*/
public T onFailure(Exception err) throws IgniteCheckedException;
}
} | apache-2.0 |
winklerm/droolsjbpm-integration | kie-server-parent/kie-server-controller/kie-server-controller-websocket-common/src/main/java/org/kie/server/controller/websocket/common/config/WebSocketClientConfigurationImpl.java | 3361 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.controller.websocket.common.config;
import java.util.List;
import javax.websocket.Decoder;
import javax.websocket.Encoder;
public class WebSocketClientConfigurationImpl implements WebSocketClientConfiguration {
private String controllerUrl;
private String userName;
private String token;
private String password;
private Long maxSessionIdleTimeout = 0L;
private Long asyncSendTimeout = 120 * 1000L;
private List<Class<? extends Encoder>> encoders;
private List<Class<? extends Decoder>> decoders;
protected WebSocketClientConfigurationImpl() {
}
@Override
public String getControllerUrl() {
return controllerUrl;
}
public void setControllerUrl(String controllerUrl) {
this.controllerUrl = controllerUrl;
}
@Override
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
@Override
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
@Override
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public Long getMaxSessionIdleTimeout() {
return maxSessionIdleTimeout;
}
public void setMaxSessionIdleTimeout(Long maxSessionIdleTimeout) {
this.maxSessionIdleTimeout = maxSessionIdleTimeout;
}
@Override
public Long getAsyncSendTimeout() {
return asyncSendTimeout;
}
public void setAsyncSendTimeout(Long asyncSendTimeout) {
this.asyncSendTimeout = asyncSendTimeout;
}
@Override
public List<Class<? extends Encoder>> getEncoders() {
return encoders;
}
public void setEncoders(List<Class<? extends Encoder>> encoders) {
this.encoders = encoders;
}
@Override
public List<Class<? extends Decoder>> getDecoders() {
return decoders;
}
public void setDecoders(List<Class<? extends Decoder>> decoders) {
this.decoders = decoders;
}
@Override
public String toString() {
return "WebSocketClientConfigurationImpl{" +
"controllerUrl='" + controllerUrl + '\'' +
", userName='" + userName + '\'' +
", token='" + token + '\'' +
", password='" + password + '\'' +
", maxSessionIdleTimeout=" + maxSessionIdleTimeout +
", asyncSendTimeout=" + asyncSendTimeout +
", encoders=" + encoders +
", decoders=" + decoders +
'}';
}
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/XTS/WS-C/dev/src/com/arjuna/wsc/CannotRegisterException.java | 1242 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc and individual contributors as identified
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*
* @authors Andrew Dinn
*/
package com.arjuna.wsc;
public class CannotRegisterException extends Exception
{
public CannotRegisterException()
{
}
public CannotRegisterException(String message)
{
super(message);
}
}
| apache-2.0 |
DIVERSIFY-project/SMART-GH | core/src/test/java/com/graphhopper/storage/NativeFSLockFactoryTest.java | 2373 | /*
* Licensed to Peter Karich under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* Peter Karich licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the
* License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.storage;
import java.nio.channels.OverlappingFileLockException;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Peter Karich
*/
public class NativeFSLockFactoryTest extends AbstractLockFactoryTester
{
@Override
protected LockFactory createLockFactory()
{
return new NativeFSLockFactory(lockDir);
}
@Test
public void testMultiReadObtain()
{
LockFactory instance = createLockFactory();
instance.setLockDir(lockDir);
Lock writeLock1 = instance.create("test", true);
assertTrue(writeLock1.tryLock());
// BUT disallow more than one write lock!
Lock lock2 = instance.create("test", false);
assertFalse(lock2.tryLock());
writeLock1.release();
assertTrue(lock2.tryLock());
// http://stackoverflow.com/q/24367887/194609
// we cannot test 'allow multiple read locks' as multiple reads are only allowed for different processes
// Lock lock3 = instance.create("test", false);
// assertFalse(lock3.tryLock());
// lock3.release();
// still the lock should be valid
assertTrue(lock2.isLocked());
// disallow write lock if currently reading
Lock writeLock4 = instance.create("test", true);
assertFalse(writeLock4.tryLock());
assertEquals(OverlappingFileLockException.class, writeLock4.getObtainFailedReason().getClass());
writeLock4.release();
assertTrue(lock2.isLocked());
lock2.release();
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4chee-usr/trunk/dcm4chee-usr-ui/src/main/java/org/dcm4chee/usr/ui/usermanagement/role/CreateOrEditRolePage.java | 12474 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), hosted at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* Agfa-Gevaert AG.
* Portions created by the Initial Developer are Copyright (C) 2002-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* See listed authors below.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chee.usr.ui.usermanagement.role;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.wicket.ResourceReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxFallbackButton;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.html.CSSPackageResource;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.resources.CompressedResourceReference;
import org.apache.wicket.markup.repeater.RepeatingView;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.util.ListModel;
import org.dcm4chee.usr.dao.UserAccess;
import org.dcm4chee.usr.model.Group;
import org.dcm4chee.usr.model.Role;
import org.dcm4chee.usr.ui.validator.RoleValidator;
import org.dcm4chee.usr.util.JNDIUtils;
import org.dcm4chee.web.common.base.BaseWicketApplication;
import org.dcm4chee.web.common.base.BaseWicketPage;
import org.dcm4chee.web.common.markup.BaseForm;
import org.dcm4chee.web.common.secure.SecureSessionCheckPage;
import org.dcm4chee.web.common.secure.SecurityBehavior;
import org.dcm4chee.web.common.util.Auditlog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Robert David <robert.david@agfa.com>
* @version $Revision$ $Date$
* @since Jul. 21, 2010
*/
public class CreateOrEditRolePage extends SecureSessionCheckPage {
private static final long serialVersionUID = 1L;
private static Logger log = LoggerFactory.getLogger(CreateOrEditRolePage.class);
protected ModalWindow window;
public CreateOrEditRolePage(final ModalWindow window, ListModel<Role> allRolenames, Role role, Map<String,Group> types) {
super();
this.window = window;
add(new CreateOrEditRoleForm("add-role-form", allRolenames, role, types));
add(new WebMarkupContainer("create-role-title").setVisible(role == null));
add(new WebMarkupContainer("edit-role-title").setVisible(role != null));
}
private final class CreateOrEditRoleForm extends BaseForm {
private static final long serialVersionUID = 1L;
private Model<String> rolename = new Model<String>();
private Model<String> description = new Model<String>();
private Model<Boolean> hiddenRole = new Model<Boolean>();
private TextField<String> rolenameTextField= new TextField<String>("rolelist.add-role-form.rolename.input", rolename);
private TextField<String> descriptionTextField= new TextField<String>("rolelist.add-role-form.description.input", description);
private List<CheckBox> groupCheckboxList;
public CreateOrEditRoleForm(String id, final ListModel<Role> allRolenames, final Role role, final Map<String,Group> types) {
super(id);
setOutputMarkupId(true);
((BaseWicketApplication) getApplication()).getInitParameter("UserAccessServiceName");
final UserAccess userAccess = ((UserAccess) JNDIUtils.lookup(UserAccess.JNDI_NAME));
final List<Group> groups = userAccess.getAllGroups();
add(rolenameTextField
.setRequired(true)
.add(new RoleValidator(allRolenames, (role == null ? null : role.getRolename())))
);
add(descriptionTextField);
WebMarkupContainer hiddenRoleContainer = new WebMarkupContainer("hiddenRoleContainer");
add(hiddenRoleContainer);
final CheckBox hiddenRoleCheckbox = new CheckBox("hiddenRole-checkbox", hiddenRole);
hiddenRole.setObject(role != null && role.isSuperuser());
if (role != null) {
rolenameTextField.setModelObject(role.getRolename());
descriptionTextField.setModelObject(role.getDescription());
hiddenRoleCheckbox.setModelObject(role.isSuperuser());
}
hiddenRoleContainer
.add(hiddenRoleCheckbox)
.add(new SecurityBehavior(getModuleName() + ":superuserCheckbox"));
final StringBuffer webRoleUuid = new StringBuffer();
final StringBuffer dicomRoleUuid = new StringBuffer();
final StringBuffer aetRoleUuid = new StringBuffer();
groupCheckboxList = new ArrayList<CheckBox>(groups.size());
RepeatingView groupRows = new RepeatingView("group-rows");
add(groupRows);
for (final Group group : groups) {
WebMarkupContainer rowParent;
groupRows.add((rowParent = new WebMarkupContainer(groupRows.newChildId())));
CheckBox groupCheckbox;
rowParent.add((groupCheckbox = new CheckBox("group-checkbox",
new Model<Boolean>(role != null ?
group.getGroupname().equals("Web") ? role.isWebRole() :
group.getGroupname().equals("Dicom") ? role.isDicomRole() :
group.getGroupname().equals("AET") ? role.isAETRole() :
role.getRoleGroups().contains(group.getUuid()) : false)))
.setLabel(new Model<String>(group.getUuid()))
.add(new SecurityBehavior(getModuleName() + ":changeGroupAssignmentCheckbox")));
rowParent.add(new Label("groupname", new Model<String>(group.getGroupname())));
groupCheckboxList.add(groupCheckbox);
groupRows.add(rowParent);
if (group.getGroupname().equals("Web"))
webRoleUuid.append(group.getUuid());
else if (group.getGroupname().equals("Dicom"))
dicomRoleUuid.append(group.getUuid());
else if (group.getGroupname().equals("AET"))
aetRoleUuid.append(group.getUuid());
}
add(new AjaxFallbackButton("add-role-submit", CreateOrEditRoleForm.this) {
private static final long serialVersionUID = 1L;
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
try {
List<String> groupList = new ArrayList<String>();
for (CheckBox checkbox : groupCheckboxList)
if (checkbox.getModelObject())
groupList.add(checkbox.getLabel().getObject());
Set<String> aetGroups = new HashSet<String>();
aetGroups.add("*");
if (role == null) {
Role newRole = new Role(rolename.getObject());
newRole.setDescription(description.getObject());
newRole.setSuperuser(hiddenRoleCheckbox.getModelObject());
newRole.setRoleGroups(groupList);
newRole.setWebRole(groupList.contains(webRoleUuid.toString()));
newRole.setDicomRole(groupList.contains(dicomRoleUuid.toString()));
newRole.setAETRole(groupList.contains(aetRoleUuid.toString()));
if (newRole.isAETRole())
newRole.setAETGroups(aetGroups);
userAccess.addRole(newRole);
Auditlog.logSoftwareConfiguration(true, "Role "+newRole+" created.");
} else {
getUpdateInfo(role, groupList, aetGroups);
role.setRolename(rolename.getObject());
role.setDescription(description.getObject());
role.setSuperuser(hiddenRoleCheckbox.getModelObject());
role.setRoleGroups(groupList);
role.setWebRole(groupList.contains(webRoleUuid.toString()));
role.setDicomRole(groupList.contains(dicomRoleUuid.toString()));
role.setAETRole(groupList.contains(aetRoleUuid.toString()));
if (role.isAETRole() && (role.getAETGroups() == null || role.getAETGroups().size() == 0))
role.setAETGroups(aetGroups);
userAccess.updateRole(role);
Auditlog.logSoftwareConfiguration(true, "Role "+role+" updated.");
}
allRolenames.setObject(userAccess.getAllRoles());
window.close(target);
} catch (final Exception e) {
log.error(this.getClass().toString() + ": " + "onSubmit: " + e.getMessage());
log.debug("Exception: ", e);
}
}
@Override
protected void onError(AjaxRequestTarget target, Form<?> form) {
target.addComponent(form);
}
private String getUpdateInfo(Role role, List<String> groupList, Set<String> aetGroups) {
StringBuilder sb = new StringBuilder("Role ").append(role).append(" updated.");
boolean changed = Auditlog.addChange(sb, false, "rolename", role.getRolename(), rolename.getObject());
Auditlog.addChange(sb, changed, "description", role.getDescription(), description.getObject());
Auditlog.addChange(sb, changed, "superuser", role.isSuperuser(), hiddenRoleCheckbox.getModelObject());
Auditlog.addChange(sb, changed, "WEB role", role.isWebRole(), groupList.contains(webRoleUuid.toString()));
Auditlog.addChange(sb, changed, "DICOM role", role.isDicomRole(), groupList.contains(dicomRoleUuid.toString()));
Auditlog.addChange(sb, changed, "AET role", role.isAETRole(), groupList.contains(aetRoleUuid.toString()));
return sb.toString();
}
});
}
};
public static String getModuleName() {
return "roletypes";
}
}
| apache-2.0 |
timopulkkinen/BubbleFish | android_webview/javatests/src/org/chromium/android_webview/test/AndroidWebViewTestRunnerActivity.java | 2043 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview.test;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.widget.LinearLayout;
/*
* This is a lightweight activity for tests that only require WebView functionality.
*/
public class AndroidWebViewTestRunnerActivity extends Activity {
private LinearLayout mLinearLayout;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// TODO(joth): When SW-renderer is available, we'll want to enable this on a per-test
// basis.
boolean hardwareAccelerated = true;
Log.i("AndroidWebViewTestRunnerActivity", "Is " + (hardwareAccelerated ? "" : "NOT ")
+ "hardware accelerated");
if (hardwareAccelerated) {
getWindow().setFlags(
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
}
mLinearLayout = new LinearLayout(this);
mLinearLayout.setOrientation(LinearLayout.VERTICAL);
mLinearLayout.setShowDividers(LinearLayout.SHOW_DIVIDER_MIDDLE);
mLinearLayout.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
setContentView(mLinearLayout);
}
/**
* Adds a view to the main linear layout.
*/
public void addView(View view) {
view.setLayoutParams(new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1f));
mLinearLayout.addView(view);
}
/**
* Clears the main linear layout.
*/
public void removeAllViews() {
mLinearLayout.removeAllViews();
}
}
| bsd-3-clause |
scheib/chromium | components/browser_ui/bottomsheet/android/java/src/org/chromium/components/browser_ui/bottomsheet/BottomSheetContent.java | 8552 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.browser_ui.bottomsheet;
import android.view.View;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.chromium.base.Callback;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* An interface defining content that can be displayed inside of the bottom sheet for Chrome
* Home.
*/
public interface BottomSheetContent {
/** The different possible height modes for a given state. */
@IntDef({HeightMode.DEFAULT, HeightMode.WRAP_CONTENT, HeightMode.DISABLED})
@Retention(RetentionPolicy.SOURCE)
@interface HeightMode {
/**
* The sheet will use the stock behavior for the {@link BottomSheetController.SheetState}
* this is used for. Typically this means a pre-defined height ratio, peek being the
* exception that uses the feature's toolbar height.
*/
int DEFAULT = 0;
/**
* The sheet will set its height so the content is completely visible. This mode cannot
* be used for the peek state.
*/
int WRAP_CONTENT = -1;
/**
* The state this mode is used for will be disabled. For example, disabling the peek state
* would cause the sheet to automatically expand when triggered.
*/
int DISABLED = -2;
}
/** The different priorities that the sheet's content can have. */
@IntDef({ContentPriority.HIGH, ContentPriority.LOW})
@Retention(RetentionPolicy.SOURCE)
@interface ContentPriority {
int HIGH = 0;
int LOW = 1;
}
/** Interface to listen when the size of a BottomSheetContent changes. */
interface ContentSizeListener {
/** Called when the size of the view has changed. */
void onSizeChanged(int width, int height, int oldWidth, int oldHeight);
}
/**
* Gets the {@link View} that holds the content to be displayed in the Chrome Home bottom
* sheet.
* @return The content view.
*/
View getContentView();
/**
* Get the {@link View} that contains the toolbar specific to the content being
* displayed. If null is returned, the omnibox is used.
*
* @return The toolbar view.
*/
@Nullable
View getToolbarView();
/**
* @return The vertical scroll offset of the content view.
*/
int getVerticalScrollOffset();
/**
* Called to destroy the {@link BottomSheetContent} when it is dismissed. The means the
* sheet is in the {@link BottomSheetController.SheetState#HIDDEN} state without being
* suppressed. This method does not necessarily need to be used but exists for convenience.
* Cleanup can be done manually via the owning component (likely watching for the sheet hidden
* event using an observer).
*/
void destroy();
/**
* @return The priority of this content.
*/
@ContentPriority
int getPriority();
/**
* @return Whether swiping the sheet down hard enough will cause the sheet to be dismissed.
*/
boolean swipeToDismissEnabled();
/**
* @return Whether the sheet will always skip the half state once it was fully extended.
*/
default boolean skipHalfStateOnScrollingDown() {
return true;
};
/**
* @return Whether this content owns its lifecycle. If false, the content will be hidden
* when the user navigates away from the page or switches tab.
*/
default boolean hasCustomLifecycle() {
return false;
}
/**
* @return Whether this content owns the scrim lifecycle. If false, a default scrim will
* be displayed behind the sheet when this content is shown.
*/
default boolean hasCustomScrimLifecycle() {
return false;
}
/**
* @return The height of the peeking state for the content in px or one of the values in
* {@link HeightMode}. If {@link HeightMode#DEFAULT}, the system expects
* {@link #getToolbarView} to be non-null, where it will then use its height as the
* peeking height. This method cannot return {@link HeightMode#WRAP_CONTENT}.
*/
default int getPeekHeight() {
return HeightMode.DEFAULT;
}
/**
* @return The height of the half state for the content as a ratio of the height of the
* content area (ex. 1.f would be full-screen, 0.5f would be half-screen). The
* returned value can also be one of {@link HeightMode}. If
* {@link HeightMode#DEFAULT} is returned, the ratio will be a predefined value. If
* {@link HeightMode#WRAP_CONTENT} is returned by {@link #getFullHeightRatio()}, the
* half height will be disabled. Half height will also be disabled on small screens.
* This method cannot return {@link HeightMode#WRAP_CONTENT}.
*/
default float getHalfHeightRatio() {
return HeightMode.DEFAULT;
}
/**
* @return The height of the full state for the content as a ratio of the height of the
* content area (ex. 1.f would be full-screen, 0.5f would be half-screen). The
* returned value can also be one of {@link HeightMode}. If
* {@link HeightMode#DEFAULT}, the ratio will be a predefined value. This height
* cannot be disabled. This method cannot return {@link HeightMode#DISABLED}.
*/
default float getFullHeightRatio() {
return HeightMode.DEFAULT;
}
/**
* Set a {@link ContentSizeListener} that should be notified when the size of the content
* has changed. This will be called only if {@link #getFullHeightRatio()} returns {@link
* HeightMode#WRAP_CONTENT}. Note that you need to implement this method only if the content
* view height changes are animated.
*
* @return Whether the listener was correctly set.
*/
default boolean setContentSizeListener(@Nullable ContentSizeListener listener) {
return false;
}
/**
* @return Whether the sheet should be hidden when it is in the PEEK state and the user
* scrolls down the page.
*/
default boolean hideOnScroll() {
return false;
}
/**
* A means for the content to intercept and handle the back press event. This will be called
* even if the sheet is in the peeking state. If left {@code false}, the sheet will collapse to
* its minimum state on back press or do nothing if in the minimum / peeking state.
* @return Whether the bottom sheet handled the back press.
*/
default boolean handleBackPress() {
return false;
}
/**
* @return The resource id of the content description for the bottom sheet. This is
* generally the name of the feature/content that is showing. 'Swipe down to close.'
* will be automatically appended after the content description.
*/
int getSheetContentDescriptionStringId();
/**
* @return The resource id of the string announced when the sheet is opened at half height.
* This is typically the name of your feature followed by 'opened at half height'.
*/
int getSheetHalfHeightAccessibilityStringId();
/**
* @return The resource id of the string announced when the sheet is opened at full height.
* This is typically the name of your feature followed by 'opened at full height'.
*/
int getSheetFullHeightAccessibilityStringId();
/**
* @return The resource id of the string announced when the sheet is closed. This is
* typically the name of your feature followed by 'closed'.
*/
int getSheetClosedAccessibilityStringId();
/**
* Return {@code true} if the content expects {@link #setOffsetController} to be called.
*
* This is an experimental feature. Use it at your own risks. TODO(b/177037825): Remove or
* cleanup.
*/
default boolean contentControlsOffset() {
return false;
}
/**
* Set or reset the set offset callback.
*
* The active content can use this callback to move the sheet to the given offset.
*
* Only called if {@link #contentControlsOffset} returns {@code true}.
*/
default void setOffsetController(@Nullable Callback<Integer> setOffset) {}
}
| bsd-3-clause |
ric2b/Vivaldi-browser | chromium/chrome/android/java/src/org/chromium/chrome/browser/gcore/ChromeGoogleApiClientImpl.java | 2904 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.gcore;
import android.content.Context;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import org.chromium.base.Log;
import org.chromium.base.TraceEvent;
import org.chromium.components.externalauth.ExternalAuthUtils;
import java.util.concurrent.TimeUnit;
/**
* Default implementation for {@link ChromeGoogleApiClient}.
*/
public class ChromeGoogleApiClientImpl implements ChromeGoogleApiClient {
private static final String TAG = "Icing";
private final Context mApplicationContext;
private final GoogleApiClient mClient;
private final ExternalAuthUtils mExternalAuthUtils;
/**
* @param context its application context will be exposed through
* {@link #getApplicationContext()}.
* @param client will be exposed through {@link #getApiClient()}.
* @param requireFirstPartyBuild true if the given client can only be used in a first-party
* build.
*/
public ChromeGoogleApiClientImpl(Context context, GoogleApiClient client,
boolean requireFirstPartyBuild) {
mApplicationContext = context.getApplicationContext();
mClient = client;
mExternalAuthUtils = ExternalAuthUtils.getInstance();
if (requireFirstPartyBuild && !mExternalAuthUtils.isChromeGoogleSigned()) {
throw new IllegalStateException("GoogleApiClient requires first-party build");
}
}
@Override
public void disconnect() {
mClient.disconnect();
}
@Override
public boolean isGooglePlayServicesAvailable() {
TraceEvent.begin("ChromeGoogleApiClientImpl:isGooglePlayServicesAvailable");
try {
return mExternalAuthUtils.canUseGooglePlayServices();
} finally {
TraceEvent.end("ChromeGoogleApiClientImpl:isGooglePlayServicesAvailable");
}
}
@Override
public boolean connectWithTimeout(long timeout) {
TraceEvent.begin("ChromeGoogleApiClientImpl:connectWithTimeout");
try {
ConnectionResult result = mClient.blockingConnect(timeout, TimeUnit.MILLISECONDS);
if (!result.isSuccess()) {
Log.e(TAG, "Connection to GmsCore unsuccessful. Error %d", result.getErrorCode());
} else {
Log.d(TAG, "Connection to GmsCore successful.");
}
return result.isSuccess();
} finally {
TraceEvent.end("ChromeGoogleApiClientImpl:connectWithTimeout");
}
}
public Context getApplicationContext() {
return mApplicationContext;
}
public GoogleApiClient getApiClient() {
return mClient;
}
}
| bsd-3-clause |
stachon/XChange | xchange-examples/src/main/java/org/knowm/xchange/examples/bitstamp/account/BitstampFiatWithdrawal.java | 2031 | package org.knowm.xchange.examples.bitstamp.account;
import java.io.IOException;
import java.math.BigDecimal;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.bitstamp.BitstampAuthenticatedV2.BankCurrency;
import org.knowm.xchange.bitstamp.BitstampAuthenticatedV2.Country;
import org.knowm.xchange.bitstamp.dto.account.BitstampBalance;
import org.knowm.xchange.bitstamp.dto.account.BitstampDepositAddress;
import org.knowm.xchange.bitstamp.service.BitstampAccountServiceRaw;
import org.knowm.xchange.examples.bitstamp.BitstampDemoUtils;
import org.knowm.xchange.service.account.AccountService;
public class BitstampFiatWithdrawal {
public static void main(String[] args) throws IOException {
Exchange bitstamp = BitstampDemoUtils.createExchange();
AccountService accountService = bitstamp.getAccountService();
raw((BitstampAccountServiceRaw) accountService);
}
private static void raw(BitstampAccountServiceRaw accountService) throws IOException {
// Get the account information
BitstampBalance bitstampBalance = accountService.getBitstampBalance();
System.out.println("BitstampBalance: " + bitstampBalance);
BitstampDepositAddress depositAddress = accountService.getBitstampBitcoinDepositAddress();
System.out.println("BitstampDepositAddress address: " + depositAddress);
accountService.withdrawSepa(
new BigDecimal("150"),
"Test User",
"BY13NBRB3600900000002Z00AB00",
"DABAIE2D",
"Minsk, Belarus, Main street 2",
"197372",
"Minsk",
Country.Belarus.alpha2);
accountService.withdrawInternational(
new BigDecimal("150"),
"Test User",
"BY13NBRB3600900000002Z00AB00",
"DABAIE2D",
"Minsk, Belarus, Main street 2",
"197372",
"Minsk",
Country.Belarus.alpha2,
"Great Bank",
"Great Bank Address",
"Great Bank Postal Code",
"Great Bank City",
"Bank Country Alpha 2 code",
BankCurrency.AUD);
}
}
| mit |
yaqiyang/autorest | src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodyformdata/models/Error.java | 1347 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.bodyformdata.models;
/**
* The Error model.
*/
public class Error {
/**
* The status property.
*/
private Integer status;
/**
* The message property.
*/
private String message;
/**
* Get the status value.
*
* @return the status value
*/
public Integer status() {
return this.status;
}
/**
* Set the status value.
*
* @param status the status value to set
* @return the Error object itself.
*/
public Error withStatus(Integer status) {
this.status = status;
return this;
}
/**
* Get the message value.
*
* @return the message value
*/
public String message() {
return this.message;
}
/**
* Set the message value.
*
* @param message the message value to set
* @return the Error object itself.
*/
public Error withMessage(String message) {
this.message = message;
return this;
}
}
| mit |
julkarnain/Jersey-RS | src/main/java/org/codingpedia/demo/rest/resource/podcast/PodcastLegacyResource.java | 1551 | package org.codingpedia.demo.rest.resource.podcast;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.codingpedia.demo.rest.errorhandling.AppException;
import org.codingpedia.demo.rest.service.PodcastService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
*
* Service class that handles REST requests
*
* @author amacoder
*
*/
@Component
@Path("/legacy/podcasts")
public class PodcastLegacyResource {
@Autowired
private PodcastService podcastService;
/************************************ READ ************************************/
/**
* Returns all resources (podcasts) from the database
*
* @return
* @throws AppException
*/
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public List<Podcast> getPodcasts() throws AppException {
return podcastService.getLegacyPodcasts();
}
@GET
@Path("{id}")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public Response findById(@PathParam("id") Long id) throws AppException {
Podcast podcastById = podcastService.getLegacyPodcastById(id);
if (podcastById != null) {
return Response.status(200).entity(podcastById).build();
} else {
String message = "The podcast with the id " + id + " does not exist";
throw new AppException(404, 4004, message, message, "link");
}
}
}
| mit |
JetBrains/jdk8u_jdk | test/java/net/Socks/SocksIPv6Test.java | 6287 | /*
* Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* @test
* @bug 7100957
* @summary Java doesn't correctly handle the SOCKS protocol when used over IPv6.
* @run testng SocksIPv6Test
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.Authenticator;
import java.net.InetSocketAddress;
import java.net.URL;
import java.net.Proxy;
import java.lang.Override;
import java.net.InetAddress;
import java.net.Inet6Address;
import java.net.ServerSocket;
import java.net.SocketException;
import java.net.NetworkInterface;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.List;
import com.sun.net.httpserver.*;
import java.io.BufferedWriter;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class SocksIPv6Test {
private HttpServer server;
private SocksServer socks;
private String response = "Hello.";
private static boolean shouldRun = false;
@BeforeClass
public void setUp() throws Exception {
shouldRun = ensureInet6AddressFamily() && ensureIPv6OnLoopback();
server = HttpServer.create(new InetSocketAddress(0), 0);
server.createContext("/", ex -> {
ex.sendResponseHeaders(200, response.length());
try (BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(ex.getResponseBody(), "UTF-8"))) {
writer.write(response);
}
ex.close();
});
server.start();
socks = new SocksServer(0, false);
socks.addUser("user", "pass");
socks.start();
Authenticator.setDefault(new Authenticator() {
@Override
protected java.net.PasswordAuthentication getPasswordAuthentication() {
return new java.net.PasswordAuthentication(
"user", "pass".toCharArray());
}
});
}
private boolean ensureIPv6OnLoopback() throws Exception {
boolean ipv6 = false;
List<NetworkInterface> nics = Collections.list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface nic : nics) {
if (!nic.isLoopback()) {
continue;
}
List<InetAddress> addrs = Collections.list(nic.getInetAddresses());
for (InetAddress addr : addrs) {
if (addr instanceof Inet6Address) {
ipv6 = true;
break;
}
}
}
if (!ipv6)
System.out.println("IPv6 is not enabled on loopback. Skipping test suite.");
return ipv6;
}
private boolean ensureInet6AddressFamily() throws IOException {
try (ServerSocket s = new ServerSocket()) {
s.bind(new InetSocketAddress("::1", 0));
return true;
} catch (SocketException e) {
System.out.println("Inet 6 address family is not available. Skipping test suite.");
}
return false;
}
@Test(groups = "unit")
public void testSocksOverIPv6() throws Exception {
if (!shouldRun) return;
Proxy proxy = new Proxy(Proxy.Type.SOCKS, new InetSocketAddress("::1",
socks.getPort()));
URL url = new URL("http://[::1]:" + server.getAddress().getPort());
java.net.URLConnection conn = url.openConnection(proxy);
String actual = "";
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()))) {
actual = reader.readLine();
}
assertEquals(actual, response);
}
@Test(groups = "unit")
public void testSocksOverIPv6Hostname() throws Exception {
if (!shouldRun) return;
String ipv6Hostname = InetAddress.getByName("::1").getHostName();
String ipv4Hostname = InetAddress.getByName("127.0.0.1").getHostName();
if (ipv6Hostname.equals(InetAddress.getByName("::1").getHostAddress())) {
System.out.println("Unable to get the hostname of the IPv6 loopback "
+ "address. Skipping test case.");
return;
}
if (ipv6Hostname.equals(ipv4Hostname)) {
System.out.println("IPv6 and IPv4 loopback addresses map to the"
+ " same hostname. Skipping test case.");
return;
}
Proxy proxy = new Proxy(Proxy.Type.SOCKS, new InetSocketAddress(ipv6Hostname,
socks.getPort()));
URL url = new URL("http://" + ipv6Hostname + ":" + server.getAddress().getPort());
java.net.URLConnection conn = url.openConnection(proxy);
String actual = "";
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()))) {
actual = reader.readLine();
}
assertEquals(actual, response);
}
@AfterClass
public void tearDown() {
if (server != null) {
server.stop(1);
}
if (socks != null) {
socks.terminate();
}
}
}
| gpl-2.0 |
RodrigoRubino/DC-UFSCar-ES2-201601-Grupo-Brainstorm | src/test/java/net/sf/jabref/logic/formatter/casechanger/ProtectTermsFormatterTest.java | 777 | package net.sf.jabref.logic.formatter.casechanger;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Tests in addition to the general tests from {@link net.sf.jabref.logic.formatter.FormatterTest}
*/
public class ProtectTermsFormatterTest {
private final ProtectTermsFormatter formatter = new ProtectTermsFormatter();
@Test
public void test() {
assertEquals("{VLSI}", formatter.format("VLSI"));
assertEquals("{VLSI}", formatter.format("{VLSI}"));
assertEquals("VLsI", formatter.format("VLsI"));
assertEquals("{VLSI} {VLSI}", formatter.format("VLSI {VLSI}"));
}
@Test
public void formatExample() {
assertEquals("In {CDMA}", formatter.format(formatter.getExampleInput()));
}
}
| gpl-2.0 |
Jackkal/jpexs-decompiler | libsrc/jpproxy/src/com/jpexs/proxy/ReplacedListener.java | 148 | package com.jpexs.proxy;
public interface ReplacedListener {
public void replaced(Replacement replacement, String url, String contentType);
}
| gpl-3.0 |
sonbt56/aacplayer-android | src/com/spoledge/aacplayer/MMSInputStream.java | 4266 | /*
** AACPlayer - Freeware Advanced Audio (AAC) Player for Android
** Copyright (C) 2011 Spolecne s.r.o., http://www.spoledge.com
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation; either version 3 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program. If not, see <http://www.gnu.org/licenses/>.
**/
package com.spoledge.aacplayer;
import android.util.Log;
import java.io.InputStream;
import java.io.IOException;
/**
* This is a MMS input stream - reads data from a MMS stream.
* <pre>
* MMSInputStream mmsis = new MMSInputStream( "mms://..." );
* ...
* mmsis.read( buffer );
* ...
* mmsis.close();
* </pre>
*/
public class MMSInputStream extends InputStream {
private static final String LOG = "MMSInputStream";
private static boolean libLoaded = false;
////////////////////////////////////////////////////////////////////////////
// Attributes
////////////////////////////////////////////////////////////////////////////
/**
* The native handler.
*/
private int mms;
////////////////////////////////////////////////////////////////////////////
// Constructors
////////////////////////////////////////////////////////////////////////////
public MMSInputStream( String url ) throws IOException {
ensureLibLoaded();
mms = nativeConnect( url );
}
////////////////////////////////////////////////////////////////////////////
// Public - InputStream
////////////////////////////////////////////////////////////////////////////
/**
* Please do not use this method - this is not efficient !
*/
public int read() throws IOException {
byte[] buf = new byte[1];
int n;
while ((n = read( buf, 0, 1)) == 0) {
}
if (n == 1) return ((int)buf[0]) & 0xff;
return n;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return nativeRead( mms, b, off, len );
}
@Override
public void close() throws IOException {
nativeClose( mms );
}
////////////////////////////////////////////////////////////////////////////
// Private
////////////////////////////////////////////////////////////////////////////
/**
* This is a conversion method used by libmms.
*/
private static synchronized void ensureLibLoaded() {
if (!libLoaded) {
System.loadLibrary( "mms" );
libLoaded = true;
}
}
private static final String CHARSET_NAME_UNICODE = "UTF-16LE";
/**
* This is a conversion method used by libmms.
*/
private static byte[] string2unicode( String s ) {
Log.d( LOG, "string2unicode(): '" + s + "'" );
// return s.getBytes( CHARSET_UNICODE );
try {
return s.getBytes( CHARSET_NAME_UNICODE );
}
catch (java.io.UnsupportedEncodingException e) {
Log.e( LOG, "Cannot convert string --> unicode", e );
throw new RuntimeException( e );
}
}
/**
* This is a conversion method used by libmms.
*/
private static String unicode2string( byte[] bytes ) {
try {
String ret = new String( bytes, CHARSET_NAME_UNICODE );
Log.d( LOG, "unicode2string(): '" + ret + "'" );
return ret;
}
catch (java.io.UnsupportedEncodingException e) {
Log.e( LOG, "Cannot convert unicode --> string", e );
throw new RuntimeException( e );
}
}
private native int nativeConnect( String url ) throws IOException;
private native int nativeRead( int mms, byte[] b, int off, int len ) throws IOException;
private native void nativeClose( int mms ) throws IOException;
}
| gpl-3.0 |
dennishendriksen/molgenis | molgenis-data/src/test/java/org/molgenis/data/UnknownEnumValueExceptionTest.java | 1465 | package org.molgenis.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.molgenis.util.exception.ExceptionMessageTest;
class UnknownEnumValueExceptionTest extends ExceptionMessageTest {
@BeforeEach
void setUp() {
messageSource.addMolgenisNamespaces("data");
}
@ParameterizedTest
@MethodSource("languageMessageProvider")
@Override
protected void testGetLocalizedMessage(String lang, String message) {
ExceptionMessageTest.assertExceptionMessageEquals(
new UnknownEnumValueException("MyEntityType", "myAttributeName", mock(Throwable.class)),
lang,
message);
}
@Test
void testGetMessage() {
UnknownEnumValueException ex =
new UnknownEnumValueException("MyEntityType", "myAttributeName", mock(Throwable.class));
assertEquals("entityTypeId:MyEntityType attributeName:myAttributeName", ex.getMessage());
}
static Object[][] languageMessageProvider() {
return new Object[][] {
new Object[] {
"en", "Unknown enum value for attribute 'myAttributeName' of entity 'MyEntityType'."
},
new Object[] {
"nl", "Onbekende enum waarde voor attribuut 'myAttributeName' van entiteit 'MyEntityType'."
}
};
}
}
| lgpl-3.0 |
jcarvalho/fenixedu-academic | src/main/java/org/fenixedu/academic/domain/Professorship.java | 10354 | /**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.domain;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.commons.beanutils.BeanComparator;
import org.apache.commons.lang.StringUtils;
import org.fenixedu.academic.domain.exceptions.DomainException;
import org.fenixedu.academic.util.Bundle;
import org.fenixedu.bennu.core.domain.Bennu;
import org.fenixedu.bennu.core.i18n.BundleUtil;
import org.fenixedu.bennu.core.security.Authenticate;
import pt.ist.fenixframework.Atomic;
/**
* @author João Mota
*/
public class Professorship extends Professorship_Base {
public static final Comparator<Professorship> COMPARATOR_BY_PERSON_NAME = new BeanComparator("person.name",
Collator.getInstance());
public Professorship() {
super();
setRootDomainObject(Bennu.getInstance());
new ProfessorshipPermissions(this);
}
public boolean belongsToExecutionPeriod(ExecutionSemester executionSemester) {
return this.getExecutionCourse().getExecutionPeriod().equals(executionSemester);
}
@Atomic
public static Professorship create(Boolean responsibleFor, ExecutionCourse executionCourse, Person person) {
Objects.requireNonNull(responsibleFor);
Objects.requireNonNull(executionCourse);
Objects.requireNonNull(person);
if (executionCourse.getProfessorshipsSet().stream().anyMatch(p -> person.equals(p.getPerson()))) {
throw new DomainException("error.teacher.already.associated.to.professorship");
}
Professorship professorShip = new Professorship();
professorShip.setExecutionCourse(executionCourse);
professorShip.setPerson(person);
professorShip.setCreator(Authenticate.getUser().getPerson());
professorShip.setResponsibleFor(responsibleFor);
if (person.getTeacher() != null) {
executionCourse.moveSummariesFromTeacherToProfessorship(person.getTeacher(), professorShip);
}
ProfessorshipManagementLog.createLog(professorShip.getExecutionCourse(), Bundle.MESSAGING,
"log.executionCourse.professorship.added", professorShip.getPerson().getPresentationName(), professorShip
.getExecutionCourse().getNome(), professorShip.getExecutionCourse().getDegreePresentationString());
return professorShip;
}
public void delete() {
DomainException.throwWhenDeleteBlocked(getDeletionBlockers());
ProfessorshipManagementLog.createLog(getExecutionCourse(), Bundle.MESSAGING, "log.executionCourse.professorship.removed",
getPerson().getPresentationName(), getExecutionCourse().getNome(), getExecutionCourse()
.getDegreePresentationString());
setExecutionCourse(null);
setPerson(null);
if (super.getPermissions() != null) {
getPermissions().delete();
}
setRootDomainObject(null);
setCreator(null);
deleteDomainObject();
}
@Override
protected void checkForDeletionBlockers(Collection<String> blockers) {
super.checkForDeletionBlockers(blockers);
if (!getAssociatedSummariesSet().isEmpty()) {
blockers.add(BundleUtil.getString(Bundle.APPLICATION, "error.remove.professorship.hasAnyAssociatedSummaries"));
}
if (!getAssociatedShiftProfessorshipSet().isEmpty()) {
blockers.add(BundleUtil
.getString(Bundle.APPLICATION, "error.remove.professorship.hasAnyAssociatedShiftProfessorship"));
}
}
public boolean isDeletable() {
return getDeletionBlockers().isEmpty();
}
public boolean isResponsibleFor() {
return getResponsibleFor().booleanValue();
}
public static List<Professorship> readByDegreeCurricularPlanAndExecutionYear(DegreeCurricularPlan degreeCurricularPlan,
ExecutionYear executionYear) {
Set<Professorship> professorships = new HashSet<Professorship>();
for (CurricularCourse curricularCourse : degreeCurricularPlan.getCurricularCoursesSet()) {
for (ExecutionCourse executionCourse : curricularCourse.getExecutionCoursesByExecutionYear(executionYear)) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
}
return new ArrayList<Professorship>(professorships);
}
public static List<Professorship> readByDegreeCurricularPlanAndExecutionYearAndBasic(
DegreeCurricularPlan degreeCurricularPlan, ExecutionYear executionYear, Boolean basic) {
Set<Professorship> professorships = new HashSet<Professorship>();
for (CurricularCourse curricularCourse : degreeCurricularPlan.getCurricularCoursesSet()) {
if (curricularCourse.getBasic().equals(basic)) {
for (ExecutionCourse executionCourse : curricularCourse.getExecutionCoursesByExecutionYear(executionYear)) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
}
}
return new ArrayList<Professorship>(professorships);
}
public static List<Professorship> readByDegreeCurricularPlanAndExecutionPeriod(DegreeCurricularPlan degreeCurricularPlan,
ExecutionSemester executionSemester) {
Set<Professorship> professorships = new HashSet<Professorship>();
for (CurricularCourse curricularCourse : degreeCurricularPlan.getCurricularCoursesSet()) {
for (ExecutionCourse executionCourse : curricularCourse.getExecutionCoursesByExecutionPeriod(executionSemester)) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
}
return new ArrayList<Professorship>(professorships);
}
public static List<Professorship> readByDegreeCurricularPlansAndExecutionYearAndBasic(
List<DegreeCurricularPlan> degreeCurricularPlans, ExecutionYear executionYear, Boolean basic) {
Set<Professorship> professorships = new HashSet<Professorship>();
for (DegreeCurricularPlan degreeCurricularPlan : degreeCurricularPlans) {
for (CurricularCourse curricularCourse : degreeCurricularPlan.getCurricularCoursesSet()) {
if (curricularCourse.getBasic() == null || curricularCourse.getBasic().equals(basic)) {
if (executionYear != null) {
for (ExecutionCourse executionCourse : curricularCourse.getExecutionCoursesByExecutionYear(executionYear)) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
} else {
for (ExecutionCourse executionCourse : curricularCourse.getAssociatedExecutionCoursesSet()) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
}
}
}
}
return new ArrayList<Professorship>(professorships);
}
public static List<Professorship> readByDegreeCurricularPlansAndExecutionYear(
List<DegreeCurricularPlan> degreeCurricularPlans, ExecutionYear executionYear) {
Set<Professorship> professorships = new HashSet<Professorship>();
for (DegreeCurricularPlan degreeCurricularPlan : degreeCurricularPlans) {
for (CurricularCourse curricularCourse : degreeCurricularPlan.getCurricularCoursesSet()) {
if (executionYear != null) {
for (ExecutionCourse executionCourse : curricularCourse.getExecutionCoursesByExecutionYear(executionYear)) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
} else {
for (ExecutionCourse executionCourse : curricularCourse.getAssociatedExecutionCoursesSet()) {
professorships.addAll(executionCourse.getProfessorshipsSet());
}
}
}
}
return new ArrayList<Professorship>(professorships);
}
public Teacher getTeacher() {
return getPerson().getTeacher();
}
public void setTeacher(Teacher teacher) {
setPerson(teacher.getPerson());
}
@Override
public void setResponsibleFor(Boolean responsibleFor) {
if (responsibleFor == null) {
responsibleFor = Boolean.FALSE;
}
super.setResponsibleFor(responsibleFor);
}
public boolean hasTeacher() {
return getPerson() != null && getPerson().getTeacher() != null;
}
public void removeTeacher() {
setPerson(null);
}
public String getDegreeSiglas() {
Set<String> degreeSiglas = new HashSet<String>();
for (CurricularCourse curricularCourse : getExecutionCourse().getAssociatedCurricularCoursesSet()) {
degreeSiglas.add(curricularCourse.getDegreeCurricularPlan().getDegree().getSigla());
}
return StringUtils.join(degreeSiglas, ", ");
}
public String getDegreePlanNames() {
Set<String> degreeSiglas = new HashSet<String>();
for (CurricularCourse curricularCourse : getExecutionCourse().getAssociatedCurricularCoursesSet()) {
degreeSiglas.add(curricularCourse.getDegreeCurricularPlan().getName());
}
return StringUtils.join(degreeSiglas, ", ");
}
}
| lgpl-3.0 |
sajithar/product-es | modules/integration/tests-ui-integration/tests-ui/src/test/java/org/wso2/es/ui/integration/test/notifications/ESPublisherNotificationTestCase.java | 6218 | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.es.ui.integration.test.notifications;
import org.openqa.selenium.By;
import org.testng.annotations.*;
import org.wso2.carbon.automation.engine.context.AutomationContext;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.automation.extensions.selenium.BrowserManager;
import org.wso2.es.integration.common.clients.ResourceAdminServiceClient;
import org.wso2.es.ui.integration.util.AssetUtil;
import org.wso2.es.ui.integration.util.BaseUITestCase;
import org.wso2.es.ui.integration.util.ESUtil;
import org.wso2.es.ui.integration.util.ESWebDriver;
import java.io.File;
import static org.testng.Assert.assertTrue;
/**
* Notification framework test for Super Tenant: Super admin & Super user
* Adds a new asset
* Update it & check for notifications
* Do an LC transition on it & check for notifications
*/
public class ESPublisherNotificationTestCase extends BaseUITestCase {
private TestUserMode userMode;
private static final String EMAIL = "test.automation.dummy@gmail.com";
private static final String EMAIL_PWD = "automation.test1";
private static final String FIRST_NAME = "name 1";
private static final String LAST_NAME = "name 2";
private static final String VERSION = "1.0.0";
private static final String CREATED_TIME = "12";
private static final String ASSET_TYPE = "gadget";
private static final String ASSET_DESCRIPTION = "Test Description";
private static final String SMTP_PROPERTY_FILE = File.separator + "notifications" + File.separator + "smtp.properties";
private static final int MAX_POLL_COUNT = 30;
private String LCNotificationSubject = "[StoreLifecycleStateChange] at path: ";
private String updateNotificationSubject = "[StoreAssetUpdate] at path: ";
private String assetName;
@Factory(dataProvider = "userMode")
public ESPublisherNotificationTestCase(TestUserMode testUserMode, String assetName) {
this.userMode = testUserMode;
this.assetName = assetName;
}
@BeforeClass(alwaysRun = true)
public void setUp() throws Exception {
super.init(userMode);
driver = new ESWebDriver(BrowserManager.getWebDriver());
currentUserName = userInfo.getUserName().split("@")[0];
currentUserPwd = userInfo.getPassword();
baseUrl = getWebAppURL();
AutomationContext automationContext = new AutomationContext(PRODUCT_GROUP_NAME, TestUserMode.SUPER_TENANT_ADMIN);
adminUserName = automationContext.getSuperTenant().getTenantAdmin().getUserName().split("@")[0];
adminUserPwd = automationContext.getSuperTenant().getTenantAdmin().getPassword();
String resourceLocation = getResourceLocation();
backendURL = automationContext.getContextUrls().getBackEndUrl();
resourceAdminServiceClient = new ResourceAdminServiceClient(backendURL, adminUserName, adminUserPwd);
resourcePath = GADGET_REGISTRY_BASE_PATH + currentUserName + "/" + assetName + "/" + VERSION;
LCNotificationSubject += resourcePath;
updateNotificationSubject += resourcePath;
smtpPropertyLocation = resourceLocation + SMTP_PROPERTY_FILE;
//Update user profiles through Admin console
ESUtil.loginToAdminConsole(driver, baseUrl, adminUserName, adminUserPwd);
ESUtil.setupUserProfile(driver, baseUrl, currentUserName, FIRST_NAME, LAST_NAME, EMAIL);
//login to publisher & add a new gadget
ESUtil.login(driver, baseUrl, PUBLISHER_APP, currentUserName, currentUserPwd);
AssetUtil.addNewAsset(driver, baseUrl, ASSET_TYPE, assetName, VERSION, "", "", "");
}
@Test(groups = "wso2.es.notification", description = "Testing mails for LC state change event")
public void testLCNotification() throws Exception {
//check notification for initial LC state change
driver.findElementPoll(By.linkText(assetName), MAX_POLL_COUNT);
//read email using smtp
boolean hasMail = (ESUtil.readEmail(smtpPropertyLocation, EMAIL_PWD, EMAIL, LCNotificationSubject) != null);
assertTrue(hasMail, "LC Notification failed for user:" + currentUserName);
}
@Test(groups = "wso2.es.notification", description = "Testing mails for asset update event",
dependsOnMethods = "testLCNotification")
public void testUpdateNotification() throws Exception {
//Update gadget and check lC state change notification
driver.get(baseUrl + PUBLISHER_GADGET_LIST_PAGE);
AssetUtil.updateAsset(driver, baseUrl, ASSET_TYPE, assetName, ASSET_DESCRIPTION);
driver.get(baseUrl + PUBLISHER_GADGET_LIST_PAGE);
//read email using smtp
boolean hasMail = (ESUtil.readEmail(smtpPropertyLocation, EMAIL_PWD, EMAIL, updateNotificationSubject) != null);
assertTrue(hasMail, "Asset Update Notification failed for user:" + currentUserName);
}
@AfterClass(alwaysRun = true)
public void tearDown() throws Exception {
//delete gadget and email, logout from admin console and publisher
resourceAdminServiceClient.deleteResource(resourcePath);
ESUtil.logoutFromAdminConsole(driver, baseUrl);
driver.get(baseUrl + PUBLISHER_LOGOUT_URL);
driver.quit();
}
@DataProvider(name = "userMode")
private static Object[][] userModeProvider() {
return new Object[][]{{TestUserMode.SUPER_TENANT_ADMIN, "Notification asset - SuperAdmin"},
{TestUserMode.SUPER_TENANT_USER, "Notification asset - SuperUser"}};
}
} | apache-2.0 |
bhupeshchawda/incubator-samoa | samoa-flink/src/main/java/org/apache/samoa/flink/topology/impl/FlinkEntranceProcessingItem.java | 2714 | package org.apache.samoa.flink.topology.impl;
/*
* #%L
* SAMOA
* %%
* Copyright (C) 2014 - 2015 Apache Software Foundation
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.samoa.core.EntranceProcessor;
import org.apache.samoa.flink.helpers.Utils;
import org.apache.samoa.topology.AbstractEntranceProcessingItem;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import java.io.Serializable;
public class FlinkEntranceProcessingItem extends AbstractEntranceProcessingItem
implements FlinkComponent, Serializable {
private transient StreamExecutionEnvironment env;
private transient DataStream outStream;
public FlinkEntranceProcessingItem(StreamExecutionEnvironment env, EntranceProcessor proc) {
super(proc);
this.env = env;
}
@Override
public void initialise() {
final EntranceProcessor proc = getProcessor();
final String streamId = getOutputStream().getStreamId();
final int compID = getComponentId();
outStream = env.addSource(new RichSourceFunction() {
private volatile boolean isCancelled;
@Override
public void run(SourceContext sourceContext) throws Exception {
while(!isCancelled && entrProc.hasNext())
{
sourceContext.collect(SamoaType.of(entrProc.nextEvent(), id));
}
}
@Override
public void cancel() {
isCancelled = true;
}
EntranceProcessor entrProc = proc;
String id = streamId;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
entrProc.onCreate(compID);
}
}).returns(Utils.tempTypeInfo);
((FlinkStream) getOutputStream()).initialise();
}
@Override
public boolean canBeInitialised() {
return true;
}
@Override
public boolean isInitialised() {
return outStream != null;
}
@Override
public int getComponentId() {
return -1; // dummy number shows that it comes from an Entrance PI
}
@Override
public DataStream getOutStream() {
return outStream;
}
}
| apache-2.0 |
danielmitterdorfer/elasticsearch | core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java | 11999 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import static java.util.Collections.unmodifiableMap;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
/**
* Context object used to create lucene queries on the shard level.
*/
public class QueryShardContext extends QueryRewriteContext {
private final MapperService mapperService;
private final SimilarityService similarityService;
private final BitsetFilterCache bitsetFilterCache;
private final IndexFieldDataService indexFieldDataService;
private final IndexSettings indexSettings;
private String[] types = Strings.EMPTY_ARRAY;
public void setTypes(String... types) {
this.types = types;
}
public String[] getTypes() {
return types;
}
private final Map<String, Query> namedQueries = new HashMap<>();
private final MapperQueryParser queryParser = new MapperQueryParser(this);
private final IndicesQueriesRegistry indicesQueriesRegistry;
private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope;
private boolean isFilter;
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService,
MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
IndexReader reader, ClusterState clusterState) {
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState);
this.indexSettings = indexSettings;
this.similarityService = similarityService;
this.mapperService = mapperService;
this.bitsetFilterCache = bitsetFilterCache;
this.indexFieldDataService = indexFieldDataService;
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.nestedScope = new NestedScope();
}
public QueryShardContext(QueryShardContext source) {
this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client,
source.reader, source.clusterState);
this.types = source.getTypes();
}
private void reset() {
allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.lookup = null;
this.namedQueries.clear();
this.nestedScope = new NestedScope();
this.isFilter = false;
}
public AnalysisService getAnalysisService() {
return mapperService.analysisService();
}
public Similarity getSearchSimilarity() {
return similarityService != null ? similarityService.similarity(mapperService) : null;
}
public String defaultField() {
return indexSettings.getDefaultField();
}
public boolean queryStringLenient() {
return indexSettings.isQueryStringLenient();
}
public boolean queryStringAnalyzeWildcard() {
return indexSettings.isQueryStringAnalyzeWildcard();
}
public boolean queryStringAllowLeadingWildcard() {
return indexSettings.isQueryStringAllowLeadingWildcard();
}
public MapperQueryParser queryParser(QueryParserSettings settings) {
queryParser.reset(settings);
return queryParser;
}
public BitSetProducer bitsetFilter(Query filter) {
return bitsetFilterCache.getBitSetProducer(filter);
}
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
return indexFieldDataService.getForField(mapper);
}
public void addNamedQuery(String name, Query query) {
if (query != null) {
namedQueries.put(name, query);
}
}
public Map<String, Query> copyNamedQueries() {
// This might be a good use case for CopyOnWriteHashMap
return unmodifiableMap(new HashMap<>(namedQueries));
}
/**
* Return whether we are currently parsing a filter or a query.
*/
public boolean isFilter() {
return isFilter;
}
/**
* Public for testing only!
*
* Sets whether we are currently parsing a filter or a query
*/
public void setIsFilter(boolean isFilter) {
this.isFilter = isFilter;
}
public Collection<String> simpleMatchToIndexNames(String pattern) {
return mapperService.simpleMatchToIndexNames(pattern);
}
public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, mapperService.fullName(name));
}
public ObjectMapper getObjectMapper(String name) {
return mapperService.getObjectMapper(name);
}
/**
* Gets the search analyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return getMapperService().searchAnalyzer();
}
/**
* Gets the search quote analyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
}
return getMapperService().searchQuoteAnalyzer();
}
public void setAllowUnmappedFields(boolean allowUnmappedFields) {
this.allowUnmappedFields = allowUnmappedFields;
}
public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) {
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
}
MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
if (fieldMapping != null || allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString) {
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
} else {
throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name);
}
}
/**
* Returns the narrowed down explicit types, or, if not set, all types.
*/
public Collection<String> queryTypes() {
String[] types = getTypes();
if (types == null || types.length == 0) {
return getMapperService().types();
}
if (types.length == 1 && types[0].equals("_all")) {
return getMapperService().types();
}
return Arrays.asList(types);
}
private SearchLookup lookup = null;
public SearchLookup lookup() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.lookup();
}
if (lookup == null) {
lookup = new SearchLookup(getMapperService(), indexFieldDataService, null);
}
return lookup;
}
public long nowInMillis() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.nowInMillis();
}
return System.currentTimeMillis();
}
public NestedScope nestedScope() {
return nestedScope;
}
public Version indexVersionCreated() {
return indexSettings.getIndexVersionCreated();
}
public boolean matchesIndices(String... indices) {
for (String index : indices) {
if (indexSettings.matchesIndexName(index)) {
return true;
}
}
return false;
}
public ParsedQuery toFilter(QueryBuilder queryBuilder) {
return toQuery(queryBuilder, q -> {
Query filter = q.toFilter(this);
if (filter == null) {
return null;
}
return filter;
});
}
public ParsedQuery toQuery(QueryBuilder queryBuilder) {
return toQuery(queryBuilder, q -> {
Query query = q.toQuery(this);
if (query == null) {
query = Queries.newMatchNoDocsQuery("No query left after rewrite.");
}
return query;
});
}
@FunctionalInterface
private interface CheckedFunction<T, R> {
R apply(T t) throws IOException;
}
private ParsedQuery toQuery(QueryBuilder queryBuilder, CheckedFunction<QueryBuilder, Query> filterOrQuery) {
reset();
try {
QueryBuilder rewriteQuery = QueryBuilder.rewriteQuery(queryBuilder, this);
return new ParsedQuery(filterOrQuery.apply(rewriteQuery), copyNamedQueries());
} catch(QueryShardException | ParsingException e ) {
throw e;
} catch(Exception e) {
throw new QueryShardException(this, "failed to create query: {}", e, queryBuilder);
} finally {
reset();
}
}
public final Index index() {
return indexSettings.getIndex();
}
}
| apache-2.0 |
honestrock/FragmentTransactionExtended | fragmentTransactionExample/src/main/java/com/desarrollodroide/fragmenttrasitionextendedexample/MainActivity.java | 2760 | package com.desarrollodroide.fragmenttrasitionextendedexample;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Spinner;
import com.desarrollodroide.libraryfragmenttransactionextended.FragmentTransactionExtended;
public class MainActivity extends Activity implements AdapterView.OnItemSelectedListener{
private int optionSelected = 0;
private SlidingListFragmentLeft mFirstFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Spinner spinner = (Spinner) findViewById(R.id.spinner);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this, R.array.array_spinner, android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
spinner.setAdapter(adapter);
spinner.setOnItemSelectedListener(this);
//Add first fragment
mFirstFragment = new SlidingListFragmentLeft();
FragmentManager fm = getFragmentManager();
FragmentTransaction fragmentTransaction = fm.beginTransaction();
fragmentTransaction.add(R.id.fragment_place, mFirstFragment);
fragmentTransaction.commit();
}
public void addTransition(View view) {
Button button = (Button) findViewById(R.id.button);
if (getFragmentManager().getBackStackEntryCount()==0) {
Fragment secondFragment = new SlidingListFragmentRight();
FragmentManager fm = getFragmentManager();
FragmentTransaction fragmentTransaction = fm.beginTransaction();
FragmentTransactionExtended fragmentTransactionExtended = new FragmentTransactionExtended(this, fragmentTransaction, mFirstFragment, secondFragment, R.id.fragment_place);
fragmentTransactionExtended.addTransition(optionSelected);
fragmentTransactionExtended.commit();
button.setText("Back");
}else{
getFragmentManager().popBackStack();
button.setText("Push");
}
}
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
optionSelected = i;
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
@Override
public void onBackPressed()
{
Button button = (Button) findViewById(R.id.button);
button.setText("Push");
super.onBackPressed();
}
}
| apache-2.0 |
kjniemi/activemq-artemis | examples/features/federation/federated-address-downstream-upstream/src/main/java/org/apache/activemq/artemis/jms/example/FederatedAddressDownstreamUpstreamExample.java | 1254 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.example;
/**
* A simple example that demonstrates multicast address replication between remote servers,
* using Address Federation downstream and upstream feature combined.
*/
public class FederatedAddressDownstreamUpstreamExample {
public static void main(final String[] args) throws Exception {
//Re-use the same Federated address test for upstream
FederatedAddressExample.main(args);
}
}
| apache-2.0 |
miniway/presto | presto-main/src/main/java/io/prestosql/cost/LimitStatsRule.java | 1845 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.cost;
import io.prestosql.Session;
import io.prestosql.matching.Pattern;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.iterative.Lookup;
import io.prestosql.sql.planner.plan.LimitNode;
import java.util.Optional;
import static io.prestosql.sql.planner.plan.Patterns.limit;
public class LimitStatsRule
extends SimpleStatsRule<LimitNode>
{
private static final Pattern<LimitNode> PATTERN = limit();
public LimitStatsRule(StatsNormalizer normalizer)
{
super(normalizer);
}
@Override
public Pattern<LimitNode> getPattern()
{
return PATTERN;
}
@Override
protected Optional<PlanNodeStatsEstimate> doCalculate(LimitNode node, StatsProvider statsProvider, Lookup lookup, Session session, TypeProvider types)
{
PlanNodeStatsEstimate sourceStats = statsProvider.getStats(node.getSource());
if (sourceStats.getOutputRowCount() <= node.getCount()) {
return Optional.of(sourceStats);
}
// LIMIT actually limits (or when there was no row count estimated for source)
return Optional.of(PlanNodeStatsEstimate.buildFrom(sourceStats)
.setOutputRowCount(node.getCount())
.build());
}
}
| apache-2.0 |
ollie314/spring-android | test/spring-android-rest-template-test/src/main/java/org/springframework/http/client/support/HttpAccessorTests.java | 1431 | /*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.client.support;
import junit.framework.TestCase;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import android.test.suitebuilder.annotation.SmallTest;
public class HttpAccessorTests extends TestCase {
private RestTemplate restTemplate;
@Override
protected void setUp() throws Exception {
this.restTemplate = new RestTemplate();
}
@Override
protected void tearDown() throws Exception {
this.restTemplate = null;
}
@SmallTest
public void testConstructor() {
ClientHttpRequestFactory factory = restTemplate.getRequestFactory();
assertTrue(factory instanceof HttpComponentsClientHttpRequestFactory);
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/test/java/org/apache/lucene/index/LazySoftDeletesDirectoryReaderWrapperTests.java | 7789 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.apache.lucene.index;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.core.internal.io.IOUtils;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
public class LazySoftDeletesDirectoryReaderWrapperTests extends LuceneTestCase {
public void testDropFullyDeletedSegments() throws IOException {
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
indexWriterConfig.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
softDeletesField, MatchAllDocsQuery::new, NoMergePolicy.INSTANCE));
try (Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, indexWriterConfig)) {
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new StringField("id", "2", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
try (DirectoryReader reader =
new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField)) {
assertEquals(2, reader.leaves().size());
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
}
writer.updateDocValues(new Term("id", "1"), new NumericDocValuesField(softDeletesField, 1));
writer.commit();
try (DirectoryReader reader =
new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField)) {
assertEquals(1, reader.numDocs());
assertEquals(1, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
assertEquals(1, reader.leaves().size());
}
try (DirectoryReader reader = DirectoryReader.open(dir)) {
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
assertEquals(2, reader.leaves().size());
}
}
}
public void testMixSoftAndHardDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Set<Integer> uniqueDocs = new HashSet<>();
for (int i = 0; i < 100; i++) {
int docId = random().nextInt(5);
uniqueDocs.add(docId);
Document doc = new Document();
doc.add(new StringField("id", String.valueOf(docId), Field.Store.YES));
if (docId % 2 == 0) {
writer.updateDocument(new Term("id", String.valueOf(docId)), doc);
} else {
writer.softUpdateDocument(
new Term("id", String.valueOf(docId)),
doc,
new NumericDocValuesField(softDeletesField, 0));
}
}
writer.commit();
writer.close();
DirectoryReader reader =
new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
assertEquals(uniqueDocs.size(), reader.numDocs());
IndexSearcher searcher = new IndexSearcher(reader);
for (Integer docId : uniqueDocs) {
assertEquals(1, searcher.count(new TermQuery(new Term("id", docId.toString()))));
}
IOUtils.close(reader, dir);
}
public void testReaderCacheKey() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
indexWriterConfig.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new StringField("id", "2", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
DirectoryReader reader =
new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
IndexReader.CacheHelper readerCacheHelper =
reader.leaves().get(0).reader().getReaderCacheHelper();
AtomicInteger leafCalled = new AtomicInteger(0);
AtomicInteger dirCalled = new AtomicInteger(0);
readerCacheHelper.addClosedListener(
key -> {
leafCalled.incrementAndGet();
assertSame(key, readerCacheHelper.getKey());
});
IndexReader.CacheHelper dirReaderCacheHelper = reader.getReaderCacheHelper();
dirReaderCacheHelper.addClosedListener(
key -> {
dirCalled.incrementAndGet();
assertSame(key, dirReaderCacheHelper.getKey());
});
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "2", Field.Store.YES));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
doc = new Document();
doc.add(new StringField("id", "3", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
assertEquals(0, leafCalled.get());
assertEquals(0, dirCalled.get());
DirectoryReader newReader =
new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
assertEquals(0, leafCalled.get());
assertEquals(0, dirCalled.get());
assertNotSame(
newReader.getReaderCacheHelper().getKey(), reader.getReaderCacheHelper().getKey());
assertNotSame(newReader, reader);
reader.close();
reader = newReader;
assertEquals(1, dirCalled.get());
assertEquals(1, leafCalled.get());
IOUtils.close(reader, writer, dir);
}
}
| apache-2.0 |
agura/incubator-ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheTwoStepQuery.java | 3981 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
/**
* Two step map-reduce style query.
*/
public class GridCacheTwoStepQuery {
/** */
public static final int DFLT_PAGE_SIZE = 1000;
/** */
@GridToStringInclude
private List<GridCacheSqlQuery> mapQrys = new ArrayList<>();
/** */
@GridToStringInclude
private GridCacheSqlQuery rdc;
/** */
private int pageSize = DFLT_PAGE_SIZE;
/** */
private boolean explain;
/** */
private Set<String> spaces;
/** */
private final boolean skipMergeTbl;
/**
* @param spaces All spaces accessed in query.
* @param rdc Reduce query.
* @param skipMergeTbl {@code True} if reduce query can skip merge table creation and
* get data directly from merge index.
*/
public GridCacheTwoStepQuery(Set<String> spaces, GridCacheSqlQuery rdc, boolean skipMergeTbl) {
assert rdc != null;
this.spaces = spaces;
this.rdc = rdc;
this.skipMergeTbl = skipMergeTbl;
}
/**
* @return {@code True} if reduce query can skip merge table creation and get data directly from merge index.
*/
public boolean skipMergeTable() {
return skipMergeTbl;
}
/**
* @return If this is explain query.
*/
public boolean explain() {
return explain;
}
/**
* @param explain If this is explain query.
*/
public void explain(boolean explain) {
this.explain = explain;
}
/**
* @param pageSize Page size.
*/
public void pageSize(int pageSize) {
this.pageSize = pageSize;
}
/**
* @return Page size.
*/
public int pageSize() {
return pageSize;
}
/**
* @param qry SQL Query.
* @return {@code this}.
*/
public GridCacheTwoStepQuery addMapQuery(GridCacheSqlQuery qry) {
mapQrys.add(qry);
return this;
}
/**
* @return Reduce query.
*/
public GridCacheSqlQuery reduceQuery() {
return rdc;
}
/**
* @return Map queries.
*/
public List<GridCacheSqlQuery> mapQueries() {
return mapQrys;
}
/**
* @return Spaces.
*/
public Set<String> spaces() {
return spaces;
}
/**
* @param spaces Spaces.
*/
public void spaces(Set<String> spaces) {
this.spaces = spaces;
}
/**
* @param args New arguments to copy with.
* @return Copy.
*/
public GridCacheTwoStepQuery copy(Object[] args) {
assert !explain;
GridCacheTwoStepQuery cp = new GridCacheTwoStepQuery(spaces, rdc.copy(args), skipMergeTbl);
cp.pageSize = pageSize;
for (int i = 0; i < mapQrys.size(); i++)
cp.mapQrys.add(mapQrys.get(i).copy(args));
return cp;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridCacheTwoStepQuery.class, this);
}
} | apache-2.0 |
alanfgates/hive | ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java | 13369 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.junit.Assert;
import org.junit.Test;
public class TestGenericUDFOPDivide extends AbstractTestGenericUDFOPNumeric {
private static final double EPSILON = 1E-6;
@Test
public void testByteDivideShort() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
ByteWritable left = new ByteWritable((byte) 4);
ShortWritable right = new ShortWritable((short) 6);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableByteObjectInspector,
PrimitiveObjectInspectorFactory.writableShortObjectInspector
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.getDecimalTypeInfo(9, 6));
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.666667"), res.getHiveDecimal());
}
@Test
public void testVarcharDivideInt() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
HiveVarcharWritable left = new HiveVarcharWritable();
left.set("123");
IntWritable right = new IntWritable(456);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,
PrimitiveObjectInspectorFactory.writableIntObjectInspector
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(123.0 / 456.0, res.get(), EPSILON);
}
@Test
public void testDoubleDivideLong() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
DoubleWritable left = new DoubleWritable(4.5);
LongWritable right = new LongWritable(10);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
PrimitiveObjectInspectorFactory.writableLongObjectInspector
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(0.45, res.get(), EPSILON);
}
@Test
public void testLongDivideDecimal() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
LongWritable left = new LongWritable(104);
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableLongObjectInspector,
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9, 4))
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(33, 10), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.4426096949"), res.getHiveDecimal());
}
@Test
public void testFloatDivideFloat() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
FloatWritable f1 = new FloatWritable(4.5f);
FloatWritable f2 = new FloatWritable(1.5f);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
PrimitiveObjectInspectorFactory.writableFloatObjectInspector
};
DeferredObject[] args = {
new DeferredJavaObject(f1),
new DeferredJavaObject(f2),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(3.0, res.get(), EPSILON);
}
@Test
public void testDouleDivideDecimal() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
DoubleWritable left = new DoubleWritable(74.52);
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(74.52 / 234.97, res.get(), EPSILON);
}
@Test
public void testDecimalDivideDecimal() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3, 1)),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11, 7), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.06171"), res.getHiveDecimal());
}
@Test
public void testDecimalDivideDecimal2() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("5"));
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("25"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(1, 0)),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(2, 0))
};
DeferredObject[] args = {
new DeferredJavaObject(left),
new DeferredJavaObject(right),
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(7, 6), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.2"), res.getHiveDecimal());
}
@Test
public void testDecimalDivideDecimalSameParams() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(13, 8), oi.getTypeInfo());
}
@Test
public void testDecimalDivisionResultType() throws HiveException {
testDecimalDivisionResultType(5, 2, 3, 2, 11, 6);
testDecimalDivisionResultType(38, 18, 38, 18, 38, 6);
testDecimalDivisionResultType(38, 18, 20, 0, 38, 18);
testDecimalDivisionResultType(20, 0, 8, 5, 34, 9);
testDecimalDivisionResultType(10, 0, 10, 0, 21, 11);
testDecimalDivisionResultType(5, 2, 5, 5, 16, 8);
testDecimalDivisionResultType(10, 10, 5, 0, 16, 16);
testDecimalDivisionResultType(10, 10, 5, 5, 21, 16);
testDecimalDivisionResultType(38, 38, 38, 38, 38, 6);
testDecimalDivisionResultType(38, 0, 38, 0, 38, 6);
}
private void testDecimalDivisionResultType(int prec1, int scale1, int prec2, int scale2, int prec3, int scale3)
throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec1, scale1)),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec2, scale2))
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(prec3, scale3), oi.getTypeInfo());
}
@Test
public void testReturnTypeBackwardCompat() throws Exception {
// Disable ansi sql arithmetic changes
SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "0.12");
verifyReturnType(new GenericUDFOPDivide(), "int", "int", "double"); // different from sql compat mode
verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double");
verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)");
verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double");
verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double");
verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double");
verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)");
// Most tests are done with ANSI SQL mode enabled, set it back to true
SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
}
@Test
public void testReturnTypeAnsiSql() throws Exception {
SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
verifyReturnType(new GenericUDFOPDivide(), "int", "int", "decimal(21,11)");
verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double");
verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)");
verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double");
verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double");
verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double");
verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double");
verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)");
}
}
| apache-2.0 |
azaytsev/ios-driver | server/src/main/java/org/uiautomation/ios/utils/JSONToXMLConverter.java | 1872 | /*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios.utils;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.json.JSONArray;
import org.json.JSONObject;
public class JSONToXMLConverter {
private final String xml;
public JSONToXMLConverter(JSONObject tree) {
xml = buildXMLDoc(tree).asXML();
}
public String asXML() {
return xml;
}
private Document buildXMLDoc(JSONObject tree) {
Document document = DocumentHelper.createDocument();
document.setXMLEncoding("UTF-8");
Element root = document.addElement("root");
buildXMLNode(tree, root);
return document;
}
private void buildXMLNode(JSONObject from, Element parent) {
if (from == null) {
return;
}
Element node = parent.addElement(from.optString("type"))
.addAttribute("name", from.optString("name"))
.addAttribute("label", from.optString("label"))
.addAttribute("value", from.optString("value"))
.addAttribute("ref", from.optString("ref"));
JSONArray array = from.optJSONArray("children");
if (array != null) {
for (int i = 0; i < array.length(); i++) {
JSONObject n = array.optJSONObject(i);
buildXMLNode(n, node);
}
}
}
}
| apache-2.0 |
gfyoung/elasticsearch | server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java | 1633 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.elasticsearch.action.Action;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.transport.TransportRequestOptions;
public class BulkAction extends Action<BulkResponse> {
public static final BulkAction INSTANCE = new BulkAction();
public static final String NAME = "indices:data/write/bulk";
private BulkAction() {
super(NAME);
}
@Override
public BulkResponse newResponse() {
return new BulkResponse();
}
@Override
public TransportRequestOptions transportOptions(Settings settings) {
return TransportRequestOptions.builder()
.withType(TransportRequestOptions.Type.BULK)
.withCompress(settings.getAsBoolean("action.bulk.compress", true)
).build();
}
}
| apache-2.0 |
Prakhash/security-tools | external/dependency-check-core-3.1.1/src/main/java/org/owasp/dependencycheck/xml/hints/package-info.java | 135 | /**
* Contains classes used to parse the hints file to add evidence to dependencies.
*/
package org.owasp.dependencycheck.xml.hints;
| apache-2.0 |
awhitford/Resteasy | testsuite/integration-tests-spring/deployment/src/test/java/org/jboss/resteasy/test/spring/deployment/resource/RequestScopedBeanInnerBean.java | 109 | package org.jboss.resteasy.test.spring.deployment.resource;
public interface RequestScopedBeanInnerBean {
}
| apache-2.0 |
awhitford/Resteasy | testsuite/integration-tests/src/test/java/org/jboss/resteasy/test/cdi/validation/resource/AsyncRootResourceImpl.java | 372 | package org.jboss.resteasy.test.cdi.validation.resource;
import javax.enterprise.context.RequestScoped;
import javax.inject.Inject;
@RequestScoped
public class AsyncRootResourceImpl extends AbstractAsyncRootResource
{
@Inject
private AsyncSubResourceImpl subResource;
@Override
public AsyncSubResource getSubResource()
{
return subResource;
}
}
| apache-2.0 |
nwnpallewela/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/providers/assistants/EsbModelingAssistantProviderOfInboundEndpointOnErrorSequenceInputConnectorEditPart.java | 9415 | package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointOnErrorSequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider;
/**
* @generated
*/
public class EsbModelingAssistantProviderOfInboundEndpointOnErrorSequenceInputConnectorEditPart extends
EsbModelingAssistantProvider {
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnTarget(IAdaptable target) {
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnTarget((InboundEndpointOnErrorSequenceInputConnectorEditPart) targetEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnTarget(InboundEndpointOnErrorSequenceInputConnectorEditPart target) {
List<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.EsbLink_4001);
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getTypesForSource(IAdaptable target, IElementType relationshipType) {
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetTypesForSource((InboundEndpointOnErrorSequenceInputConnectorEditPart) targetEditPart,
relationshipType);
}
/**
* @generated
*/
public List<IElementType> doGetTypesForSource(InboundEndpointOnErrorSequenceInputConnectorEditPart target,
IElementType relationshipType) {
List<IElementType> types = new ArrayList<IElementType>();
if (relationshipType == EsbElementTypes.EsbLink_4001) {
types.add(EsbElementTypes.ProxyOutputConnector_3002);
types.add(EsbElementTypes.ProxyOutSequenceOutputConnector_3729);
types.add(EsbElementTypes.PropertyMediatorOutputConnector_3034);
types.add(EsbElementTypes.ThrottleMediatorOutputConnector_3122);
types.add(EsbElementTypes.ThrottleMediatorOnAcceptOutputConnector_3581);
types.add(EsbElementTypes.ThrottleMediatorOnRejectOutputConnector_3582);
types.add(EsbElementTypes.FilterMediatorOutputConnector_3534);
types.add(EsbElementTypes.FilterMediatorPassOutputConnector_3011);
types.add(EsbElementTypes.FilterMediatorFailOutputConnector_3012);
types.add(EsbElementTypes.LogMediatorOutputConnector_3019);
types.add(EsbElementTypes.EnrichMediatorOutputConnector_3037);
types.add(EsbElementTypes.XSLTMediatorOutputConnector_3040);
types.add(EsbElementTypes.SwitchMediatorOutputConnector_3499);
types.add(EsbElementTypes.SwitchCaseBranchOutputConnector_3043);
types.add(EsbElementTypes.SwitchDefaultBranchOutputConnector_3044);
types.add(EsbElementTypes.SequenceOutputConnector_3050);
types.add(EsbElementTypes.EventMediatorOutputConnector_3053);
types.add(EsbElementTypes.EntitlementMediatorOutputConnector_3056);
types.add(EsbElementTypes.EntitlementMediatorOnRejectOutputConnector_3748);
types.add(EsbElementTypes.EntitlementMediatorOnAcceptOutputConnector_3749);
types.add(EsbElementTypes.EntitlementMediatorAdviceOutputConnector_3750);
types.add(EsbElementTypes.EntitlementMediatorObligationsOutputConnector_3751);
types.add(EsbElementTypes.ClassMediatorOutputConnector_3059);
types.add(EsbElementTypes.SpringMediatorOutputConnector_3062);
types.add(EsbElementTypes.ScriptMediatorOutputConnector_3065);
types.add(EsbElementTypes.FaultMediatorOutputConnector_3068);
types.add(EsbElementTypes.XQueryMediatorOutputConnector_3071);
types.add(EsbElementTypes.CommandMediatorOutputConnector_3074);
types.add(EsbElementTypes.DBLookupMediatorOutputConnector_3077);
types.add(EsbElementTypes.DBReportMediatorOutputConnector_3080);
types.add(EsbElementTypes.SmooksMediatorOutputConnector_3083);
types.add(EsbElementTypes.SendMediatorOutputConnector_3086);
types.add(EsbElementTypes.SendMediatorEndpointOutputConnector_3539);
types.add(EsbElementTypes.HeaderMediatorOutputConnector_3101);
types.add(EsbElementTypes.CloneMediatorOutputConnector_3104);
types.add(EsbElementTypes.CloneMediatorTargetOutputConnector_3133);
types.add(EsbElementTypes.CacheMediatorOutputConnector_3107);
types.add(EsbElementTypes.CacheMediatorOnHitOutputConnector_3618);
types.add(EsbElementTypes.IterateMediatorOutputConnector_3110);
types.add(EsbElementTypes.IterateMediatorTargetOutputConnector_3606);
types.add(EsbElementTypes.CalloutMediatorOutputConnector_3116);
types.add(EsbElementTypes.TransactionMediatorOutputConnector_3119);
types.add(EsbElementTypes.RMSequenceMediatorOutputConnector_3125);
types.add(EsbElementTypes.RuleMediatorOutputConnector_3128);
types.add(EsbElementTypes.RuleMediatorChildMediatorsOutputConnector_3640);
types.add(EsbElementTypes.OAuthMediatorOutputConnector_3131);
types.add(EsbElementTypes.AggregateMediatorOutputConnector_3113);
types.add(EsbElementTypes.AggregateMediatorOnCompleteOutputConnector_3132);
types.add(EsbElementTypes.StoreMediatorOutputConnector_3590);
types.add(EsbElementTypes.BuilderMediatorOutputConector_3593);
types.add(EsbElementTypes.CallTemplateMediatorOutputConnector_3596);
types.add(EsbElementTypes.PayloadFactoryMediatorOutputConnector_3599);
types.add(EsbElementTypes.EnqueueMediatorOutputConnector_3602);
types.add(EsbElementTypes.URLRewriteMediatorOutputConnector_3622);
types.add(EsbElementTypes.ValidateMediatorOutputConnector_3625);
types.add(EsbElementTypes.ValidateMediatorOnFailOutputConnector_3626);
types.add(EsbElementTypes.RouterMediatorOutputConnector_3630);
types.add(EsbElementTypes.RouterMediatorTargetOutputConnector_3631);
types.add(EsbElementTypes.ConditionalRouterMediatorOutputConnector_3637);
types.add(EsbElementTypes.ConditionalRouterMediatorAdditionalOutputConnector_3638);
types.add(EsbElementTypes.BAMMediatorOutputConnector_3682);
types.add(EsbElementTypes.BeanMediatorOutputConnector_3685);
types.add(EsbElementTypes.EJBMediatorOutputConnector_3688);
types.add(EsbElementTypes.DefaultEndPointOutputConnector_3022);
types.add(EsbElementTypes.AddressEndPointOutputConnector_3031);
types.add(EsbElementTypes.FailoverEndPointOutputConnector_3090);
types.add(EsbElementTypes.FailoverEndPointWestOutputConnector_3097);
types.add(EsbElementTypes.RecipientListEndPointOutputConnector_3694);
types.add(EsbElementTypes.RecipientListEndPointWestOutputConnector_3695);
types.add(EsbElementTypes.WSDLEndPointOutputConnector_3093);
types.add(EsbElementTypes.NamedEndpointOutputConnector_3662);
types.add(EsbElementTypes.LoadBalanceEndPointOutputConnector_3096);
types.add(EsbElementTypes.LoadBalanceEndPointWestOutputConnector_3098);
types.add(EsbElementTypes.APIResourceEndpointOutputConnector_3676);
types.add(EsbElementTypes.AddressingEndpointOutputConnector_3691);
types.add(EsbElementTypes.HTTPEndPointOutputConnector_3711);
types.add(EsbElementTypes.TemplateEndpointOutputConnector_3718);
types.add(EsbElementTypes.CloudConnectorOutputConnector_3721);
types.add(EsbElementTypes.CloudConnectorOperationOutputConnector_3724);
types.add(EsbElementTypes.LoopBackMediatorOutputConnector_3738);
types.add(EsbElementTypes.RespondMediatorOutputConnector_3741);
types.add(EsbElementTypes.CallMediatorOutputConnector_3744);
types.add(EsbElementTypes.CallMediatorEndpointOutputConnector_3745);
types.add(EsbElementTypes.DataMapperMediatorOutputConnector_3763);
types.add(EsbElementTypes.FastXSLTMediatorOutputConnector_3766);
types.add(EsbElementTypes.ForEachMediatorOutputConnector_3782);
types.add(EsbElementTypes.ForEachMediatorTargetOutputConnector_3783);
types.add(EsbElementTypes.PublishEventMediatorOutputConnector_3787);
types.add(EsbElementTypes.MessageOutputConnector_3047);
types.add(EsbElementTypes.MergeNodeOutputConnector_3016);
types.add(EsbElementTypes.SequencesOutputConnector_3617);
types.add(EsbElementTypes.DefaultEndPointOutputConnector_3645);
types.add(EsbElementTypes.AddressEndPointOutputConnector_3648);
types.add(EsbElementTypes.FailoverEndPointOutputConnector_3651);
types.add(EsbElementTypes.FailoverEndPointWestOutputConnector_3652);
types.add(EsbElementTypes.RecipientListEndPointOutputConnector_3698);
types.add(EsbElementTypes.RecipientListEndPointWestOutputConnector_3699);
types.add(EsbElementTypes.WSDLEndPointOutputConnector_3655);
types.add(EsbElementTypes.LoadBalanceEndPointOutputConnector_3658);
types.add(EsbElementTypes.LoadBalanceEndPointWestOutputConnector_3659);
types.add(EsbElementTypes.HTTPEndPointOutputConnector_3714);
types.add(EsbElementTypes.TemplateEndpointOutputConnector_3727);
types.add(EsbElementTypes.APIResourceOutputConnector_3671);
types.add(EsbElementTypes.APIResourceOutSequenceOutputConnector_3730);
types.add(EsbElementTypes.ComplexEndpointsOutputConnector_3679);
types.add(EsbElementTypes.InboundEndpointSequenceOutputConnector_3769);
types.add(EsbElementTypes.InboundEndpointOnErrorSequenceOutputConnector_3771);
}
return types;
}
}
| apache-2.0 |
siosio/intellij-community | python/src/com/jetbrains/python/PyParameterInfoHandler.java | 5239 | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.parameterInfo.ParameterFlag;
import com.intellij.lang.parameterInfo.*;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.PsiFile;
import com.intellij.util.ArrayUtilRt;
import com.jetbrains.python.codeInsight.parameterInfo.ParameterHints;
import com.jetbrains.python.codeInsight.parameterInfo.PyParameterInfoUtils;
import com.jetbrains.python.psi.PyArgumentList;
import com.jetbrains.python.psi.PyCallExpression;
import com.jetbrains.python.psi.types.PyCallableType;
import one.util.streamex.MoreCollectors;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
public class PyParameterInfoHandler implements ParameterInfoHandler<PyArgumentList, Pair<PyCallExpression, PyCallableType>> {
private static final EnumMap<ParameterFlag, ParameterInfoUIContextEx.Flag> PARAM_FLAG_TO_UI_FLAG = new EnumMap<>(Map.of(
ParameterFlag.HIGHLIGHT, ParameterInfoUIContextEx.Flag.HIGHLIGHT,
ParameterFlag.DISABLE, ParameterInfoUIContextEx.Flag.DISABLE,
ParameterFlag.STRIKEOUT, ParameterInfoUIContextEx.Flag.STRIKEOUT
));
@Override
@Nullable
public PyArgumentList findElementForParameterInfo(@NotNull CreateParameterInfoContext context) {
PsiFile file = context.getFile();
int offset = context.getOffset();
final PyArgumentList argumentList = PyParameterInfoUtils.findArgumentList(file, offset, -1);
List<Pair<PyCallExpression, PyCallableType>> parameterInfos = PyParameterInfoUtils.findCallCandidates(argumentList);
if (parameterInfos != null) {
Object[] infoArr = parameterInfos.toArray();
context.setItemsToShow(infoArr);
return argumentList;
}
return null;
}
@Override
public void showParameterInfo(@NotNull PyArgumentList element, @NotNull CreateParameterInfoContext context) {
context.showHint(element, element.getTextOffset(), this);
}
@Override
@Nullable
public PyArgumentList findElementForUpdatingParameterInfo(@NotNull UpdateParameterInfoContext context) {
return PyParameterInfoUtils.findArgumentList(context.getFile(), context.getOffset(), context.getParameterListStart());
}
/*
<b>Note: instead of parameter index, we directly store parameter's offset for later use.</b><br/>
We cannot store an index since we cannot determine what is an argument until we actually map arguments to parameters.
This is because a tuple in arguments may be a whole argument or map to a tuple parameter.
*/
@Override
public void updateParameterInfo(@NotNull PyArgumentList argumentList, @NotNull UpdateParameterInfoContext context) {
final int allegedCursorOffset = context.getOffset(); // this is already shifted backwards to skip spaces
if (!argumentList.getTextRange().contains(allegedCursorOffset) && argumentList.getText().endsWith(")")) {
context.removeHint();
return;
}
final PsiFile file = context.getFile();
int offset = PyParameterInfoUtils.findCurrentParameter(argumentList, allegedCursorOffset, file);
context.setCurrentParameter(offset);
}
@Override
public void updateUI(@NotNull Pair<PyCallExpression, PyCallableType> callAndCallee, @NotNull ParameterInfoUIContext context) {
final int currentParamOffset = context.getCurrentParameterIndex(); // in Python mode, we get an offset here, not an index!
ParameterHints parameterHints = PyParameterInfoUtils.buildParameterHints(callAndCallee, currentParamOffset);
if (parameterHints == null) return;
String[] hints = ArrayUtilRt.toStringArray(parameterHints.getHints());
if (context instanceof ParameterInfoUIContextEx) {
//noinspection unchecked
EnumSet<ParameterInfoUIContextEx.Flag>[] flags = new EnumSet[parameterHints.getFlags().size()];
for (int i = 0; i < flags.length; i++) {
flags[i] = StreamEx.of(parameterHints.getFlags().get(i))
.map(PARAM_FLAG_TO_UI_FLAG::get)
.collect(MoreCollectors.toEnumSet(ParameterInfoUIContextEx.Flag.class));
}
if (hints.length == 0) {
hints = new String[]{getNoParamsMsg()};
//noinspection unchecked
flags = new EnumSet[]{EnumSet.of(ParameterInfoUIContextEx.Flag.DISABLE)};
}
((ParameterInfoUIContextEx)context).setupUIComponentPresentation(hints, flags, context.getDefaultParameterColor());
}
else { // fallback, no highlight
final StringBuilder signatureBuilder = new StringBuilder();
if (hints.length == 0) {
signatureBuilder.append(getNoParamsMsg());
}
else {
for (String s : hints) signatureBuilder.append(s);
}
context.setupUIComponentPresentation(
signatureBuilder.toString(), -1, 0, false, false, false, context.getDefaultParameterColor()
);
}
}
private static String getNoParamsMsg() {
return CodeInsightBundle.message("parameter.info.no.parameters");
}
}
| apache-2.0 |
waahoo/protostuff | protostuff-json/src/test/java/com/dyuproject/protostuff/JsonXRepeatedMessagesTest.java | 1580 | //========================================================================
//Copyright 2007-2010 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package com.dyuproject.protostuff;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
/**
* Testcase for ser/deser of multiple messages using json via {@link JsonXOutput}.
*
* @author David Yu
* @created Oct 11, 2010
*/
public class JsonXRepeatedMessagesTest extends RepeatedMessagesTest
{
protected <T> List<T> parseListFrom(InputStream in, Schema<T> schema) throws IOException
{
return JsonIOUtil.parseListFrom(in, schema, false);
}
protected <T> void writeListTo(OutputStream out, List<T> messages, Schema<T> schema)
throws IOException
{
JsonXIOUtil.writeListTo(out, messages, schema, false, buf());
}
}
| apache-2.0 |
arturog8m/ocs | bundle/edu.gemini.auxfile.workflow/src/main/java/edu/gemini/auxfile/workflow/CopyTaskState.java | 3120 | //
// $Id: CopyTaskState.java 855 2007-05-22 02:52:46Z rnorris $
//
package edu.gemini.auxfile.workflow;
import edu.gemini.auxfile.copier.AuxFileCopier;
import java.io.*;
import java.util.Collection;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.text.ParseException;
/**
* Manages a file containing CopyTasks. Provides for reading, modifying the
* state file.
*/
public final class CopyTaskState {
private static final Logger LOG = Logger.getLogger(CopyTaskState.class.getName());
private final AuxFileCopier _copier;
private final File _stateFile;
public CopyTaskState(AuxFileCopier copier, File stateFile) {
_copier = copier;
_stateFile = stateFile;
}
public AuxFileCopier getCopier() {
return _copier;
}
public synchronized Collection<CopyTask> getTasks() {
Collection<CopyTask> res = new ArrayList<CopyTask>();
BufferedReader br = null;
try {
final FileReader fr = new FileReader(_stateFile);
br = new BufferedReader(fr);
String line;
while ((line = br.readLine()) != null) {
res.add(CopyTask.parse(this, line));
}
} catch (FileNotFoundException ex) {
// no state
} catch (ParseException ex) {
LOG.log(Level.SEVERE, ex.getMessage(), ex);
} catch (IOException ex) {
LOG.log(Level.SEVERE, ex.getMessage(), ex);
} finally {
try { if (br != null) br.close(); } catch (Exception ex) {/*empty*/}
}
return res;
}
synchronized void setTasks(Collection<CopyTask> state) {
BufferedWriter bw = null;
try {
final FileWriter fw = new FileWriter(_stateFile);
bw = new BufferedWriter(fw);
for (CopyTask task : state) {
bw.write(task.format());
bw.newLine();
}
bw.flush();
} catch (IOException ex) {
LOG.log(Level.SEVERE, ex.getMessage(), ex);
} finally {
try { if (bw != null) bw.close(); } catch (Exception ex) {/*empty*/}
}
}
public synchronized void addTask(CopyTask task) {
Collection<CopyTask> state = getTasks();
for (Iterator<CopyTask> it = state.iterator(); it.hasNext(); ) {
AuxFileTask curTask = it.next();
if (curTask.getFile().equals(task.getFile())) {
it.remove();
break;
}
}
state.add(task);
setTasks(state);
}
public synchronized void removeTask(AuxFileTask task) {
Collection<CopyTask> state = getTasks();
boolean modified = false;
for (Iterator<CopyTask> it = state.iterator(); it.hasNext(); ) {
AuxFileTask curTask = it.next();
if (curTask.equals(task)) {
it.remove();
modified = true;
break;
}
}
if (modified) setTasks(state);
}
}
| bsd-3-clause |
ownclo/jpeg-on-steroids | vendor/C/OpenJPEG/src/bin/jpip/opj_viewer/src/ImageWindow.java | 3666 | /*
* $Id$
*
* Copyright (c) 2002-2011, Communications and Remote Sensing Laboratory, Universite catholique de Louvain (UCL), Belgium
* Copyright (c) 2002-2011, Professor Benoit Macq
* Copyright (c) 2010-2011, Kaori Hagihara
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS'
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
import javax.swing.*;
import java.awt.event.*;
import java.awt.*;
public class ImageWindow extends JFrame
{
private ImageViewer imgviewer;
private ImageManager imgmanager;
public ImageWindow( String uri, String j2kfilename, String host, int port, boolean session, boolean jppstream, int aux)
{
super( j2kfilename);
imgmanager = new ImageManager( uri, host, port);
imgviewer = new ImageViewer( j2kfilename, imgmanager, session, jppstream, aux);
imgviewer.setOpaque(true); //content panes must be opaque
JPanel panel = new JPanel();
panel.setLayout(new BorderLayout());
panel.add( imgviewer, BorderLayout.CENTER);
setContentPane( panel);
addWindowListener(new WindowMyAdapter());
}
class WindowMyAdapter extends WindowAdapter
{
public void windowClosing(WindowEvent arg)
{
imgmanager.closeChannel();
System.exit(0);
}
}
public static void main(String s[])
{
String j2kfilename, uri, host;
boolean session, jppstream;
int port, aux; // 0: none, 1: tcp, 2: udp
if(s.length >= 2){
uri = s[0];
j2kfilename = s[1];
if( s.length > 2)
host = s[2];
else
host = "localhost";
if( s.length > 3)
port = Integer.valueOf( s[3]).intValue();
else
port = 50000;
if( s.length > 4)
session = !s[4].equalsIgnoreCase( "stateless");
else
session = true;
if( s.length > 5)
jppstream = !s[5].equalsIgnoreCase( "JPT");
else
jppstream = true;
if( s.length > 6){
if( s[6].equalsIgnoreCase("udp"))
aux = 2;
else
aux = 1;
}
else
aux = 0;
}
else{
System.out.println("Usage: java -jar opj_viewer.jar HTTP_server_URI imagefile.jp2 [hostname] [portnumber] [stateless/session] [JPT/JPP] [tcp/udp]");
return;
}
ImageWindow frame = new ImageWindow( uri, j2kfilename, host, port, session, jppstream, aux);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//Display the window.
frame.pack();
frame.setSize(new Dimension(400,200));
frame.setLocation( 0, 50);
frame.setVisible(true);
}
}
| bsd-3-clause |
dirkrombauts/gherkin3 | java/src/main/java/gherkin/TokenScanner.java | 1264 | package gherkin;
import gherkin.ast.Location;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
/**
* <p>
* The scanner reads a gherkin doc (typically read from a .feature file) and creates a token
* for each line. The tokens are passed to the parser, which outputs an AST (Abstract Syntax Tree).</p>
*
* <p>
* If the scanner sees a # language header, it will reconfigure itself dynamically to look for
* Gherkin keywords for the associated language. The keywords are defined in gherkin-languages.json.</p>
*/
public class TokenScanner implements Parser.ITokenScanner {
private final BufferedReader reader;
private int lineNumber;
public TokenScanner(String source) {
this(new StringReader(source));
}
public TokenScanner(Reader source) {
this.reader = new BufferedReader(source);
}
@Override
public Token read() {
try {
String line = reader.readLine();
Location location = new Location(++lineNumber, 0);
return line == null ? new Token(null, location) : new Token(new GherkinLine(line), location);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| mit |
johannrichard/openhab2-addons | addons/binding/org.openhab.binding.innogysmarthome/src/main/java/org/openhab/binding/innogysmarthome/internal/client/entity/Location.java | 2004 | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.innogysmarthome.internal.client.entity;
import java.util.List;
import com.google.api.client.util.Key;
/**
* Defines a {@link Location} structure.
*
* @author Oliver Kuhl - Initial contribution
*/
public class Location extends ConfigPropertyList {
private static final String CONFIG_PROPERTY_TYPE = "Type";
/**
* Identifier of the location – must be unique.
*/
@Key("id")
private String id;
/**
* Reference to the description of the message.
* Optional.
*/
@Key("desc")
private String desc;
/**
* Container for tagging the location, e.g. if the location is on a certain floor in the house.
* Optional.
*/
@Key("Tags")
private List<Property> tagsList;
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @return the desc
*/
public String getDesc() {
return desc;
}
/**
* @param desc the desc to set
*/
public void setDesc(String desc) {
this.desc = desc;
}
/**
* @return the tagsList
*/
public List<Property> getTagsList() {
return tagsList;
}
/**
* @param tagsList the tagsList to set
*/
public void setTagsList(List<Property> tagsList) {
this.tagsList = tagsList;
}
@Override
public String getName() {
return getPropertyValueAsString(CONFIG_PROPERTY_NAME);
}
public String getType() {
return getPropertyValueAsString(CONFIG_PROPERTY_TYPE);
}
}
| epl-1.0 |
knabar/openmicroscopy | components/blitz/src/ome/services/blitz/repo/RequestObjectFactoryRegistry.java | 3641 | /*
* Copyright (C) 2012 Glencoe Software, Inc. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package ome.services.blitz.repo;
import java.util.HashMap;
import java.util.Map;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import ome.io.nio.PixelsService;
import ome.io.nio.TileSizes;
import ome.services.blitz.fire.Registry;
import ome.services.blitz.fire.Ring;
import ome.system.OmeroContext;
import ome.formats.importer.ImportConfig;
import ome.formats.importer.OMEROWrapper;
import omero.util.Resources;
/**
* Requests which are handled by the repository servants.
*/
public class RequestObjectFactoryRegistry extends
omero.util.ObjectFactoryRegistry implements ApplicationContextAware {
private final Registry reg;
private final TileSizes sizes;
private final RepositoryDao dao;
private final Ring ring;
private final PixelsService pixels;
private final Resources resources;
private/* final */OmeroContext ctx;
public RequestObjectFactoryRegistry(Registry reg, TileSizes sizes,
RepositoryDao repositoryDao, Ring ring,
PixelsService pixels) {
this(reg, sizes, repositoryDao, ring, pixels, null);
}
public RequestObjectFactoryRegistry(Registry reg, TileSizes sizes,
RepositoryDao repositoryDao, Ring ring,
PixelsService pixels, Resources resources) {
this.reg = reg;
this.sizes = sizes;
this.dao = repositoryDao;
this.ring = ring;
this.pixels = pixels;
this.resources = resources;
}
public void setApplicationContext(ApplicationContext ctx)
throws BeansException {
this.ctx = (OmeroContext) ctx;
}
public Map<String, ObjectFactory> createFactories(Ice.Communicator ic) {
Map<String, ObjectFactory> factories = new HashMap<String, ObjectFactory>();
factories.put(ManagedImportRequestI.ice_staticId(), new ObjectFactory(
ManagedImportRequestI.ice_staticId()) {
@Override
public Ice.Object create(String name) {
ManagedImportRequestI mir = new ManagedImportRequestI(reg, sizes, dao,
new OMEROWrapper(
new ImportConfig(),
pixels.getMemoizerWait(),
pixels.getMemoizerDirectory()),
ring.uuid);
mir.setResources(resources);
return mir;
}
});
factories.put(RawAccessRequestI.ice_staticId(), new ObjectFactory(
RawAccessRequestI.ice_staticId()) {
@Override
public Ice.Object create(String name) {
return new RawAccessRequestI(reg);
}
});
return factories;
}
}
| gpl-2.0 |
jballanc/openmicroscopy | components/server/src/ome/services/sessions/stats/CounterFactory.java | 2012 | /*
* $Id$
*
* Copyright 2008 Glencoe Software, Inc. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.services.sessions.stats;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ApplicationEventPublisherAware;
/**
* Factory for creating counter objects. Passed to other Spring beans to prevent
* constant context lookups.
*
* @author Josh Moore, josh at glencoesoftware.com
* @since Beta4
*/
public class CounterFactory implements ApplicationEventPublisherAware {
protected ApplicationEventPublisher publisher;
protected int objectsReadHardLimit = Integer.MAX_VALUE;
protected int objectsWrittenHardLimit = Integer.MAX_VALUE;
protected int methodHardLimit = Integer.MAX_VALUE;
public void setApplicationEventPublisher(
ApplicationEventPublisher applicationEventPublisher) {
this.publisher = applicationEventPublisher;
}
public void setObjectsReadHardLimit(int objectsReadHardLimit) {
this.objectsReadHardLimit = objectsReadHardLimit;
}
public void setObjectsWrittenHardLimit(int objectsWrittenHardLimit) {
this.objectsWrittenHardLimit = objectsWrittenHardLimit;
}
public void setMethodHardLimit(int methodHardLimit) {
this.methodHardLimit = methodHardLimit;
}
public SessionStats createStats() {
ObjectsReadCounter read = new ObjectsReadCounter(objectsReadHardLimit);
read.setApplicationEventPublisher(publisher);
ObjectsWrittenCounter written = new ObjectsWrittenCounter(
objectsWrittenHardLimit);
written.setApplicationEventPublisher(publisher);
MethodCounter methods = new MethodCounter(methodHardLimit);
methods.setApplicationEventPublisher(publisher);
return new SimpleSessionStats(read, written, methods);
}
}
| gpl-2.0 |
barun-saha/one-simulator | one_1.5.1-RC2/core/ArithmeticCondition.java | 2460 | /*
* Copyright 2010 Aalto University, ComNet
* Released under GPLv3. See LICENSE.txt for details.
*/
package core;
/**
* This class presents a simple arithmetic condition: is value smaller than,
* bigger than, or equal to another value. The condition is given in text
* form, e.g., "< 42", and then different values can be matched against that
* condition.
* @author Ari
*/
public class ArithmeticCondition {
private static final String VALID_OPERATORS = "><=";
private char operator;
private double number;
/**
* Creates a new condition based on the given string.
* @param cond The condition string. Must consist of one operator
* ("<", ">", or "=") and one double-precision floating point number.
* @throws SettingsError if the given string is not a valid condition
*/
public ArithmeticCondition(String cond) {
String value;
int multiplier = 1;
if (cond.length() < 2) {
throw new SettingsError("Invalid condition \"" + cond + "\"");
}
operator = cond.charAt(0);
value = cond.substring(1);
/* handle kilo and Mega suffixes for the value */
if (value.endsWith("k")) {
multiplier = 1000;
} else if (value.endsWith("M")) {
multiplier = 1000000;
}
if (multiplier > 1) { /* remove suffix */
value = value.substring(0, value.length() - 1);
}
if (VALID_OPERATORS.indexOf(operator) == -1) {
throw new SettingsError("Invalid operator in condition \"" + cond +
"\" valid operators: " + VALID_OPERATORS);
}
try {
number = Double.parseDouble(value);
} catch (NumberFormatException e) {
throw new SettingsError("Invalid numeric value in condition \"" +
cond + "\"");
}
number *= multiplier;
}
/**
* Returns true if the given value satisfies "V X N" where V is the given
* value, X is the operator (from the settings), and N is the numeric value
* given after the operator in the settings.
* @param value The value to check
* @return true if the condition holds for the given value, false otherwise
*/
public boolean isTrueFor(double value) {
switch (operator) {
case '<': return value < this.number;
case '>': return value > this.number;
case '=': return value == this.number;
default: throw new SettingsError("Invalid operator");
}
}
@Override
public String toString() {
return "Condition \"" + operator + " " + number + "\"";
}
}
| gpl-3.0 |
xasx/wildfly | legacy/messaging/src/test/java/org/jboss/as/messaging/test/MessagingSubsystem15TestCase.java | 1945 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.messaging.test;
import java.io.IOException;
import org.jboss.as.messaging.MessagingExtension;
import org.junit.Test;
/**
* * @author <a href="http://jmesnil.net/">Jeff Mesnil</a> (c) 2013 Red Hat inc
*/
public class MessagingSubsystem15TestCase extends AbstractLegacySubsystemBaseTest {
public MessagingSubsystem15TestCase() {
super(MessagingExtension.SUBSYSTEM_NAME, new MessagingExtension());
}
@Override
protected String getSubsystemXml() throws IOException {
return readResource("subsystem_1_5.xml");
}
@Test
public void testExpressions() throws Exception {
standardSubsystemTest("subsystem_1_5_expressions.xml");
}
@Override
protected void compareXml(String configId, String original, String marshalled) throws Exception {
// XML from messaging 1.5 does not have the same output than 2.0
return;
}
}
| lgpl-2.1 |
Yaliang/presto | presto-orc/src/main/java/com/facebook/presto/orc/OrcEncoding.java | 1674 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc;
import com.facebook.presto.orc.metadata.DwrfMetadataReader;
import com.facebook.presto.orc.metadata.DwrfMetadataWriter;
import com.facebook.presto.orc.metadata.MetadataReader;
import com.facebook.presto.orc.metadata.MetadataWriter;
import com.facebook.presto.orc.metadata.OrcMetadataReader;
import com.facebook.presto.orc.metadata.OrcMetadataWriter;
public enum OrcEncoding
{
ORC {
@Override
public MetadataReader createMetadataReader()
{
return new OrcMetadataReader();
}
@Override
public MetadataWriter createMetadataWriter()
{
return new OrcMetadataWriter();
}
},
DWRF {
@Override
public MetadataReader createMetadataReader()
{
return new DwrfMetadataReader();
}
@Override
public MetadataWriter createMetadataWriter()
{
return new DwrfMetadataWriter();
}
};
public abstract MetadataReader createMetadataReader();
public abstract MetadataWriter createMetadataWriter();
}
| apache-2.0 |
brat000012001/keycloak | testsuite/integration-arquillian/tests/base/src/main/java/org/keycloak/testsuite/console/page/events/Config.java | 4279 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.console.page.events;
import org.keycloak.testsuite.console.page.fragment.OnOffSwitch;
import org.keycloak.testsuite.page.Form;
import org.keycloak.testsuite.util.UIUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.ui.Select;
import static org.keycloak.testsuite.util.WaitUtils.waitUntilElement;
/**
* @author tkyjovsk
* @author mhajas
*/
public class Config extends Events {
@Override
public String getUriFragment() {
return super.getUriFragment() + "/events-settings";
}
@FindBy(xpath = "//form")
private ConfigForm form;
public ConfigForm form() {
return form;
}
public class ConfigForm extends Form {
@FindBy(id = "s2id_autogen1")
private WebElement eventListenersInput;
@FindBy(xpath = "//div[@id='s2id_autogen1']/..//select")
private Select eventListenersSelect;
@FindBy(xpath = ".//div[@class='onoffswitch' and ./input[@id='enabled']]")
private OnOffSwitch SaveEvents;
@FindBy(xpath = "//div[@id='s2id_enabledEventTypes']//input")
private WebElement savedTypesInput;
@FindBy(xpath = "//div[@id='select2-drop']/ul")
private WebElement savedTypesOptions;
@FindBy(id = "expiration")
private WebElement expirationInput;
@FindBy(name = "expirationUnit")
private Select expirationUnitSelect;
@FindBy(xpath = ".//div[@class='onoffswitch' and ./input[@id='adminEventsEnabled']]")
private OnOffSwitch saveAdminEvents;
@FindBy(xpath = ".//div[@class='onoffswitch' and ./input[@id='adminEventsDetailsEnabled']]")
private OnOffSwitch includeRepresentation;
@FindBy(xpath = "//button[@data-ng-click='clearEvents()']")
private WebElement clearLoginEventsButton;
@FindBy(xpath = "//button[@data-ng-click='clearAdminEvents()']")
private WebElement clearAdminEventsButton;
public void addEventListener(String listener) {
eventListenersInput.click();
eventListenersSelect.selectByVisibleText(listener);
}
public void removeEventListener(String listener) {
eventListenersInput.findElement(By.xpath("//div[text()='" + listener + "']/../a")).click();
}
public void setSaveEvents(boolean value) {
SaveEvents.setOn(value);
}
public void addSaveType(String type) {
savedTypesInput.click();
savedTypesOptions.findElement(By.xpath("//div[text()='" + type + "']")).click();
}
public void removeSaveType(String type) {
savedTypesInput.findElement(By.xpath("//div[text()='" + type + "']/../a")).click();
}
public void clearLoginEvents() {
clearLoginEventsButton.click();
}
public void setExpiration(String value, String unit) {
expirationUnitSelect.selectByVisibleText(unit);
UIUtils.setTextInputValue(expirationInput, value);
}
public void setSaveAdminEvents(boolean value) {
saveAdminEvents.setOn(value);
}
public void setIncludeRepresentation(boolean value) {
includeRepresentation.setOn(value);
}
public void clearAdminEvents() {
clearAdminEventsButton.click();
}
public void waitForClearEventsButtonPresent() {
waitUntilElement(clearLoginEventsButton).is().present();
}
}
}
| apache-2.0 |
lincoln-lil/flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ExecutionVertexDeploymentTest.java | 10970 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway;
import org.apache.flink.runtime.jobmaster.LogicalSlot;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlot;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlotBuilder;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.util.TestLogger;
import org.junit.Test;
import java.util.concurrent.CompletableFuture;
import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.getExecutionVertex;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class ExecutionVertexDeploymentTest extends TestLogger {
private static final String ERROR_MESSAGE = "test_failure_error_message";
@Test
public void testDeployCall() {
try {
final ExecutionVertex vertex = getExecutionVertex();
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertFalse(vertex.getFailureInfo().isPresent());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithSynchronousAnswer() {
try {
final ExecutionVertex vertex = getExecutionVertex();
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertFalse(vertex.getFailureInfo().isPresent());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithAsynchronousAnswer() {
try {
final ExecutionVertex vertex = getExecutionVertex();
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(slot);
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedSynchronous() {
try {
final ExecutionVertex vertex = getExecutionVertex();
final LogicalSlot slot =
new TestingLogicalSlotBuilder()
.setTaskManagerGateway(
new SubmitFailingSimpleAckingTaskManagerGateway())
.createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(slot);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertTrue(vertex.getFailureInfo().isPresent());
assertThat(
vertex.getFailureInfo().map(ErrorInfo::getExceptionAsString).get(),
containsString(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedAsynchronously() {
try {
final ExecutionVertex vertex = getExecutionVertex();
final LogicalSlot slot =
new TestingLogicalSlotBuilder()
.setTaskManagerGateway(
new SubmitFailingSimpleAckingTaskManagerGateway())
.createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(slot);
// wait until the state transition must be done
for (int i = 0; i < 100; i++) {
if (vertex.getExecutionState() == ExecutionState.FAILED
&& vertex.getFailureInfo().isPresent()) {
break;
} else {
Thread.sleep(10);
}
}
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertTrue(vertex.getFailureInfo().isPresent());
assertThat(
vertex.getFailureInfo().map(ErrorInfo::getExceptionAsString).get(),
containsString(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testFailExternallyDuringDeploy() {
try {
final ExecutionVertex vertex = getExecutionVertex();
TestingLogicalSlot testingLogicalSlot =
new TestingLogicalSlotBuilder()
.setTaskManagerGateway(
new SubmitBlockingSimpleAckingTaskManagerGateway())
.createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED);
vertex.deployToSlot(testingLogicalSlot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
Exception testError = new Exception("test error");
vertex.fail(testError);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertThat(
vertex.getFailureInfo()
.map(ErrorInfo::getException)
.get()
.deserializeError(ClassLoader.getSystemClassLoader()),
is(testError));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public static class SubmitFailingSimpleAckingTaskManagerGateway
extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(
TaskDeploymentDescriptor tdd, Time timeout) {
CompletableFuture<Acknowledge> future = new CompletableFuture<>();
future.completeExceptionally(new Exception(ERROR_MESSAGE));
return future;
}
}
private static class SubmitBlockingSimpleAckingTaskManagerGateway
extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(
TaskDeploymentDescriptor tdd, Time timeout) {
return new CompletableFuture<>();
}
}
}
| apache-2.0 |
bclozel/spring-boot | spring-boot-project/spring-boot-cli/src/main/java/org/springframework/boot/cli/compiler/dependencies/package-info.java | 743 | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes for dependencies used during compilation.
*/
package org.springframework.boot.cli.compiler.dependencies;
| apache-2.0 |
nicoben/pentaho-kettle | engine/test-src/org/pentaho/di/job/entries/http/JobEntryHTTP_PDI208_Test.java | 6116 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.http;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.job.Job;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
public class JobEntryHTTP_PDI208_Test {
public static final String HTTP_HOST = "localhost";
public static final int HTTP_PORT = 9998;
public static final String HTTP_SERVER_BASEURL = "http://localhost:9998";
private static HttpServer httpServer;
@BeforeClass
public static void setupBeforeClass() throws KettleException, IOException {
KettleClientEnvironment.init();
JobEntryHTTP_PDI208_Test.startHTTPServer();
}
@AfterClass
public static void tearDown() {
JobEntryHTTP_PDI208_Test.stopHTTPServer();
}
@Test
public void testHTTPResultDefaultRows() throws IOException {
File localFileForUpload = getInputFile( "existingFile1", ".tmp" );
File tempFileForDownload = File.createTempFile( "downloadedFile1", ".tmp" );
localFileForUpload.deleteOnExit();
tempFileForDownload.deleteOnExit();
Object[] r = new Object[] { HTTP_SERVER_BASEURL + "/uploadFile",
localFileForUpload.getCanonicalPath(), tempFileForDownload.getCanonicalPath() };
RowMeta rowMetaDefault = new RowMeta();
rowMetaDefault.addValueMeta( new ValueMetaString( "URL" ) );
rowMetaDefault.addValueMeta( new ValueMetaString( "UPLOAD" ) );
rowMetaDefault.addValueMeta( new ValueMetaString( "DESTINATION" ) );
List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
rows.add( new RowMetaAndData( rowMetaDefault, r ) );
Result previousResult = new Result();
previousResult.setRows( rows );
JobEntryHTTP http = new JobEntryHTTP();
http.setParentJob( new Job() );
http.setRunForEveryRow( true );
http.setAddFilenameToResult( false );
http.execute( previousResult, 0 );
assertTrue( FileUtils.contentEquals( localFileForUpload, tempFileForDownload ) );
}
@Test
public void testHTTPResultCustomRows() throws IOException {
File localFileForUpload = getInputFile( "existingFile2", ".tmp" );
File tempFileForDownload = File.createTempFile( "downloadedFile2", ".tmp" );
localFileForUpload.deleteOnExit();
tempFileForDownload.deleteOnExit();
Object[] r = new Object[] { HTTP_SERVER_BASEURL + "/uploadFile",
localFileForUpload.getCanonicalPath(), tempFileForDownload.getCanonicalPath() };
RowMeta rowMetaDefault = new RowMeta();
rowMetaDefault.addValueMeta( new ValueMetaString( "MyURL" ) );
rowMetaDefault.addValueMeta( new ValueMetaString( "MyUpload" ) );
rowMetaDefault.addValueMeta( new ValueMetaString( "MyDestination" ) );
List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
rows.add( new RowMetaAndData( rowMetaDefault, r ) );
Result previousResult = new Result();
previousResult.setRows( rows );
JobEntryHTTP http = new JobEntryHTTP();
http.setParentJob( new Job() );
http.setRunForEveryRow( true );
http.setAddFilenameToResult( false );
http.setUrlFieldname( "MyURL" );
http.setUploadFieldname( "MyUpload" );
http.setDestinationFieldname( "MyDestination" );
http.execute( previousResult, 0 );
assertTrue( FileUtils.contentEquals( localFileForUpload, tempFileForDownload ) );
}
private File getInputFile( String prefix, String suffix ) throws IOException {
File inputFile = File.createTempFile( prefix, suffix );
FileUtils.writeStringToFile( inputFile, UUID.randomUUID().toString(), "UTF-8" );
return inputFile;
}
private static void startHTTPServer() throws IOException {
httpServer = HttpServer.create( new InetSocketAddress( JobEntryHTTP_PDI208_Test.HTTP_HOST, JobEntryHTTP_PDI208_Test.HTTP_PORT ), 10 );
httpServer.createContext( "/uploadFile", new HttpHandler() {
@Override
public void handle( HttpExchange httpExchange ) throws IOException {
Headers h = httpExchange.getResponseHeaders();
h.add( "Content-Type", "application/octet-stream" );
httpExchange.sendResponseHeaders( 200, 0 );
InputStream is = httpExchange.getRequestBody();
OutputStream os = httpExchange.getResponseBody();
int inputChar = -1;
while ( ( inputChar = is.read() ) >= 0 ) {
os.write( inputChar );
}
is.close();
os.flush();
os.close();
httpExchange.close();
}
} );
httpServer.start();
}
private static void stopHTTPServer() {
httpServer.stop( 2 );
}
}
| apache-2.0 |
abstractj/keycloak | services/src/main/java/org/keycloak/forms/login/freemarker/Templates.java | 3271 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.forms.login.freemarker;
import org.keycloak.forms.login.LoginFormsPages;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class Templates {
public static String getTemplate(LoginFormsPages page) {
switch (page) {
case LOGIN:
return "login.ftl";
case LOGIN_USERNAME:
return "login-username.ftl";
case LOGIN_PASSWORD:
return "login-password.ftl";
case LOGIN_TOTP:
return "login-otp.ftl";
case LOGIN_CONFIG_TOTP:
return "login-config-totp.ftl";
case LOGIN_WEBAUTHN:
return "webauthn-authenticate.ftl";
case LOGIN_VERIFY_EMAIL:
return "login-verify-email.ftl";
case LOGIN_IDP_LINK_CONFIRM:
return "login-idp-link-confirm.ftl";
case LOGIN_IDP_LINK_EMAIL:
return "login-idp-link-email.ftl";
case OAUTH_GRANT:
return "login-oauth-grant.ftl";
case LOGIN_RESET_PASSWORD:
return "login-reset-password.ftl";
case LOGIN_UPDATE_PASSWORD:
return "login-update-password.ftl";
case LOGIN_OAUTH2_DEVICE_VERIFY_USER_CODE:
return "login-oauth2-device-verify-user-code.ftl";
case LOGIN_SELECT_AUTHENTICATOR:
return "select-authenticator.ftl";
case REGISTER:
return "register.ftl";
case REGISTER_USER_PROFILE:
return "register-user-profile.ftl";
case INFO:
return "info.ftl";
case ERROR:
return "error.ftl";
case ERROR_WEBAUTHN:
return "webauthn-error.ftl";
case LOGIN_UPDATE_PROFILE:
return "login-update-profile.ftl";
case CODE:
return "code.ftl";
case LOGIN_PAGE_EXPIRED:
return "login-page-expired.ftl";
case X509_CONFIRM:
return "login-x509-info.ftl";
case SAML_POST_FORM:
return "saml-post-form.ftl";
case UPDATE_USER_PROFILE:
return "update-user-profile.ftl";
case IDP_REVIEW_USER_PROFILE:
return "idp-review-user-profile.ftl";
case FRONTCHANNEL_LOGOUT:
return "frontchannel-logout.ftl";
default:
throw new IllegalArgumentException();
}
}
}
| apache-2.0 |
brettwooldridge/btm | btm/src/main/java/bitronix/tm/gui/TransactionLogHeaderPanel.java | 3195 | /*
* Copyright (C) 2006-2013 Bitronix Software (http://www.bitronix.be)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bitronix.tm.gui;
import bitronix.tm.journal.TransactionLogHeader;
import bitronix.tm.utils.Decoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Date;
/**
* @author Ludovic Orban
*/
public class TransactionLogHeaderPanel extends JPanel {
private final static Logger log = LoggerFactory.getLogger(TransactionLogHeaderPanel.class);
private final JTextField logFileField = new JTextField();
private final JTextField timestampField = new JTextField();
private final JTextField stateField = new JTextField();
private final JTextField positionField = new JTextField();
public TransactionLogHeaderPanel() {
logFileField.setEditable(false);
timestampField.setEditable(false);
stateField.setEditable(false);
positionField.setEditable(false);
logFileField.setBorder(null);
timestampField.setBorder(null);
stateField.setBorder(null);
positionField.setBorder(null);
setLayout(new BoxLayout(this, BoxLayout.X_AXIS));
add(logFileField);
add(timestampField);
add(stateField);
add(positionField);
}
public void setLogFile(File logFile) {
logFileField.setText(logFile.getName());
}
public void setTimestamp(long timestamp) {
timestampField.setText(Console.dateFormatter.format(new Date(timestamp)));
}
public void setState(byte state) {
stateField.setText(Decoder.decodeHeaderState(state));
}
public void setPosition(long position) {
positionField.setText("" + position);
}
public void read(File logFile, boolean active) throws IOException {
RandomAccessFile raf = new RandomAccessFile(logFile, "r");
TransactionLogHeader header = new TransactionLogHeader(raf.getChannel(), 0L);
raf.close();
if (log.isDebugEnabled()) { log.debug("read header: " + header); }
setLogFile(logFile);
setTimestamp(header.getTimestamp());
setState(header.getState());
setPosition(header.getPosition());
Font font;
if (active) {
font = logFileField.getFont().deriveFont(Font.BOLD);
}
else {
font = logFileField.getFont().deriveFont(Font.PLAIN);
}
logFileField.setFont(font);
timestampField.setFont(font);
stateField.setFont(font);
positionField.setFont(font);
}
}
| apache-2.0 |
nicoben/pentaho-kettle | ui/src/org/pentaho/di/ui/job/entries/sendnagiospassivecheck/JobEntrySendNagiosPassiveCheckDialog.java | 24337 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.job.entries.sendnagiospassivecheck;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.util.SocketUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.sendnagiospassivecheck.JobEntrySendNagiosPassiveCheck;
import org.pentaho.di.job.entry.JobEntryDialogInterface;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.LabelText;
import org.pentaho.di.ui.core.widget.LabelTextVar;
import org.pentaho.di.ui.core.widget.StyledTextComp;
import org.pentaho.di.ui.job.dialog.JobDialog;
import org.pentaho.di.ui.job.entry.JobEntryDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
/**
* This dialog allows you to edit the SendNagiosPassiveCheck job entry settings.
*
* @author Samatar
* @since 01-10-2011
*/
public class JobEntrySendNagiosPassiveCheckDialog extends JobEntryDialog implements JobEntryDialogInterface {
private static Class<?> PKG = JobEntrySendNagiosPassiveCheck.class; // for i18n purposes, needed by Translator2!!
private LabelText wName;
private FormData fdName;
private LabelTextVar wServerName;
private FormData fdServerName;
private LabelTextVar wResponseTimeOut;
private FormData fdResponseTimeOut;
private LabelTextVar wPassword;
private FormData fdPassword;
private LabelTextVar wSenderServerName;
private FormData fdSenderServerName;
private LabelTextVar wSenderServiceName;
private FormData fdSenderServiceName;
private Button wOK, wCancel;
private Listener lsOK, lsCancel;
private JobEntrySendNagiosPassiveCheck jobEntry;
private Shell shell;
private SelectionAdapter lsDef;
private boolean changed;
private Group wServerSettings;
private FormData fdServerSettings;
private CTabFolder wTabFolder;
private Composite wGeneralComp;
private CTabItem wGeneralTab;
private FormData fdGeneralComp;
private FormData fdTabFolder;
private FormData fdPort;
private LabelTextVar wPort;
private FormData fdwConnectionTimeOut;
private LabelTextVar wConnectionTimeOut;
private Button wTest;
private FormData fdTest;
private Listener lsTest;
private Group wSenderSettings;
private FormData fdSenderSettings;
private Group wMessageGroup;
private FormData fdMessageGroup;
private Label wlMessage;
private StyledTextComp wMessage;
private FormData fdlMessage, fdMessage;
private Label wlEncryptionMode;
private CCombo wEncryptionMode;
private FormData fdlEncryptionMode, fdEncryptionMode;
private Label wlLevelMode;
private CCombo wLevelMode;
private FormData fdlLevelMode, fdLevelMode;
public JobEntrySendNagiosPassiveCheckDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep,
JobMeta jobMeta ) {
super( parent, jobEntryInt, rep, jobMeta );
jobEntry = (JobEntrySendNagiosPassiveCheck) jobEntryInt;
if ( this.jobEntry.getName() == null ) {
this.jobEntry.setName( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Name.Default" ) );
}
}
public JobEntryInterface open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, props.getJobsDialogStyle() );
props.setLook( shell );
JobDialog.setShellImage( shell, jobEntry );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
jobEntry.setChanged();
}
};
changed = jobEntry.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Job entry name line
wName =
new LabelText( shell, BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Name.Label" ), BaseMessages
.getString( PKG, "JobSendNagiosPassiveCheck.Name.Tooltip" ) );
wName.addModifyListener( lsMod );
fdName = new FormData();
fdName.top = new FormAttachment( 0, 0 );
fdName.left = new FormAttachment( 0, 0 );
fdName.right = new FormAttachment( 100, 0 );
wName.setLayoutData( fdName );
wTabFolder = new CTabFolder( shell, SWT.BORDER );
props.setLook( wTabFolder, PropsUI.WIDGET_STYLE_TAB );
// ////////////////////////
// START OF GENERAL TAB ///
// ////////////////////////
wGeneralTab = new CTabItem( wTabFolder, SWT.NONE );
wGeneralTab.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ServerSettings.General" ) );
wGeneralComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wGeneralComp );
FormLayout generalLayout = new FormLayout();
generalLayout.marginWidth = 3;
generalLayout.marginHeight = 3;
wGeneralComp.setLayout( generalLayout );
// ////////////////////////
// START OF SERVER SETTINGS GROUP///
// /
wServerSettings = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wServerSettings );
wServerSettings
.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ServerSettings.Group.Label" ) );
FormLayout ServerSettingsgroupLayout = new FormLayout();
ServerSettingsgroupLayout.marginWidth = 10;
ServerSettingsgroupLayout.marginHeight = 10;
wServerSettings.setLayout( ServerSettingsgroupLayout );
// ServerName line
wServerName = new LabelTextVar( jobMeta, wServerSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Server.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Server.Tooltip" ) );
props.setLook( wServerName );
wServerName.addModifyListener( lsMod );
fdServerName = new FormData();
fdServerName.left = new FormAttachment( 0, 0 );
fdServerName.top = new FormAttachment( wName, margin );
fdServerName.right = new FormAttachment( 100, 0 );
wServerName.setLayoutData( fdServerName );
// Server port line
wPort = new LabelTextVar( jobMeta, wServerSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Port.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Port.Tooltip" ) );
props.setLook( wPort );
wPort.addModifyListener( lsMod );
fdPort = new FormData();
fdPort.left = new FormAttachment( 0, 0 );
fdPort.top = new FormAttachment( wServerName, margin );
fdPort.right = new FormAttachment( 100, 0 );
wPort.setLayoutData( fdPort );
// Password String line
wPassword =
new LabelTextVar( jobMeta, wServerSettings, BaseMessages.getString(
PKG, "JobSendNagiosPassiveCheck.Password.Label" ), BaseMessages
.getString( "JobSendNagiosPassiveCheck.Password.Tooltip" ), true );
props.setLook( wPassword );
wPassword.addModifyListener( lsMod );
fdPassword = new FormData();
fdPassword.left = new FormAttachment( 0, 0 );
fdPassword.top = new FormAttachment( wPort, margin );
fdPassword.right = new FormAttachment( 100, 0 );
wPassword.setLayoutData( fdPassword );
// Server wConnectionTimeOut line
wConnectionTimeOut =
new LabelTextVar( jobMeta, wServerSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ConnectionTimeOut.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ConnectionTimeOut.Tooltip" ) );
props.setLook( wConnectionTimeOut );
wConnectionTimeOut.addModifyListener( lsMod );
fdwConnectionTimeOut = new FormData();
fdwConnectionTimeOut.left = new FormAttachment( 0, 0 );
fdwConnectionTimeOut.top = new FormAttachment( wPassword, margin );
fdwConnectionTimeOut.right = new FormAttachment( 100, 0 );
wConnectionTimeOut.setLayoutData( fdwConnectionTimeOut );
// ResponseTimeOut line
wResponseTimeOut = new LabelTextVar( jobMeta, wServerSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ResponseTimeOut.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.ResponseTimeOut.Tooltip" ) );
props.setLook( wResponseTimeOut );
wResponseTimeOut.addModifyListener( lsMod );
fdResponseTimeOut = new FormData();
fdResponseTimeOut.left = new FormAttachment( 0, 0 );
fdResponseTimeOut.top = new FormAttachment( wConnectionTimeOut, margin );
fdResponseTimeOut.right = new FormAttachment( 100, 0 );
wResponseTimeOut.setLayoutData( fdResponseTimeOut );
// Test connection button
wTest = new Button( wServerSettings, SWT.PUSH );
wTest.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.TestConnection.Label" ) );
props.setLook( wTest );
fdTest = new FormData();
wTest.setToolTipText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.TestConnection.Tooltip" ) );
fdTest.top = new FormAttachment( wResponseTimeOut, margin );
fdTest.right = new FormAttachment( 100, 0 );
wTest.setLayoutData( fdTest );
fdServerSettings = new FormData();
fdServerSettings.left = new FormAttachment( 0, margin );
fdServerSettings.top = new FormAttachment( wName, margin );
fdServerSettings.right = new FormAttachment( 100, -margin );
wServerSettings.setLayoutData( fdServerSettings );
// ///////////////////////////////////////////////////////////
// / END OF SERVER SETTINGS GROUP
// ///////////////////////////////////////////////////////////
// ////////////////////////
// START OF Advanced SETTINGS GROUP///
// /
wSenderSettings = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wSenderSettings );
wSenderSettings
.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.SenderSettings.Group.Label" ) );
FormLayout SenderSettingsgroupLayout = new FormLayout();
SenderSettingsgroupLayout.marginWidth = 10;
SenderSettingsgroupLayout.marginHeight = 10;
wSenderSettings.setLayout( SenderSettingsgroupLayout );
// SenderServerName line
wSenderServerName = new LabelTextVar( jobMeta, wSenderSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.SenderServerName.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.SenderServerName.Tooltip" ) );
props.setLook( wSenderServerName );
wSenderServerName.addModifyListener( lsMod );
fdSenderServerName = new FormData();
fdSenderServerName.left = new FormAttachment( 0, 0 );
fdSenderServerName.top = new FormAttachment( wServerSettings, margin );
fdSenderServerName.right = new FormAttachment( 100, 0 );
wSenderServerName.setLayoutData( fdSenderServerName );
// SenderServiceName line
wSenderServiceName = new LabelTextVar( jobMeta, wSenderSettings,
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.SenderServiceName.Label" ),
BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.SenderServiceName.Tooltip" ) );
props.setLook( wSenderServiceName );
wSenderServiceName.addModifyListener( lsMod );
fdSenderServiceName = new FormData();
fdSenderServiceName.left = new FormAttachment( 0, 0 );
fdSenderServiceName.top = new FormAttachment( wSenderServerName, margin );
fdSenderServiceName.right = new FormAttachment( 100, 0 );
wSenderServiceName.setLayoutData( fdSenderServiceName );
// Encryption mode
wlEncryptionMode = new Label( wSenderSettings, SWT.RIGHT );
wlEncryptionMode.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.EncryptionMode.Label" ) );
props.setLook( wlEncryptionMode );
fdlEncryptionMode = new FormData();
fdlEncryptionMode.left = new FormAttachment( 0, margin );
fdlEncryptionMode.right = new FormAttachment( middle, -margin );
fdlEncryptionMode.top = new FormAttachment( wSenderServiceName, margin );
wlEncryptionMode.setLayoutData( fdlEncryptionMode );
wEncryptionMode = new CCombo( wSenderSettings, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER );
wEncryptionMode.setItems( JobEntrySendNagiosPassiveCheck.encryption_mode_Desc );
props.setLook( wEncryptionMode );
fdEncryptionMode = new FormData();
fdEncryptionMode.left = new FormAttachment( middle, margin );
fdEncryptionMode.top = new FormAttachment( wSenderServiceName, margin );
fdEncryptionMode.right = new FormAttachment( 100, 0 );
wEncryptionMode.setLayoutData( fdEncryptionMode );
wEncryptionMode.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
}
} );
// Level mode
wlLevelMode = new Label( wSenderSettings, SWT.RIGHT );
wlLevelMode.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.LevelMode.Label" ) );
props.setLook( wlLevelMode );
fdlLevelMode = new FormData();
fdlLevelMode.left = new FormAttachment( 0, margin );
fdlLevelMode.right = new FormAttachment( middle, -margin );
fdlLevelMode.top = new FormAttachment( wEncryptionMode, margin );
wlLevelMode.setLayoutData( fdlLevelMode );
wLevelMode = new CCombo( wSenderSettings, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER );
wLevelMode.setItems( JobEntrySendNagiosPassiveCheck.level_type_Desc );
props.setLook( wLevelMode );
fdLevelMode = new FormData();
fdLevelMode.left = new FormAttachment( middle, margin );
fdLevelMode.top = new FormAttachment( wEncryptionMode, margin );
fdLevelMode.right = new FormAttachment( 100, 0 );
wLevelMode.setLayoutData( fdLevelMode );
wLevelMode.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
}
} );
fdSenderSettings = new FormData();
fdSenderSettings.left = new FormAttachment( 0, margin );
fdSenderSettings.top = new FormAttachment( wServerSettings, margin );
fdSenderSettings.right = new FormAttachment( 100, -margin );
wSenderSettings.setLayoutData( fdSenderSettings );
// ///////////////////////////////////////////////////////////
// / END OF Advanced SETTINGS GROUP
// ///////////////////////////////////////////////////////////
// ////////////////////////
// START OF MESSAGE GROUP///
// /
wMessageGroup = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wMessageGroup );
wMessageGroup.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.MessageGroup.Group.Label" ) );
FormLayout MessageGroupgroupLayout = new FormLayout();
MessageGroupgroupLayout.marginWidth = 10;
MessageGroupgroupLayout.marginHeight = 10;
wMessageGroup.setLayout( MessageGroupgroupLayout );
// Message line
wlMessage = new Label( wMessageGroup, SWT.RIGHT );
wlMessage.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Message.Label" ) );
props.setLook( wlMessage );
fdlMessage = new FormData();
fdlMessage.left = new FormAttachment( 0, 0 );
fdlMessage.top = new FormAttachment( wSenderSettings, margin );
fdlMessage.right = new FormAttachment( middle, -margin );
wlMessage.setLayoutData( fdlMessage );
wMessage =
new StyledTextComp( jobMeta, wMessageGroup, SWT.MULTI
| SWT.LEFT | SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL, "" );
props.setLook( wMessage );
wMessage.addModifyListener( lsMod );
fdMessage = new FormData();
fdMessage.left = new FormAttachment( middle, 0 );
fdMessage.top = new FormAttachment( wSenderSettings, margin );
fdMessage.right = new FormAttachment( 100, -2 * margin );
fdMessage.bottom = new FormAttachment( 100, -margin );
wMessage.setLayoutData( fdMessage );
fdMessageGroup = new FormData();
fdMessageGroup.left = new FormAttachment( 0, margin );
fdMessageGroup.top = new FormAttachment( wSenderSettings, margin );
fdMessageGroup.right = new FormAttachment( 100, -margin );
fdMessageGroup.bottom = new FormAttachment( 100, -margin );
wMessageGroup.setLayoutData( fdMessageGroup );
// ///////////////////////////////////////////////////////////
// / END OF MESSAGE GROUP
// ///////////////////////////////////////////////////////////
fdGeneralComp = new FormData();
fdGeneralComp.left = new FormAttachment( 0, 0 );
fdGeneralComp.top = new FormAttachment( 0, 0 );
fdGeneralComp.right = new FormAttachment( 100, 0 );
fdGeneralComp.bottom = new FormAttachment( 100, 0 );
wGeneralComp.setLayoutData( fdGeneralComp );
wGeneralComp.layout();
wGeneralTab.setControl( wGeneralComp );
props.setLook( wGeneralComp );
// ///////////////////////////////////////////////////////////
// / END OF GENERAL TAB
// ///////////////////////////////////////////////////////////
fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 );
fdTabFolder.top = new FormAttachment( wName, margin );
fdTabFolder.right = new FormAttachment( 100, 0 );
fdTabFolder.bottom = new FormAttachment( 100, -50 );
wTabFolder.setLayoutData( fdTabFolder );
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
BaseStepDialog.positionBottomButtons( shell, new Button[] { wOK, wCancel }, margin, wTabFolder );
// Add listeners
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsTest = new Listener() {
public void handleEvent( Event e ) {
test();
}
};
wCancel.addListener( SWT.Selection, lsCancel );
wOK.addListener( SWT.Selection, lsOK );
wTest.addListener( SWT.Selection, lsTest );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wName.addSelectionListener( lsDef );
wServerName.addSelectionListener( lsDef );
wResponseTimeOut.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
getData();
wTabFolder.setSelection( 0 );
BaseStepDialog.setSize( shell );
shell.open();
props.setDialogSize( shell, "JobSendNagiosPassiveCheckDialogSize" );
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return jobEntry;
}
private void test() {
boolean testOK = false;
String errMsg = null;
String hostname = jobMeta.environmentSubstitute( wServerName.getText() );
int nrPort =
Const.toInt(
jobMeta.environmentSubstitute( "" + wPort.getText() ), JobEntrySendNagiosPassiveCheck.DEFAULT_PORT );
int realConnectionTimeOut = Const.toInt( jobMeta.environmentSubstitute( wConnectionTimeOut.getText() ), -1 );
try {
SocketUtil.connectToHost( hostname, nrPort, realConnectionTimeOut );
testOK = true;
} catch ( Exception e ) {
errMsg = e.getMessage();
}
if ( testOK ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Connected.OK", hostname ) + Const.CR );
mb.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Connected.Title.Ok" ) );
mb.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString(
PKG, "JobSendNagiosPassiveCheck.Connected.NOK.ConnectionBad", hostname )
+ Const.CR + errMsg + Const.CR );
mb.setText( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Connected.Title.Bad" ) );
mb.open();
}
}
public void dispose() {
WindowProperty winprop = new WindowProperty( shell );
props.setScreen( winprop );
shell.dispose();
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
wName.setText( Const.nullToEmpty( jobEntry.getName() ) );
wServerName.setText( Const.NVL( jobEntry.getServerName(), "" ) );
wPort.setText( Const.nullToEmpty( jobEntry.getPort() ) );
wConnectionTimeOut.setText( Const.NVL( jobEntry.getConnectionTimeOut(), "" ) );
wResponseTimeOut.setText( Const.nullToEmpty( jobEntry.getResponseTimeOut() ) );
wPassword.setText( Const.NVL( jobEntry.getPassword(), "" ) );
wSenderServerName.setText( Const.NVL( jobEntry.getSenderServerName(), "" ) );
wSenderServiceName.setText( Const.NVL( jobEntry.getSenderServiceName(), "" ) );
wMessage.setText( Const.NVL( jobEntry.getMessage(), "" ) );
wEncryptionMode.setText( JobEntrySendNagiosPassiveCheck.getEncryptionModeDesc( jobEntry.getEncryptionMode() ) );
wLevelMode.setText( JobEntrySendNagiosPassiveCheck.getLevelDesc( jobEntry.getLevel() ) );
wName.selectAll();
wName.setFocus();
}
private void cancel() {
jobEntry.setChanged( changed );
jobEntry = null;
dispose();
}
private void ok() {
if ( Utils.isEmpty( wName.getText() ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Title" ) );
mb.open();
return;
}
jobEntry.setName( wName.getText() );
jobEntry.setPort( wPort.getText() );
jobEntry.setServerName( wServerName.getText() );
jobEntry.setConnectionTimeOut( wConnectionTimeOut.getText() );
jobEntry.setResponseTimeOut( wResponseTimeOut.getText() );
jobEntry.setSenderServerName( wSenderServerName.getText() );
jobEntry.setSenderServiceName( wSenderServiceName.getText() );
jobEntry.setMessage( wMessage.getText() );
jobEntry
.setEncryptionMode( JobEntrySendNagiosPassiveCheck.getEncryptionModeByDesc( wEncryptionMode.getText() ) );
jobEntry.setLevel( JobEntrySendNagiosPassiveCheck.getLevelByDesc( wLevelMode.getText() ) );
jobEntry.setPassword( wPassword.getText() );
dispose();
}
public boolean evaluates() {
return true;
}
public boolean isUnconditional() {
return false;
}
}
| apache-2.0 |
ingokegel/intellij-community | python/python-psi-api/src/com/jetbrains/python/psi/PyFunction.java | 3806 | // Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.psi;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiNameIdentifierOwner;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.util.ArrayFactory;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.psi.stubs.PyFunctionStub;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Function declaration in source (the {@code def} and everything within).
*
* @author yole
*/
public interface PyFunction extends PsiNamedElement, StubBasedPsiElement<PyFunctionStub>, PsiNameIdentifierOwner, PyStatement, PyCallable,
PyDocStringOwner, ScopeOwner, PyDecoratable, PyTypedElement, PyStatementListContainer,
PyPossibleClassMember, PyTypeCommentOwner, PyAnnotationOwner {
PyFunction[] EMPTY_ARRAY = new PyFunction[0];
ArrayFactory<PyFunction> ARRAY_FACTORY = count -> count == 0 ? EMPTY_ARRAY : new PyFunction[count];
/**
* Returns the AST node for the function name identifier.
*
* @return the node, or null if the function is incomplete (only the "def"
* keyword was typed)
*/
@Nullable
ASTNode getNameNode();
@Nullable
PyType getReturnStatementType(@NotNull TypeEvalContext context);
/**
* If the function raises a DeprecationWarning or a PendingDeprecationWarning, returns the explanation text provided for the warning..
*
* @return the deprecation message or null if the function is not deprecated.
*/
@Nullable
String getDeprecationMessage();
/**
* Looks for two standard decorators to a function, or a wrapping assignment that closely follows it.
*
* @return a flag describing what was detected.
*/
@Nullable
Modifier getModifier();
/**
* Checks whether the function contains a yield expression in its body.
*/
boolean isGenerator();
boolean isAsync();
boolean isAsyncAllowed();
default boolean onlyRaisesNotImplementedError() {
return false;
}
/**
* Flags that mark common alterations of a function: decoration by and wrapping in classmethod() and staticmethod().
*/
enum Modifier {
/**
* Function is decorated with @classmethod, its first param is the class.
*/
CLASSMETHOD,
/**
* Function is decorated with {@code @staticmethod}, its first param is as in a regular function.
*/
STATICMETHOD,
}
/**
* Returns a property for which this function is a getter, setter or deleter.
*
* @return the corresponding property, or null if there isn't any.
*/
@Nullable
Property getProperty();
/**
* Searches for function attributes.
* See <a href="http://legacy.python.org/dev/peps/pep-0232/">PEP-0232</a>
* @return assignment statements for function attributes
*/
@NotNull
List<PyAssignmentStatement> findAttributes();
/**
* @return function protection level (underscore based)
*/
@NotNull
ProtectionLevel getProtectionLevel();
enum ProtectionLevel {
/**
* public members
*/
PUBLIC(0),
/**
* _protected_memebers
*/
PROTECTED(1),
/**
* __private_memebrs
*/
PRIVATE(2);
private final int myUnderscoreLevel;
ProtectionLevel(final int underscoreLevel) {
myUnderscoreLevel = underscoreLevel;
}
/**
* @return number of underscores
*/
public int getUnderscoreLevel() {
return myUnderscoreLevel;
}
}
}
| apache-2.0 |
deki/spring-boot | spring-boot/src/main/java/org/springframework/boot/diagnostics/analyzer/InvalidConfigurationPropertyNameFailureAnalyzer.java | 2629 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.diagnostics.analyzer;
import java.util.stream.Collectors;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.boot.context.properties.source.InvalidConfigurationPropertyNameException;
import org.springframework.boot.diagnostics.AbstractFailureAnalyzer;
import org.springframework.boot.diagnostics.FailureAnalysis;
/**
* An {@link AbstractFailureAnalyzer} that performs analysis of failures caused by
* {@link InvalidConfigurationPropertyNameException}.
*
* @author Madhura Bhave
* @since 2.0.0
*/
public class InvalidConfigurationPropertyNameFailureAnalyzer
extends AbstractFailureAnalyzer<InvalidConfigurationPropertyNameException> {
@Override
protected FailureAnalysis analyze(Throwable rootFailure,
InvalidConfigurationPropertyNameException cause) {
BeanCreationException exception = findCause(rootFailure,
BeanCreationException.class);
String action = String.format(
"Modify '%s' so that it conforms to the canonical names requirements.",
cause.getName());
return new FailureAnalysis(buildDescription(cause, exception), action, cause);
}
private String buildDescription(InvalidConfigurationPropertyNameException cause,
BeanCreationException exception) {
StringBuilder description = new StringBuilder(String.format(
"Configuration property name '%s' is not valid:%n", cause.getName()));
String invalid = cause.getInvalidCharacters().stream().map(this::quote)
.collect(Collectors.joining(", "));
description.append(String.format("%n Invalid characters: %s", invalid));
if (exception != null) {
description.append(String.format("%n Bean: %s", exception.getBeanName()));
}
description.append(String.format("%n Reason: Canonical names should be "
+ "kebab-case ('-' separated), lowercase alpha-numeric characters"
+ " and must start with a letter"));
return description.toString();
}
private String quote(Character c) {
return "'" + c + "'";
}
}
| apache-2.0 |
signed/intellij-community | plugins/ant/src/com/intellij/lang/ant/dom/AntBooleanConverter.java | 1735 | /*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.ant.dom;
import com.intellij.util.xml.ConvertContext;
import com.intellij.util.xml.Converter;
import com.intellij.util.xml.GenericAttributeValue;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
/**
* @author Eugene Zhuravlev
* Date: Aug 3, 2010
*/
public class AntBooleanConverter extends Converter<Boolean> {
public final Boolean DEFAULT_VALUE;
public AntBooleanConverter() {
DEFAULT_VALUE = null;
}
public AntBooleanConverter(boolean defaultValue) {
DEFAULT_VALUE = Boolean.valueOf(defaultValue);
}
public Boolean fromString(@Nullable @NonNls String s, ConvertContext context) {
if (s == null || s.length() == 0) {
return DEFAULT_VALUE;
}
return "true".equalsIgnoreCase(s) || "yes".equalsIgnoreCase(s);
}
public String toString(@Nullable Boolean aBoolean, ConvertContext context) {
final GenericAttributeValue attribValue = context.getInvocationElement().getParentOfType(GenericAttributeValue.class, false);
if (attribValue == null) {
return null;
}
return attribValue.getRawText();
}
}
| apache-2.0 |
GlenRSmith/elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyResponseTests.java | 3869 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.security.action;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import static org.hamcrest.Matchers.equalTo;
public class CreateApiKeyResponseTests extends AbstractXContentTestCase<CreateApiKeyResponse> {
@Override
protected CreateApiKeyResponse doParseInstance(XContentParser parser) throws IOException {
return CreateApiKeyResponse.fromXContent(parser);
}
@Override
protected CreateApiKeyResponse createTestInstance() {
final String name = randomAlphaOfLengthBetween(1, 256);
final SecureString key = new SecureString(UUIDs.randomBase64UUID().toCharArray());
final Instant expiration = randomBoolean() ? Instant.now().plus(7L, ChronoUnit.DAYS) : null;
final String id = randomAlphaOfLength(100);
return new CreateApiKeyResponse(name, id, key, expiration);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
public void testSerialization() throws IOException {
final CreateApiKeyResponse response = createTestInstance();
try (BytesStreamOutput out = new BytesStreamOutput()) {
response.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
CreateApiKeyResponse serialized = new CreateApiKeyResponse(in);
assertThat(serialized, equalTo(response));
}
}
}
public void testEqualsHashCode() {
CreateApiKeyResponse createApiKeyResponse = createTestInstance();
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
createApiKeyResponse,
(original) -> {
return new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), original.getExpiration());
}
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
createApiKeyResponse,
(original) -> {
return new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), original.getExpiration());
},
CreateApiKeyResponseTests::mutateTestItem
);
}
private static CreateApiKeyResponse mutateTestItem(CreateApiKeyResponse original) {
switch (randomIntBetween(0, 3)) {
case 0:
return new CreateApiKeyResponse(randomAlphaOfLength(5), original.getId(), original.getKey(), original.getExpiration());
case 1:
return new CreateApiKeyResponse(original.getName(), randomAlphaOfLength(5), original.getKey(), original.getExpiration());
case 2:
return new CreateApiKeyResponse(
original.getName(),
original.getId(),
new SecureString(UUIDs.randomBase64UUID().toCharArray()),
original.getExpiration()
);
case 3:
return new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), Instant.now());
default:
return new CreateApiKeyResponse(randomAlphaOfLength(5), original.getId(), original.getKey(), original.getExpiration());
}
}
}
| apache-2.0 |
jirmauritz/perun | perun-web-gui/src/main/java/cz/metacentrum/perun/webgui/json/propagationStatsReader/GetFacilityState.java | 11161 | package cz.metacentrum.perun.webgui.json.propagationStatsReader;
import com.google.gwt.cell.client.FieldUpdater;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.json.client.JSONNumber;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.cellview.client.ColumnSortEvent.ListHandler;
import com.google.gwt.user.cellview.client.RowStyles;
import com.google.gwt.view.client.DefaultSelectionEventManager;
import com.google.gwt.view.client.ListDataProvider;
import com.google.gwt.view.client.MultiSelectionModel;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.resources.TableSorter;
import cz.metacentrum.perun.webgui.json.*;
import cz.metacentrum.perun.webgui.json.keyproviders.FacilityStateKeyProvider;
import cz.metacentrum.perun.webgui.model.FacilityState;
import cz.metacentrum.perun.webgui.model.PerunError;
import cz.metacentrum.perun.webgui.tabs.facilitiestabs.FacilityDetailTabItem;
import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage;
import cz.metacentrum.perun.webgui.widgets.PerunTable;
import cz.metacentrum.perun.webgui.widgets.UnaccentMultiWordSuggestOracle;
import java.util.ArrayList;
import java.util.Comparator;
/**
* Ajax query to get facility propagation state for 1 or All facilities or Facilities related to some VO
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class GetFacilityState implements JsonCallback, JsonCallbackTable<FacilityState>, JsonCallbackOracle<FacilityState> {
// Session
private PerunWebSession session = PerunWebSession.getInstance();
// JSON URL
static private final String JSON_URL = "propagationStatsReader/getFacilityState";
// External events
private JsonCallbackEvents events = new JsonCallbackEvents();
// data providers
private ListDataProvider<FacilityState> dataProvider = new ListDataProvider<FacilityState>();
private ArrayList<FacilityState> list = new ArrayList<FacilityState>();
private PerunTable<FacilityState> table;
// Selection model
final MultiSelectionModel<FacilityState> selectionModel = new MultiSelectionModel<FacilityState>(new FacilityStateKeyProvider());
// loader image
private AjaxLoaderImage loaderImage = new AjaxLoaderImage();
private int facilityId = 0;
private int voId = 0;
// oracle support
private ArrayList<FacilityState> fullBackup = new ArrayList<FacilityState>();
private UnaccentMultiWordSuggestOracle oracle = new UnaccentMultiWordSuggestOracle();
/**
* New instance of get facility state
*
* @param facilityId - can be 0 if we want all facilities
* @param voId - if NOT ZERO, get all facilities related to this VO
*/
public GetFacilityState(int facilityId, int voId) {
this.facilityId = facilityId;
this.voId = voId;
}
/**
* New instance of get facility state with external events
*
* @param facilityId - can be 0 if we want all facilities
* @param voId - if NOT ZERO, get all facilities related to this VO
* @param events external events
*/
public GetFacilityState(int facilityId, int voId, JsonCallbackEvents events) {
this.facilityId = facilityId;
this.voId = voId;
this.events = events;
}
/**
* Return table with owners - starts RPC call
*
* @return table widget
*/
public CellTable<FacilityState> getTable() {
retrieveData();
// Table data provider.
dataProvider = new ListDataProvider<FacilityState>(list);
// Cell table
table = new PerunTable<FacilityState>(list);
// Connect the table to the data provider.
dataProvider.addDataDisplay(table);
// Sorting
ListHandler<FacilityState> columnSortHandler = new ListHandler<FacilityState>(dataProvider.getList());
table.addColumnSortHandler(columnSortHandler);
// table selection
table.setSelectionModel(selectionModel, DefaultSelectionEventManager.<FacilityState> createCheckboxManager());
// set empty content & loader
table.setEmptyTableWidget(loaderImage);
// facility column
Column<FacilityState, String> facilityColumn = JsonUtils.addColumn(
new JsonUtils.GetValue<FacilityState, String>() {
public String getValue(FacilityState object) {
return String.valueOf(object.getFacility().getName());
}
}, new FieldUpdater<FacilityState, String>(){
public void update(int index, FacilityState object, String value) {
if (session.isPerunAdmin() || session.isFacilityAdmin(object.getFacility().getId())) {
session.getTabManager().addTab(new FacilityDetailTabItem(object.getFacility(), 2));
}
}
});
facilityColumn.setSortable(true);
columnSortHandler.setComparator(facilityColumn, new Comparator<FacilityState>(){
public int compare(FacilityState o1, FacilityState o2) {
return o1.getFacility().getName().compareToIgnoreCase((o2.getFacility().getName()));
}
});
// status column
Column<FacilityState, String> statusColumn = JsonUtils.addColumn(
new JsonUtils.GetValue<FacilityState, String>() {
public String getValue(FacilityState object) {
return String.valueOf(object.getState());
}
}, null);
statusColumn.setSortable(true);
columnSortHandler.setComparator(statusColumn, new Comparator<FacilityState>(){
public int compare(FacilityState o1, FacilityState o2) {
return o1.getState().compareToIgnoreCase(o2.getState());
}
});
/*
// error column
Column<FacilityState, String> errorColumn = JsonUtils.addColumn(
new JsonUtils.GetValue<FacilityState, String>() {
public String getValue(FacilityState object) {
Set<String> set = new HashSet<String>();
for (int i=0; i<object.getTasksResults().length(); i++) {
if (!set.contains(object.getTasksResults().get(i).getDestination().getDestination())) {
set.add(object.getTasksResults().get(i).getDestination().getDestination());
}
}
String result = "";
ArrayList<String> list = new ArrayList<String>();
for (String dest : set) {
list.add(dest);
}
Collections.sort(list);
for (String s : list) {
result = result + s + ", ";
}
return result;
}
}, null);
*/
table.addColumn(facilityColumn, "Facility");
table.addColumn(statusColumn, "Propagation state");
//table.addColumn(errorColumn, "Nodes in error");
// set row styles based on task state
table.setRowStyles(new RowStyles<FacilityState>(){
public String getStyleNames(FacilityState row, int rowIndex) {
if (row.getState().equalsIgnoreCase("NOT_DETERMINED")) {
return "";
}
else if (row.getState().equalsIgnoreCase("OK")){
return "rowgreen";
}
else if (row.getState().equalsIgnoreCase("PROCESSING")){
return "rowyellow";
}
else if (row.getState().equalsIgnoreCase("OPEN")){
return "roworange";
}
else if (row.getState().equalsIgnoreCase("ERROR")){
return "rowred";
}
return "";
}
});
return table;
}
/**
* Sorts table by objects Name
*/
public void sortTable() {
list = new TableSorter<FacilityState>().sortByFacilityName(getList());
dataProvider.flush();
dataProvider.refresh();
}
/**
* Add object as new row to table
*
* @param object FacilityState to be added as new row
*/
public void addToTable(FacilityState object) {
list.add(object);
oracle.add(object.getFacility().getName());
dataProvider.flush();
dataProvider.refresh();
}
/**
* Removes object as row from table
*
* @param object FacilityState to be removed as row
*/
public void removeFromTable(FacilityState object) {
list.remove(object);
selectionModel.getSelectedSet().remove(object);
dataProvider.flush();
dataProvider.refresh();
}
/**
* Clear all table content
*/
public void clearTable(){
loaderImage.loadingStart();
list.clear();
fullBackup.clear();
selectionModel.clear();
dataProvider.flush();
dataProvider.refresh();
}
/**
* Clears list of selected items
*/
public void clearTableSelectedSet(){
selectionModel.clear();
}
/**
* Return selected items from list
*
* @return return list of checked items
*/
public ArrayList<FacilityState> getTableSelectedList(){
return JsonUtils.setToList(selectionModel.getSelectedSet());
}
/**
* Called, when an error occurs
*/
public void onError(PerunError error) {
session.getUiElements().setLogErrorText("Error while loading FacilityState");
loaderImage.loadingError(error);
events.onError(error);
}
/**
* Called, when loading starts
*/
public void onLoadingStart() {
session.getUiElements().setLogText("Loading FacilityState started.");
events.onLoadingStart();
}
/**
* Called, when operation finishes successfully.
*/
public void onFinished(JavaScriptObject jso) {
setList(JsonUtils.<FacilityState>jsoAsList(jso));
sortTable();
session.getUiElements().setLogText("FacilityState loaded: " + list.size());
events.onFinished(jso);
loaderImage.loadingFinished();
}
public void insertToTable(int index, FacilityState object) {
list.add(index, object);
oracle.add(object.getFacility().getName());
dataProvider.flush();
dataProvider.refresh();
}
public void setEditable(boolean editable) {
// TODO Auto-generated method stub
}
public void setCheckable(boolean checkable) {
// TODO Auto-generated method stub
}
public void setList(ArrayList<FacilityState> list) {
clearTable();
this.list.addAll(list);
dataProvider.flush();
dataProvider.refresh();
}
public ArrayList<FacilityState> getList() {
return this.list;
}
/**
* Retrieve data from RPC
*/
public void retrieveData() {
final JsonCallback passToCallback = this;
JsonPostClient jsp = new JsonPostClient(new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso) {
passToCallback.onFinished(jso);
}
@Override
public void onError(PerunError error) {
passToCallback.onError(error);
}
@Override
public void onLoadingStart() {
passToCallback.onLoadingStart();
}
});
if (facilityId != 0 ){
// get specific facility
jsp.put("facility", new JSONNumber(facilityId));
jsp.sendData(JSON_URL);
} else if (voId == 0) {
// get all facilities where user is admin
jsp.sendNativeData("propagationStatsReader/getAllFacilitiesStates", "{}");
} else {
// get facilities related to VO
jsp.put("vo", new JSONNumber(voId));
jsp.sendData("propagationStatsReader/getAllFacilitiesStates");
}
}
public UnaccentMultiWordSuggestOracle getOracle(){
return this.oracle;
}
public void filterTable(String text){
// store list only for first time
if (fullBackup.isEmpty() || fullBackup == null) {
fullBackup.addAll(list);
}
// always clear selected items
selectionModel.clear();
list.clear();
if (text.equalsIgnoreCase("")) {
list.addAll(fullBackup);
} else {
for (FacilityState fac : fullBackup){
// store facility by filter
if (fac.getFacility().getName().toLowerCase().startsWith(text.toLowerCase())) {
list.add(fac);
}
}
}
dataProvider.flush();
dataProvider.refresh();
loaderImage.loadingFinished();
}
public void setOracle(UnaccentMultiWordSuggestOracle oracle) {
this.oracle = oracle;
}
}
| bsd-2-clause |
exponent/exponent | android/ReactAndroid/src/main/java/com/facebook/react/views/textinput/ReactTextInputEvent.java | 1864 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.views.textinput;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
/**
* Event emitted by EditText native view when text changes. VisibleForTesting from {@link
* TextInputEventsTestCase}.
*/
public class ReactTextInputEvent extends Event<ReactTextInputEvent> {
public static final String EVENT_NAME = "topTextInput";
private String mText;
private String mPreviousText;
private int mRangeStart;
private int mRangeEnd;
public ReactTextInputEvent(
int viewId, String text, String previousText, int rangeStart, int rangeEnd) {
super(viewId);
mText = text;
mPreviousText = previousText;
mRangeStart = rangeStart;
mRangeEnd = rangeEnd;
}
@Override
public String getEventName() {
return EVENT_NAME;
}
@Override
public boolean canCoalesce() {
// We don't want to miss any textinput event, as event data is incremental.
return false;
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap eventData = Arguments.createMap();
WritableMap range = Arguments.createMap();
range.putDouble("start", mRangeStart);
range.putDouble("end", mRangeEnd);
eventData.putString("text", mText);
eventData.putString("previousText", mPreviousText);
eventData.putMap("range", range);
eventData.putInt("target", getViewTag());
return eventData;
}
}
| bsd-3-clause |
tinkerpop/frames | src/main/java/com/tinkerpop/frames/core/FramedGraphQueryImpl.java | 2137 | package com.tinkerpop.frames.core;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Predicate;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.frames.FramedGraph;
import com.tinkerpop.frames.FramedGraphQuery;
import com.tinkerpop.frames.structures.FramedEdgeIterable;
import com.tinkerpop.frames.structures.FramedVertexIterable;
public class FramedGraphQueryImpl implements FramedGraphQuery {
private GraphQuery graphQuery;
private FramedGraph<?> graph;
public FramedGraphQueryImpl(FramedGraph<?> graph, GraphQuery graphQuery) {
this.graph = graph;
this.graphQuery = graphQuery;
}
public FramedGraphQuery has(String key) {
graphQuery = graphQuery.has(key);
return this;
}
public FramedGraphQuery hasNot(String key) {
graphQuery = graphQuery.hasNot(key);
return this;
}
public FramedGraphQuery has(String key, Object value) {
graphQuery = graphQuery.has(key, value);
return this;
}
public FramedGraphQuery hasNot(String key, Object value) {
graphQuery = graphQuery.hasNot(key, value);
return this;
}
public FramedGraphQuery has(String key, Predicate predicate, Object value) {
graphQuery = graphQuery.has(key, predicate, value);
return this;
}
public <T extends Comparable<T>> FramedGraphQuery has(String key, T value,
Compare compare) {
graphQuery = graphQuery.has(key, value, compare);
return this;
}
public <T extends Comparable<?>> FramedGraphQuery interval(String key,
T startValue, T endValue) {
graphQuery = graphQuery.interval(key, startValue, endValue);
return this;
}
public FramedGraphQuery limit(int limit) {
graphQuery = graphQuery.limit(limit);
return this;
}
@Override
public <T> Iterable<T> edges(Class<T> kind) {
return new FramedEdgeIterable<T>(graph, edges(), kind);
}
@Override
public <T> Iterable<T> vertices(Class<T> kind) {
return new FramedVertexIterable<T>(graph, vertices(), kind);
}
@Override
public Iterable<Edge> edges() {
return graphQuery.edges();
}
@Override
public Iterable<Vertex> vertices() {
return graphQuery.vertices();
}
}
| bsd-3-clause |
steand/openhab2-addons | addons/binding/org.openhab.binding.smaenergymeter/src/main/java/org/openhab/binding/smaenergymeter/handler/SMAEnergyMeterHandler.java | 4249 | /**
* Copyright (c) 2014-2016 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.smaenergymeter.handler;
import static org.openhab.binding.smaenergymeter.SMAEnergyMeterBindingConstants.*;
import java.io.IOException;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.eclipse.smarthome.core.thing.ChannelUID;
import org.eclipse.smarthome.core.thing.Thing;
import org.eclipse.smarthome.core.thing.ThingStatus;
import org.eclipse.smarthome.core.thing.ThingStatusDetail;
import org.eclipse.smarthome.core.thing.binding.BaseThingHandler;
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.RefreshType;
import org.openhab.binding.smaenergymeter.configuration.EnergyMeterConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The {@link SMAEnergyMeterHandler} is responsible for handling commands, which are
* sent to one of the channels.
*
* @author Osman Basha - Initial contribution
*/
public class SMAEnergyMeterHandler extends BaseThingHandler {
private Logger logger = LoggerFactory.getLogger(SMAEnergyMeterHandler.class);
private EnergyMeter energyMeter;
private ScheduledFuture<?> pollingJob;
public SMAEnergyMeterHandler(Thing thing) {
super(thing);
}
@Override
public void handleCommand(ChannelUID channelUID, Command command) {
if (command == RefreshType.REFRESH) {
logger.debug("Refreshing {}", channelUID);
updateData();
} else {
logger.warn("This binding is a read-only binding and cannot handle commands");
}
}
@Override
public void initialize() {
logger.debug("Initializing SMAEnergyMeter handler '{}'", getThing().getUID());
EnergyMeterConfig config = getConfigAs(EnergyMeterConfig.class);
int port = (config.getPort() == null) ? EnergyMeter.DEFAULT_MCAST_PORT : config.getPort();
energyMeter = new EnergyMeter(config.getMcastGroup(), port);
try {
energyMeter.update();
updateProperty(Thing.PROPERTY_VENDOR, "SMA");
updateProperty(Thing.PROPERTY_SERIAL_NUMBER, energyMeter.getSerialNumber());
logger.debug("Found a SMA Energy Meter with S/N '{}'", energyMeter.getSerialNumber());
} catch (IOException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.OFFLINE.CONFIGURATION_ERROR, e.getMessage());
return;
}
int pollingPeriod = (config.getPollingPeriod() == null) ? 30 : config.getPollingPeriod();
pollingJob = scheduler.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
updateData();
}
}, 0, pollingPeriod, TimeUnit.SECONDS);
logger.debug("Polling job scheduled to run every {} sec. for '{}'", pollingPeriod, getThing().getUID());
updateStatus(ThingStatus.ONLINE);
}
@Override
public void dispose() {
logger.debug("Disposing SMAEnergyMeter handler '{}'", getThing().getUID());
if (pollingJob != null) {
pollingJob.cancel(true);
pollingJob = null;
}
energyMeter = null;
}
private synchronized void updateData() {
logger.debug("Update SMAEnergyMeter data '{}'", getThing().getUID());
try {
energyMeter.update();
updateState(CHANNEL_POWER_IN, energyMeter.getPowerIn());
updateState(CHANNEL_POWER_OUT, energyMeter.getPowerOut());
updateState(CHANNEL_ENERGY_IN, energyMeter.getEnergyIn());
updateState(CHANNEL_ENERGY_OUT, energyMeter.getEnergyOut());
if (getThing().getStatus().equals(ThingStatus.OFFLINE)) {
updateStatus(ThingStatus.ONLINE);
}
} catch (IOException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.OFFLINE.COMMUNICATION_ERROR, e.getMessage());
}
}
}
| epl-1.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jvmti/RedefineClasses/redefclass020/newclass/redefclass020a.java | 1319 | /*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jvmti.RedefineClasses;
class redefclass020a {
int intValue;
public redefclass020a() {
intValue = 0;
}
public void setValue(int i) {
intValue = i;
}
public int getValue() {
return intValue;
}
}
| gpl-2.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jvmti/MethodExit/mexit001.java | 1909 | /*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jvmti.MethodExit;
import java.io.PrintStream;
public class mexit001 {
final static int JCK_STATUS_BASE = 95;
static {
try {
System.loadLibrary("mexit001");
} catch (UnsatisfiedLinkError ule) {
System.err.println("Could not load mexit001 library");
System.err.println("java.library.path:"
+ System.getProperty("java.library.path"));
throw ule;
}
}
native static int check();
native static int init0();
public static void main(String args[]) {
args = nsk.share.jvmti.JVMTITest.commonInit(args);
// produce JCK-like exit status.
System.exit(run(args, System.out) + JCK_STATUS_BASE);
}
public static int run(String args[], PrintStream out) {
init0();
return check();
}
}
| gpl-2.0 |
rex-xxx/mt6572_x201 | frameworks/opt/telephony/src/java/com/android/internal/telephony/cdma/SignalToneUtil.java | 15465 | /*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.telephony.cdma;
import java.util.HashMap;
import java.util.HashSet;
import android.util.Log;
import android.media.ToneGenerator;
public class SignalToneUtil {
/** A marker that isn't a valid TONE */
public static final int CDMA_INVALID_TONE = -1;
// public final int int IS95_CONST_IR_SIGNAL_TYPE_TYPE;
static public final int IS95_CONST_IR_SIGNAL_TONE = 0;
static public final int IS95_CONST_IR_SIGNAL_ISDN = 1;
static public final int IS95_CONST_IR_SIGNAL_IS54B = 2;
static public final int IS95_CONST_IR_SIGNAL_USR_DEFD_ALERT = 4;
// public final int int IS95_CONST_IR_ALERT_PITCH_TYPE;
static public final int IS95_CONST_IR_ALERT_MED = 0;
static public final int IS95_CONST_IR_ALERT_HIGH = 1;
static public final int IS95_CONST_IR_ALERT_LOW = 2;
// Based on 3GPP2 C.S0005-E, seciton 3.7.5.5 Signal,
// set TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN to 0 to avoid
// the alert pitch to be involved in hash calculation for
// signal type other than IS54B.
static public final int TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN = 0;
// public final int int IS95_CONST_IR_SIGNAL_TYPE;
static public final int IS95_CONST_IR_SIG_ISDN_NORMAL = 0;
static public final int IS95_CONST_IR_SIG_ISDN_INTGRP = 1;
static public final int IS95_CONST_IR_SIG_ISDN_SP_PRI = 2;
static public final int IS95_CONST_IR_SIG_ISDN_PAT_3 = 3;
static public final int IS95_CONST_IR_SIG_ISDN_PING = 4;
static public final int IS95_CONST_IR_SIG_ISDN_PAT_5 = 5;
static public final int IS95_CONST_IR_SIG_ISDN_PAT_6 = 6;
static public final int IS95_CONST_IR_SIG_ISDN_PAT_7 = 7;
static public final int IS95_CONST_IR_SIG_ISDN_OFF = 15;
static public final int IS95_CONST_IR_SIG_TONE_DIAL = 0;
static public final int IS95_CONST_IR_SIG_TONE_RING = 1;
static public final int IS95_CONST_IR_SIG_TONE_INT = 2;
static public final int IS95_CONST_IR_SIG_TONE_ABB_INT = 3;
static public final int IS95_CONST_IR_SIG_TONE_REORDER = 4;
static public final int IS95_CONST_IR_SIG_TONE_ABB_RE = 5;
static public final int IS95_CONST_IR_SIG_TONE_BUSY = 6;
static public final int IS95_CONST_IR_SIG_TONE_CONFIRM = 7;
static public final int IS95_CONST_IR_SIG_TONE_ANSWER = 8;
static public final int IS95_CONST_IR_SIG_TONE_CALL_W = 9;
static public final int IS95_CONST_IR_SIG_TONE_PIP = 10;
static public final int IS95_CONST_IR_SIG_TONE_NO_TONE = 63;
static public final int IS95_CONST_IR_SIG_IS54B_NO_TONE = 0;
static public final int IS95_CONST_IR_SIG_IS54B_L = 1;
static public final int IS95_CONST_IR_SIG_IS54B_SS = 2;
static public final int IS95_CONST_IR_SIG_IS54B_SSL = 3;
static public final int IS95_CONST_IR_SIG_IS54B_SS_2 = 4;
static public final int IS95_CONST_IR_SIG_IS54B_SLS = 5;
static public final int IS95_CONST_IR_SIG_IS54B_S_X4 = 6;
static public final int IS95_CONST_IR_SIG_IS54B_PBX_L = 7;
static public final int IS95_CONST_IR_SIG_IS54B_PBX_SS = 8;
static public final int IS95_CONST_IR_SIG_IS54B_PBX_SSL = 9;
static public final int IS95_CONST_IR_SIG_IS54B_PBX_SLS = 10;
static public final int IS95_CONST_IR_SIG_IS54B_PBX_S_X4 = 11;
static public final int IS95_CONST_IR_SIG_TONE_ABBR_ALRT = 0;
// Hashmap to map signalInfo To AudioTone
static private HashMap<Integer, Integer> hm = new HashMap<Integer, Integer>();
private static Integer signalParamHash(int signalType, int alertPitch, int signal) {
if ((signalType < 0) || (signalType > 256) || (alertPitch > 256) ||
(alertPitch < 0) || (signal > 256) || (signal < 0)) {
return new Integer(CDMA_INVALID_TONE);
}
// Based on 3GPP2 C.S0005-E, seciton 3.7.5.5 Signal,
// the alert pitch field is ignored by the mobile station unless
// SIGNAL_TYPE is '10',IS-54B Alerting.
// Set alert pitch to TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN
// so the alert pitch is not involved in hash calculation
// when signal type is not IS-54B.
if (signalType != IS95_CONST_IR_SIGNAL_IS54B) {
alertPitch = TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN;
}
return new Integer(signalType * 256 * 256 + alertPitch * 256 + signal);
}
public static int getAudioToneFromSignalInfo(int signalType, int alertPitch, int signal) {
Integer result = hm.get(signalParamHash(signalType, alertPitch, signal));
if (result == null) {
return CDMA_INVALID_TONE;
}
return result;
}
static {
/* SIGNAL_TYPE_ISDN */
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_NORMAL), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_NORMAL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_INTGRP),
ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_INTERGROUP);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_SP_PRI), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_PAT_3), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_PAT3);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_PING), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_PING_RING);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_PAT_5), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_PAT5);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_PAT_6), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_PAT6);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_PAT_7), ToneGenerator.TONE_CDMA_CALL_SIGNAL_ISDN_PAT7);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_ISDN, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_ISDN_OFF), ToneGenerator.TONE_CDMA_SIGNAL_OFF);
/* SIGNAL_TYPE_TONE */
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_DIAL), ToneGenerator.TONE_CDMA_DIAL_TONE_LITE);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_RING), ToneGenerator.TONE_CDMA_NETWORK_USA_RINGBACK);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_INT), ToneGenerator.TONE_SUP_INTERCEPT);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_ABB_INT), ToneGenerator.TONE_SUP_INTERCEPT_ABBREV);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_REORDER), ToneGenerator.TONE_CDMA_REORDER);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_ABB_RE), ToneGenerator.TONE_CDMA_ABBR_REORDER);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_BUSY), ToneGenerator.TONE_CDMA_NETWORK_BUSY);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_CONFIRM), ToneGenerator.TONE_SUP_CONFIRM);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_ANSWER), ToneGenerator.TONE_CDMA_ANSWER);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_CALL_W), ToneGenerator.TONE_CDMA_NETWORK_CALLWAITING);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_PIP), ToneGenerator.TONE_CDMA_PIP);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_TONE, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_TONE_NO_TONE), ToneGenerator.TONE_CDMA_SIGNAL_OFF);
/* SIGNAL_TYPE_IS54B */
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_L), ToneGenerator.TONE_CDMA_HIGH_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_L), ToneGenerator.TONE_CDMA_MED_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_L), ToneGenerator.TONE_CDMA_LOW_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_SS), ToneGenerator.TONE_CDMA_HIGH_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_SS), ToneGenerator.TONE_CDMA_MED_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_SS), ToneGenerator.TONE_CDMA_LOW_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_SSL), ToneGenerator.TONE_CDMA_HIGH_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_SSL), ToneGenerator.TONE_CDMA_MED_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_SSL), ToneGenerator.TONE_CDMA_LOW_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_SS_2), ToneGenerator.TONE_CDMA_HIGH_SS_2);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_SS_2), ToneGenerator.TONE_CDMA_MED_SS_2);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_SS_2), ToneGenerator.TONE_CDMA_LOW_SS_2);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_SLS), ToneGenerator.TONE_CDMA_HIGH_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_SLS), ToneGenerator.TONE_CDMA_MED_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_SLS), ToneGenerator.TONE_CDMA_LOW_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_S_X4), ToneGenerator.TONE_CDMA_HIGH_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_S_X4), ToneGenerator.TONE_CDMA_MED_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_S_X4), ToneGenerator.TONE_CDMA_LOW_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_PBX_L), ToneGenerator.TONE_CDMA_HIGH_PBX_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_PBX_L), ToneGenerator.TONE_CDMA_MED_PBX_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_PBX_L), ToneGenerator.TONE_CDMA_LOW_PBX_L);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_PBX_SS), ToneGenerator.TONE_CDMA_HIGH_PBX_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_PBX_SS), ToneGenerator.TONE_CDMA_MED_PBX_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_PBX_SS), ToneGenerator.TONE_CDMA_LOW_PBX_SS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_PBX_SSL), ToneGenerator.TONE_CDMA_HIGH_PBX_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_PBX_SSL), ToneGenerator.TONE_CDMA_MED_PBX_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_PBX_SSL), ToneGenerator.TONE_CDMA_LOW_PBX_SSL);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_PBX_SLS), ToneGenerator.TONE_CDMA_HIGH_PBX_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_PBX_SLS), ToneGenerator.TONE_CDMA_MED_PBX_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_PBX_SLS), ToneGenerator.TONE_CDMA_LOW_PBX_SLS);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_HIGH,
IS95_CONST_IR_SIG_IS54B_PBX_S_X4), ToneGenerator.TONE_CDMA_HIGH_PBX_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_MED,
IS95_CONST_IR_SIG_IS54B_PBX_S_X4), ToneGenerator.TONE_CDMA_MED_PBX_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, IS95_CONST_IR_ALERT_LOW,
IS95_CONST_IR_SIG_IS54B_PBX_S_X4), ToneGenerator.TONE_CDMA_LOW_PBX_S_X4);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_IS54B, TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN,
IS95_CONST_IR_SIG_IS54B_NO_TONE), ToneGenerator.TONE_CDMA_SIGNAL_OFF);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_USR_DEFD_ALERT,
TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN, IS95_CONST_IR_SIG_TONE_ABBR_ALRT),
ToneGenerator.TONE_CDMA_ABBR_ALERT);
hm.put(signalParamHash(IS95_CONST_IR_SIGNAL_USR_DEFD_ALERT,
TAPIAMSSCDMA_SIGNAL_PITCH_UNKNOWN, IS95_CONST_IR_SIG_TONE_NO_TONE),
ToneGenerator.TONE_CDMA_ABBR_ALERT);
}
// suppress default constructor for noninstantiability
private SignalToneUtil() {
}
}
| gpl-2.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jdi/Scenarios/invokeMethod/redefineclasses001b.java | 2974 | /*
* Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jdi.Scenarios.invokeMethod;
import nsk.share.*;
import nsk.share.jpda.*;
import nsk.share.jdi.*;
import java.io.*;
/**
* <code>redefineclasses001b</code> is deugee's part of the redefineclasses001.
*/
public class redefineclasses001b extends Thread{
public final static int INITIAL_VALUE = 0;
public final static int BEFORE_REDEFINITION = 1;
public final static int AFTER_REDEFINITION = 2;
public static boolean loadClass = false;
public static int flag = INITIAL_VALUE;
public final static String methodName = "runIt";
public final static String flagName = "flag";
public static Object waitStarting = new Object();
public static Object waitFinishing = new Object();
public volatile static boolean notified = false;
public void run() {
runIt(true);
}
public static void runIt(boolean doWait) {
flag = BEFORE_REDEFINITION;
// ^^^^^^^^^^^^^^^^^^^ it will be redefined
// System.out.println("runIt::notify...");
notified = false;
synchronized(waitStarting) {
waitStarting.notify();
}
notified = true;
System.out.println("runIt::notified...");
if (!doWait) {
return;
}
synchronized(waitFinishing) {
try {
waitFinishing.wait();
} catch(InterruptedException e) {
e.printStackTrace();
}
}
}
public static String flag2String(int value) {
switch (value) {
case INITIAL_VALUE:
return "INITIAL_VALUE(" + INITIAL_VALUE + ")";
case BEFORE_REDEFINITION:
return "BEFORE_REDEFINITION(" + BEFORE_REDEFINITION + ")";
case AFTER_REDEFINITION:
return "AFTER_REDEFINITION(" + AFTER_REDEFINITION + ")";
default:
return "UNKNOWN_VALUE";
}
}
}
| gpl-2.0 |
kumarrus/voltdb | third_party/java/src/au/com/bytecode/opencsv_voltpatches/CSVParser.java | 13793 | package au.com.bytecode.opencsv_voltpatches;
/**
Copyright 2005 Bytecode Pty Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A very simple CSV parser released under a commercial-friendly license.
* This just implements splitting a single line into fields.
*
* @author Glen Smith
* @author Rainer Pruy
*/
public class CSVParser {
private final char separator;
private final char quotechar;
private final char escape;
private final boolean strictQuotes;
private String pending;
private boolean inField = false;
private final boolean ignoreLeadingWhiteSpace;
/**
* The default separator to use if none is supplied to the constructor.
*/
public static final char DEFAULT_SEPARATOR = ',';
public static final int INITIAL_READ_SIZE = 128;
/**
* The default quote character to use if none is supplied to the
* constructor.
*/
public static final char DEFAULT_QUOTE_CHARACTER = '"';
/**
* The default escape character to use if none is supplied to the
* constructor.
*/
public static final char DEFAULT_ESCAPE_CHARACTER = '\\';
/**
* The default strict quote behavior to use if none is supplied to the
* constructor
*/
public static final boolean DEFAULT_STRICT_QUOTES = false;
/**
* The default leading whitespace behavior to use if none is supplied to the
* constructor
*/
public static final boolean DEFAULT_IGNORE_LEADING_WHITESPACE = true;
/**
* This is the "null" character - if a value is set to this then it is ignored.
* I.E. if the quote character is set to null then there is no quote character.
*/
public static final char NULL_CHARACTER = '\0';
/**
* Constructs CSVParser using a comma for the separator.
*/
public CSVParser() {
this(DEFAULT_SEPARATOR, DEFAULT_QUOTE_CHARACTER, DEFAULT_ESCAPE_CHARACTER);
}
/**
* Constructs CSVParser with supplied separator.
*
* @param separator the delimiter to use for separating entries.
*/
public CSVParser(char separator) {
this(separator, DEFAULT_QUOTE_CHARACTER, DEFAULT_ESCAPE_CHARACTER);
}
/**
* Constructs CSVParser with supplied separator and quote char.
*
* @param separator the delimiter to use for separating entries
* @param quotechar the character to use for quoted elements
*/
public CSVParser(char separator, char quotechar) {
this(separator, quotechar, DEFAULT_ESCAPE_CHARACTER);
}
/**
* Constructs CSVReader with supplied separator and quote char.
*
* @param separator the delimiter to use for separating entries
* @param quotechar the character to use for quoted elements
* @param escape the character to use for escaping a separator or quote
*/
public CSVParser(char separator, char quotechar, char escape) {
this(separator, quotechar, escape, DEFAULT_STRICT_QUOTES);
}
/**
* Constructs CSVReader with supplied separator and quote char.
* Allows setting the "strict quotes" flag
*
* @param separator the delimiter to use for separating entries
* @param quotechar the character to use for quoted elements
* @param escape the character to use for escaping a separator or quote
* @param strictQuotes if true, characters outside the quotes are ignored
*/
public CSVParser(char separator, char quotechar, char escape, boolean strictQuotes) {
this(separator, quotechar, escape, strictQuotes, DEFAULT_IGNORE_LEADING_WHITESPACE);
}
/**
* Constructs CSVReader with supplied separator and quote char.
* Allows setting the "strict quotes" and "ignore leading whitespace" flags
*
* @param separator the delimiter to use for separating entries
* @param quotechar the character to use for quoted elements
* @param escape the character to use for escaping a separator or quote
* @param strictQuotes if true, characters outside the quotes are ignored
* @param ignoreLeadingWhiteSpace if true, white space in front of a quote in a field is ignored
*/
public CSVParser(char separator, char quotechar, char escape, boolean strictQuotes, boolean ignoreLeadingWhiteSpace) {
if (anyCharactersAreTheSame(separator, quotechar, escape)) {
throw new UnsupportedOperationException("The separator, quote, and escape characters must be different!");
}
if (separator == NULL_CHARACTER) {
throw new UnsupportedOperationException("The separator character must be defined!");
}
this.separator = separator;
this.quotechar = quotechar;
this.escape = escape;
this.strictQuotes = strictQuotes;
this.ignoreLeadingWhiteSpace = ignoreLeadingWhiteSpace;
}
private boolean anyCharactersAreTheSame(char separator, char quotechar, char escape) {
return isSameCharacter(separator, quotechar) || isSameCharacter(separator, escape) || isSameCharacter(quotechar, escape);
}
private boolean isSameCharacter(char c1, char c2) {
return c1 != NULL_CHARACTER && c1 == c2;
}
/**
* @return true if something was left over from last call(s)
*/
public boolean isPending() {
return pending != null;
}
public String[] parseLineMulti(String nextLine) throws IOException {
List<Object> list = parseLine(nextLine, true);
if (list != null) {
return list.toArray(new String[list.size()]);
}
return null;
}
public String[] parseLine(String nextLine) throws IOException {
List<Object> list = parseLine(nextLine, false);
if (list != null) {
return list.toArray(new String[list.size()]);
}
return null;
}
public List<Object> parseLineList(String nextLine) throws IOException {
return parseLine(nextLine, false);
}
/**
* Parses an incoming String and returns an array of elements.
*
* @param nextLine the string to parse
* @param multi
* @return the comma-tokenized list of elements, or null if nextLine is null
* @throws IOException if bad things happen during the read
*/
private List<Object> parseLine(String nextLine, boolean multi) throws IOException {
if (!multi && pending != null) {
pending = null;
}
if (nextLine == null) {
if (pending != null) {
String s = pending;
pending = null;
return new ArrayList<Object>();
} else {
return null;
}
}
List<Object> tokensOnThisLine = new ArrayList<Object>();
StringBuilder sb = new StringBuilder(INITIAL_READ_SIZE);
boolean inQuotes = false;
if (pending != null) {
sb.append(pending);
pending = null;
inQuotes = true;
}
for (int i = 0; i < nextLine.length(); i++) {
char c = nextLine.charAt(i);
if (c == this.escape) {
if (isNullcaseForEscape(nextLine, inQuotes, i, sb.toString())) {
sb.append(c);
inField = true;
} else if (isNextCharacterEscapable(nextLine, inQuotes || inField, i)) {
sb.append(nextLine.charAt(i + 1));
i++;
}
} else if (c == quotechar) {
if (isNextCharacterEscapedQuote(nextLine, inQuotes || inField, i)) {
sb.append(nextLine.charAt(i + 1));
i++;
} else {
//inQuotes = !inQuotes;
// the tricky case of an embedded quote in the middle: a,bc"d"ef,g
if (!strictQuotes) {
if (i > 2 //not on the beginning of the line
&& nextLine.charAt(i - 1) != this.separator //not at the beginning of an escape sequence
&& nextLine.length() > (i + 1) &&
nextLine.charAt(i + 1) != this.separator //not at the end of an escape sequence
) {
if (ignoreLeadingWhiteSpace && sb.length() > 0 && isAllWhiteSpace(sb)) {
sb.setLength(0); //discard white space leading up to quote
} else {
sb.append(c);
//continue;
}
}
}
inQuotes = !inQuotes;
}
inField = !inField;
} else if (c == separator && !inQuotes) {
tokensOnThisLine.add(sb.toString());
sb.setLength(0); // start work on next token
inField = false;
} else {
if (!strictQuotes || inQuotes) {
sb.append(c);
inField = true;
}
}
}
// line is done - check status
if (inQuotes) {
if (multi) {
// continuing a quoted section, re-append newline
sb.append("\n");
pending = sb.toString();
sb = null; // this partial content is not to be added to field list yet
} else {
throw new IOException("Un-terminated quoted field at end of CSV line");
}
}
if (sb != null) {
tokensOnThisLine.add(sb.toString());
}
return tokensOnThisLine;
}
/**
* precondition: the current character is a quote or an escape
*
* @param nextLine the current line
* @param inQuotes true if the current context is quoted
* @param i current index in line
* @return true if the following character is a quote
*/
private boolean isNextCharacterEscapedQuote(String nextLine, boolean inQuotes, int i) {
return inQuotes // we are in quotes, therefore there can be escaped quotes in here.
&& nextLine.length() > (i + 1) // there is indeed another character to check.
&& nextLine.charAt(i + 1) == quotechar;
}
/**
* precondition: the current character is an escape
*
* @param nextLine the current line
* @param inQuotes true if the current context is quoted
* @param i current index in line
* @return true if the following character is a quote
*/
protected boolean isNextCharacterEscapable(String nextLine, boolean inQuotes, int i) {
return inQuotes // we are in quotes, therefore there can be escaped quotes in here.
&& nextLine.length() > (i + 1) // there is indeed another character to check.
&& (nextLine.charAt(i + 1) == quotechar || nextLine.charAt(i + 1) == this.escape);
}
/**
* precondition: the current back slash with N next will be an NULL case
* @param nextLine
* @param inQuotes
* @param i
* @return
*/
protected boolean isNullcaseForEscape(String nextLine, boolean inQuotes, int i, String sb) {
boolean result = false, hasmet = false;
for (int k = 0; k < sb.length(); k++) {
char c = sb.charAt(k);
if (Character.isWhitespace(c)) continue;
else if (c == quotechar) {
if (!inQuotes || hasmet) return false;
hasmet = true;
continue;
} else
return false;
}
hasmet = false;
if (nextLine.length() > (i + 1) && (nextLine.charAt(i + 1) == 'N' )) {
for (int j=i+2;j < nextLine.length(); j++) {
char c = nextLine.charAt(j);
if (Character.isWhitespace(c)) continue;
else if (c == quotechar) {
if (!inQuotes || hasmet) return false;
hasmet = true;
continue;
} else if (c == separator) break;
else return false;
}
result = true;
}
return result;
}
/**
* precondition: sb.length() > 0
*
* @param sb A sequence of characters to examine
* @return true if every character in the sequence is whitespace
*/
protected boolean isAllWhiteSpace(CharSequence sb) {
boolean result = true;
for (int i = 0; i < sb.length(); i++) {
char c = sb.charAt(i);
if (!Character.isWhitespace(c)) {
return false;
}
}
return result;
}
}
| agpl-3.0 |
jflnet/XADES4J | src/main/java/xades4j/properties/data/SignaturePolicyData.java | 1800 | /*
* XAdES4j - A Java library for generation and verification of XAdES signatures.
* Copyright (C) 2010 Luis Goncalves.
*
* XAdES4j is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 3 of the License, or any later version.
*
* XAdES4j is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License along
* with XAdES4j. If not, see <http://www.gnu.org/licenses/>.
*/
package xades4j.properties.data;
import xades4j.properties.ObjectIdentifier;
/**
*
* @author Luís
*/
public final class SignaturePolicyData implements PropertyDataObject
{
private final ObjectIdentifier identifier;
private final String digestAlgorithm;
private final byte[] digestValue;
public SignaturePolicyData(
ObjectIdentifier identifier,
String digestAlgorithm,
byte[] digestValue)
{
this.identifier = identifier;
this.digestAlgorithm = digestAlgorithm;
this.digestValue = digestValue;
}
public SignaturePolicyData()
{
this.identifier = null;
this.digestAlgorithm = null;
this.digestValue = null;
}
public String getDigestAlgorithm()
{
return digestAlgorithm;
}
public byte[] getDigestValue()
{
return digestValue;
}
public ObjectIdentifier getIdentifier()
{
return identifier;
}
}
| lgpl-3.0 |
robin13/elasticsearch | distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java | 4103 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.test.rest;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.Request;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.containsString;
public class RequestsWithoutContentIT extends ESRestTestCase {
public void testIndexMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/_doc/123")));
assertResponseException(responseException, "request body is required");
}
public void testBulkMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_bulk")));
assertResponseException(responseException, "request body is required");
}
public void testPutSettingsMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request("PUT", "/_settings")));
assertResponseException(responseException, "request body is required");
}
public void testPutMappingsMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/test_index/_mapping")));
assertResponseException(responseException, "request body is required");
}
public void testPutIndexTemplateMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "PUT" : "POST", "/_template/my_template")));
assertResponseException(responseException, "request body is required");
}
public void testMultiSearchMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_msearch")));
assertResponseException(responseException, "request body or source parameter is required");
}
public void testPutPipelineMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request("PUT", "/_ingest/pipeline/my_pipeline")));
assertResponseException(responseException, "request body or source parameter is required");
}
public void testSimulatePipelineMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate")));
assertResponseException(responseException, "request body or source parameter is required");
}
public void testPutScriptMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_scripts/lang")));
assertResponseException(responseException, "request body is required");
}
private static void assertResponseException(ResponseException responseException, String message) {
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
assertThat(responseException.getMessage(), containsString(message));
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/main/java/org/elasticsearch/search/slice/SliceQuery.java | 1843 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.slice;
import org.apache.lucene.search.Query;
import java.util.Objects;
/**
* An abstract {@link Query} that defines an hash function to partition the documents in multiple slices.
*/
public abstract class SliceQuery extends Query {
private final String field;
private final int id;
private final int max;
/**
* @param field The name of the field
* @param id The id of the slice
* @param max The maximum number of slices
*/
public SliceQuery(String field, int id, int max) {
this.field = field;
this.id = id;
this.max = max;
}
// Returns true if the value matches the predicate
protected final boolean contains(long value) {
return Math.floorMod(value, max) == id;
}
public String getField() {
return field;
}
public int getId() {
return id;
}
public int getMax() {
return max;
}
@Override
public boolean equals(Object o) {
if (sameClassAs(o) == false) {
return false;
}
SliceQuery that = (SliceQuery) o;
return field.equals(that.field) && id == that.id && max == that.max;
}
@Override
public int hashCode() {
return Objects.hash(classHash(), field, id, max);
}
@Override
public String toString(String f) {
return getClass().getSimpleName() + "[field=" + field + ", id=" + id + ", max=" + max + "]";
}
}
| apache-2.0 |
vitalif/gitblit | src/main/java/com/gitblit/transport/ssh/UsernamePasswordAuthenticator.java | 1963 | /*
* Copyright 2014 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.gitblit.transport.ssh;
import java.util.Locale;
import org.apache.sshd.server.auth.password.PasswordAuthenticator;
import org.apache.sshd.server.session.ServerSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.manager.IAuthenticationManager;
import com.gitblit.models.UserModel;
/**
*
* Authenticates an SSH session with username/password credentials.
*
* @author James Moger
*
*/
public class UsernamePasswordAuthenticator implements PasswordAuthenticator {
protected final Logger log = LoggerFactory.getLogger(getClass());
protected final IAuthenticationManager authManager;
public UsernamePasswordAuthenticator(IAuthenticationManager authManager) {
this.authManager = authManager;
}
@Override
public boolean authenticate(String username, String password, ServerSession session) {
SshDaemonClient client = session.getAttribute(SshDaemonClient.KEY);
if (client.getUser() != null) {
log.info("{} has already authenticated!", username);
return true;
}
username = username.toLowerCase(Locale.US);
UserModel user = authManager.authenticate(username, password.toCharArray(), null);
if (user != null) {
client.setUser(user);
return true;
}
log.warn("could not authenticate {} ({}) for SSH using the supplied password", username, client.getRemoteAddress());
return false;
}
}
| apache-2.0 |
droolsjbpm/drools | kie-pmml/src/main/java/org/kie/pmml/pmml_4_2/model/mining/MiningSegmentTransfer.java | 6257 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.pmml_4_2.model.mining;
import java.util.HashMap;
import java.util.Map;
import org.kie.api.pmml.PMML4Result;
import org.kie.api.pmml.PMMLRequestData;
public class MiningSegmentTransfer {
private String correlationId;
private String segmentationId;
private String fromSegmentId;
private String toSegmentId;
private Map<String, String> requestFromResultMap;
private PMMLRequestData outboundRequest;
private PMML4Result inboundResult;
public MiningSegmentTransfer(String segmentationId, String fromSegmentId, String toSegmentId) {
this.segmentationId = segmentationId;
this.fromSegmentId = fromSegmentId;
this.toSegmentId = toSegmentId;
this.requestFromResultMap = new HashMap<>();
}
public MiningSegmentTransfer(PMML4Result inboundResult, String toSegmentId) {
this.inboundResult = inboundResult;
this.correlationId = inboundResult.getCorrelationId();
this.segmentationId = inboundResult.getSegmentationId();
this.fromSegmentId = inboundResult.getSegmentId();
this.toSegmentId = toSegmentId;
this.requestFromResultMap = new HashMap<>();
}
public String getCorrelationId() {
return correlationId;
}
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
public String getSegmentationId() {
return segmentationId;
}
public void setSegmentationId(String segmentationId) {
this.segmentationId = segmentationId;
}
public String getFromSegmentId() {
return fromSegmentId;
}
public void setFromSegmentId(String fromSegmentId) {
this.fromSegmentId = fromSegmentId;
}
public String getToSegmentId() {
return toSegmentId;
}
public void setToSegmentId(String toSegmentId) {
this.toSegmentId = toSegmentId;
}
public PMMLRequestData getOutboundRequest() {
if (outboundRequest == null) {
outboundRequest = new PMMLRequestData(this.correlationId);
outboundRequest.setSource("MiningSegmentTransfer:"+this.fromSegmentId+"-"+this.toSegmentId);
for (String requestField: requestFromResultMap.keySet()) {
String resultFieldName = requestFromResultMap.get(requestField);
Object resultFieldValue = getValueFromResult(resultFieldName);
if (resultFieldValue != null) {
outboundRequest.addRequestParam(requestField, resultFieldValue);
}
}
}
return outboundRequest;
}
public Map<String,String> getRequestFromResultMap() {
return new HashMap<>(this.requestFromResultMap);
}
private Object getValueFromResult(String fieldName) {
if (fieldName.contains(".")) {
String fieldParts[] = fieldName.split("\\.");
if (fieldParts != null && fieldParts.length == 2) {
return inboundResult.getResultValue(fieldParts[0],fieldParts[1]);
} else {
throw new IllegalStateException("MiningSegmentTransfer: Result field name is invalid - "+fieldName);
}
} else {
return inboundResult.getResultValue(fieldName,null);
}
}
public void setOutboundRequest(PMMLRequestData outboundRequest) {
this.outboundRequest = outboundRequest;
}
public PMML4Result getInboundResult() {
return inboundResult;
}
public void setInboundResult(PMML4Result inboundResult) {
this.inboundResult = inboundResult;
}
public Map<String, String> getResultFieldNameToRequestFieldName() {
return requestFromResultMap;
}
public String addResultToRequestMapping(String resultFieldName, String requestFieldName) {
return this.requestFromResultMap.put(requestFieldName, resultFieldName);
}
public void addResultToRequestMappings(Map<String,String> collection) {
this.requestFromResultMap.putAll(collection);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((correlationId == null) ? 0 : correlationId.hashCode());
result = prime * result + ((fromSegmentId == null) ? 0 : fromSegmentId.hashCode());
result = prime * result + ((segmentationId == null) ? 0 : segmentationId.hashCode());
result = prime * result + ((toSegmentId == null) ? 0 : toSegmentId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
MiningSegmentTransfer other = (MiningSegmentTransfer) obj;
if (correlationId == null) {
if (other.correlationId != null) {
return false;
}
} else if (!correlationId.equals(other.correlationId)) {
return false;
}
if (fromSegmentId == null) {
if (other.fromSegmentId != null) {
return false;
}
} else if (!fromSegmentId.equals(other.fromSegmentId)) {
return false;
}
if (segmentationId == null) {
if (other.segmentationId != null) {
return false;
}
} else if (!segmentationId.equals(other.segmentationId)) {
return false;
}
if (toSegmentId == null) {
if (other.toSegmentId != null) {
return false;
}
} else if (!toSegmentId.equals(other.toSegmentId)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder bldr = new StringBuilder("MiningSegmentTransfer [");
bldr.append("correlationId=").append(correlationId).append(", ");
bldr.append("segmentationId=").append(segmentationId).append(", ");
bldr.append("fromSegmentId=").append(fromSegmentId).append(", ");
bldr.append("toSegmentId=").append(toSegmentId).append(", ");
bldr.append("resultFieldNameToRequestFieldName=").append(requestFromResultMap).append(", ");
bldr.append("inboundResult=").append(inboundResult).append(", ");
bldr.append("outboundRequest=").append(getOutboundRequest());
bldr.append("]");
return bldr.toString();
}
}
| apache-2.0 |
haikuowuya/android_system_code | src/android/test/ClassPathPackageInfoSource.java | 12482 | /*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.test;
import android.util.Log;
import com.google.android.collect.Maps;
import com.google.android.collect.Sets;
import dalvik.system.DexFile;
import java.io.File;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Generate {@link ClassPathPackageInfo}s by scanning apk paths.
*
* {@hide} Not needed for 1.0 SDK.
*/
public class ClassPathPackageInfoSource {
private static final String CLASS_EXTENSION = ".class";
private static final ClassLoader CLASS_LOADER
= ClassPathPackageInfoSource.class.getClassLoader();
private final SimpleCache<String, ClassPathPackageInfo> cache =
new SimpleCache<String, ClassPathPackageInfo>() {
@Override
protected ClassPathPackageInfo load(String pkgName) {
return createPackageInfo(pkgName);
}
};
// The class path of the running application
private final String[] classPath;
private static String[] apkPaths;
// A cache of jar file contents
private final Map<File, Set<String>> jarFiles = Maps.newHashMap();
private ClassLoader classLoader;
ClassPathPackageInfoSource() {
classPath = getClassPath();
}
public static void setApkPaths(String[] apkPaths) {
ClassPathPackageInfoSource.apkPaths = apkPaths;
}
public ClassPathPackageInfo getPackageInfo(String pkgName) {
return cache.get(pkgName);
}
private ClassPathPackageInfo createPackageInfo(String packageName) {
Set<String> subpackageNames = new TreeSet<String>();
Set<String> classNames = new TreeSet<String>();
Set<Class<?>> topLevelClasses = Sets.newHashSet();
findClasses(packageName, classNames, subpackageNames);
for (String className : classNames) {
if (className.endsWith(".R") || className.endsWith(".Manifest")) {
// Don't try to load classes that are generated. They usually aren't in test apks.
continue;
}
try {
// We get errors in the emulator if we don't use the caller's class loader.
topLevelClasses.add(Class.forName(className, false,
(classLoader != null) ? classLoader : CLASS_LOADER));
} catch (ClassNotFoundException e) {
// Should not happen unless there is a generated class that is not included in
// the .apk.
Log.w("ClassPathPackageInfoSource", "Cannot load class. "
+ "Make sure it is in your apk. Class name: '" + className
+ "'. Message: " + e.getMessage(), e);
}
}
return new ClassPathPackageInfo(this, packageName, subpackageNames,
topLevelClasses);
}
/**
* Finds all classes and sub packages that are below the packageName and
* add them to the respective sets. Searches the package on the whole class
* path.
*/
private void findClasses(String packageName, Set<String> classNames,
Set<String> subpackageNames) {
String packagePrefix = packageName + '.';
String pathPrefix = packagePrefix.replace('.', '/');
for (String entryName : classPath) {
File classPathEntry = new File(entryName);
// Forge may not have brought over every item in the classpath. Be
// polite and ignore missing entries.
if (classPathEntry.exists()) {
try {
if (entryName.endsWith(".apk")) {
findClassesInApk(entryName, packageName, classNames, subpackageNames);
} else {
// scan the directories that contain apk files.
for (String apkPath : apkPaths) {
File file = new File(apkPath);
scanForApkFiles(file, packageName, classNames, subpackageNames);
}
}
} catch (IOException e) {
throw new AssertionError("Can't read classpath entry " +
entryName + ": " + e.getMessage());
}
}
}
}
private void scanForApkFiles(File source, String packageName,
Set<String> classNames, Set<String> subpackageNames) throws IOException {
if (source.getPath().endsWith(".apk")) {
findClassesInApk(source.getPath(), packageName, classNames, subpackageNames);
} else {
File[] files = source.listFiles();
if (files != null) {
for (File file : files) {
scanForApkFiles(file, packageName, classNames, subpackageNames);
}
}
}
}
/**
* Finds all classes and sub packages that are below the packageName and
* add them to the respective sets. Searches the package in a class directory.
*/
private void findClassesInDirectory(File classDir,
String packagePrefix, String pathPrefix, Set<String> classNames,
Set<String> subpackageNames)
throws IOException {
File directory = new File(classDir, pathPrefix);
if (directory.exists()) {
for (File f : directory.listFiles()) {
String name = f.getName();
if (name.endsWith(CLASS_EXTENSION) && isToplevelClass(name)) {
classNames.add(packagePrefix + getClassName(name));
} else if (f.isDirectory()) {
subpackageNames.add(packagePrefix + name);
}
}
}
}
/**
* Finds all classes and sub packages that are below the packageName and
* add them to the respective sets. Searches the package in a single jar file.
*/
private void findClassesInJar(File jarFile, String pathPrefix,
Set<String> classNames, Set<String> subpackageNames)
throws IOException {
Set<String> entryNames = getJarEntries(jarFile);
// check if the Jar contains the package.
if (!entryNames.contains(pathPrefix)) {
return;
}
int prefixLength = pathPrefix.length();
for (String entryName : entryNames) {
if (entryName.startsWith(pathPrefix)) {
if (entryName.endsWith(CLASS_EXTENSION)) {
// check if the class is in the package itself or in one of its
// subpackages.
int index = entryName.indexOf('/', prefixLength);
if (index >= 0) {
String p = entryName.substring(0, index).replace('/', '.');
subpackageNames.add(p);
} else if (isToplevelClass(entryName)) {
classNames.add(getClassName(entryName).replace('/', '.'));
}
}
}
}
}
/**
* Finds all classes and sub packages that are below the packageName and
* add them to the respective sets. Searches the package in a single apk file.
*/
private void findClassesInApk(String apkPath, String packageName,
Set<String> classNames, Set<String> subpackageNames)
throws IOException {
DexFile dexFile = null;
try {
dexFile = new DexFile(apkPath);
Enumeration<String> apkClassNames = dexFile.entries();
while (apkClassNames.hasMoreElements()) {
String className = apkClassNames.nextElement();
if (className.startsWith(packageName)) {
String subPackageName = packageName;
int lastPackageSeparator = className.lastIndexOf('.');
if (lastPackageSeparator > 0) {
subPackageName = className.substring(0, lastPackageSeparator);
}
if (subPackageName.length() > packageName.length()) {
subpackageNames.add(subPackageName);
} else if (isToplevelClass(className)) {
classNames.add(className);
}
}
}
} catch (IOException e) {
if (false) {
Log.w("ClassPathPackageInfoSource",
"Error finding classes at apk path: " + apkPath, e);
}
} finally {
if (dexFile != null) {
// Todo: figure out why closing causes a dalvik error resulting in vm shutdown.
// dexFile.close();
}
}
}
/**
* Gets the class and package entries from a Jar.
*/
private Set<String> getJarEntries(File jarFile)
throws IOException {
Set<String> entryNames = jarFiles.get(jarFile);
if (entryNames == null) {
entryNames = Sets.newHashSet();
ZipFile zipFile = new ZipFile(jarFile);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
String entryName = entries.nextElement().getName();
if (entryName.endsWith(CLASS_EXTENSION)) {
// add the entry name of the class
entryNames.add(entryName);
// add the entry name of the classes package, i.e. the entry name of
// the directory that the class is in. Used to quickly skip jar files
// if they do not contain a certain package.
//
// Also add parent packages so that a JAR that contains
// pkg1/pkg2/Foo.class will be marked as containing pkg1/ in addition
// to pkg1/pkg2/ and pkg1/pkg2/Foo.class. We're still interested in
// JAR files that contains subpackages of a given package, even if
// an intermediate package contains no direct classes.
//
// Classes in the default package will cause a single package named
// "" to be added instead.
int lastIndex = entryName.lastIndexOf('/');
do {
String packageName = entryName.substring(0, lastIndex + 1);
entryNames.add(packageName);
lastIndex = entryName.lastIndexOf('/', lastIndex - 1);
} while (lastIndex > 0);
}
}
jarFiles.put(jarFile, entryNames);
}
return entryNames;
}
/**
* Checks if a given file name represents a toplevel class.
*/
private static boolean isToplevelClass(String fileName) {
return fileName.indexOf('$') < 0;
}
/**
* Given the absolute path of a class file, return the class name.
*/
private static String getClassName(String className) {
int classNameEnd = className.length() - CLASS_EXTENSION.length();
return className.substring(0, classNameEnd);
}
/**
* Gets the class path from the System Property "java.class.path" and splits
* it up into the individual elements.
*/
private static String[] getClassPath() {
String classPath = System.getProperty("java.class.path");
String separator = System.getProperty("path.separator", ":");
return classPath.split(Pattern.quote(separator));
}
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
}
| apache-2.0 |
zzottel/graphhopper | core/src/main/java/com/graphhopper/geohash/SpatialKeyAlgo.java | 7912 | /*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.geohash;
import com.graphhopper.util.shapes.BBox;
import com.graphhopper.util.shapes.GHPoint;
/**
* This class implements the idea of a geohash but in 'binary form' - to avoid confusion this is
* called 'spatial key'. The idea of mixing the latitude and longitude is also taken to allow
* removing the insignificant (right side) bits to make a geo-query or the coordinate less precise.
* E.g. for a 3 bit precision the spatial key would need 6 bits and look like:
* <p>
* lat0 lon0 | lat1 lon1 | lat2 lon2
* <p>
* This works similar to how BIG endianess works for bytes to int packing. Detailed information is
* available in this blog post:
* http://karussell.wordpress.com/2012/05/23/spatial-keys-memory-efficient-geohashes/
* <p>
* The bits are usable as key for hash tables like our SpatialKeyHashtable or for a spatial tree
* like QuadTreeSimple. Also the binary form makes it relative simple for implementations using this
* encoding scheme to expand to arbitrary dimension (e.g. shifting n-times if n would be the
* dimension).
* <p>
* A 32 bit representation has a precision of approx 600 meters = 40000/2^16
* <p>
* There are different possibilities how to handle different precision and order of bits. Either:
* <p>
* lat0 lon0 | lat1 lon1 | lat2 lon2
* <p>
* 0 0 | lat0 lon0 | lat1 lon1
* <p>
* as it is done now. Advantage: A single shift is only necessary to make it less precise. Or:
* <p>
* lat2 lon2 | lat1 lon1 | lat0 lon0
* <p>
* 0 0 | lat1 lon1 | lat0 lon0
* <p>
* Advantage: the bit mask to get lat0 lon0 is simple: 000..0011 and independent of the precision!
* But when stored e.g. as int one would need to (left) shift several times if precision is only
* 3bits.
* <p>
* @author Peter Karich
*/
// A 2 bit precision spatial key could look like
//
// |----|----|----|----|
// |1010|1011|1110|1111|
// |----|----|----|----| lat0 == 1
// |1000|1001|1100|1101|
// -|----|----|----|----|------
// |0010|0011|0110|0111|
// |----|----|----|----| lat0 == 0
// |0000|0001|0100|0101|
// |----|----|----|----|
// |
// lon0 == 0 | lon0 == 1
public class SpatialKeyAlgo implements KeyAlgo
{
private BBox bbox;
private int allBits;
private long initialBits;
/**
* @param allBits how many bits should be used for the spatial key when encoding/decoding
*/
public SpatialKeyAlgo( int allBits )
{
myinit(allBits);
}
private void myinit( int allBits )
{
if (allBits > 64)
throw new IllegalStateException("allBits is too big and does not fit into 8 bytes");
if (allBits <= 0)
throw new IllegalStateException("allBits must be positive");
// if ((allBits & 0x1) == 1)
// throw new IllegalStateException("allBits needs to be even to use the same amount for lat and lon");
this.allBits = allBits;
initialBits = 1L << (allBits - 1);
setWorldBounds();
}
/**
* @return the number of involved bits
*/
public int getBits()
{
return allBits;
}
public int getExactPrecision()
{
// 360 / 2^(allBits/2) = 1/precision
int p = (int) (Math.pow(2, allBits) / 360);
// no rounding error
p++;
return (int) Math.log10(p);
}
public SpatialKeyAlgo bounds( BBox box )
{
bbox = box.clone();
return this;
}
@Override
public SpatialKeyAlgo setBounds( double minLonInit, double maxLonInit, double minLatInit, double maxLatInit )
{
bounds(new BBox(minLonInit, maxLonInit, minLatInit, maxLatInit));
return this;
}
protected void setWorldBounds()
{
setBounds(-180, 180, -90, 90);
}
@Override
public long encode( GHPoint coord )
{
return encode(coord.lat, coord.lon);
}
/**
* Take latitude and longitude as input.
* <p>
* @return the spatial key
*/
@Override
public final long encode( double lat, double lon )
{
// PERFORMANCE: int operations would be faster than double (for further comparison etc)
// but we would need 'long' because 'int factorForPrecision' is not enough (problem: coord!=decode(encode(coord)) see testBijection)
// and 'long'-ops are more expensive than double (at least on 32bit systems)
long hash = 0;
double minLatTmp = bbox.minLat;
double maxLatTmp = bbox.maxLat;
double minLonTmp = bbox.minLon;
double maxLonTmp = bbox.maxLon;
int i = 0;
while (true)
{
if (minLatTmp < maxLatTmp)
{
double midLat = (minLatTmp + maxLatTmp) / 2;
if (lat < midLat)
{
maxLatTmp = midLat;
} else
{
hash |= 1;
minLatTmp = midLat;
}
}
i++;
if (i < allBits)
hash <<= 1;
else
// if allBits is an odd number
break;
if (minLonTmp < maxLonTmp)
{
double midLon = (minLonTmp + maxLonTmp) / 2;
if (lon < midLon)
{
maxLonTmp = midLon;
} else
{
hash |= 1;
minLonTmp = midLon;
}
}
i++;
if (i < allBits)
hash <<= 1;
else
break;
}
return hash;
}
/**
* This method returns latitude and longitude via latLon - calculated from specified spatialKey
* <p>
* @param spatialKey is the input
*/
@Override
public final void decode( long spatialKey, GHPoint latLon )
{
// Performance: calculating 'midLon' and 'midLat' on the fly is not slower than using
// precalculated values from arrays and for 'bits' a precalculated array is even slightly slower!
// Use the value in the middle => start from "min" use "max" as initial step-size
double midLat = (bbox.maxLat - bbox.minLat) / 2;
double midLon = (bbox.maxLon - bbox.minLon) / 2;
double lat = bbox.minLat;
double lon = bbox.minLon;
long bits = initialBits;
while (true)
{
if ((spatialKey & bits) != 0)
{
lat += midLat;
}
midLat /= 2;
bits >>>= 1;
if ((spatialKey & bits) != 0)
{
lon += midLon;
}
midLon /= 2;
if (bits > 1)
{
bits >>>= 1;
} else
{
break;
}
}
// stable rounding - see testBijection
lat += midLat;
lon += midLon;
latLon.lat = lat;
latLon.lon = lon;
}
@Override
public String toString()
{
return "bits:" + allBits + ", bounds:" + bbox;
}
}
| apache-2.0 |
bclozel/spring-boot | spring-boot-project/spring-boot-tools/spring-boot-antlib/src/main/java/org/springframework/boot/ant/ShareAntlibLoader.java | 1369 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.ant;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.springframework.util.StringUtils;
/**
* Quiet task that establishes a reference to its loader.
*
* @author Matt Benson
* @since 1.3.0
*/
public class ShareAntlibLoader extends Task {
private String refid;
public ShareAntlibLoader(Project project) {
setProject(project);
}
@Override
public void execute() throws BuildException {
if (!StringUtils.hasText(this.refid)) {
throw new BuildException("@refid has no text");
}
getProject().addReference(this.refid, getClass().getClassLoader());
}
public void setRefid(String refid) {
this.refid = refid;
}
}
| apache-2.0 |
abstractj/keycloak | adapters/saml/as7-eap6/subsystem/src/main/java/org/keycloak/subsystem/saml/as7/KeycloakSamlExtension.java | 4420 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.subsystem.saml.as7;
import org.jboss.as.controller.Extension;
import org.jboss.as.controller.ExtensionContext;
import org.jboss.as.controller.ModelVersion;
import org.jboss.as.controller.PathElement;
import org.jboss.as.controller.SubsystemRegistration;
import org.jboss.as.controller.descriptions.StandardResourceDescriptionResolver;
import org.jboss.as.controller.parsing.ExtensionParsingContext;
import org.jboss.as.controller.registry.ManagementResourceRegistration;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUBSYSTEM;
/**
* Main Extension class for the subsystem.
*
* @author Stan Silvert ssilvert@redhat.com (C) 2013 Red Hat Inc.
*/
public class KeycloakSamlExtension implements Extension {
static final String SUBSYSTEM_NAME = "keycloak-saml";
private static final String NAMESPACE_1_1 = "urn:jboss:domain:keycloak-saml:1.1";
private static final String NAMESPACE_1_2 = "urn:jboss:domain:keycloak-saml:1.2";
private static final String NAMESPACE_1_3 = "urn:jboss:domain:keycloak-saml:1.3";
private static final String NAMESPACE_1_4 = "urn:jboss:domain:keycloak-saml:1.4";
static final String CURRENT_NAMESPACE = NAMESPACE_1_4;
private static final KeycloakSubsystemParser PARSER = new KeycloakSubsystemParser();
static final PathElement PATH_SUBSYSTEM = PathElement.pathElement(SUBSYSTEM, SUBSYSTEM_NAME);
private static final String RESOURCE_NAME = KeycloakSamlExtension.class.getPackage().getName() + ".LocalDescriptions";
private static final ModelVersion MGMT_API_VERSION = ModelVersion.create(1, 1, 0);
static final PathElement SUBSYSTEM_PATH = PathElement.pathElement(SUBSYSTEM, SUBSYSTEM_NAME);
static StandardResourceDescriptionResolver getResourceDescriptionResolver(final String... keyPrefix) {
StringBuilder prefix = new StringBuilder(SUBSYSTEM_NAME);
for (String kp : keyPrefix) {
prefix.append('.').append(kp);
}
return new StandardResourceDescriptionResolver(prefix.toString(), RESOURCE_NAME, KeycloakSamlExtension.class.getClassLoader(), true, false);
}
/**
* {@inheritDoc}
*/
@Override
public void initializeParsers(final ExtensionParsingContext context) {
context.setSubsystemXmlMapping(SUBSYSTEM_NAME, KeycloakSamlExtension.NAMESPACE_1_1, PARSER);
context.setSubsystemXmlMapping(SUBSYSTEM_NAME, KeycloakSamlExtension.NAMESPACE_1_2, PARSER);
context.setSubsystemXmlMapping(SUBSYSTEM_NAME, KeycloakSamlExtension.NAMESPACE_1_3, PARSER);
context.setSubsystemXmlMapping(SUBSYSTEM_NAME, KeycloakSamlExtension.NAMESPACE_1_4, PARSER);
}
/**
* {@inheritDoc}
*/
@Override
public void initialize(final ExtensionContext context) {
final SubsystemRegistration subsystem = context.registerSubsystem(SUBSYSTEM_NAME,
MGMT_API_VERSION.getMajor(), MGMT_API_VERSION.getMinor(), MGMT_API_VERSION.getMicro());
ManagementResourceRegistration registration = subsystem.registerSubsystemModel(KeycloakSubsystemDefinition.INSTANCE);
ManagementResourceRegistration secureDeploymentRegistration = registration.registerSubModel(SecureDeploymentDefinition.INSTANCE);
ManagementResourceRegistration serviceProviderRegistration = secureDeploymentRegistration.registerSubModel(ServiceProviderDefinition.INSTANCE);
serviceProviderRegistration.registerSubModel(KeyDefinition.INSTANCE);
ManagementResourceRegistration idpRegistration = serviceProviderRegistration.registerSubModel(IdentityProviderDefinition.INSTANCE);
idpRegistration.registerSubModel(KeyDefinition.INSTANCE);
subsystem.registerXMLElementWriter(PARSER);
}
}
| apache-2.0 |
akuznetsov-gridgain/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheUtilsSelfTest.java | 7340 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import org.apache.ignite.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.testframework.*;
import org.apache.ignite.testframework.junits.common.*;
import java.io.*;
import java.util.concurrent.*;
/**
* Grid cache utils test.
*/
public class GridCacheUtilsSelfTest extends GridCommonAbstractTest {
/** */
private static final String EXTERNALIZABLE_WARNING = "For best performance you should implement " +
"java.io.Externalizable";
/**
* Does not override equals and hashCode.
*/
private static class NoEqualsAndHashCode {
}
/**
* Does not override equals.
*/
private static class NoEquals {
/** {@inheritDoc} */
@Override public int hashCode() {
return 1;
}
}
/**
* Does not override hashCode.
*/
private static class NoHashCode {
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return super.equals(obj);
}
}
/**
* Defines equals with different signature.
*/
private static class WrongEquals {
/**
* @param obj Object.
* @return {@code False}.
*/
@SuppressWarnings("CovariantEquals")
public boolean equals(String obj) {
return false;
}
}
/**
* Overrides equals and hashCode.
*/
private static class EqualsAndHashCode {
/** {@inheritDoc} */
@Override public int hashCode() {
return super.hashCode();
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return super.equals(obj);
}
}
/**
* Overrides equals and hashCode and implements {@link Externalizable}.
*/
private static class ExternalizableEqualsAndHashCode implements Externalizable {
/**
* Constructor required by {@link Externalizable}.
*/
public ExternalizableEqualsAndHashCode() {
// No-op.
}
/** {@inheritDoc} */
@Override public int hashCode() {
return super.hashCode();
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return super.equals(obj);
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
// No-op.
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
// No-op.
}
}
/**
* Extends class which overrides equals and hashCode.
*/
private static class ExtendsClassWithEqualsAndHashCode extends EqualsAndHashCode {
}
/**
* Extends class which overrides equals and hashCode, overrides equals and hashCode.
*/
private static class ExtendsClassWithEqualsAndHashCode2 extends EqualsAndHashCode {
/** {@inheritDoc} */
@Override public int hashCode() {
return super.hashCode();
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return super.equals(obj);
}
}
/**
* Does not implement {@link Externalizable}.
*/
private static class NoImplExternalizable {
}
/**
* Implements {@link Externalizable}.
*/
private static class ImplExternalizable implements Externalizable {
/**
* Constructor required by {@link Externalizable}.
*/
public ImplExternalizable() {
// No-op.
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
// No-op.
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
// No-op.
}
}
/**
* Extends class which implements {@link Externalizable}.
*/
private static class ExtendsImplExternalizable extends ImplExternalizable {
/**
* Constructor required by {@link Externalizable}.
*/
public ExtendsImplExternalizable() {
// No-op.
}
}
/**
*/
public void testCacheKeyValidation() {
CU.validateCacheKey(log, "key");
CU.validateCacheKey(log, 1);
CU.validateCacheKey(log, 1L);
CU.validateCacheKey(log, 1.0);
CU.validateCacheKey(log, new ExtendsClassWithEqualsAndHashCode());
CU.validateCacheKey(log, new ExtendsClassWithEqualsAndHashCode2());
assertThrowsForInvalidKey(new NoEqualsAndHashCode());
assertThrowsForInvalidKey(new NoEquals());
assertThrowsForInvalidKey(new NoHashCode());
assertThrowsForInvalidKey(new WrongEquals());
IgniteLogger log = new GridStringLogger(false);
CU.validateCacheKey(log, new ExternalizableEqualsAndHashCode());
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheKey(log, "key");
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheKey(log, new EqualsAndHashCode());
assertTrue(log.toString().contains(EXTERNALIZABLE_WARNING));
}
/**
*/
public void testCacheValueValidation() {
IgniteLogger log = new GridStringLogger(false);
CU.validateCacheValue(log, new ImplExternalizable());
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheValue(log, new ExtendsImplExternalizable());
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheValue(log, "val");
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheValue(log, new byte[10]);
assertFalse(log.toString().contains(EXTERNALIZABLE_WARNING));
CU.validateCacheValue(log, new NoImplExternalizable());
assertTrue(log.toString().contains(EXTERNALIZABLE_WARNING));
}
/**
* @param key Cache key.
*/
private void assertThrowsForInvalidKey(final Object key) {
GridTestUtils.assertThrows(log, new Callable<Void>() {
@Override public Void call() throws Exception {
CU.validateCacheKey(log, key);
return null;
}
}, IllegalArgumentException.class, null);
}
}
| apache-2.0 |
Jasig/SSP-Platform | uportal-war/src/test/java/org/jasig/portal/portlet/dao/jpa/JpaPortletDaoTest.java | 12691 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlet.dao.jpa;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import org.jasig.portal.concurrency.CallableWithoutResult;
import org.jasig.portal.portlet.dao.IPortletDefinitionDao;
import org.jasig.portal.portlet.dao.IPortletEntityDao;
import org.jasig.portal.portlet.dao.IPortletTypeDao;
import org.jasig.portal.portlet.om.IPortletDefinition;
import org.jasig.portal.portlet.om.IPortletDefinitionId;
import org.jasig.portal.portlet.om.IPortletEntity;
import org.jasig.portal.portlet.om.IPortletEntityId;
import org.jasig.portal.portlet.om.IPortletPreference;
import org.jasig.portal.portlet.om.IPortletType;
import org.jasig.portal.test.BasePortalJpaDaoTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* @author Eric Dalquist <a href="mailto:eric.dalquist@doit.wisc.edu">eric.dalquist@doit.wisc.edu</a>
* @version $Revision: 337 $
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = "classpath:jpaPortalTestApplicationContext.xml")
public class JpaPortletDaoTest extends BasePortalJpaDaoTest {
@Autowired
private IPortletTypeDao jpaChannelTypeDao;
@Autowired
private IPortletDefinitionDao jpaPortletDefinitionDao;
@Autowired
private IPortletEntityDao jpaPortletEntityDao;
@Before
public void onSetUp() throws Exception {
this.execute(new Callable<Object>() {
@Override
public Object call() throws Exception {
for (final IPortletDefinition portletDefinition : jpaPortletDefinitionDao.getPortletDefinitions()) {
jpaPortletDefinitionDao.deletePortletDefinition(portletDefinition);
}
for (final IPortletType portletType : jpaChannelTypeDao.getPortletTypes()) {
jpaChannelTypeDao.deletePortletType(portletType);
}
return null;
}
});
}
@Test
public void testNoopOperations() throws Exception {
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final IPortletDefinitionId portletDefinitionId = PortletDefinitionIdImpl.create(1);
final IPortletDefinition nullPortDef1 = jpaPortletDefinitionDao.getPortletDefinition(portletDefinitionId);
assertNull(nullPortDef1);
final IPortletEntity nullPortEnt1 = jpaPortletEntityDao.getPortletEntity("chanSub1", 1);
assertNull(nullPortEnt1);
final Set<IPortletEntity> portEnts = jpaPortletEntityDao.getPortletEntities(PortletDefinitionIdImpl.create(1));
assertEquals(Collections.emptySet(), portEnts);
}
});
}
@Test
public void testAllDefinitionDaoMethods() throws Exception {
final IPortletDefinitionId portletDefinitionId = execute(new Callable<IPortletDefinitionId>() {
@Override
public IPortletDefinitionId call() {
final IPortletType channelType = jpaChannelTypeDao.createPortletType("BaseType", "foobar");
//Create a definition
final IPortletDefinition chanDef1 = jpaPortletDefinitionDao.createPortletDefinition(channelType, "fname1", "Test Portlet 1", "Test Portlet 1 Title", "/context1", "portletName1", false);
//Try all of the retrieval options
final IPortletDefinition portDef1a = jpaPortletDefinitionDao.getPortletDefinition(chanDef1.getPortletDefinitionId());
assertEquals(chanDef1, portDef1a);
//Create a second definition with the same app/portlet
final IPortletDefinition chanDef2 = jpaPortletDefinitionDao.createPortletDefinition(channelType, "fname2", "Test Portlet 2", "Test Portlet 2 Title", "/uPortal", "portletName2", true);
return chanDef2.getPortletDefinitionId();
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final IPortletDefinition chanDef2 = jpaPortletDefinitionDao.getPortletDefinitionByFname("fname2");
// Add some preferences
final List<IPortletPreference> prefsList2 = chanDef2.getPortletPreferences();
prefsList2.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
prefsList2.add(new PortletPreferenceImpl("prefName2", true, "val3", "val4"));
jpaPortletDefinitionDao.updatePortletDefinition(chanDef2);
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final IPortletDefinition chanDef2 = jpaPortletDefinitionDao.getPortletDefinitionByFname("fname2");
// verify preferences
final List<IPortletPreference> prefsList2 = chanDef2.getPortletPreferences();
assertEquals(2, prefsList2.size());
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
// Check prefs, remove one and another
final IPortletDefinition portDef3 = jpaPortletDefinitionDao.getPortletDefinitionByName("Test Portlet 2");
final List<IPortletPreference> prefsList3 = portDef3.getPortletPreferences();
final List<IPortletPreference> expectedPrefsList3 = new ArrayList<IPortletPreference>();
expectedPrefsList3.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
expectedPrefsList3.add(new PortletPreferenceImpl("prefName2", true, "val3", "val4"));
assertEquals(expectedPrefsList3, prefsList3);
prefsList3.remove(1);
prefsList3.add(new PortletPreferenceImpl("prefName3", false, "val5", "val6"));
jpaPortletDefinitionDao.updatePortletDefinition(portDef3);
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
// Check prefs
final IPortletDefinition portDef4 = jpaPortletDefinitionDao.getPortletDefinition(portletDefinitionId);
final List<IPortletPreference> prefsList4 = portDef4.getPortletPreferences();
final List<IPortletPreference> expectedPrefsList4 = new ArrayList<IPortletPreference>();
expectedPrefsList4.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
expectedPrefsList4.add(new PortletPreferenceImpl("prefName3", false, "val5", "val6"));
assertEquals(expectedPrefsList4, prefsList4);
}
});
}
@Test
public void testAllEntityDaoMethods() throws Exception {
final IPortletDefinitionId portletDefinitionId = execute(new Callable<IPortletDefinitionId>() {
@Override
public IPortletDefinitionId call() throws Exception {
final IPortletType channelType = jpaChannelTypeDao.createPortletType("BaseType", "foobar");
//Create a definition
final IPortletDefinition chanDef1 = jpaPortletDefinitionDao.createPortletDefinition(channelType, "fname1", "Test Portlet 1", "Test Portlet 1 Title", "/context1", "portletName1", false);
return chanDef1.getPortletDefinitionId();
}
});
final IPortletEntityId portletEntityId = execute(new Callable<IPortletEntityId>() {
@Override
public IPortletEntityId call() throws Exception {
IPortletEntity portEnt1 = jpaPortletEntityDao.createPortletEntity(portletDefinitionId, "chanSub1", 1);
return portEnt1.getPortletEntityId();
}
});
execute(new Callable<Object>() {
@Override
public Object call() throws Exception {
final IPortletEntity portEnt1a = jpaPortletEntityDao.getPortletEntity(portletEntityId);
assertNotNull(portEnt1a);
final IPortletEntity portEnt1b = jpaPortletEntityDao.getPortletEntity("chanSub1", 1);
assertEquals(portEnt1a, portEnt1b);
final IPortletEntity portEnt1c = jpaPortletEntityDao.getPortletEntity("chanSub1", 1);
assertEquals(portEnt1b, portEnt1c);
final Set<IPortletEntity> portletEntities1 = jpaPortletEntityDao.getPortletEntities(portletDefinitionId);
assertEquals(Collections.singleton(portEnt1a), portletEntities1);
final Set<IPortletEntity> portletEntitiesByUser = jpaPortletEntityDao.getPortletEntitiesForUser(1);
assertEquals(Collections.singleton(portEnt1a), portletEntitiesByUser);
return null;
}
});
execute(new Callable<Object>() {
@Override
public Object call() throws Exception {
//Add entity and preferences
final IPortletDefinition portDef1 = jpaPortletDefinitionDao.getPortletDefinition(portletDefinitionId);
portDef1.getPortletPreferences().add(new PortletPreferenceImpl("defpref1", false, "dpv1", "dpv2"));
jpaPortletDefinitionDao.updatePortletDefinition(portDef1);
final IPortletEntity portEnt1 = jpaPortletEntityDao.getPortletEntity(portletEntityId);
portEnt1.getPortletPreferences().add(new PortletPreferenceImpl("entpref1", false, "epv1", "epv2"));
// portEnt1.setWindowState(WindowState.MINIMIZED);
jpaPortletEntityDao.updatePortletEntity(portEnt1);
return null;
}
});
execute(new Callable<Object>() {
@Override
public Object call() throws Exception {
//Delete whole tree
final IPortletDefinition portDef2 = jpaPortletDefinitionDao.getPortletDefinition(portletDefinitionId);
jpaPortletDefinitionDao.deletePortletDefinition(portDef2);
return null;
}
});
execute(new Callable<Object>() {
@Override
public Object call() throws Exception {
//Verify it is gone
final Set<IPortletEntity> portletEntities2 = jpaPortletEntityDao.getPortletEntities(portletDefinitionId);
assertEquals(Collections.emptySet(), portletEntities2);
return null;
}
});
}
public static class Util {
public static <T> Set<T> unmodifiableSet(T... o) {
return Collections.unmodifiableSet(new HashSet<T>(Arrays.asList(o)));
}
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/main/java/org/elasticsearch/common/FieldMemoryStats.java | 4026 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Iterator;
import java.util.Objects;
/**
* A reusable class to encode {@code field -> memory size} mappings
*/
public final class FieldMemoryStats implements Writeable, Iterable<ObjectLongCursor<String>>{
private final ObjectLongHashMap<String> stats;
/**
* Creates a new FieldMemoryStats instance
*/
public FieldMemoryStats(ObjectLongHashMap<String> stats) {
this.stats = Objects.requireNonNull(stats, "status must be non-null");
assert stats.containsKey(null) == false;
}
/**
* Creates a new FieldMemoryStats instance from a stream
*/
public FieldMemoryStats(StreamInput input) throws IOException {
int size = input.readVInt();
stats = new ObjectLongHashMap<>(size);
for (int i = 0; i < size; i++) {
stats.put(input.readString(), input.readVLong());
}
}
/**
* Adds / merges the given field memory stats into this stats instance
*/
public void add(FieldMemoryStats fieldMemoryStats) {
for (ObjectLongCursor<String> entry : fieldMemoryStats.stats) {
stats.addTo(entry.key, entry.value);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(stats.size());
for (ObjectLongCursor<String> entry : stats) {
out.writeString(entry.key);
out.writeVLong(entry.value);
}
}
/**
* Generates x-content into the given builder for each of the fields in this stats instance
* @param builder the builder to generated on
* @param key the top level key for this stats object
* @param rawKey the raw byte key for each of the fields byte sizes
* @param readableKey the readable key for each of the fields byte sizes
*/
public void toXContent(XContentBuilder builder, String key, String rawKey, String readableKey) throws IOException {
builder.startObject(key);
for (ObjectLongCursor<String> entry : stats) {
builder.startObject(entry.key);
builder.humanReadableField(rawKey, readableKey, new ByteSizeValue(entry.value));
builder.endObject();
}
builder.endObject();
}
/**
* Creates a deep copy of this stats instance
*/
public FieldMemoryStats copy() {
return new FieldMemoryStats(stats.clone());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldMemoryStats that = (FieldMemoryStats) o;
return Objects.equals(stats, that.stats);
}
@Override
public int hashCode() {
return Objects.hash(stats);
}
@Override
public Iterator<ObjectLongCursor<String>> iterator() {
return stats.iterator();
}
/**
* Returns the fields value in bytes or <code>0</code> if it's not present in the stats
*/
public long get(String field) {
return stats.get(field);
}
/**
* Returns <code>true</code> iff the given field is in the stats
*/
public boolean containsField(String field) {
return stats.containsKey(field);
}
}
| apache-2.0 |
openweave/openweave-core | third_party/android/platform-libcore/android-platform-libcore/luni/src/main/java/java/net/HttpCookie.java | 27906 | /* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.net;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import libcore.base.Objects;
/**
* An opaque key-value value pair held by an HTTP client to permit a stateful
* session with an HTTP server. This class parses cookie headers for all three
* commonly used HTTP cookie specifications:
*
* <ul>
* <li>The Netscape cookie spec is officially obsolete but widely used in
* practice. Each cookie contains one key-value pair and the following
* attributes: {@code Domain}, {@code Expires}, {@code Path}, and
* {@code Secure}. The {@link #getVersion() version} of cookies in this
* format is {@code 0}.
* <p>There are no accessors for the {@code Expires} attribute. When
* parsed, expires attributes are assigned to the {@link #getMaxAge()
* Max-Age} attribute as an offset from {@link System#currentTimeMillis()
* now}.
* <li><a href="http://www.ietf.org/rfc/rfc2109.txt">RFC 2109</a> formalizes
* the Netscape cookie spec. It replaces the {@code Expires} timestamp
* with a {@code Max-Age} duration and adds {@code Comment} and {@code
* Version} attributes. The {@link #getVersion() version} of cookies in
* this format is {@code 1}.
* <li><a href="http://www.ietf.org/rfc/rfc2965.txt">RFC 2965</a> refines
* RFC 2109. It adds {@code Discard}, {@code Port}, and {@code
* CommentURL} attributes and renames the header from {@code Set-Cookie}
* to {@code Set-Cookie2}. The {@link #getVersion() version} of cookies
* in this format is {@code 1}.
* </ul>
*
* <p>This implementation silently discards unrecognized attributes. In
* particular, the {@code HttpOnly} attribute is widely served but isn't in any
* of the above specs. It was introduced by Internet Explorer to prevent server
* cookies from being exposed in the DOM to JavaScript, etc.
*
* @since 1.6
*/
public final class HttpCookie implements Cloneable {
/**
* Most websites serve cookies in the blessed format. Eagerly create the parser to ensure such
* cookies are on the fast path.
*/
private static final ThreadLocal<DateFormat> STANDARD_DATE_FORMAT
= new ThreadLocal<DateFormat>() {
@Override protected DateFormat initialValue() {
return new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US); // RFC 1123
}
};
/**
* If we fail to parse a date in a non-standard format, try each of these formats in sequence.
*/
private static final String[] BROWSER_COMPATIBLE_DATE_FORMATS = new String[] {
/* This list comes from {@code org.apache.http.impl.cookie.BrowserCompatSpec}. */
"EEEE, dd-MMM-yy HH:mm:ss zzz", // RFC 1036
"EEE MMM d HH:mm:ss yyyy", // ANSI C asctime()
"EEE, dd-MMM-yyyy HH:mm:ss z",
"EEE, dd-MMM-yyyy HH-mm-ss z",
"EEE, dd MMM yy HH:mm:ss z",
"EEE dd-MMM-yyyy HH:mm:ss z",
"EEE dd MMM yyyy HH:mm:ss z",
"EEE dd-MMM-yyyy HH-mm-ss z",
"EEE dd-MMM-yy HH:mm:ss z",
"EEE dd MMM yy HH:mm:ss z",
"EEE,dd-MMM-yy HH:mm:ss z",
"EEE,dd-MMM-yyyy HH:mm:ss z",
"EEE, dd-MM-yyyy HH:mm:ss z",
/* RI bug 6641315 claims a cookie of this format was once served by www.yahoo.com */
"EEE MMM d yyyy HH:mm:ss z",
};
private static final Set<String> RESERVED_NAMES = new HashSet<String>();
static {
RESERVED_NAMES.add("comment"); // RFC 2109 RFC 2965
RESERVED_NAMES.add("commenturl"); // RFC 2965
RESERVED_NAMES.add("discard"); // RFC 2965
RESERVED_NAMES.add("domain"); // Netscape RFC 2109 RFC 2965
RESERVED_NAMES.add("expires"); // Netscape
RESERVED_NAMES.add("max-age"); // RFC 2109 RFC 2965
RESERVED_NAMES.add("path"); // Netscape RFC 2109 RFC 2965
RESERVED_NAMES.add("port"); // RFC 2965
RESERVED_NAMES.add("secure"); // Netscape RFC 2109 RFC 2965
RESERVED_NAMES.add("version"); // RFC 2109 RFC 2965
}
/**
* Returns true if {@code host} matches the domain pattern {@code domain}.
*
* @param domainPattern a host name (like {@code android.com} or {@code
* localhost}), or a pattern to match subdomains of a domain name (like
* {@code .android.com}). A special case pattern is {@code .local},
* which matches all hosts without a TLD (like {@code localhost}).
* @param host the host name or IP address from an HTTP request.
*/
public static boolean domainMatches(String domainPattern, String host) {
if (domainPattern == null || host == null) {
return false;
}
String a = host.toLowerCase();
String b = domainPattern.toLowerCase();
/*
* From the spec: "both host names are IP addresses and their host name strings match
* exactly; or both host names are FQDN strings and their host name strings match exactly"
*/
if (a.equals(b) && (isFullyQualifiedDomainName(a, 0) || !InetAddress.isHostName(a))) {
return true;
}
if (!isFullyQualifiedDomainName(a, 0)) {
return b.equals(".local");
}
/*
* Not in the spec! If prefixing a hostname with "." causes it to equal the domain pattern,
* then it should match. This is necessary so that the pattern ".google.com" will match the
* host "google.com".
*/
if (b.length() == 1 + a.length()
&& b.startsWith(".")
&& b.endsWith(a)
&& isFullyQualifiedDomainName(b, 1)) {
return true;
}
/*
* From the spec: "A is a HDN string and has the form NB, where N is a
* non-empty name string, B has the form .B', and B' is a HDN string.
* (So, x.y.com domain-matches .Y.com but not Y.com.)
*/
return a.length() > b.length()
&& a.endsWith(b)
&& ((b.startsWith(".") && isFullyQualifiedDomainName(b, 1)) || b.equals(".local"));
}
/**
* Returns true if {@code cookie} should be sent to or accepted from {@code uri} with respect
* to the cookie's path. Cookies match by directory prefix: URI "/foo" matches cookies "/foo",
* "/foo/" and "/foo/bar", but not "/" or "/foobar".
*/
static boolean pathMatches(HttpCookie cookie, URI uri) {
String uriPath = matchablePath(uri.getPath());
String cookiePath = matchablePath(cookie.getPath());
return uriPath.startsWith(cookiePath);
}
/**
* Returns true if {@code cookie} should be sent to {@code uri} with respect to the cookie's
* secure attribute. Secure cookies should not be sent in insecure (ie. non-HTTPS) requests.
*/
static boolean secureMatches(HttpCookie cookie, URI uri) {
return !cookie.getSecure() || "https".equalsIgnoreCase(uri.getScheme());
}
/**
* Returns true if {@code cookie} should be sent to {@code uri} with respect to the cookie's
* port list.
*/
static boolean portMatches(HttpCookie cookie, URI uri) {
if (cookie.getPortlist() == null) {
return true;
}
return Arrays.asList(cookie.getPortlist().split(","))
.contains(Integer.toString(uri.getEffectivePort()));
}
/**
* Returns a non-null path ending in "/".
*/
private static String matchablePath(String path) {
if (path == null) {
return "/";
} else if (path.endsWith("/")) {
return path;
} else {
return path + "/";
}
}
/**
* Returns true if {@code s.substring(firstCharacter)} contains a dot
* between its first and last characters, exclusive. This considers both
* {@code android.com} and {@code co.uk} to be fully qualified domain names,
* but not {@code android.com.}, {@code .com}. or {@code android}.
*
* <p>Although this implements the cookie spec's definition of FQDN, it is
* not general purpose. For example, this returns true for IPv4 addresses.
*/
private static boolean isFullyQualifiedDomainName(String s, int firstCharacter) {
int dotPosition = s.indexOf('.', firstCharacter + 1);
return dotPosition != -1 && dotPosition < s.length() - 1;
}
/**
* Constructs a cookie from a string. The string should comply with
* set-cookie or set-cookie2 header format as specified in RFC 2965. Since
* set-cookies2 syntax allows more than one cookie definitions in one
* header, the returned object is a list.
*
* @param header
* a set-cookie or set-cookie2 header.
* @return a list of constructed cookies
* @throws IllegalArgumentException
* if the string does not comply with cookie specification, or
* the cookie name contains illegal characters, or reserved
* tokens of cookie specification appears
* @throws NullPointerException
* if header is null
*/
public static List<HttpCookie> parse(String header) {
return new CookieParser(header).parse();
}
static class CookieParser {
private static final String ATTRIBUTE_NAME_TERMINATORS = ",;= \t";
private static final String WHITESPACE = " \t";
private final String input;
private final String inputLowerCase;
private int pos = 0;
/*
* The cookie's version is set based on an overly complex heuristic:
* If it has an expires attribute, the version is 0.
* Otherwise, if it has a max-age attribute, the version is 1.
* Otherwise, if the cookie started with "Set-Cookie2", the version is 1.
* Otherwise, if it has any explicit version attributes, use the first one.
* Otherwise, the version is 0.
*/
boolean hasExpires = false;
boolean hasMaxAge = false;
boolean hasVersion = false;
CookieParser(String input) {
this.input = input;
this.inputLowerCase = input.toLowerCase(Locale.US);
}
public List<HttpCookie> parse() {
List<HttpCookie> cookies = new ArrayList<HttpCookie>(2);
// The RI permits input without either the "Set-Cookie:" or "Set-Cookie2" headers.
boolean pre2965 = true;
if (inputLowerCase.startsWith("set-cookie2:")) {
pos += "set-cookie2:".length();
pre2965 = false;
hasVersion = true;
} else if (inputLowerCase.startsWith("set-cookie:")) {
pos += "set-cookie:".length();
}
/*
* Read a comma-separated list of cookies. Note that the values may contain commas!
* <NAME> "=" <VALUE> ( ";" <ATTR NAME> ( "=" <ATTR VALUE> )? )*
*/
while (true) {
String name = readAttributeName(false);
if (name == null) {
if (cookies.isEmpty()) {
throw new IllegalArgumentException("No cookies in " + input);
}
return cookies;
}
if (!readEqualsSign()) {
throw new IllegalArgumentException(
"Expected '=' after " + name + " in " + input);
}
String value = readAttributeValue(pre2965 ? ";" : ",;");
HttpCookie cookie = new HttpCookie(name, value);
cookie.version = pre2965 ? 0 : 1;
cookies.add(cookie);
/*
* Read the attributes of the current cookie. Each iteration of this loop should
* enter with input either exhausted or prefixed with ';' or ',' as in ";path=/"
* and ",COOKIE2=value2".
*/
while (true) {
skipWhitespace();
if (pos == input.length()) {
break;
}
if (input.charAt(pos) == ',') {
pos++;
break; // a true comma delimiter; the current cookie is complete.
} else if (input.charAt(pos) == ';') {
pos++;
}
String attributeName = readAttributeName(true);
if (attributeName == null) {
continue; // for empty attribute as in "Set-Cookie: foo=Foo;;path=/"
}
/*
* Since expires and port attributes commonly include comma delimiters, always
* scan until a semicolon when parsing these attributes.
*/
String terminators = pre2965
|| "expires".equals(attributeName) || "port".equals(attributeName)
? ";"
: ";,";
String attributeValue = null;
if (readEqualsSign()) {
attributeValue = readAttributeValue(terminators);
}
setAttribute(cookie, attributeName, attributeValue);
}
if (hasExpires) {
cookie.version = 0;
} else if (hasMaxAge) {
cookie.version = 1;
}
}
}
private void setAttribute(HttpCookie cookie, String name, String value) {
if (name.equals("comment") && cookie.comment == null) {
cookie.comment = value;
} else if (name.equals("commenturl") && cookie.commentURL == null) {
cookie.commentURL = value;
} else if (name.equals("discard")) {
cookie.discard = true;
} else if (name.equals("domain") && cookie.domain == null) {
cookie.domain = value;
} else if (name.equals("expires")) {
hasExpires = true;
if (cookie.maxAge == -1L) {
Date date = parseHttpDate(value);
if (date != null) {
cookie.setExpires(date);
} else {
cookie.maxAge = 0;
}
}
} else if (name.equals("max-age") && cookie.maxAge == -1L) {
hasMaxAge = true;
cookie.maxAge = Long.parseLong(value);
} else if (name.equals("path") && cookie.path == null) {
cookie.path = value;
} else if (name.equals("port") && cookie.portList == null) {
cookie.portList = value != null ? value : "";
} else if (name.equals("secure")) {
cookie.secure = true;
} else if (name.equals("version") && !hasVersion) {
cookie.version = Integer.parseInt(value);
}
}
private Date parseHttpDate(String value) {
try {
return STANDARD_DATE_FORMAT.get().parse(value);
} catch (ParseException ignore) {
}
for (String formatString : BROWSER_COMPATIBLE_DATE_FORMATS) {
try {
return new SimpleDateFormat(formatString, Locale.US).parse(value);
} catch (ParseException ignore) {
}
}
return null;
}
/**
* Returns the next attribute name, or null if the input has been
* exhausted. Returns wth the cursor on the delimiter that follows.
*/
private String readAttributeName(boolean returnLowerCase) {
skipWhitespace();
int c = find(ATTRIBUTE_NAME_TERMINATORS);
String forSubstring = returnLowerCase ? inputLowerCase : input;
String result = pos < c ? forSubstring.substring(pos, c) : null;
pos = c;
return result;
}
/**
* Returns true if an equals sign was read and consumed.
*/
private boolean readEqualsSign() {
skipWhitespace();
if (pos < input.length() && input.charAt(pos) == '=') {
pos++;
return true;
}
return false;
}
/**
* Reads an attribute value, by parsing either a quoted string or until
* the next character in {@code terminators}. The terminator character
* is not consumed.
*/
private String readAttributeValue(String terminators) {
skipWhitespace();
/*
* Quoted string: read 'til the close quote. The spec mentions only "double quotes"
* but RI bug 6901170 claims that 'single quotes' are also used.
*/
if (pos < input.length() && (input.charAt(pos) == '"' || input.charAt(pos) == '\'')) {
char quoteCharacter = input.charAt(pos++);
int closeQuote = input.indexOf(quoteCharacter, pos);
if (closeQuote == -1) {
throw new IllegalArgumentException("Unterminated string literal in " + input);
}
String result = input.substring(pos, closeQuote);
pos = closeQuote + 1;
return result;
}
int c = find(terminators);
String result = input.substring(pos, c);
pos = c;
return result;
}
/**
* Returns the index of the next character in {@code chars}, or the end
* of the string.
*/
private int find(String chars) {
for (int c = pos; c < input.length(); c++) {
if (chars.indexOf(input.charAt(c)) != -1) {
return c;
}
}
return input.length();
}
private void skipWhitespace() {
for (; pos < input.length(); pos++) {
if (WHITESPACE.indexOf(input.charAt(pos)) == -1) {
break;
}
}
}
}
private String comment;
private String commentURL;
private boolean discard;
private String domain;
private long maxAge = -1l;
private final String name;
private String path;
private String portList;
private boolean secure;
private String value;
private int version = 1;
/**
* Creates a new cookie.
*
* @param name a non-empty string that contains only printable ASCII, no
* commas or semicolons, and is not prefixed with {@code $}. May not be
* an HTTP attribute name.
* @param value an opaque value from the HTTP server.
* @throws IllegalArgumentException if {@code name} is invalid.
*/
public HttpCookie(String name, String value) {
String ntrim = name.trim(); // erase leading and trailing whitespace
if (!isValidName(ntrim)) {
throw new IllegalArgumentException();
}
this.name = ntrim;
this.value = value;
}
private boolean isValidName(String n) {
// name cannot be empty or begin with '$' or equals the reserved
// attributes (case-insensitive)
boolean isValid = !(n.length() == 0 || n.startsWith("$") || RESERVED_NAMES.contains(n.toLowerCase()));
if (isValid) {
for (int i = 0; i < n.length(); i++) {
char nameChar = n.charAt(i);
// name must be ASCII characters and cannot contain ';', ',' and
// whitespace
if (nameChar < 0
|| nameChar >= 127
|| nameChar == ';'
|| nameChar == ','
|| (Character.isWhitespace(nameChar) && nameChar != ' ')) {
isValid = false;
break;
}
}
}
return isValid;
}
/**
* Returns the {@code Comment} attribute.
*/
public String getComment() {
return comment;
}
/**
* Returns the value of {@code CommentURL} attribute.
*/
public String getCommentURL() {
return commentURL;
}
/**
* Returns the {@code Discard} attribute.
*/
public boolean getDiscard() {
return discard;
}
/**
* Returns the {@code Domain} attribute.
*/
public String getDomain() {
return domain;
}
/**
* Returns the {@code Max-Age} attribute, in delta-seconds.
*/
public long getMaxAge() {
return maxAge;
}
/**
* Returns the name of this cookie.
*/
public String getName() {
return name;
}
/**
* Returns the {@code Path} attribute. This cookie is visible to all
* subpaths.
*/
public String getPath() {
return path;
}
/**
* Returns the {@code Port} attribute, usually containing comma-separated
* port numbers. A null port indicates that the cookie may be sent to any
* port. The empty string indicates that the cookie should only be sent to
* the port of the originating request.
*/
public String getPortlist() {
return portList;
}
/**
* Returns the {@code Secure} attribute.
*/
public boolean getSecure() {
return secure;
}
/**
* Returns the value of this cookie.
*/
public String getValue() {
return value;
}
/**
* Returns the version of this cookie.
*/
public int getVersion() {
return version;
}
/**
* Returns true if this cookie's Max-Age is 0.
*/
public boolean hasExpired() {
// -1 indicates the cookie will persist until browser shutdown
// so the cookie is not expired.
if (maxAge == -1l) {
return false;
}
boolean expired = false;
if (maxAge <= 0l) {
expired = true;
}
return expired;
}
/**
* Set the {@code Comment} attribute of this cookie.
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* Set the {@code CommentURL} attribute of this cookie.
*/
public void setCommentURL(String commentURL) {
this.commentURL = commentURL;
}
/**
* Set the {@code Discard} attribute of this cookie.
*/
public void setDiscard(boolean discard) {
this.discard = discard;
}
/**
* Set the {@code Domain} attribute of this cookie. HTTP clients send
* cookies only to matching domains.
*/
public void setDomain(String pattern) {
domain = pattern == null ? null : pattern.toLowerCase();
}
/**
* Sets the {@code Max-Age} attribute of this cookie.
*/
public void setMaxAge(long deltaSeconds) {
maxAge = deltaSeconds;
}
private void setExpires(Date expires) {
maxAge = (expires.getTime() - System.currentTimeMillis()) / 1000;
}
/**
* Set the {@code Path} attribute of this cookie. HTTP clients send cookies
* to this path and its subpaths.
*/
public void setPath(String path) {
this.path = path;
}
/**
* Set the {@code Port} attribute of this cookie.
*/
public void setPortlist(String portList) {
this.portList = portList;
}
/**
* Sets the {@code Secure} attribute of this cookie.
*/
public void setSecure(boolean secure) {
this.secure = secure;
}
/**
* Sets the opaque value of this cookie.
*/
public void setValue(String value) {
// FIXME: According to spec, version 0 cookie value does not allow many
// symbols. But RI does not implement it. Follow RI temporarily.
this.value = value;
}
/**
* Sets the {@code Version} attribute of the cookie.
*
* @throws IllegalArgumentException if v is neither 0 nor 1
*/
public void setVersion(int v) {
if (v != 0 && v != 1) {
throw new IllegalArgumentException();
}
version = v;
}
@Override public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
throw new AssertionError();
}
}
/**
* Returns true if {@code object} is a cookie with the same domain, name and
* path. Domain and name use case-insensitive comparison; path uses a
* case-sensitive comparison.
*/
@Override public boolean equals(Object object) {
if (object == this) {
return true;
}
if (object instanceof HttpCookie) {
HttpCookie that = (HttpCookie) object;
return name.equalsIgnoreCase(that.getName())
&& (domain != null ? domain.equalsIgnoreCase(that.domain) : that.domain == null)
&& Objects.equal(path, that.path);
}
return false;
}
/**
* Returns the hash code of this HTTP cookie: <pre> {@code
* name.toLowerCase().hashCode()
* + (domain == null ? 0 : domain.toLowerCase().hashCode())
* + (path == null ? 0 : path.hashCode())
* }</pre>
*/
@Override public int hashCode() {
return name.toLowerCase().hashCode()
+ (domain == null ? 0 : domain.toLowerCase().hashCode())
+ (path == null ? 0 : path.hashCode());
}
/**
* Returns a string representing this cookie in the format used by the
* {@code Cookie} header line in an HTTP request.
*/
@Override public String toString() {
if (version == 0) {
return name + "=" + value;
}
StringBuilder result = new StringBuilder()
.append(name)
.append("=")
.append("\"")
.append(value)
.append("\"");
appendAttribute(result, "Path", path);
appendAttribute(result, "Domain", domain);
appendAttribute(result, "Port", portList);
return result.toString();
}
private void appendAttribute(StringBuilder builder, String name, String value) {
if (value != null && builder != null) {
builder.append(";$");
builder.append(name);
builder.append("=\"");
builder.append(value);
builder.append("\"");
}
}
}
| apache-2.0 |
android-ia/platform_tools_idea | java/java-psi-impl/src/com/intellij/psi/impl/source/PsiReceiverParameterImpl.java | 1274 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiReceiverParameter;
import com.intellij.psi.impl.java.stubs.JavaStubElementTypes;
import com.intellij.psi.impl.java.stubs.PsiParameterStub;
import org.jetbrains.annotations.NotNull;
public class PsiReceiverParameterImpl extends PsiParameterImpl implements PsiReceiverParameter {
public PsiReceiverParameterImpl(@NotNull PsiParameterStub stub) {
super(stub, JavaStubElementTypes.RECEIVER_PARAMETER);
}
public PsiReceiverParameterImpl(@NotNull ASTNode node) {
super(node);
}
@Override
public String toString() {
return "PsiReceiverParameter";
}
}
| apache-2.0 |
porcelli-forks/kie-wb-common | kie-wb-common-services/kie-wb-common-data-modeller-core/src/main/java/org/kie/workbench/common/services/datamodeller/driver/impl/ModuleDataModelOracleUtils.java | 10918 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.datamodeller.driver.impl;
import java.lang.reflect.Modifier;
import java.util.Map;
import org.kie.soup.project.datamodel.oracle.ModelField;
import org.kie.soup.project.datamodel.oracle.ModuleDataModelOracle;
import org.kie.soup.project.datamodel.oracle.TypeSource;
import org.kie.workbench.common.services.datamodel.backend.server.DataModelOracleUtilities;
import org.kie.workbench.common.services.datamodeller.core.DataModel;
import org.kie.workbench.common.services.datamodeller.core.DataObject;
import org.kie.workbench.common.services.datamodeller.core.JavaEnum;
import org.kie.workbench.common.services.datamodeller.core.ObjectSource;
import org.kie.workbench.common.services.datamodeller.core.Visibility;
import org.kie.workbench.common.services.datamodeller.core.impl.JavaEnumImpl;
import org.kie.workbench.common.services.datamodeller.driver.ModelDriverException;
import org.kie.workbench.common.services.datamodeller.util.DriverUtils;
import org.kie.workbench.common.services.datamodeller.util.NamingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModuleDataModelOracleUtils {
private static final Logger logger = LoggerFactory.getLogger(ModuleDataModelOracleUtils.class);
public static void loadExternalDependencies(DataModel dataModel,
ModuleDataModelOracle moduleDataModelOracle,
ClassLoader classLoader) throws ModelDriverException {
String[] factTypes = DataModelOracleUtilities.getFactTypes(moduleDataModelOracle);
ObjectSource source;
if (factTypes != null) {
for (int i = 0; i < factTypes.length; i++) {
source = factSource(moduleDataModelOracle,
factTypes[i]);
if (source != null && ObjectSource.DEPENDENCY.equals(source)) {
addType(dataModel,
moduleDataModelOracle,
factTypes[i],
classLoader);
}
}
}
}
private static void addType(DataModel dataModel,
ModuleDataModelOracle oracleDataModel,
String factType,
ClassLoader classLoader) throws ModelDriverException {
ClassMetadata classMetadata = readClassMetadata(factType,
classLoader);
if (classMetadata != null && !classMetadata.isMemberClass() && !classMetadata.isAnonymousClass()
&& !classMetadata.isLocalClass()) {
if (classMetadata.isEnumClass()) {
addEnumType(dataModel,
factType,
classMetadata);
} else {
addDataObjectType(dataModel,
oracleDataModel,
factType,
classMetadata);
}
}
}
private static void addDataObjectType(DataModel dataModel,
ModuleDataModelOracle oracleDataModel,
String factType,
ClassMetadata classMetadata) throws ModelDriverException {
String superClass = DataModelOracleUtilities.getSuperType(oracleDataModel,
factType);
Visibility visibility = DriverUtils.buildVisibility(classMetadata.getModifiers());
DataObject dataObject;
logger.debug("Adding dataObjectType: " + factType + ", to dataModel: " + dataModel +
", from oracleDataModel: " + oracleDataModel);
dataObject = dataModel.addDataObject(factType,
visibility,
Modifier.isAbstract(classMetadata.getModifiers()),
Modifier.isFinal(classMetadata.getModifiers()),
ObjectSource.DEPENDENCY);
dataObject.setSuperClassName(superClass);
Map<String, ModelField[]> fields = oracleDataModel.getModuleModelFields();
if (fields != null) {
ModelField[] factFields = fields.get(factType);
ModelField field;
if (factFields != null && factFields.length > 0) {
for (int j = 0; j < factFields.length; j++) {
field = factFields[j];
if (isLoadableField(field)) {
if (field.getType().equals("Collection")) {
//read the correct bag and item classes.
String bag = DataModelOracleUtilities.getFieldClassName(oracleDataModel,
factType,
field.getName());
String itemsClass = DataModelOracleUtilities.getParametricFieldType(oracleDataModel,
factType,
field.getName());
if (itemsClass == null) {
//if we don't know the items class, the property will be managed as a simple property.
dataObject.addProperty(field.getName(),
bag);
} else {
dataObject.addProperty(field.getName(),
itemsClass,
true,
bag);
}
} else {
dataObject.addProperty(field.getName(),
field.getClassName());
}
}
}
}
} else {
logger.debug("No fields found for factTye: " + factType);
}
}
private static void addEnumType(DataModel dataModel,
String factType,
ClassMetadata classMetadata) {
String packageName = NamingUtils.extractPackageName(factType);
String className = NamingUtils.extractClassName(factType);
Visibility visibility = DriverUtils.buildVisibility(classMetadata.getModifiers());
JavaEnum javaEnum = new JavaEnumImpl(packageName,
className,
visibility);
dataModel.addJavaEnum(javaEnum,
ObjectSource.DEPENDENCY);
}
private static ClassMetadata readClassMetadata(String factType,
ClassLoader classLoader) {
try {
Class _class = classLoader.loadClass(factType);
return new ClassMetadata(_class.getModifiers(),
_class.isMemberClass(),
_class.isLocalClass(),
_class.isAnonymousClass(),
_class.isEnum());
} catch (ClassNotFoundException e) {
logger.error("It was not possible to read class metadata for class: " + factType);
}
return null;
}
private static ObjectSource factSource(ModuleDataModelOracle oracleDataModel,
String factType) {
TypeSource oracleType = DataModelOracleUtilities.getTypeSource(oracleDataModel,
factType);
if (TypeSource.JAVA_PROJECT.equals(oracleType)) {
return ObjectSource.INTERNAL;
} else if (TypeSource.JAVA_DEPENDENCY.equals(oracleType)) {
return ObjectSource.DEPENDENCY;
}
return null;
}
/**
* Indicates if this field should be loaded or not.
* Some fields like a filed with name "this" shouldn't be loaded.
*/
private static boolean isLoadableField(ModelField field) {
return (field.getOrigin().equals(ModelField.FIELD_ORIGIN.DECLARED));
}
static class ClassMetadata {
int modifiers;
boolean memberClass;
boolean localClass;
boolean anonymousClass;
boolean enumClass;
public ClassMetadata(int modifiers,
boolean memberClass,
boolean localClass,
boolean anonymousClass,
boolean enumClass) {
this.modifiers = modifiers;
this.memberClass = memberClass;
this.localClass = localClass;
this.anonymousClass = anonymousClass;
this.enumClass = enumClass;
}
public int getModifiers() {
return modifiers;
}
public void setModifiers(int modifiers) {
this.modifiers = modifiers;
}
public boolean isMemberClass() {
return memberClass;
}
public void setMemberClass(boolean memberClass) {
this.memberClass = memberClass;
}
public boolean isLocalClass() {
return localClass;
}
public void setLocalClass(boolean localClass) {
this.localClass = localClass;
}
public boolean isAnonymousClass() {
return anonymousClass;
}
public void setAnonymousClass(boolean anonymousClass) {
this.anonymousClass = anonymousClass;
}
public boolean isEnumClass() {
return enumClass;
}
public void setEnumClass(boolean enumClass) {
this.enumClass = enumClass;
}
}
} | apache-2.0 |
spockframework/spock | spock-core/src/main/java/spock/util/matcher/HamcrestSupport.java | 3220 | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spock.util.matcher;
import org.hamcrest.Matcher;
import org.spockframework.runtime.InvalidSpecException;
public class HamcrestSupport {
/**
* Used to match a value against a (Hamcrest) matcher.
* Only allowed in places where a condition is expected
* (expect-block, then-block, after an 'assert' keyword).
*
* <p>Basic example:
*
* <pre>
* import static spock.util.matcher.HamcrestSupport.that
* import static org.hamcrest.CoreMatchers.equalTo // ships with JUnit
*
* def foo = 42
*
* expect:
* that(foo, equalTo(42))
* </pre>
*
* Note that Spock supports an even simpler syntax for applying matchers:
*
* <pre>
* expect:
* foo equalTo(42)
* </pre>
*
* However, the simpler syntax cannot be used in explicit conditions
* (i.e. after the 'assert' keyword), and may not be as IDE-friendly.
* That's why this method is provided as an alternative.
*
* <h3>When would I use matchers?</h3>
*
* <p>Due to Spock's good diagnostic messages and Groovy's expressivity,
* matchers are less often needed than when, say, writing JUnit tests
* in Java. However, they come in handy when more complex conditions
* are required (and possibly repeated throughout a project).
* In such cases, Spock's Hamcrest integration provides the best of two worlds:
* the diagnostic messages known from Spock's conditions, and the
* custom failure messages of Hamcrest matchers.
*
* <h3>Third-party matchers</h3>
*
* <p>The matchers that ship with JUnit aren't very useful per se.
* Instead, you will want to use matchers from Hamcrest
* (http://code.google.com/p/hamcrest/) or other libraries. Both Hamcrest
* 1.1 and 1.2 are supported. You can also write your own matchers,
* building up a matcher library that's specific to the needs of your project.
*
* @param value the actual value
* @param matcher a matcher describing the expected value
* @param <T> the actual value's type
*/
@SuppressWarnings("UnusedDeclaration")
public static <T> void that(T value, Matcher<? super T> matcher) {
throw new InvalidSpecException("that() can only be used where a condition is expected");
}
/**
* Alias for {@link #that(Object, org.hamcrest.Matcher)} intended for use in then-blocks.
*
* @param value the actual value
* @param matcher a matcher describing the expected value
* @param <T> the actual value's type
*/
@SuppressWarnings("UnusedDeclaration")
public static <T> void expect(T value, Matcher<? super T> matcher) {
that(value, matcher);
}
}
| apache-2.0 |
siosio/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/previewfeature/errors/AccessInnerClassInsidePreviewFeatureClass.java | 2935 |
import <error descr="com.mycom.FirstPreviewFeature is a preview API and may be removed in a future release">com.mycom.FirstPreviewFeature</error>;
import <warning descr="com.mycom.FirstPreviewFeatureReflective is a preview API and may be removed in a future release">com.mycom.FirstPreviewFeatureReflective</warning>;
class Main {
public Main(String value) { }
{
var a = new <error descr="com.mycom.FirstPreviewFeature is a preview API and may be removed in a future release">FirstPreviewFeature</error>.Outer.Inner();
var b = new <warning descr="com.mycom.FirstPreviewFeatureReflective.Outer.Inner is a preview API and may be removed in a future release"><warning descr="com.mycom.FirstPreviewFeatureReflective.Outer is a preview API and may be removed in a future release"><warning descr="com.mycom.FirstPreviewFeatureReflective is a preview API and may be removed in a future release">FirstPreviewFeatureReflective</warning>.Outer</warning>.Inner</warning>();
<error descr="com.mycom.FirstPreviewFeature.Outer.Inner#z is a preview API and may be removed in a future release">a.z</error>();
<warning descr="com.mycom.FirstPreviewFeatureReflective.Outer.Inner#z is a preview API and may be removed in a future release">b.z</warning>();
Runnable r1 = <error descr="com.mycom.FirstPreviewFeature.Outer.Inner#z is a preview API and may be removed in a future release">a::z</error>;
Runnable r2 = <warning descr="com.mycom.FirstPreviewFeatureReflective.Outer.Inner#z is a preview API and may be removed in a future release">b::z</warning>;
new Main(<error descr="com.mycom.FirstPreviewFeature is a preview API and may be removed in a future release">FirstPreviewFeature</error>.KEY);
new Main(<warning descr="com.mycom.FirstPreviewFeatureReflective#KEY is a preview API and may be removed in a future release"><warning descr="com.mycom.FirstPreviewFeatureReflective is a preview API and may be removed in a future release">FirstPreviewFeatureReflective</warning>.KEY</warning>);
new Main(<error descr="com.mycom.FirstPreviewFeature is a preview API and may be removed in a future release">FirstPreviewFeature</error>.KEY + "");
new Main(<warning descr="com.mycom.FirstPreviewFeatureReflective#KEY is a preview API and may be removed in a future release"><warning descr="com.mycom.FirstPreviewFeatureReflective is a preview API and may be removed in a future release">FirstPreviewFeatureReflective</warning>.KEY</warning> + "");
new Main("" + <error descr="com.mycom.FirstPreviewFeature is a preview API and may be removed in a future release">FirstPreviewFeature</error>.KEY);
new Main("" + <warning descr="com.mycom.FirstPreviewFeatureReflective#KEY is a preview API and may be removed in a future release"><warning descr="com.mycom.FirstPreviewFeatureReflective is a preview API and may be removed in a future release">FirstPreviewFeatureReflective</warning>.KEY</warning>);
}
} | apache-2.0 |
romartin/kie-wb-common | kie-wb-common-services/kie-wb-common-services-backend/src/main/java/org/kie/workbench/common/services/backend/builder/core/ObservableProjectImportsFile.java | 1163 | /*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.backend.builder.core;
import org.guvnor.common.services.builder.ResourceChangeObservableFile;
import org.uberfire.backend.vfs.Path;
/**
* Changes to project.imports invalidates the DMO cache
*/
public class ObservableProjectImportsFile implements ResourceChangeObservableFile {
static final String FILENAME = "project.imports";
@Override
public boolean accept(final Path path) {
final String fileName = path.getFileName();
return fileName.equals(FILENAME);
}
}
| apache-2.0 |
metamx/druid | processing/src/test/java/io/druid/collections/spatial/RTreeTest.java | 3497 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.collections.spatial;
import io.druid.collections.bitmap.BitmapFactory;
import io.druid.collections.bitmap.ConciseBitmapFactory;
import io.druid.collections.bitmap.RoaringBitmapFactory;
import io.druid.collections.spatial.split.LinearGutmanSplitStrategy;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Random;
/**
*/
public class RTreeTest
{
private RTree tree;
private RTree roaringtree;
@Before
public void setUp() throws Exception
{
BitmapFactory bf = new ConciseBitmapFactory();
tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, bf), bf);
BitmapFactory rbf = new RoaringBitmapFactory();
roaringtree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, rbf), rbf);
}
@Test
public void testInsertNoSplit()
{
float[] elem = new float[]{5, 5};
tree.insert(elem, 1);
Assert.assertTrue(Arrays.equals(elem, tree.getRoot().getMinCoordinates()));
Assert.assertTrue(Arrays.equals(elem, tree.getRoot().getMaxCoordinates()));
tree.insert(new float[]{6, 7}, 2);
tree.insert(new float[]{1, 3}, 3);
tree.insert(new float[]{10, 4}, 4);
tree.insert(new float[]{8, 2}, 5);
Assert.assertEquals(tree.getRoot().getChildren().size(), 5);
float[] expectedMin = new float[]{1, 2};
float[] expectedMax = new float[]{10, 7};
Assert.assertTrue(Arrays.equals(expectedMin, tree.getRoot().getMinCoordinates()));
Assert.assertTrue(Arrays.equals(expectedMax, tree.getRoot().getMaxCoordinates()));
Assert.assertEquals(tree.getRoot().getArea(), 45.0d);
}
@Test
public void testInsertDuplicatesNoSplit()
{
tree.insert(new float[]{1, 1}, 1);
tree.insert(new float[]{1, 1}, 1);
tree.insert(new float[]{1, 1}, 1);
Assert.assertEquals(tree.getRoot().getChildren().size(), 3);
}
@Test
public void testInsertDuplicatesNoSplitRoaring()
{
roaringtree.insert(new float[]{1, 1}, 1);
roaringtree.insert(new float[]{1, 1}, 1);
roaringtree.insert(new float[]{1, 1}, 1);
Assert.assertEquals(roaringtree.getRoot().getChildren().size(), 3);
}
@Test
public void testSplitOccurs()
{
Random rand = new Random();
for (int i = 0; i < 100; i++) {
tree.insert(new float[]{rand.nextFloat(), rand.nextFloat()}, i);
}
Assert.assertTrue(tree.getRoot().getChildren().size() > 1);
}
@Test
public void testSplitOccursRoaring()
{
Random rand = new Random();
for (int i = 0; i < 100; i++) {
roaringtree.insert(new float[]{rand.nextFloat(), rand.nextFloat()}, i);
}
Assert.assertTrue(roaringtree.getRoot().getChildren().size() > 1);
}
}
| apache-2.0 |
siosio/intellij-community | platform/lang-impl/src/com/intellij/packageDependencies/actions/AnalyzeDependenciesHandler.java | 2158 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.packageDependencies.actions;
import com.intellij.analysis.AnalysisScope;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.openapi.project.Project;
import com.intellij.packageDependencies.DependenciesBuilder;
import com.intellij.packageDependencies.ForwardDependenciesBuilder;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class AnalyzeDependenciesHandler extends DependenciesHandlerBase {
private final int myTransitiveBorder;
public AnalyzeDependenciesHandler(@NotNull Project project, List<? extends AnalysisScope> scopes, int transitiveBorder, Set<PsiFile> excluded) {
super(project, scopes, excluded);
myTransitiveBorder = transitiveBorder;
}
public AnalyzeDependenciesHandler(final Project project, final AnalysisScope scope, final int transitiveBorder) {
this(project, Collections.singletonList(scope), transitiveBorder, new HashSet<>());
}
@Override
protected DependenciesBuilder createDependenciesBuilder(AnalysisScope scope) {
return new ForwardDependenciesBuilder(myProject, scope, myTransitiveBorder);
}
@Override
protected String getPanelDisplayName(final AnalysisScope scope) {
return CodeInsightBundle.message("package.dependencies.toolwindow.title", scope.getDisplayName());
}
@Override
protected String getProgressTitle() {
return CodeInsightBundle.message("package.dependencies.progress.title");
}
} | apache-2.0 |
dushmis/closure-compiler | test/com/google/javascript/jscomp/RemoveUnusedPrototypePropertiesTest.java | 18250 | /*
* Copyright 2006 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Tests for {@link RemoveUnusedPrototypeProperties}.
*
* @author nicksantos@google.com (Nick Santos)
*/
public final class RemoveUnusedPrototypePropertiesTest extends CompilerTestCase {
private static final String EXTERNS =
"IFoo.prototype.bar; var mExtern; mExtern.bExtern; mExtern['cExtern'];";
private boolean canRemoveExterns = false;
private boolean anchorUnusedVars = false;
public RemoveUnusedPrototypePropertiesTest() {
super(EXTERNS);
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new RemoveUnusedPrototypeProperties(compiler,
canRemoveExterns, anchorUnusedVars);
}
@Override
public void setUp() {
anchorUnusedVars = false;
canRemoveExterns = false;
}
public void testAnalyzePrototypeProperties() {
// Basic removal for prototype properties
test("function e(){}" +
"e.prototype.a = function(){};" +
"e.prototype.b = function(){};" +
"var x = new e; x.a()",
"function e(){}" +
"e.prototype.a = function(){};" +
"var x = new e; x.a()");
// Basic removal for prototype replacement
test("function e(){}" +
"e.prototype = {a: function(){}, b: function(){}};" +
"var x=new e; x.a()",
"function e(){}" +
"e.prototype = {a: function(){}};" +
"var x = new e; x.a()");
// Unused properties that were referenced in the externs file should not be
// removed
test("function e(){}" +
"e.prototype.a = function(){};" +
"e.prototype.bExtern = function(){};" +
"var x = new e;x.a()",
"function e(){}" +
"e.prototype.a = function(){};" +
"e.prototype.bExtern = function(){};" +
"var x = new e; x.a()");
test("function e(){}" +
"e.prototype = {a: function(){}, bExtern: function(){}};" +
"var x = new e; x.a()",
"function e(){}" +
"e.prototype = {a: function(){}, bExtern: function(){}};" +
"var x = new e; x.a()");
}
public void testAliasing1() {
// Aliasing a property is not enough for it to count as used
test("function e(){}" +
"e.prototype.method1 = function(){};" +
"e.prototype.method2 = function(){};" +
// aliases
"e.prototype.alias1 = e.prototype.method1;" +
"e.prototype.alias2 = e.prototype.method2;" +
"var x = new e; x.method1()",
"function e(){}" +
"e.prototype.method1 = function(){};" +
"var x = new e; x.method1()");
// Using an alias should keep it
test("function e(){}" +
"e.prototype.method1 = function(){};" +
"e.prototype.method2 = function(){};" +
// aliases
"e.prototype.alias1 = e.prototype.method1;" +
"e.prototype.alias2 = e.prototype.method2;" +
"var x=new e; x.alias1()",
"function e(){}" +
"e.prototype.method1 = function(){};" +
"e.prototype.alias1 = e.prototype.method1;" +
"var x = new e; x.alias1()");
}
public void testAliasing2() {
// Aliasing a property is not enough for it to count as used
test("function e(){}" +
"e.prototype.method1 = function(){};" +
// aliases
"e.prototype.alias1 = e.prototype.method1;" +
"(new e).method1()",
"function e(){}" +
"e.prototype.method1 = function(){};" +
"(new e).method1()");
// Using an alias should keep it
test("function e(){}" +
"e.prototype.method1 = function(){};" +
// aliases
"e.prototype.alias1 = e.prototype.method1;" +
"(new e).alias1()",
"function e(){}" +
"e.prototype.method1 = function(){};" +
"e.prototype.alias1 = e.prototype.method1;" +
"(new e).alias1()");
}
public void testAliasing3() {
// Aliasing a property is not enough for it to count as used
test("function e(){}" +
"e.prototype.method1 = function(){};" +
"e.prototype.method2 = function(){};" +
// aliases
"e.prototype['alias1'] = e.prototype.method1;" +
"e.prototype['alias2'] = e.prototype.method2;",
"function e(){}" +
"e.prototype.method1=function(){};" +
"e.prototype.method2=function(){};" +
"e.prototype[\"alias1\"]=e.prototype.method1;" +
"e.prototype[\"alias2\"]=e.prototype.method2;");
}
public void testAliasing4() {
// Aliasing a property is not enough for it to count as used
test("function e(){}" +
"e.prototype['alias1'] = e.prototype.method1 = function(){};" +
"e.prototype['alias2'] = e.prototype.method2 = function(){};",
"function e(){}" +
"e.prototype[\"alias1\"]=e.prototype.method1=function(){};" +
"e.prototype[\"alias2\"]=e.prototype.method2=function(){};");
}
public void testAliasing5() {
// An exported alias must preserved any referenced values in the
// referenced function.
test("function e(){}" +
"e.prototype.method1 = function(){this.method2()};" +
"e.prototype.method2 = function(){};" +
// aliases
"e.prototype['alias1'] = e.prototype.method1;",
"function e(){}" +
"e.prototype.method1=function(){this.method2()};" +
"e.prototype.method2=function(){};" +
"e.prototype[\"alias1\"]=e.prototype.method1;");
}
public void testAliasing6() {
// An exported alias must preserved any referenced values in the
// referenced function.
test("function e(){}" +
"e.prototype.method1 = function(){this.method2()};" +
"e.prototype.method2 = function(){};" +
// aliases
"window['alias1'] = e.prototype.method1;",
"function e(){}" +
"e.prototype.method1=function(){this.method2()};" +
"e.prototype.method2=function(){};" +
"window['alias1']=e.prototype.method1;");
}
public void testAliasing7() {
// An exported alias must preserved any referenced values in the
// referenced function.
testSame("function e(){}" +
"e.prototype['alias1'] = e.prototype.method1 = " +
"function(){this.method2()};" +
"e.prototype.method2 = function(){};");
}
public void testStatementRestriction() {
test("function e(){}" +
"var x = e.prototype.method1 = function(){};" +
"var y = new e; x()",
"function e(){}" +
"var x = e.prototype.method1 = function(){};" +
"var y = new e; x()");
}
public void testExportedMethodsByNamingConvention() {
String classAndItsMethodAliasedAsExtern =
"function Foo() {}" +
"Foo.prototype.method = function() {};" + // not removed
"Foo.prototype.unused = function() {};" + // removed
"var _externInstance = new Foo();" +
"Foo.prototype._externMethod = Foo.prototype.method"; // aliased here
String compiled =
"function Foo(){}" +
"Foo.prototype.method = function(){};" +
"var _externInstance = new Foo;" +
"Foo.prototype._externMethod = Foo.prototype.method";
test(classAndItsMethodAliasedAsExtern, compiled);
}
public void testMethodsFromExternsFileNotExported() {
canRemoveExterns = true;
String classAndItsMethodAliasedAsExtern =
"function Foo() {}" +
"Foo.prototype.bar_ = function() {};" +
"Foo.prototype.unused = function() {};" +
"var instance = new Foo;" +
"Foo.prototype.bar = Foo.prototype.bar_";
String compiled =
"function Foo(){}" +
"var instance = new Foo;";
test(classAndItsMethodAliasedAsExtern, compiled);
}
public void testExportedMethodsByNamingConventionAlwaysExported() {
canRemoveExterns = true;
String classAndItsMethodAliasedAsExtern =
"function Foo() {}" +
"Foo.prototype.method = function() {};" + // not removed
"Foo.prototype.unused = function() {};" + // removed
"var _externInstance = new Foo();" +
"Foo.prototype._externMethod = Foo.prototype.method"; // aliased here
String compiled =
"function Foo(){}" +
"Foo.prototype.method = function(){};" +
"var _externInstance = new Foo;" +
"Foo.prototype._externMethod = Foo.prototype.method";
test(classAndItsMethodAliasedAsExtern, compiled);
}
public void testExternMethodsFromExternsFile() {
String classAndItsMethodAliasedAsExtern =
"function Foo() {}" +
"Foo.prototype.bar_ = function() {};" + // not removed
"Foo.prototype.unused = function() {};" + // removed
"var instance = new Foo;" +
"Foo.prototype.bar = Foo.prototype.bar_"; // aliased here
String compiled =
"function Foo(){}" +
"Foo.prototype.bar_ = function(){};" +
"var instance = new Foo;" +
"Foo.prototype.bar = Foo.prototype.bar_";
test(classAndItsMethodAliasedAsExtern, compiled);
}
public void testPropertyReferenceGraph() {
// test a prototype property graph that looks like so:
// b -> a, c -> b, c -> a, d -> c, e -> a, e -> f
String constructor = "function Foo() {}";
String defA =
"Foo.prototype.a = function() { Foo.superClass_.a.call(this); };";
String defB = "Foo.prototype.b = function() { this.a(); };";
String defC = "Foo.prototype.c = function() { " +
"Foo.superClass_.c.call(this); this.b(); this.a(); };";
String defD = "Foo.prototype.d = function() { this.c(); };";
String defE = "Foo.prototype.e = function() { this.a(); this.f(); };";
String defF = "Foo.prototype.f = function() { };";
String fullClassDef = constructor + defA + defB + defC + defD + defE + defF;
// ensure that all prototypes are compiled out if none are used
test(fullClassDef, "");
// make sure that the right prototypes are called for each use
String callA = "(new Foo()).a();";
String callB = "(new Foo()).b();";
String callC = "(new Foo()).c();";
String callD = "(new Foo()).d();";
String callE = "(new Foo()).e();";
String callF = "(new Foo()).f();";
test(fullClassDef + callA, constructor + defA + callA);
test(fullClassDef + callB, constructor + defA + defB + callB);
test(fullClassDef + callC, constructor + defA + defB + defC + callC);
test(fullClassDef + callD, constructor + defA + defB + defC + defD + callD);
test(fullClassDef + callE, constructor + defA + defE + defF + callE);
test(fullClassDef + callF, constructor + defF + callF);
test(fullClassDef + callA + callC,
constructor + defA + defB + defC + callA + callC);
test(fullClassDef + callB + callC,
constructor + defA + defB + defC + callB + callC);
test(fullClassDef + callA + callB + callC,
constructor + defA + defB + defC + callA + callB + callC);
}
public void testPropertiesDefinedWithGetElem() {
testSame("function Foo() {} Foo.prototype['elem'] = function() {};");
testSame("function Foo() {} Foo.prototype[1 + 1] = function() {};");
}
public void testQuotedProperties() {
// Basic removal for prototype replacement
testSame("function e(){}" +
"e.prototype = {'a': function(){}, 'b': function(){}};");
}
public void testNeverRemoveImplicitlyUsedProperties() {
testSame("function Foo() {} " +
"Foo.prototype.length = 3; " +
"Foo.prototype.toString = function() { return 'Foo'; }; " +
"Foo.prototype.valueOf = function() { return 'Foo'; }; ");
}
public void testPropertyDefinedInBranch() {
test("function Foo() {} if (true) Foo.prototype.baz = function() {};",
"if (true);");
test("function Foo() {} while (true) Foo.prototype.baz = function() {};",
"while (true);");
test("function Foo() {} for (;;) Foo.prototype.baz = function() {};",
"for (;;);");
test("function Foo() {} do Foo.prototype.baz = function() {}; while(true);",
"do; while(true);");
}
public void testUsingAnonymousObjectsToDefeatRemoval() {
String constructor = "function Foo() {}";
String declaration = constructor + "Foo.prototype.baz = 3;";
test(declaration, "");
testSame(declaration + "var x = {}; x.baz = 5;");
testSame(declaration + "var x = {baz: 5};");
test(declaration + "var x = {'baz': 5};",
"var x = {'baz': 5};");
}
public void testGlobalFunctionsInGraph() {
test(
"var x = function() { (new Foo).baz(); };" +
"var y = function() { x(); };" +
"function Foo() {}" +
"Foo.prototype.baz = function() { y(); };",
"");
}
public void testGlobalFunctionsInGraph2() {
// In this example, Foo.prototype.baz is a global reference to
// Foo, and Foo has a reference to baz. So everything stays in.
// TODO(nicksantos): We should be able to make the graph more fine-grained
// here. Instead of Foo.prototype.bar creating a global reference to Foo,
// it should create a reference from .bar to Foo. That will let us
// compile this away to nothing.
testSame(
"var x = function() { (new Foo).baz(); };" +
"var y = function() { x(); };" +
"function Foo() { this.baz(); }" +
"Foo.prototype.baz = function() { y(); };");
}
public void testGlobalFunctionsInGraph3() {
test(
"var x = function() { (new Foo).baz(); };" +
"var y = function() { x(); };" +
"function Foo() { this.baz(); }" +
"Foo.prototype.baz = function() { x(); };",
"var x = function() { (new Foo).baz(); };" +
"function Foo() { this.baz(); }" +
"Foo.prototype.baz = function() { x(); };");
}
public void testGlobalFunctionsInGraph4() {
test(
"var x = function() { (new Foo).baz(); };" +
"var y = function() { x(); };" +
"function Foo() { Foo.prototype.baz = function() { y(); }; }",
"");
}
public void testGlobalFunctionsInGraph5() {
test(
"function Foo() {}" +
"Foo.prototype.methodA = function() {};" +
"function x() { (new Foo).methodA(); }" +
"Foo.prototype.methodB = function() { x(); };",
"");
anchorUnusedVars = true;
test(
"function Foo() {}" +
"Foo.prototype.methodA = function() {};" +
"function x() { (new Foo).methodA(); }" +
"Foo.prototype.methodB = function() { x(); };",
"function Foo() {}" +
"Foo.prototype.methodA = function() {};" +
"function x() { (new Foo).methodA(); }");
}
public void testGlobalFunctionsInGraph6() {
testSame(
"function Foo() {}" +
"Foo.prototype.methodA = function() {};" +
"function x() { (new Foo).methodA(); }" +
"Foo.prototype.methodB = function() { x(); };" +
"(new Foo).methodB();");
}
public void testGlobalFunctionsInGraph7() {
testSame(
"function Foo() {}" +
"Foo.prototype.methodA = function() {};" +
"this.methodA();");
}
public void testGetterBaseline() {
anchorUnusedVars = true;
test(
"function Foo() {}" +
"Foo.prototype = { " +
" methodA: function() {}," +
" methodB: function() { x(); }" +
"};" +
"function x() { (new Foo).methodA(); }",
"function Foo() {}" +
"Foo.prototype = { " +
" methodA: function() {}" +
"};" +
"function x() { (new Foo).methodA(); }");
}
public void testGetter1() {
test(
"function Foo() {}" +
"Foo.prototype = { " +
" get methodA() {}," +
" get methodB() { x(); }" +
"};" +
"function x() { (new Foo).methodA; }",
"function Foo() {}" +
"Foo.prototype = {};");
anchorUnusedVars = true;
test(
"function Foo() {}" +
"Foo.prototype = { " +
" get methodA() {}," +
" get methodB() { x(); }" +
"};" +
"function x() { (new Foo).methodA; }",
"function Foo() {}" +
"Foo.prototype = { " +
" get methodA() {}" +
"};" +
"function x() { (new Foo).methodA; }");
}
public void testGetter2() {
anchorUnusedVars = true;
test(
"function Foo() {}" +
"Foo.prototype = { " +
" get methodA() {}," +
" set methodA(a) {}," +
" get methodB() { x(); }," +
" set methodB(a) { x(); }" +
"};" +
"function x() { (new Foo).methodA; }",
"function Foo() {}" +
"Foo.prototype = { " +
" get methodA() {}," +
" set methodA(a) {}" +
"};" +
"function x() { (new Foo).methodA; }");
}
public void testHook1() throws Exception {
test(
"/** @constructor */ function Foo() {}" +
"Foo.prototype.method1 = Math.random() ?" +
" function() { this.method2(); } : function() { this.method3(); };" +
"Foo.prototype.method2 = function() {};" +
"Foo.prototype.method3 = function() {};",
"");
}
public void testHook2() throws Exception {
testSame(
"/** @constructor */ function Foo() {}" +
"Foo.prototype.method1 = Math.random() ?" +
" function() { this.method2(); } : function() { this.method3(); };" +
"Foo.prototype.method2 = function() {};" +
"Foo.prototype.method3 = function() {};" +
"(new Foo()).method1();");
}
}
| apache-2.0 |
chromium/chromium | net/android/java/src/org/chromium/net/ThreadStatsUid.java | 2192 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net;
import android.net.TrafficStats;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* Class to wrap TrafficStats.setThreadStatsUid(int uid) and TrafficStats.clearThreadStatsUid()
* which are hidden and so must be accessed via reflection.
*/
public class ThreadStatsUid {
// Reference to TrafficStats.setThreadStatsUid(int uid).
private static final Method sSetThreadStatsUid;
// Reference to TrafficStats.clearThreadStatsUid().
private static final Method sClearThreadStatsUid;
// Get reference to TrafficStats.setThreadStatsUid(int uid) and
// TrafficStats.clearThreadStatsUid() via reflection.
static {
try {
sSetThreadStatsUid = TrafficStats.class.getMethod("setThreadStatsUid", Integer.TYPE);
sClearThreadStatsUid = TrafficStats.class.getMethod("clearThreadStatsUid");
} catch (NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Unable to get TrafficStats methods", e);
}
}
/** Calls TrafficStats.setThreadStatsUid(uid) */
public static void set(int uid) {
try {
sSetThreadStatsUid.invoke(null, uid); // Pass null for "this" as it's a static method.
} catch (IllegalAccessException e) {
throw new RuntimeException("TrafficStats.setThreadStatsUid failed", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("TrafficStats.setThreadStatsUid failed", e);
}
}
/** Calls TrafficStats.clearThreadStatsUid() */
public static void clear() {
try {
sClearThreadStatsUid.invoke(null); // Pass null for "this" as it's a static method.
} catch (IllegalAccessException e) {
throw new RuntimeException("TrafficStats.clearThreadStatsUid failed", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("TrafficStats.clearThreadStatsUid failed", e);
}
}
}
| bsd-3-clause |
WeRockStar/java-design-patterns | fluentinterface/src/test/java/com/iluwatar/fluentinterface/app/AppTest.java | 1312 | /**
* The MIT License
* Copyright (c) 2014 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.fluentinterface.app;
import org.junit.Test;
public class AppTest {
@Test
public void test() {
String[] args = {};
App.main(args);
}
}
| mit |
simleo/openmicroscopy | components/insight/SRC/org/openmicroscopy/shoola/agents/metadata/util/TreeCellRenderer.java | 5726 | /*
*------------------------------------------------------------------------------
* Copyright (C) 2006-2008 University of Dundee. All rights reserved.
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*------------------------------------------------------------------------------
*/
package org.openmicroscopy.shoola.agents.metadata.util;
import java.awt.Component;
import java.awt.Font;
import javax.swing.Icon;
import javax.swing.JTree;
import javax.swing.tree.DefaultTreeCellRenderer;
import org.openmicroscopy.shoola.agents.metadata.IconManager;
import org.openmicroscopy.shoola.agents.metadata.MetadataViewerAgent;
import org.openmicroscopy.shoola.agents.metadata.browser.TreeBrowserDisplay;
import org.openmicroscopy.shoola.env.LookupNames;
import omero.gateway.model.DatasetData;
import omero.gateway.model.ExperimenterData;
import omero.gateway.model.ImageData;
import omero.gateway.model.PlateData;
import omero.gateway.model.ProjectData;
import omero.gateway.model.ScreenData;
import omero.gateway.model.TagAnnotationData;
/**
* Renderer of Browser's tree.
*
* @author Jean-Marie Burel
* <a href="mailto:j.burel@dundee.ac.uk">j.burel@dundee.ac.uk</a>
* @author Donald MacDonald
* <a href="mailto:donald@lifesci.dundee.ac.uk">donald@lifesci.dundee.ac.uk</a>
* @version 3.0
* @since OME3.0
*/
public class TreeCellRenderer
extends DefaultTreeCellRenderer
{
/** Reference to the <code>Image</code> icon. */
private static final Icon IMAGE_ICON;
/** Reference to the <code>Dataset</code> icon. */
private static final Icon DATASET_ICON;
/** Reference to the <code>Project</code> icon. */
private static final Icon PROJECT_ICON;
/** Reference to the <code>Screen</code> icon. */
private static final Icon SCREEN_ICON;
/** Reference to the <code>Plate</code> icon. */
private static final Icon PLATE_ICON;
/** Reference to the <code>Tag</code> icon. */
private static final Icon TAG_ICON;
/** Reference to the <code>Tag Set</code> icon. */
private static final Icon TAG_SET_ICON;
static {
IconManager icons = IconManager.getInstance();
IMAGE_ICON = icons.getIcon(IconManager.IMAGE);
DATASET_ICON = icons.getIcon(IconManager.DATASET);
PROJECT_ICON = icons.getIcon(IconManager.PROJECT);
SCREEN_ICON = icons.getIcon(IconManager.SCREEN);
PLATE_ICON = icons.getIcon(IconManager.PLATE);
TAG_ICON = icons.getIcon(IconManager.TAG);
TAG_SET_ICON = icons.getIcon(IconManager.TAG_SET);
}
/** The ID of the current user. */
private long currentUserID;
/** The default font. */
private Font defaultFont;
/**
* Sets the icon and the text corresponding to the user's object.
*
* @param usrObject The user object to handle.
*/
private void setIcon(Object usrObject)
{
Icon icon = null;
if (usrObject instanceof ProjectData)
icon = PROJECT_ICON;
else if (usrObject instanceof DatasetData)
icon = DATASET_ICON;
else if (usrObject instanceof ImageData)
icon = IMAGE_ICON;
else if (usrObject instanceof ScreenData)
icon = SCREEN_ICON;
else if (usrObject instanceof PlateData)
icon = PLATE_ICON;
else if (usrObject instanceof TagAnnotationData) {
TagAnnotationData tag = (TagAnnotationData) usrObject;
if (TagAnnotationData.INSIGHT_TAGSET_NS.equals(tag.getNameSpace()))
icon = TAG_SET_ICON;
else
icon = TAG_ICON;
}
else if (usrObject instanceof String)
icon = null;
setIcon(icon);
}
/** Creates a new instance. */
public TreeCellRenderer()
{
ExperimenterData exp =
(ExperimenterData) MetadataViewerAgent.getRegistry().lookup(
LookupNames.CURRENT_USER_DETAILS);
currentUserID = exp.getId();
}
/**
* Overridden to set the icon and the text.
* @see DefaultTreeCellRenderer#getTreeCellRendererComponent(JTree, Object,
* boolean, boolean, boolean, int, boolean)
*/
public Component getTreeCellRendererComponent(JTree tree, Object value,
boolean sel, boolean expanded, boolean leaf,
int row, boolean hasFocus)
{
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf,
row, hasFocus);
if (!(value instanceof TreeBrowserDisplay)) return this;
if (defaultFont == null)
defaultFont = getFont();
TreeBrowserDisplay node = (TreeBrowserDisplay) value;
Object object = node.getUserObject();
Icon icon = node.getDefaultIcon();
if (object instanceof String)
setIcon(null);
setFont(defaultFont);
if (icon != null) {
setIcon(icon);
if (object instanceof String)
setFont(defaultFont.deriveFont(Font.ITALIC, 10));
} else {
setIcon(object);
}
return this;
}
}
| gpl-2.0 |
zhiqinghuang/core | src/com/dotmarketing/business/PermissionCache.java | 497 | package com.dotmarketing.business;
import java.util.List;
import com.dotmarketing.beans.Permission;
//This interface should have default package access
public abstract class PermissionCache implements Cachable{
abstract protected List<Permission> addToPermissionCache(String key,
List<Permission> permissions);
abstract protected List<Permission> getPermissionsFromCache(String key);
abstract public void clearCache();
abstract protected void remove(String key);
} | gpl-3.0 |
gnodet/camel | components/camel-aws2-athena/src/test/java/org/apache/camel/component/aws2/athena/AmazonAthenaClientMock.java | 6632 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.athena;
import java.util.LinkedList;
import java.util.Queue;
import software.amazon.awssdk.core.exception.SdkException;
import software.amazon.awssdk.services.athena.AthenaClient;
import software.amazon.awssdk.services.athena.model.ColumnInfo;
import software.amazon.awssdk.services.athena.model.Datum;
import software.amazon.awssdk.services.athena.model.GetQueryExecutionRequest;
import software.amazon.awssdk.services.athena.model.GetQueryExecutionResponse;
import software.amazon.awssdk.services.athena.model.GetQueryResultsRequest;
import software.amazon.awssdk.services.athena.model.GetQueryResultsResponse;
import software.amazon.awssdk.services.athena.model.ListQueryExecutionsRequest;
import software.amazon.awssdk.services.athena.model.ListQueryExecutionsResponse;
import software.amazon.awssdk.services.athena.model.QueryExecution;
import software.amazon.awssdk.services.athena.model.QueryExecutionState;
import software.amazon.awssdk.services.athena.model.QueryExecutionStatus;
import software.amazon.awssdk.services.athena.model.ResultConfiguration;
import software.amazon.awssdk.services.athena.model.ResultSet;
import software.amazon.awssdk.services.athena.model.ResultSetMetadata;
import software.amazon.awssdk.services.athena.model.Row;
import software.amazon.awssdk.services.athena.model.StartQueryExecutionRequest;
import software.amazon.awssdk.services.athena.model.StartQueryExecutionResponse;
import software.amazon.awssdk.services.athena.paginators.GetQueryResultsIterable;
public class AmazonAthenaClientMock implements AthenaClient {
private Queue<String> startQueryExecutionResults = new LinkedList<>();
private Queue<QueryExecution> getQueryExecutionResults = new LinkedList<>();
/**
* Optionally provide a FIFO queue of results in the order they should be returned for each call to
* {@link #startQueryExecution(StartQueryExecutionRequest)}.
*
* @param startQueryExecutionResults FIFO ordered queue of results in the order they will be returned
*/
public void setStartQueryExecutionResults(LinkedList<String> startQueryExecutionResults) {
this.startQueryExecutionResults = startQueryExecutionResults;
}
/**
* Optionally provide a FIFO queue of results in the order they should be returned for each call to
* {@link #getQueryExecution(GetQueryExecutionRequest)}.
*
* @param getQueryExecutionResults FIFO ordered queue of results in the order they will be returned
*/
public void setGetQueryExecutionResults(LinkedList<QueryExecution> getQueryExecutionResults) {
this.getQueryExecutionResults = getQueryExecutionResults;
}
@Override
public GetQueryExecutionResponse getQueryExecution(GetQueryExecutionRequest getQueryExecutionRequest)
throws SdkException {
QueryExecution defaultResult = QueryExecution.builder()
.queryExecutionId("11111111-1111-1111-1111-111111111111")
.status(QueryExecutionStatus.builder().state(QueryExecutionState.SUCCEEDED).build())
.resultConfiguration(ResultConfiguration.builder().outputLocation("s3://bucket/file.csv").build())
.build();
QueryExecution result = getQueryExecutionResults.isEmpty() ? defaultResult : getQueryExecutionResults.poll();
// if query execution id is 3333..., sleep for 500 ms to imitate a long running query
if ("33333333-3333-3333-3333-333333333333".equals(result.queryExecutionId())) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// noop
}
}
return GetQueryExecutionResponse.builder()
.queryExecution(result)
.build();
}
@Override
public ListQueryExecutionsResponse listQueryExecutions(ListQueryExecutionsRequest listQueryExecutionsRequest)
throws SdkException {
return ListQueryExecutionsResponse.builder()
.queryExecutionIds(
"11111111-1111-1111-1111-111111111111",
"22222222-2222-2222-2222-222222222222")
.nextToken(listQueryExecutionsRequest.nextToken())
.build();
}
@Override
public StartQueryExecutionResponse startQueryExecution(StartQueryExecutionRequest startQueryExecutionRequest)
throws SdkException {
String defaultResult = "11111111-1111-1111-1111-111111111111";
String result = startQueryExecutionResults.isEmpty() ? defaultResult : startQueryExecutionResults.poll();
return StartQueryExecutionResponse.builder()
.queryExecutionId(result)
.build();
}
@Override
public GetQueryResultsResponse getQueryResults(GetQueryResultsRequest getQueryResultsRequest) throws SdkException {
return GetQueryResultsResponse.builder()
.nextToken(null)
.resultSet(ResultSet.builder()
.resultSetMetadata(ResultSetMetadata.builder()
.columnInfo(ColumnInfo.builder().name("id").build())
.build())
.rows(Row.builder()
.data(Datum.builder().varCharValue("42").build())
.build())
.build())
.build();
}
@Override
public GetQueryResultsIterable getQueryResultsPaginator(GetQueryResultsRequest getQueryResultsRequest)
throws SdkException {
return new GetQueryResultsIterable(this, getQueryResultsRequest);
}
@Override
public String serviceName() {
return null;
}
@Override
public void close() {
// noop
}
}
| apache-2.0 |
allotria/intellij-community | platform/analysis-api/src/com/intellij/codeInsight/signatureHelp/package-info.java | 253 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
@ApiStatus.Internal
package com.intellij.codeInsight.signatureHelp;
import org.jetbrains.annotations.ApiStatus; | apache-2.0 |
dahlstrom-g/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/replaceTypeInCast/beforeIntersection.java | 230 | // "Replace 'Integer' with 'Number' in cast" "true"
import java.util.*;
class X {
void test(Object x) {
if (x instanceof Integer || x instanceof Long) {
System.out.println(((<caret>Integer)x).longValue());
}
}
} | apache-2.0 |
adessaigne/camel | components/camel-spring/src/test/java/org/apache/camel/spring/bind/ProcessorAsEndpointTest.java | 2399 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.bind;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.NoSuchEndpointException;
import org.apache.camel.spring.SpringTestSupport;
import org.junit.jupiter.api.Test;
import org.springframework.context.support.AbstractXmlApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class ProcessorAsEndpointTest extends SpringTestSupport {
protected Object body = "<hello>world!</hello>";
@Test
public void testSendingToProcessorEndpoint() throws Exception {
ProcessorStub processor = getMandatoryBean(ProcessorStub.class, "myProcessor");
template.sendBody("bean:myProcessor", body);
List<Exchange> list = processor.getExchanges();
assertEquals(1, list.size(), "Received exchange list: " + list);
log.debug("Found exchanges: " + list);
}
@Test
public void testSendingToNonExistentEndpoint() throws Exception {
String uri = "unknownEndpoint";
try {
template.sendBody(uri, body);
fail("We should have failed as this is a bad endpoint URI");
} catch (NoSuchEndpointException e) {
log.debug("Caught expected exception: " + e, e);
}
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/bind/processorAsEndpoint.xml");
}
}
| apache-2.0 |
pedroigor/keycloak | testsuite/integration-arquillian/servers/auth-server/services/testsuite-providers/src/main/java/org/keycloak/testsuite/services/clientpolicy/executor/TestRaiseExeptionExecutorFactory.java | 2002 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.services.clientpolicy.executor;
import java.util.Collections;
import java.util.List;
import org.keycloak.Config.Scope;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.provider.ProviderConfigProperty;
import org.keycloak.services.clientpolicy.executor.ClientPolicyExecutorProvider;
import org.keycloak.services.clientpolicy.executor.ClientPolicyExecutorProviderFactory;
public class TestRaiseExeptionExecutorFactory implements ClientPolicyExecutorProviderFactory {
public static final String PROVIDER_ID = "test-raise-exception";
@Override
public ClientPolicyExecutorProvider create(KeycloakSession session) {
return new TestRaiseExeptionExecutor(session);
}
@Override
public void init(Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public void close() {
}
@Override
public String getId() {
return PROVIDER_ID;
}
@Override
public String getHelpText() {
return "NA";
}
@Override
public List<ProviderConfigProperty> getConfigProperties() {
return Collections.emptyList();
}
@Override
public boolean isSupported() {
return true;
}
} | apache-2.0 |
hongyuhong/flink | flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java | 8841 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.optimizer.traversals;
import org.apache.flink.api.common.Plan;
import org.apache.flink.optimizer.CompilerException;
import org.apache.flink.optimizer.Optimizer;
import org.apache.flink.optimizer.dag.TempMode;
import org.apache.flink.optimizer.plan.BinaryUnionPlanNode;
import org.apache.flink.optimizer.plan.BulkIterationPlanNode;
import org.apache.flink.optimizer.plan.BulkPartialSolutionPlanNode;
import org.apache.flink.optimizer.plan.Channel;
import org.apache.flink.optimizer.plan.IterationPlanNode;
import org.apache.flink.optimizer.plan.OptimizedPlan;
import org.apache.flink.optimizer.plan.PlanNode;
import org.apache.flink.optimizer.plan.SinkPlanNode;
import org.apache.flink.optimizer.plan.SolutionSetPlanNode;
import org.apache.flink.optimizer.plan.SourcePlanNode;
import org.apache.flink.optimizer.plan.WorksetIterationPlanNode;
import org.apache.flink.optimizer.plan.WorksetPlanNode;
import org.apache.flink.runtime.operators.DriverStrategy;
import org.apache.flink.util.Visitor;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This visitor traverses the selected execution plan and finalizes it:
*
* <ul>
* <li>The graph of nodes is double-linked (links from child to parent are inserted).</li>
* <li>If unions join static and dynamic paths, the cache is marked as a memory consumer.</li>
* <li>Relative memory fractions are assigned to all nodes.</li>
* <li>All nodes are collected into a set.</li>
* </ul>
*/
public class PlanFinalizer implements Visitor<PlanNode> {
private final Set<PlanNode> allNodes; // a set of all nodes in the optimizer plan
private final List<SourcePlanNode> sources; // all data source nodes in the optimizer plan
private final List<SinkPlanNode> sinks; // all data sink nodes in the optimizer plan
private final Deque<IterationPlanNode> stackOfIterationNodes;
private int memoryConsumerWeights; // a counter of all memory consumers
/**
* Creates a new plan finalizer.
*/
public PlanFinalizer() {
this.allNodes = new HashSet<PlanNode>();
this.sources = new ArrayList<SourcePlanNode>();
this.sinks = new ArrayList<SinkPlanNode>();
this.stackOfIterationNodes = new ArrayDeque<IterationPlanNode>();
}
public OptimizedPlan createFinalPlan(List<SinkPlanNode> sinks, String jobName, Plan originalPlan) {
this.memoryConsumerWeights = 0;
// traverse the graph
for (SinkPlanNode node : sinks) {
node.accept(this);
}
// assign the memory to each node
if (this.memoryConsumerWeights > 0) {
for (PlanNode node : this.allNodes) {
// assign memory to the driver strategy of the node
final int consumerWeight = node.getMemoryConsumerWeight();
if (consumerWeight > 0) {
final double relativeMem = (double)consumerWeight / this.memoryConsumerWeights;
node.setRelativeMemoryPerSubtask(relativeMem);
if (Optimizer.LOG.isDebugEnabled()) {
Optimizer.LOG.debug("Assigned " + relativeMem + " of total memory to each subtask of " +
node.getProgramOperator().getName() + ".");
}
}
// assign memory to the local and global strategies of the channels
for (Channel c : node.getInputs()) {
if (c.getLocalStrategy().dams()) {
final double relativeMem = 1.0 / this.memoryConsumerWeights;
c.setRelativeMemoryLocalStrategy(relativeMem);
if (Optimizer.LOG.isDebugEnabled()) {
Optimizer.LOG.debug("Assigned " + relativeMem + " of total memory to each local strategy " +
"instance of " + c + ".");
}
}
if (c.getTempMode() != TempMode.NONE) {
final double relativeMem = 1.0/ this.memoryConsumerWeights;
c.setRelativeTempMemory(relativeMem);
if (Optimizer.LOG.isDebugEnabled()) {
Optimizer.LOG.debug("Assigned " + relativeMem + " of total memory to each instance of the temp " +
"table for " + c + ".");
}
}
}
}
}
return new OptimizedPlan(this.sources, this.sinks, this.allNodes, jobName, originalPlan);
}
@Override
public boolean preVisit(PlanNode visitable) {
// if we come here again, prevent a further descend
if (!this.allNodes.add(visitable)) {
return false;
}
if (visitable instanceof SinkPlanNode) {
this.sinks.add((SinkPlanNode) visitable);
}
else if (visitable instanceof SourcePlanNode) {
this.sources.add((SourcePlanNode) visitable);
}
else if (visitable instanceof BinaryUnionPlanNode) {
BinaryUnionPlanNode unionNode = (BinaryUnionPlanNode) visitable;
if (unionNode.unionsStaticAndDynamicPath()) {
unionNode.setDriverStrategy(DriverStrategy.UNION_WITH_CACHED);
}
}
else if (visitable instanceof BulkPartialSolutionPlanNode) {
// tell the partial solution about the iteration node that contains it
final BulkPartialSolutionPlanNode pspn = (BulkPartialSolutionPlanNode) visitable;
final IterationPlanNode iteration = this.stackOfIterationNodes.peekLast();
// sanity check!
if (iteration == null || !(iteration instanceof BulkIterationPlanNode)) {
throw new CompilerException("Bug: Error finalizing the plan. " +
"Cannot associate the node for a partial solutions with its containing iteration.");
}
pspn.setContainingIterationNode((BulkIterationPlanNode) iteration);
}
else if (visitable instanceof WorksetPlanNode) {
// tell the partial solution about the iteration node that contains it
final WorksetPlanNode wspn = (WorksetPlanNode) visitable;
final IterationPlanNode iteration = this.stackOfIterationNodes.peekLast();
// sanity check!
if (iteration == null || !(iteration instanceof WorksetIterationPlanNode)) {
throw new CompilerException("Bug: Error finalizing the plan. " +
"Cannot associate the node for a partial solutions with its containing iteration.");
}
wspn.setContainingIterationNode((WorksetIterationPlanNode) iteration);
}
else if (visitable instanceof SolutionSetPlanNode) {
// tell the partial solution about the iteration node that contains it
final SolutionSetPlanNode sspn = (SolutionSetPlanNode) visitable;
final IterationPlanNode iteration = this.stackOfIterationNodes.peekLast();
// sanity check!
if (iteration == null || !(iteration instanceof WorksetIterationPlanNode)) {
throw new CompilerException("Bug: Error finalizing the plan. " +
"Cannot associate the node for a partial solutions with its containing iteration.");
}
sspn.setContainingIterationNode((WorksetIterationPlanNode) iteration);
}
// double-connect the connections. previously, only parents knew their children, because
// one child candidate could have been referenced by multiple parents.
for (Channel conn : visitable.getInputs()) {
conn.setTarget(visitable);
conn.getSource().addOutgoingChannel(conn);
}
for (Channel c : visitable.getBroadcastInputs()) {
c.setTarget(visitable);
c.getSource().addOutgoingChannel(c);
}
// count the memory consumption
this.memoryConsumerWeights += visitable.getMemoryConsumerWeight();
for (Channel c : visitable.getInputs()) {
if (c.getLocalStrategy().dams()) {
this.memoryConsumerWeights++;
}
if (c.getTempMode() != TempMode.NONE) {
this.memoryConsumerWeights++;
}
}
for (Channel c : visitable.getBroadcastInputs()) {
if (c.getLocalStrategy().dams()) {
this.memoryConsumerWeights++;
}
if (c.getTempMode() != TempMode.NONE) {
this.memoryConsumerWeights++;
}
}
// pass the visitor to the iteraton's step function
if (visitable instanceof IterationPlanNode) {
// push the iteration node onto the stack
final IterationPlanNode iterNode = (IterationPlanNode) visitable;
this.stackOfIterationNodes.addLast(iterNode);
// recurse
((IterationPlanNode) visitable).acceptForStepFunction(this);
// pop the iteration node from the stack
this.stackOfIterationNodes.removeLast();
}
return true;
}
@Override
public void postVisit(PlanNode visitable) {}
}
| apache-2.0 |
DadanielZ/incubator-eagle | eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/main/java/org/apache/eagle/alert/engine/publisher/AlertPublishSpecListener.java | 1317 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.alert.engine.publisher;
import org.apache.eagle.alert.coordination.model.PublishSpec;
import org.apache.eagle.alert.engine.coordinator.PolicyDefinition;
import org.apache.eagle.alert.engine.coordinator.StreamDefinition;
import java.util.Map;
public interface AlertPublishSpecListener {
void onAlertPublishSpecChange(PublishSpec spec, Map<String, StreamDefinition> sds);
void onAlertPolicyChange(Map<String, PolicyDefinition> pds, Map<String, StreamDefinition> sds);
}
| apache-2.0 |
ruks/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.admin.v1/src/gen/java/org/wso2/carbon/apimgt/rest/api/admin/v1/KeyManagersApi.java | 8217 | package org.wso2.carbon.apimgt.rest.api.admin.v1;
import org.wso2.carbon.apimgt.rest.api.admin.v1.dto.ErrorDTO;
import org.wso2.carbon.apimgt.rest.api.admin.v1.dto.KeyManagerDTO;
import org.wso2.carbon.apimgt.rest.api.admin.v1.dto.KeyManagerListDTO;
import org.wso2.carbon.apimgt.rest.api.admin.v1.dto.KeyManagerWellKnownResponseDTO;
import org.wso2.carbon.apimgt.rest.api.admin.v1.KeyManagersApiService;
import org.wso2.carbon.apimgt.rest.api.admin.v1.impl.KeyManagersApiServiceImpl;
import org.wso2.carbon.apimgt.api.APIManagementException;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.inject.Inject;
import io.swagger.annotations.*;
import java.io.InputStream;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.apache.cxf.jaxrs.ext.multipart.Multipart;
import java.util.Map;
import java.util.List;
import javax.validation.constraints.*;
@Path("/key-managers")
@Api(description = "the key-managers API")
public class KeyManagersApi {
@Context MessageContext securityContext;
KeyManagersApiService delegate = new KeyManagersApiServiceImpl();
@POST
@Path("/discover")
@Consumes({ "multipart/form-data" })
@Produces({ "application/json" })
@ApiOperation(value = "Retrieve Well-known information from Key Manager Well-known Endpoint", notes = "Retrieve well-known information from key manager's well-known endpoint ", response = KeyManagerWellKnownResponseDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Collection)", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. KeyManagers returned ", response = KeyManagerWellKnownResponseDTO.class) })
public Response keyManagersDiscoverPost(@Multipart(value = "url", required = false) String url, @Multipart(value = "type", required = false) String type) throws APIManagementException{
return delegate.keyManagersDiscoverPost(url, type, securityContext);
}
@GET
@Produces({ "application/json" })
@ApiOperation(value = "Get all Key managers", notes = "Get all Key managers ", response = KeyManagerListDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Collection)", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. KeyManagers returned ", response = KeyManagerListDTO.class) })
public Response keyManagersGet() throws APIManagementException{
return delegate.keyManagersGet(securityContext);
}
@DELETE
@Path("/{keyManagerId}")
@Produces({ "application/json" })
@ApiOperation(value = "Delete a Key Manager", notes = "Delete a Key Manager by keyManager id ", response = Void.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Individual)", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. Key Manager successfully deleted. ", response = Void.class),
@ApiResponse(code = 404, message = "Not Found. The specified resource does not exist.", response = ErrorDTO.class) })
public Response keyManagersKeyManagerIdDelete(@ApiParam(value = "Key Manager UUID ",required=true) @PathParam("keyManagerId") String keyManagerId) throws APIManagementException{
return delegate.keyManagersKeyManagerIdDelete(keyManagerId, securityContext);
}
@GET
@Path("/{keyManagerId}")
@Produces({ "application/json" })
@ApiOperation(value = "Get a Key Manager Configuration", notes = "Retrieve a single Key Manager Configuration. We should provide the Id of the KeyManager as a path parameter. ", response = KeyManagerDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Individual)", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. KeyManager Configuration returned ", response = KeyManagerDTO.class),
@ApiResponse(code = 404, message = "Not Found. The specified resource does not exist.", response = ErrorDTO.class),
@ApiResponse(code = 406, message = "Not Acceptable. The requested media type is not supported.", response = ErrorDTO.class) })
public Response keyManagersKeyManagerIdGet(@ApiParam(value = "Key Manager UUID ",required=true) @PathParam("keyManagerId") String keyManagerId) throws APIManagementException{
return delegate.keyManagersKeyManagerIdGet(keyManagerId, securityContext);
}
@PUT
@Path("/{keyManagerId}")
@Consumes({ "application/json" })
@Produces({ "application/json" })
@ApiOperation(value = "Update a Key Manager", notes = "Update a Key Manager by keyManager id ", response = KeyManagerDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Individual)", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. Label updated. ", response = KeyManagerDTO.class),
@ApiResponse(code = 400, message = "Bad Request. Invalid request or validation error.", response = ErrorDTO.class),
@ApiResponse(code = 404, message = "Not Found. The specified resource does not exist.", response = ErrorDTO.class) })
public Response keyManagersKeyManagerIdPut(@ApiParam(value = "Key Manager UUID ",required=true) @PathParam("keyManagerId") String keyManagerId, @ApiParam(value = "Key Manager object with updated information " ,required=true) KeyManagerDTO keyManagerDTO) throws APIManagementException{
return delegate.keyManagersKeyManagerIdPut(keyManagerId, keyManagerDTO, securityContext);
}
@POST
@Consumes({ "application/json" })
@Produces({ "application/json" })
@ApiOperation(value = "Add a new API Key Manager", notes = "Add a new API Key Manager ", response = KeyManagerDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:admin", description = "Manage all admin operations"),
@AuthorizationScope(scope = "apim:admin_operations", description = "Manage API categories and Key Managers related operations")
})
}, tags={ "Key Manager (Collection)" })
@ApiResponses(value = {
@ApiResponse(code = 201, message = "Created. Successful response with the newly created object as entity in the body. ", response = KeyManagerDTO.class),
@ApiResponse(code = 400, message = "Bad Request. Invalid request or validation error.", response = ErrorDTO.class) })
public Response keyManagersPost(@ApiParam(value = "Key Manager object that should to be added " ,required=true) KeyManagerDTO keyManagerDTO) throws APIManagementException{
return delegate.keyManagersPost(keyManagerDTO, securityContext);
}
}
| apache-2.0 |