repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
OpenHFT/Chronicle-Wire
src/main/java/net/openhft/chronicle/wire/WireType.java
21099
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.wire; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.bytes.BytesStore; import net.openhft.chronicle.bytes.BytesUtil; import net.openhft.chronicle.bytes.StopCharTesters; import net.openhft.chronicle.bytes.ref.*; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.LicenceCheck; import net.openhft.chronicle.core.io.IOTools; import net.openhft.chronicle.core.values.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Constructor; import java.net.URL; import java.util.LinkedHashMap; import java.util.Map; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static net.openhft.chronicle.core.io.IOTools.*; /** * A selection of prebuilt wire types. */ @SuppressWarnings({"rawtypes", "unchecked"}) public enum WireType implements Function<Bytes, Wire>, LicenceCheck { TEXT { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new TextWire(bytes).useBinaryDocuments(); } @Override public Supplier<LongValue> newLongReference() { return TextLongReference::new; } @Override public Supplier<LongArrayValues> newLongArrayReference() { return TextLongArrayReference::new; } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { Bytes bytes = Bytes.allocateElasticDirect(cs.length()); try { bytes.appendUtf8(cs); if (bytes.startsWith(PREABLE)) { truncatePreable(bytes); } @NotNull Wire wire = apply(bytes); //noinspection unchecked return (T) wire.getValueIn().object(); } finally { bytes.releaseLast(); } } public void truncatePreable(@NotNull Bytes bytes) { bytes.readSkip(4); long pos = bytes.readPosition(); @NotNull String word = bytes.parseUtf8(StopCharTesters.SPACE_STOP); switch (word) { case "!!data": case "!!data-not-ready": case "!!meta-data": case "!!meta-data-not-ready": break; default: bytes.readPosition(pos); } } @Override public boolean isText() { return true; } }, /** * Use this ONLY if intend to use Delta and Binary. Otherwise, use {@link #BINARY_LIGHT} */ BINARY { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new BinaryWire(bytes); } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, /** * Use this when only need to use Binary (does not support DeltaWire) */ BINARY_LIGHT { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return BinaryWire.binaryOnly(bytes); } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, DEFAULT_ZERO_BINARY { @NotNull @Override public Wire apply(Bytes bytes) { try { return (Wire) Class.forName("software.chronicle.wire.DefaultZeroWire") .getDeclaredConstructor(Bytes.class) .newInstance(bytes); } catch (Exception e) { @NotNull IllegalStateException licence = new IllegalStateException( "A Chronicle Wire Enterprise licence is required to run this code " + "because you are using DefaultZeroWire which is a licence product. " + "Please contact sales@chronicle.software"); Jvm.warn().on(getClass(), licence); throw licence; } } @Override public void licenceCheck() { if (isAvailable()) return; @NotNull final IllegalStateException licence = new IllegalStateException("A Chronicle Wire " + "Enterprise licence is required to run this code because you are using " + "DEFAULT_ZERO_BINARY which is a licence product. " + "Please contact sales@chronicle.software"); Jvm.warn().on(getClass(), licence); throw licence; } @Override public boolean isAvailable() { return IS_DEFAULT_ZERO_AVAILABLE; } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, DELTA_BINARY { @NotNull @Override public Wire apply(Bytes bytes) { try { @NotNull Class<Wire> aClass = (Class) Class.forName("software.chronicle.wire.DeltaWire"); final Constructor<Wire> declaredConstructor = aClass.getDeclaredConstructor(Bytes.class); return declaredConstructor.newInstance(bytes); } catch (Exception e) { licenceCheck(); // this should never happen throw new AssertionError(e); } } @Override public void licenceCheck() { if (isAvailable()) return; @NotNull final IllegalStateException licence = new IllegalStateException("A Chronicle-Wire-" + "Enterprise licence is required to run this code because you are using " + "DELTA_BINARY which is a licence product. " + "Please contact sales@chronicle.software"); Jvm.error().on(WireType.class, licence); throw licence; } @Override public boolean isAvailable() { return IS_DELTA_AVAILABLE; } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, FIELDLESS_BINARY { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new BinaryWire(bytes, false, false, true, Integer.MAX_VALUE, "binary", false); } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, COMPRESSED_BINARY { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new BinaryWire(bytes, false, false, false, COMPRESSED_SIZE, "lzw", true); } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, JSON { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new JSONWire(bytes).useBinaryDocuments(); } @Override public boolean isText() { return true; } }, YAML { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new YamlWire(bytes).useBinaryDocuments(); } @Override public boolean isText() { return true; } }, RAW { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new RawWire(bytes); } @NotNull @Override public String asString(Object marshallable) { return asHexString(marshallable); } @Nullable @Override public <T> T fromString(@NotNull CharSequence cs) { return fromHexString(cs); } }, CSV { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new CSVWire(bytes); } @Override public boolean isText() { return true; } }, READ_ANY { @NotNull @Override public Wire apply(@NotNull Bytes bytes) { return new ReadAnyWire(bytes); } }; static final BytesStore PREABLE = BytesStore.from("--- "); private static final int COMPRESSED_SIZE = Integer.getInteger("WireType.compressedSize", 128); private static final boolean IS_DELTA_AVAILABLE = isDeltaAvailable(); private static final boolean IS_DEFAULT_ZERO_AVAILABLE = isDefaultZeroAvailable(); private static boolean isDeltaAvailable() { try { Class.forName("software.chronicle.wire.DeltaWire").getDeclaredConstructor(Bytes.class); return true; } catch (Exception fallback) { return false; } } private static boolean isDefaultZeroAvailable() { try { Class.forName("software.chronicle.wire.DefaultZeroWire").getDeclaredConstructor(Bytes.class); return true; } catch (Exception var4) { return false; } } @NotNull static Bytes getBytesForToString() { return Wires.acquireBytesForToString(); } @NotNull static Bytes getBytes2() { // when in debug, the output becomes confused if you reuse the buffer. if (Jvm.isDebug()) return Bytes.allocateElasticOnHeap(); return Wires.acquireAnotherBytes(); } @Nullable public static WireType valueOf(@Nullable Wire wire) { if (wire == null) return null; if (wire instanceof AbstractAnyWire) wire = ((AbstractAnyWire) wire).underlyingWire(); if (wire instanceof YamlWire) return WireType.YAML; if (wire instanceof JSONWire) return WireType.JSON; if (wire instanceof TextWire) return WireType.TEXT; if ("DeltaWire".equals(wire.getClass().getSimpleName())) { return DELTA_BINARY; } // this must be above BinaryWire if ("DefaultZeroWire".equals(wire.getClass().getSimpleName())) { return DEFAULT_ZERO_BINARY; } if (wire instanceof BinaryWire) { @NotNull BinaryWire binaryWire = (BinaryWire) wire; return binaryWire.fieldLess() ? FIELDLESS_BINARY : WireType.BINARY; } if (wire instanceof RawWire) { return WireType.RAW; } throw new IllegalStateException("unknown type"); } public Supplier<IntValue> newIntReference() { return BinaryIntReference::new; } public Supplier<BooleanValue> newBooleanReference() { return BinaryBooleanReference::new; } public Supplier<LongValue> newLongReference() { return BinaryLongReference::new; } public Supplier<TwoLongValue> newTwoLongReference() { return BinaryTwoLongReference::new; } public Supplier<LongArrayValues> newLongArrayReference() { return BinaryLongArrayReference::new; } public String asString(Object marshallable) { Bytes bytes = asBytes(marshallable); return bytes.toString(); } @NotNull private Bytes asBytes(Object marshallable) { Bytes bytes = getBytesForToString(); Wire wire = apply(bytes); wire.usePadding(AbstractWire.DEFAULT_USE_PADDING); @NotNull final ValueOut valueOut = wire.getValueOut(); if (marshallable instanceof WriteMarshallable) valueOut.typedMarshallable((WriteMarshallable) marshallable); else if (marshallable instanceof Map) wire.getValueOut().marshallable((Map) marshallable, Object.class, Object.class, false); else if (marshallable instanceof Iterable) wire.getValueOut().sequence((Iterable) marshallable); else if (marshallable instanceof Serializable) valueOut.typedMarshallable((Serializable) marshallable); else { valueOut.typedMarshallable(Wires.typeNameFor(marshallable), w -> Wires.writeMarshallable(marshallable, w)); } return bytes; } /** * deserializes with an optimistic cast * * @param cs text to deserialize * @param <T> the type to expect * @return the object deserialized * @throws ClassCastException if the object is not a T */ @Nullable public <T> T fromString(@NotNull CharSequence cs) { return (T) fromString(Object.class, cs); } /** * deserializes as a given class * * @param tClass to serialize as * @param cs text to deserialize * @return the object deserialized */ public <T> T fromString(Class<T> tClass, @NotNull CharSequence cs) { if (cs.length() == 0) throw new IllegalArgumentException("cannot deserialize an empty string"); Bytes bytes = getBytes2(); bytes.appendUtf8(cs); Wire wire = apply(bytes); return wire.getValueIn().object(tClass); } @NotNull public <T> T fromFile(String filename) throws IOException { return (T) fromFile(Marshallable.class, filename); } @Nullable public <T> T fromFile(@NotNull Class<T> expectedType, String filename) throws IOException { File file = new File(filename); URL url = null; if (!file.exists()) { url = urlFor(expectedType, filename); file = new File(url.getFile()); } //: MappedFile.readOnly(file).acquireBytesForRead(0); Bytes bytes = Bytes.wrapForRead(readAsBytes(url == null ? new FileInputStream(file) : open(url))); if (bytes.readRemaining() == 0) throw new IOException("File " + file + " was empty"); try { return apply(bytes).getValueIn().object(expectedType); } finally { bytes.releaseLast(); } } @NotNull public <T> Stream<T> streamFromFile(String filename) throws IOException { return streamFromFile((Class) Marshallable.class, filename); } @NotNull public <T> Stream<T> streamFromFile(@NotNull Class<T> expectedType, String filename) throws IOException { Bytes b = BytesUtil.readFile(filename); return streamFromBytes(expectedType, b); } @NotNull public <T> Stream<T> streamFromBytes(@NotNull Class<T> expectedType, Bytes b) { Wire wire = apply(b); ValueIn valueIn = wire.getValueIn(); return StreamSupport.stream( new Spliterators.AbstractSpliterator<T>(Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.IMMUTABLE) { @Override public boolean tryAdvance(@NotNull Consumer<? super T> action) { Bytes<?> bytes = wire.bytes(); if (valueIn.hasNext()) { action.accept(valueIn.object(expectedType)); if (wire instanceof TextWire) { wire.consumePadding(); if (bytes.peekUnsignedByte() == '-' && bytes.peekUnsignedByte(bytes.readPosition() + 1) == '-' && bytes.peekUnsignedByte(bytes.readPosition() + 2) == '-') { bytes.readSkip(3); while (bytes.peekUnsignedByte() == '-') bytes.readSkip(1); } } return true; } if (bytes.refCount() > 0) bytes.releaseLast(); return false; } }, false); } @NotNull public <T> Map<String, T> fromFileAsMap(String filename, @NotNull Class<T> tClass) throws IOException { @NotNull Map<String, T> map = new LinkedHashMap<>(); Wire wire = apply(BytesUtil.readFile(filename)); @NotNull StringBuilder sb = new StringBuilder(); while (wire.hasMore()) { wire.readEventName(sb) .object(tClass, map, (m, o) -> m.put(sb.toString(), o)); } return map; } public <T extends Marshallable> void toFileAsMap(@NotNull String filename, @NotNull Map<String, T> map) throws IOException { toFileAsMap(filename, map, false); } public <T extends Marshallable> void toFileAsMap(@NotNull String filename, @NotNull Map<String, T> map, boolean compact) throws IOException { Bytes bytes = WireInternal.acquireInternalBytes(); Wire wire = apply(bytes); for (@NotNull Map.Entry<String, T> entry : map.entrySet()) { @NotNull ValueOut valueOut = wire.writeEventName(entry::getKey); boolean wasLeaf = valueOut.swapLeaf(compact); valueOut.marshallable(entry.getValue()); valueOut.swapLeaf(wasLeaf); } String tempFilename = IOTools.tempName(filename); IOTools.writeFile(tempFilename, bytes.toByteArray()); @NotNull File file2 = new File(tempFilename); @NotNull File dest = new File(filename); if (!file2.renameTo(dest)) { if (dest.delete() && file2.renameTo(dest)) return; file2.delete(); throw new IOException("Failed to rename " + tempFilename + " to " + filename); } } public void toFile(@NotNull String filename, WriteMarshallable marshallable) throws IOException { Bytes bytes = WireInternal.acquireInternalBytes(); Wire wire = apply(bytes); wire.getValueOut().typedMarshallable(marshallable); String tempFilename = IOTools.tempName(filename); IOTools.writeFile(tempFilename, bytes.toByteArray()); @NotNull File file2 = new File(tempFilename); if (!file2.renameTo(new File(filename))) { file2.delete(); throw new IOException("Failed to rename " + tempFilename + " to " + filename); } } @NotNull String asHexString(Object marshallable) { Bytes bytes = asBytes(marshallable); return bytes.toHexString(); } @Nullable <T> T fromHexString(@NotNull CharSequence s) { Bytes bytes = Bytes.fromHexString(s.toString()); try { Wire wire = apply(bytes); return wire.getValueIn().typedMarshallable(); } finally { bytes.releaseLast(); } } @Nullable public Map<String, Object> asMap(@NotNull CharSequence cs) { Bytes bytes = getBytes2(); bytes.appendUtf8(cs); Wire wire = apply(bytes); return wire.getValueIn().marshallableAsMap(String.class, Object.class); } @Override public void licenceCheck() { } @Override public boolean isAvailable() { return true; } public boolean isText() { return false; } }
apache-2.0
buchandersenn/SimpleContentProvider
app/src/main/java/dk/simplecontentprovider/demo/OwnersActivity.java
3937
package dk.simplecontentprovider.demo; import android.app.ListActivity; import android.app.LoaderManager; import android.content.ContentUris; import android.content.CursorLoader; import android.content.Loader; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.view.ContextMenu; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import android.widget.SimpleCursorAdapter; import android.widget.TextView; import dk.simplecontentprovider.demo.dialogs.AddOwnerDialog; import dk.simplecontentprovider.demo.provider.DemoContract; public class OwnersActivity extends ListActivity implements LoaderManager.LoaderCallbacks<Cursor> { private SimpleCursorAdapter mAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); mAdapter = new SimpleCursorAdapter(this, R.layout.item_owner, null, new String[]{DemoContract.Owners.NAME, DemoContract.Owners.ADDRESS}, new int[]{R.id.owner_name, R.id.owner_address}, 0); setListAdapter(mAdapter); registerForContextMenu(getListView()); View emptyView = getListView().getEmptyView(); if (emptyView instanceof TextView) { ((TextView) emptyView).setText("No data"); } getLoaderManager().initLoader(0, null, this); } @Override protected void onListItemClick(ListView listView, View view, int position, long id) { PetsActivity.startActivity(this, id); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_actions, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_add_item) { AddOwnerDialog dialog = new AddOwnerDialog(); dialog.show(getFragmentManager(), "ADD_OWNER"); return true; } else if (id == R.id.action_open_overview) { OverviewActivity.startActivity(this); } return super.onOptionsItemSelected(item); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { MenuInflater inflater = new MenuInflater(v.getContext()); inflater.inflate(R.menu.context_menu, menu); } @Override public boolean onContextItemSelected(MenuItem item) { AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo(); Uri uri = ContentUris.withAppendedId(DemoContract.Owners.CONTENT_URI, info.id); switch (item.getItemId()) { case R.id.delete: getContentResolver().delete(uri, null, null); return true; default: return super.onContextItemSelected(item); } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(this, DemoContract.Owners.CONTENT_URI, new String[]{DemoContract.Owners._ID, DemoContract.Owners.NAME, DemoContract.Owners.ADDRESS}, null, null, DemoContract.Owners._ID); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mAdapter.swapCursor(data); } @Override public void onLoaderReset(Loader<Cursor> loader) { mAdapter.swapCursor(null); } }
apache-2.0
lucperkins/heron
eco/src/java/com/twitter/heron/eco/builder/ComponentBuilder.java
1443
// Copyright 2017 Twitter. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.heron.eco.builder; import java.lang.reflect.InvocationTargetException; import java.util.List; import com.twitter.heron.eco.definition.BeanDefinition; import com.twitter.heron.eco.definition.EcoExecutionContext; public class ComponentBuilder { protected void buildComponents(EcoExecutionContext context, ObjectBuilder objectBuilder) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchFieldException, InvocationTargetException { List<BeanDefinition> componentDefinitions = context.getTopologyDefinition().getComponents(); if (componentDefinitions != null) { for (BeanDefinition bean : componentDefinitions) { Object obj = objectBuilder.buildObject(bean, context); context.addComponent(bean.getId(), obj); } } } }
apache-2.0
lhfei/hbase-in-action
zookeeper-api/src/main/java/cn/lhfei/zookeeper/MyZkConfig.java
940
/* * Copyright 2010-2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.lhfei.zookeeper; /** * @version 0.1 * * @author Hefei Li * * @since May 3, 2015 */ public final class MyZkConfig { public static final String ZK_SERVER_MASTER = "114.80.177.136"; public static final String ZK_NODE_GROUP_NAME = "zoo"; public static final String ZK_NODE_MEMBER_NAME = "lhfei"; }
apache-2.0
AtScaleInc/Impala
fe/src/main/java/com/cloudera/impala/planner/Planner.java
98502
// Copyright 2012 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.planner; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.impala.analysis.AggregateInfo; import com.cloudera.impala.analysis.AnalysisContext; import com.cloudera.impala.analysis.AnalyticInfo; import com.cloudera.impala.analysis.Analyzer; import com.cloudera.impala.analysis.BaseTableRef; import com.cloudera.impala.analysis.BinaryPredicate; import com.cloudera.impala.analysis.EquivalenceClassId; import com.cloudera.impala.analysis.Expr; import com.cloudera.impala.analysis.ExprSubstitutionMap; import com.cloudera.impala.analysis.InlineViewRef; import com.cloudera.impala.analysis.InsertStmt; import com.cloudera.impala.analysis.JoinOperator; import com.cloudera.impala.analysis.QueryStmt; import com.cloudera.impala.analysis.SelectStmt; import com.cloudera.impala.analysis.SlotDescriptor; import com.cloudera.impala.analysis.SlotId; import com.cloudera.impala.analysis.SlotRef; import com.cloudera.impala.analysis.TableRef; import com.cloudera.impala.analysis.TupleDescriptor; import com.cloudera.impala.analysis.TupleId; import com.cloudera.impala.analysis.UnionStmt; import com.cloudera.impala.analysis.UnionStmt.UnionOperand; import com.cloudera.impala.catalog.ColumnStats; import com.cloudera.impala.catalog.DataSourceTable; import com.cloudera.impala.catalog.HBaseTable; import com.cloudera.impala.catalog.HdfsTable; import com.cloudera.impala.catalog.Type; import com.cloudera.impala.common.IdGenerator; import com.cloudera.impala.common.ImpalaException; import com.cloudera.impala.common.InternalException; import com.cloudera.impala.common.NotImplementedException; import com.cloudera.impala.common.Pair; import com.cloudera.impala.common.PrintUtils; import com.cloudera.impala.common.RuntimeEnv; import com.cloudera.impala.thrift.TExplainLevel; import com.cloudera.impala.thrift.TPartitionType; import com.cloudera.impala.thrift.TQueryExecRequest; import com.cloudera.impala.thrift.TQueryOptions; import com.cloudera.impala.thrift.TTableName; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * The planner is responsible for turning parse trees into plan fragments that * can be shipped off to backends for execution. * */ public class Planner { private final static Logger LOG = LoggerFactory.getLogger(Planner.class); // Estimate of the overhead imposed by storing data in a hash tbl; // used for determining whether a broadcast join is feasible. public final static double HASH_TBL_SPACE_OVERHEAD = 1.1; // The maximum fraction of remaining memory that a sort node can use during execution. public final static double SORT_MEM_MAX_FRACTION = 0.80; private final IdGenerator<PlanNodeId> nodeIdGenerator_ = PlanNodeId.createGenerator(); private final IdGenerator<PlanFragmentId> fragmentIdGenerator_ = PlanFragmentId.createGenerator(); /** * Create plan fragments for an analyzed statement, given a set of execution options. * The fragments are returned in a list such that element i of that list can * only consume output of the following fragments j > i. */ public ArrayList<PlanFragment> createPlanFragments( AnalysisContext.AnalysisResult analysisResult, TQueryOptions queryOptions) throws ImpalaException { // Set queryStmt from analyzed SELECT or INSERT query. QueryStmt queryStmt = null; if (analysisResult.isInsertStmt() || analysisResult.isCreateTableAsSelectStmt()) { queryStmt = analysisResult.getInsertStmt().getQueryStmt(); } else { queryStmt = analysisResult.getQueryStmt(); } Analyzer analyzer = analysisResult.getAnalyzer(); analyzer.computeEquivClasses(); // Mark slots referenced by output exprs as materialized, prior to generating the // plan tree. // We need to mark the result exprs of the topmost select block as materialized, so // that PlanNode.init() can compute the final mem layout of materialized tuples // (the byte size of tuples is needed for cost computations). // TODO: instead of materializing everything produced by the plan root, derive // referenced slots from destination fragment and add a materialization node // if not all output is needed by destination fragment // TODO 2: should the materialization decision be cost-based? if (queryStmt.getBaseTblResultExprs() != null) { analyzer.materializeSlots(queryStmt.getBaseTblResultExprs()); } LOG.trace("desctbl: " + analyzer.getDescTbl().debugString()); PlanNode singleNodePlan = createQueryPlan(queryStmt, analyzer, queryOptions.isDisable_outermost_topn()); Preconditions.checkNotNull(singleNodePlan); ArrayList<PlanFragment> fragments = Lists.newArrayList(); if (queryOptions.num_nodes == 1) { // single-node execution; we're almost done singleNodePlan = addUnassignedConjuncts(analyzer, singleNodePlan.getTupleIds(), singleNodePlan); fragments.add(new PlanFragment( fragmentIdGenerator_.getNextId(), singleNodePlan, DataPartition.UNPARTITIONED)); } else { // For inserts or CTAS, unless there is a limit or offset clause, leave the root // fragment partitioned, otherwise merge everything into a single coordinator // fragment, so we can pass it back to the client. boolean isPartitioned = false; if ((analysisResult.isInsertStmt() || analysisResult.isCreateTableAsSelectStmt()) && !queryStmt.hasLimit() && !queryStmt.hasOffset()) { isPartitioned = true; } LOG.debug("create plan fragments"); long perNodeMemLimit = queryOptions.mem_limit; LOG.debug("memlimit=" + Long.toString(perNodeMemLimit)); createPlanFragments( singleNodePlan, analyzer, isPartitioned, perNodeMemLimit, fragments); } PlanFragment rootFragment = fragments.get(fragments.size() - 1); if (analysisResult.isInsertStmt() || analysisResult.isCreateTableAsSelectStmt()) { InsertStmt insertStmt = analysisResult.getInsertStmt(); if (queryOptions.num_nodes != 1) { // repartition on partition keys rootFragment = createInsertFragment( rootFragment, insertStmt, analyzer, fragments); } // set up table sink for root fragment rootFragment.setSink(insertStmt.createDataSink()); } if (analysisResult.isInsertStmt()) { rootFragment.setOutputExprs(analysisResult.getInsertStmt().getResultExprs()); } else { List<Expr> resultExprs = Expr.substituteList(queryStmt.getBaseTblResultExprs(), rootFragment.getPlanRoot().getOutputSmap(), analyzer); rootFragment.setOutputExprs(resultExprs); } LOG.debug("desctbl: " + analyzer.getDescTbl().debugString()); LOG.debug("resultexprs: " + Expr.debugString(rootFragment.getOutputExprs())); LOG.debug("finalize plan fragments"); for (PlanFragment fragment: fragments) { fragment.finalize(analyzer); } Collections.reverse(fragments); return fragments; } /** * Return combined explain string for all plan fragments. * Includes the estimated resource requirements from the request if set. */ public String getExplainString(ArrayList<PlanFragment> fragments, TQueryExecRequest request, TExplainLevel explainLevel) { StringBuilder str = new StringBuilder(); boolean hasHeader = false; if (request.isSetPer_host_mem_req() && request.isSetPer_host_vcores()) { str.append( String.format("Estimated Per-Host Requirements: Memory=%s VCores=%s\n", PrintUtils.printBytes(request.getPer_host_mem_req()), request.per_host_vcores)); hasHeader = true; } // Append warning about tables missing stats. if (request.query_ctx.isSetTables_missing_stats() && !request.query_ctx.getTables_missing_stats().isEmpty()) { List<String> tableNames = Lists.newArrayList(); for (TTableName tableName: request.query_ctx.getTables_missing_stats()) { tableNames.add(tableName.db_name + "." + tableName.table_name); } str.append("WARNING: The following tables are missing relevant table " + "and/or column statistics.\n" + Joiner.on(", ").join(tableNames) + "\n"); hasHeader = true; } if (hasHeader) str.append("\n"); if (explainLevel.ordinal() < TExplainLevel.VERBOSE.ordinal()) { // Print the non-fragmented parallel plan. str.append(fragments.get(0).getExplainString(explainLevel)); } else { // Print the fragmented parallel plan. for (int i = 0; i < fragments.size(); ++i) { PlanFragment fragment = fragments.get(i); str.append(fragment.getExplainString(explainLevel)); if (explainLevel == TExplainLevel.VERBOSE && i + 1 != fragments.size()) { str.append("\n"); } } } return str.toString(); } /** * Return plan fragment that produces result of 'root'; recursively creates * all input fragments to the returned fragment. * If a new fragment is created, it is appended to 'fragments', so that * each fragment is preceded by those from which it consumes the output. * If 'isPartitioned' is false, the returned fragment is unpartitioned; * otherwise it may be partitioned, depending on whether its inputs are * partitioned; the partition function is derived from the inputs. */ private PlanFragment createPlanFragments( PlanNode root, Analyzer analyzer, boolean isPartitioned, long perNodeMemLimit, ArrayList<PlanFragment> fragments) throws InternalException, NotImplementedException { ArrayList<PlanFragment> childFragments = Lists.newArrayList(); for (PlanNode child: root.getChildren()) { // allow child fragments to be partitioned, unless they contain a limit clause // (the result set with the limit constraint needs to be computed centrally); // merge later if needed boolean childIsPartitioned = !child.hasLimit(); childFragments.add( createPlanFragments( child, analyzer, childIsPartitioned, perNodeMemLimit, fragments)); } PlanFragment result = null; if (root instanceof ScanNode) { result = createScanFragment(root); fragments.add(result); } else if (root instanceof HashJoinNode) { Preconditions.checkState(childFragments.size() == 2); result = createHashJoinFragment( (HashJoinNode) root, childFragments.get(1), childFragments.get(0), perNodeMemLimit, fragments, analyzer); } else if (root instanceof CrossJoinNode) { Preconditions.checkState(childFragments.size() == 2); result = createCrossJoinFragment( (CrossJoinNode) root, childFragments.get(1), childFragments.get(0), perNodeMemLimit, fragments, analyzer); } else if (root instanceof SelectNode) { result = createSelectNodeFragment((SelectNode) root, childFragments, analyzer); } else if (root instanceof UnionNode) { result = createUnionNodeFragment((UnionNode) root, childFragments, fragments, analyzer); } else if (root instanceof AggregationNode) { result = createAggregationFragment( (AggregationNode) root, childFragments.get(0), fragments, analyzer); } else if (root instanceof SortNode) { if (((SortNode) root).isAnalyticSort()) { // don't parallelize this like a regular SortNode result = createAnalyticFragment( (SortNode) root, childFragments.get(0), fragments, analyzer); } else { result = createOrderByFragment( (SortNode) root, childFragments.get(0), fragments, analyzer); } } else if (root instanceof AnalyticEvalNode) { result = createAnalyticFragment(root, childFragments.get(0), fragments, analyzer); } else if (root instanceof EmptySetNode) { result = new PlanFragment( fragmentIdGenerator_.getNextId(), root, DataPartition.UNPARTITIONED); } else { throw new InternalException( "Cannot create plan fragment for this node type: " + root.getExplainString()); } // move 'result' to end, it depends on all of its children fragments.remove(result); fragments.add(result); if (!isPartitioned && result.isPartitioned()) { result = createMergeFragment(result, analyzer); fragments.add(result); } return result; } /** * Returns the product of the distinct value estimates of the individual exprs * or -1 if any of them doesn't have a distinct value estimate. */ private long getNumDistinctValues(List<Expr> exprs) { long result = 1; for (Expr expr: exprs) { result *= expr.getNumDistinctValues(); if (result < 0) return -1; } return result; } /** * Makes a cost-based decision on whether to repartition the output of 'inputFragment' * before feeding its data into the table sink of the given 'insertStmt'. Considers * user-supplied plan hints to determine whether to repartition or not. * Returns a plan fragment that partitions the output of 'inputFragment' on the * partition exprs of 'insertStmt', unless the expected number of partitions is less * than the number of nodes on which inputFragment runs. * If it ends up creating a new fragment, appends that to 'fragments'. */ private PlanFragment createInsertFragment( PlanFragment inputFragment, InsertStmt insertStmt, Analyzer analyzer, ArrayList<PlanFragment> fragments) throws InternalException { List<Expr> partitionExprs = insertStmt.getPartitionKeyExprs(); Boolean partitionHint = insertStmt.isRepartition(); if (partitionExprs.isEmpty()) return inputFragment; if (partitionHint != null && !partitionHint) return inputFragment; // we ignore constants for the sake of partitioning List<Expr> nonConstPartitionExprs = Lists.newArrayList(partitionExprs); Expr.removeConstants(nonConstPartitionExprs); DataPartition inputPartition = inputFragment.getDataPartition(); // do nothing if the input fragment is already appropriately partitioned if (analyzer.isEquivSlots(inputPartition.getPartitionExprs(), nonConstPartitionExprs)) { return inputFragment; } // if the existing partition exprs are a subset of the table partition exprs, check // if it is distributed across all nodes; if so, don't repartition if (Expr.isSubset(inputPartition.getPartitionExprs(), nonConstPartitionExprs)) { long numPartitions = getNumDistinctValues(inputPartition.getPartitionExprs()); if (numPartitions >= inputFragment.getNumNodes()) return inputFragment; } // don't repartition if the resulting number of partitions is too low to get good // parallelism long numPartitions = getNumDistinctValues(nonConstPartitionExprs); // don't repartition if we know we have fewer partitions than nodes // (ie, default to repartitioning if col stats are missing) // TODO: we want to repartition if the resulting files would otherwise // be very small (less than some reasonable multiple of the recommended block size); // in order to do that, we need to come up with an estimate of the avg row size // in the particular file format of the output table/partition. // We should always know on how many nodes our input is running. Preconditions.checkState(inputFragment.getNumNodes() != -1); if (partitionHint == null && numPartitions > 0 && numPartitions <= inputFragment.getNumNodes()) { return inputFragment; } Preconditions.checkState(partitionHint == null || partitionHint); ExchangeNode exchNode = new ExchangeNode(nodeIdGenerator_.getNextId()); exchNode.addChild(inputFragment.getPlanRoot(), false, analyzer); exchNode.init(analyzer); Preconditions.checkState(exchNode.hasValidStats()); DataPartition partition = new DataPartition(TPartitionType.HASH_PARTITIONED, nonConstPartitionExprs); PlanFragment fragment = new PlanFragment(fragmentIdGenerator_.getNextId(), exchNode, partition); inputFragment.setDestination(exchNode); inputFragment.setOutputPartition(partition); fragments.add(fragment); return fragment; } /** * Return unpartitioned fragment that merges the input fragment's output via * an ExchangeNode. * Requires that input fragment be partitioned. */ private PlanFragment createMergeFragment( PlanFragment inputFragment, Analyzer analyzer) throws InternalException { Preconditions.checkState(inputFragment.isPartitioned()); ExchangeNode mergePlan = new ExchangeNode(nodeIdGenerator_.getNextId()); mergePlan.addChild(inputFragment.getPlanRoot(), false, analyzer); mergePlan.init(analyzer); Preconditions.checkState(mergePlan.hasValidStats()); PlanFragment fragment = new PlanFragment(fragmentIdGenerator_.getNextId(), mergePlan, DataPartition.UNPARTITIONED); inputFragment.setDestination(mergePlan); return fragment; } /** * Create new randomly-partitioned fragment containing a single scan node. * TODO: take bucketing into account to produce a naturally hash-partitioned * fragment * TODO: hbase scans are range-partitioned on the row key */ private PlanFragment createScanFragment(PlanNode node) { return new PlanFragment( fragmentIdGenerator_.getNextId(), node, DataPartition.RANDOM); } /** * Modifies the leftChildFragment to execute a cross join. The right child input is * provided by an ExchangeNode, which is the destination of the rightChildFragment's * output. */ private PlanFragment createCrossJoinFragment(CrossJoinNode node, PlanFragment rightChildFragment, PlanFragment leftChildFragment, long perNodeMemLimit, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { // The rhs tree is going to send data through an exchange node which effectively // compacts the data. No reason to do it again at the rhs root node. rightChildFragment.getPlanRoot().setCompactData(false); node.setChild(0, leftChildFragment.getPlanRoot()); connectChildFragment(analyzer, node, 1, rightChildFragment); leftChildFragment.setPlanRoot(node); return leftChildFragment; } /** * Creates either a broadcast join or a repartitioning join, depending on the * expected cost. * If any of the inputs to the cost computation is unknown, it assumes the cost * will be 0. Costs being equal, it'll favor partitioned over broadcast joins. * If perNodeMemLimit > 0 and the size of the hash table for a broadcast join is * expected to exceed that mem limit, switches to partitioned join instead. * TODO: revisit the choice of broadcast as the default * TODO: don't create a broadcast join if we already anticipate that this will * exceed the query's memory budget. */ private PlanFragment createHashJoinFragment( HashJoinNode node, PlanFragment rightChildFragment, PlanFragment leftChildFragment, long perNodeMemLimit, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { // broadcast: send the rightChildFragment's output to each node executing // the leftChildFragment; the cost across all nodes is proportional to the // total amount of data sent PlanNode rhsTree = rightChildFragment.getPlanRoot(); long rhsDataSize = 0; long broadcastCost = Long.MAX_VALUE; if (rhsTree.getCardinality() != -1 && leftChildFragment.getNumNodes() != -1) { rhsDataSize = Math.round( (double) rhsTree.getCardinality() * rhsTree.getAvgRowSize()); broadcastCost = rhsDataSize * leftChildFragment.getNumNodes(); } LOG.debug("broadcast: cost=" + Long.toString(broadcastCost)); LOG.debug("card=" + Long.toString(rhsTree.getCardinality()) + " row_size=" + Float.toString(rhsTree.getAvgRowSize()) + " #nodes=" + Integer.toString(leftChildFragment.getNumNodes())); // repartition: both left- and rightChildFragment are partitioned on the // join exprs PlanNode lhsTree = leftChildFragment.getPlanRoot(); long partitionCost = Long.MAX_VALUE; List<Expr> lhsJoinExprs = Lists.newArrayList(); List<Expr> rhsJoinExprs = Lists.newArrayList(); for (Pair<Expr, Expr> pair: node.getEqJoinConjuncts()) { // no remapping necessary lhsJoinExprs.add(pair.first.clone()); rhsJoinExprs.add(pair.second.clone()); } boolean lhsHasCompatPartition = false; boolean rhsHasCompatPartition = false; if (lhsTree.getCardinality() != -1 && rhsTree.getCardinality() != -1) { lhsHasCompatPartition = analyzer.isEquivSlots(lhsJoinExprs, leftChildFragment.getDataPartition().getPartitionExprs()); rhsHasCompatPartition = analyzer.isEquivSlots(rhsJoinExprs, rightChildFragment.getDataPartition().getPartitionExprs()); double lhsCost = (lhsHasCompatPartition) ? 0.0 : Math.round((double) lhsTree.getCardinality() * lhsTree.getAvgRowSize()); double rhsCost = (rhsHasCompatPartition) ? 0.0 : Math.round((double) rhsTree.getCardinality() * rhsTree.getAvgRowSize()); partitionCost = Math.round(lhsCost + rhsCost); } LOG.debug("partition: cost=" + Long.toString(partitionCost)); LOG.debug("lhs card=" + Long.toString(lhsTree.getCardinality()) + " row_size=" + Float.toString(lhsTree.getAvgRowSize())); LOG.debug("rhs card=" + Long.toString(rhsTree.getCardinality()) + " row_size=" + Float.toString(rhsTree.getAvgRowSize())); LOG.debug(rhsTree.getExplainString()); boolean doBroadcast; // we do a broadcast join if // - we're explicitly told to do so // - or if it's cheaper and we weren't explicitly told to do a partitioned join // - and we're not doing a full outer or right outer/semi join (those require the // left-hand side to be partitioned for correctness) // - and the expected size of the hash tbl doesn't exceed perNodeMemLimit // we do a "<=" comparison of the costs so that we default to broadcast joins if // we're unable to estimate the cost if (node.getJoinOp() != JoinOperator.RIGHT_OUTER_JOIN && node.getJoinOp() != JoinOperator.FULL_OUTER_JOIN && node.getJoinOp() != JoinOperator.RIGHT_SEMI_JOIN && node.getJoinOp() != JoinOperator.RIGHT_ANTI_JOIN && (perNodeMemLimit == 0 || Math.round((double) rhsDataSize * HASH_TBL_SPACE_OVERHEAD) <= perNodeMemLimit) && (node.getTableRef().isBroadcastJoin() || (!node.getTableRef().isPartitionedJoin() && broadcastCost <= partitionCost))) { doBroadcast = true; } else { doBroadcast = false; } // The rhs tree is going to send data through an exchange node which effectively // compacts the data. No reason to do it again at the rhs root node. rhsTree.setCompactData(false); if (doBroadcast) { node.setDistributionMode(HashJoinNode.DistributionMode.BROADCAST); // Doesn't create a new fragment, but modifies leftChildFragment to execute // the join; the build input is provided by an ExchangeNode, which is the // destination of the rightChildFragment's output node.setChild(0, leftChildFragment.getPlanRoot()); connectChildFragment(analyzer, node, 1, rightChildFragment); leftChildFragment.setPlanRoot(node); return leftChildFragment; } else { node.setDistributionMode(HashJoinNode.DistributionMode.PARTITIONED); // The lhs and rhs input fragments are already partitioned on the join exprs. // Combine the lhs/rhs input fragments into leftChildFragment by placing the join // node into leftChildFragment and setting its lhs/rhs children to the plan root of // the lhs/rhs child fragment, respectively. No new child fragments or exchanges // are created, and the rhs fragment is removed. if (lhsHasCompatPartition && rhsHasCompatPartition) { node.setChild(0, leftChildFragment.getPlanRoot()); node.setChild(1, rightChildFragment.getPlanRoot()); // Redirect fragments sending to rightFragment to leftFragment. for (PlanFragment fragment: fragments) { if (fragment.getDestFragment() == rightChildFragment) { fragment.setDestination(fragment.getDestNode()); } } // Remove right fragment because its plan tree has been merged into leftFragment. fragments.remove(rightChildFragment); leftChildFragment.setPlanRoot(node); return leftChildFragment; } // The lhs input fragment is already partitioned on the join exprs. // Make the HashJoin the new root of leftChildFragment and set the join's // first child to the lhs plan root. The second child of the join is an // ExchangeNode that is fed by the rhsInputFragment whose sink repartitions // its data by the rhs join exprs. DataPartition rhsJoinPartition = new DataPartition( TPartitionType.HASH_PARTITIONED, Expr.cloneList(rhsJoinExprs)); if (lhsHasCompatPartition) { node.setChild(0, leftChildFragment.getPlanRoot()); connectChildFragment(analyzer, node, 1, rightChildFragment); rightChildFragment.setOutputPartition(rhsJoinPartition); leftChildFragment.setPlanRoot(node); return leftChildFragment; } // Same as above but with rhs and lhs reversed. DataPartition lhsJoinPartition = new DataPartition( TPartitionType.HASH_PARTITIONED, Expr.cloneList(lhsJoinExprs)); if (rhsHasCompatPartition) { node.setChild(1, rightChildFragment.getPlanRoot()); connectChildFragment(analyzer, node, 0, leftChildFragment); leftChildFragment.setOutputPartition(lhsJoinPartition); rightChildFragment.setPlanRoot(node); return rightChildFragment; } // Neither lhs nor rhs are already partitioned on the join exprs. // Create a new parent fragment containing a HashJoin node with two // ExchangeNodes as inputs; the latter are the destinations of the // left- and rightChildFragments, which now partition their output // on their respective join exprs. // The new fragment is hash-partitioned on the lhs input join exprs. ExchangeNode lhsExchange = new ExchangeNode(nodeIdGenerator_.getNextId()); lhsExchange.addChild(leftChildFragment.getPlanRoot(), false, analyzer); lhsExchange.computeStats(null); node.setChild(0, lhsExchange); ExchangeNode rhsExchange = new ExchangeNode(nodeIdGenerator_.getNextId()); rhsExchange.addChild(rightChildFragment.getPlanRoot(), false, analyzer); rhsExchange.computeStats(null); node.setChild(1, rhsExchange); // Connect the child fragments in a new fragment, and set the data partition // of the new fragment and its child fragments. PlanFragment joinFragment = new PlanFragment(fragmentIdGenerator_.getNextId(), node, lhsJoinPartition); leftChildFragment.setDestination(lhsExchange); leftChildFragment.setOutputPartition(lhsJoinPartition); rightChildFragment.setDestination(rhsExchange); rightChildFragment.setOutputPartition(rhsJoinPartition); return joinFragment; } } /** * Returns a new fragment with a UnionNode as its root. The data partition of the * returned fragment and how the data of the child fragments is consumed depends on the * data partitions of the child fragments: * - All child fragments are unpartitioned or partitioned: The returned fragment has an * UNPARTITIONED or RANDOM data partition, respectively. The UnionNode absorbs the * plan trees of all child fragments. * - Mixed partitioned/unpartitioned child fragments: The returned fragment is * RANDOM partitioned. The plan trees of all partitioned child fragments are absorbed * into the UnionNode. All unpartitioned child fragments are connected to the * UnionNode via a RANDOM exchange, and remain unchanged otherwise. */ private PlanFragment createUnionNodeFragment(UnionNode unionNode, ArrayList<PlanFragment> childFragments, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { Preconditions.checkState(unionNode.getChildren().size() == childFragments.size()); // A UnionNode could have no children or constant selects if all of its operands // were dropped because of constant predicates that evaluated to false. if (unionNode.getChildren().isEmpty()) { return new PlanFragment( fragmentIdGenerator_.getNextId(), unionNode, DataPartition.UNPARTITIONED); } Preconditions.checkState(!childFragments.isEmpty()); int numUnpartitionedChildFragments = 0; for (int i = 0; i < childFragments.size(); ++i) { if (!childFragments.get(i).isPartitioned()) ++numUnpartitionedChildFragments; } // If all child fragments are unpartitioned, return a single unpartitioned fragment // with a UnionNode that merges all child fragments. if (numUnpartitionedChildFragments == childFragments.size()) { // Absorb the plan trees of all childFragments into unionNode. for (int i = 0; i < childFragments.size(); ++i) { unionNode.setChild(i, childFragments.get(i).getPlanRoot()); } PlanFragment unionFragment = new PlanFragment(fragmentIdGenerator_.getNextId(), unionNode, DataPartition.UNPARTITIONED); unionNode.init(analyzer); // All child fragments have been absorbed into unionFragment. fragments.removeAll(childFragments); return unionFragment; } // There is at least one partitioned child fragment. for (int i = 0; i < childFragments.size(); ++i) { PlanFragment childFragment = childFragments.get(i); if (childFragment.isPartitioned()) { // Absorb the plan trees of all partitioned child fragments into unionNode. unionNode.setChild(i, childFragment.getPlanRoot()); fragments.remove(childFragment); } else { // Connect the unpartitioned child fragments to unionNode via a random exchange. connectChildFragment(analyzer, unionNode, i, childFragment); childFragment.setOutputPartition(DataPartition.RANDOM); } } // Fragment contains the UnionNode that consumes the data of all child fragments. PlanFragment unionFragment = new PlanFragment(fragmentIdGenerator_.getNextId(), unionNode, DataPartition.RANDOM); unionNode.reorderOperands(analyzer); unionNode.init(analyzer); return unionFragment; } /** * Adds the SelectNode as the new plan root to the child fragment and returns * the child fragment. */ private PlanFragment createSelectNodeFragment(SelectNode selectNode, ArrayList<PlanFragment> childFragments, Analyzer analyzer) { Preconditions.checkState(selectNode.getChildren().size() == childFragments.size()); PlanFragment childFragment = childFragments.get(0); // set the child explicitly, an ExchangeNode might have been inserted // (whereas selectNode.child[0] would point to the original child) selectNode.setChild(0, childFragment.getPlanRoot()); childFragment.setPlanRoot(selectNode); return childFragment; } /** * Replace node's child at index childIdx with an ExchangeNode that receives its * input from childFragment. */ private void connectChildFragment(Analyzer analyzer, PlanNode node, int childIdx, PlanFragment childFragment) throws InternalException { ExchangeNode exchangeNode = new ExchangeNode(nodeIdGenerator_.getNextId()); exchangeNode.addChild(childFragment.getPlanRoot(), false, analyzer); exchangeNode.init(analyzer); node.setChild(childIdx, exchangeNode); childFragment.setDestination(exchangeNode); } /** * Create a new fragment containing a single ExchangeNode that consumes the output * of childFragment, set the destination of childFragment to the new parent * and the output partition of childFragment to that of the new parent. * TODO: the output partition of a child isn't necessarily the same as the data * partition of the receiving parent (if there is more materialization happening * in the parent, such as during distinct aggregation). Do we care about the data * partition of the parent being applicable to the *output* of the parent (it's * correct for the input). */ private PlanFragment createParentFragment( Analyzer analyzer, PlanFragment childFragment, DataPartition parentPartition) throws InternalException { ExchangeNode exchangeNode = new ExchangeNode(nodeIdGenerator_.getNextId()); exchangeNode.addChild(childFragment.getPlanRoot(), false, analyzer); exchangeNode.init(analyzer); PlanFragment parentFragment = new PlanFragment(fragmentIdGenerator_.getNextId(), exchangeNode, parentPartition); childFragment.setDestination(exchangeNode); childFragment.setOutputPartition(parentPartition); return parentFragment; } /** * Returns a fragment that materializes the aggregation result of 'node'. * If the child fragment is partitioned, the result fragment will be partitioned on * the grouping exprs of 'node'. * If 'node' is phase 1 of a 2-phase DISTINCT aggregation, this will simply * add 'node' to the child fragment and return the child fragment; the new * fragment will be created by the subsequent call of createAggregationFragment() * for the phase 2 AggregationNode. */ private PlanFragment createAggregationFragment(AggregationNode node, PlanFragment childFragment, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { if (!childFragment.isPartitioned()) { // nothing to distribute; do full aggregation directly within childFragment childFragment.addPlanRoot(node); return childFragment; } if (node.getAggInfo().isDistinctAgg()) { // 'node' is phase 1 of a DISTINCT aggregation; the actual agg fragment // will get created in the next createAggregationFragment() call // for the parent AggregationNode childFragment.addPlanRoot(node); node.setIntermediateTuple(); return childFragment; } ArrayList<Expr> groupingExprs = node.getAggInfo().getGroupingExprs(); boolean hasGrouping = !groupingExprs.isEmpty(); // 2nd phase of DISTINCT aggregation boolean isDistinct = node.getChild(0) instanceof AggregationNode && ((AggregationNode)(node.getChild(0))).getAggInfo().isDistinctAgg(); if (!isDistinct) { // the original aggregation materializes the intermediate agg tuple and goes // into the child fragment; merge aggregation materializes the output agg tuple // and goes into a parent fragment childFragment.addPlanRoot(node); node.setIntermediateTuple(); // if there is a limit, we need to transfer it from the pre-aggregation // node in the child fragment to the merge aggregation node in the parent long limit = node.getLimit(); node.unsetLimit(); node.unsetNeedsFinalize(); DataPartition parentPartition = null; if (hasGrouping) { // the parent fragment is partitioned on the grouping exprs; // substitute grouping exprs to reference the *output* of the agg, not the input // TODO: add infrastructure so that all PlanNodes have smaps to make this // process of turning exprs into executable exprs less ad-hoc; might even want to // introduce another mechanism that simply records a mapping of slots List<Expr> partitionExprs = Expr.substituteList( groupingExprs, node.getAggInfo().getIntermediateSmap(), analyzer); parentPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, partitionExprs); } else { // the parent fragment is unpartitioned parentPartition = DataPartition.UNPARTITIONED; } // place a merge aggregation step in a new fragment PlanFragment mergeFragment = createParentFragment(analyzer, childFragment, parentPartition); AggregationNode mergeAggNode = new AggregationNode( nodeIdGenerator_.getNextId(), mergeFragment.getPlanRoot(), node.getAggInfo().getMergeAggInfo()); mergeAggNode.init(analyzer); mergeAggNode.setLimit(limit); // HAVING predicates can only be evaluated after the merge agg step node.transferConjuncts(mergeAggNode); // Recompute stats after transferring the conjuncts_ (order is important). node.computeStats(analyzer); mergeFragment.getPlanRoot().computeStats(analyzer); mergeAggNode.computeStats(analyzer); // Set new plan root after updating stats. mergeFragment.addPlanRoot(mergeAggNode); return mergeFragment; } Preconditions.checkState(isDistinct); // The first-phase aggregation node is already in the child fragment. Preconditions.checkState(node.getChild(0) == childFragment.getPlanRoot()); AggregateInfo firstPhaseAggInfo = ((AggregationNode) node.getChild(0)).getAggInfo(); List<Expr> partitionExprs = null; if (hasGrouping) { // We need to do // - child fragment: // * phase-1 aggregation // - merge fragment, hash-partitioned on grouping exprs: // * merge agg of phase 1 // * phase 2 agg // The output partition exprs of the child are the (input) grouping exprs of the // parent. The grouping exprs reference the output tuple of the 1st phase, but the // partitioning happens on the intermediate tuple of the 1st phase. partitionExprs = Expr.substituteList(groupingExprs, firstPhaseAggInfo.getOutputToIntermediateSmap(), analyzer); } else { // We need to do // - child fragment: // * phase-1 aggregation // - merge fragment 1, hash-partitioned on distinct exprs: // * merge agg of phase 1 // * phase 2 agg // - merge fragment 2, unpartitioned: // * merge agg of phase 2 partitionExprs = Expr.substituteList(firstPhaseAggInfo.getGroupingExprs(), firstPhaseAggInfo.getIntermediateSmap(), analyzer); } DataPartition mergePartition = new DataPartition(TPartitionType.HASH_PARTITIONED, partitionExprs); // place a merge aggregation step for the 1st phase in a new fragment PlanFragment mergeFragment = createParentFragment(analyzer, childFragment, mergePartition); AggregateInfo mergeAggInfo = firstPhaseAggInfo.getMergeAggInfo(); AggregationNode mergeAggNode = new AggregationNode( nodeIdGenerator_.getNextId(), node.getChild(0), mergeAggInfo); mergeAggNode.init(analyzer); mergeAggNode.unsetNeedsFinalize(); // The output of the 1st phase agg is the 1st phase intermediate. mergeAggNode.setIntermediateTuple(); mergeFragment.addPlanRoot(mergeAggNode); // the 2nd-phase aggregation consumes the output of the merge agg; // if there is a limit, it had already been placed with the 2nd aggregation // step (which is where it should be) mergeFragment.addPlanRoot(node); if (!hasGrouping) { // place the merge aggregation of the 2nd phase in an unpartitioned fragment; // add preceding merge fragment at end fragments.add(mergeFragment); node.unsetNeedsFinalize(); node.setIntermediateTuple(); mergeFragment = createParentFragment(analyzer, mergeFragment, DataPartition.UNPARTITIONED); mergeAggInfo = node.getAggInfo().getMergeAggInfo(); mergeAggNode = new AggregationNode( nodeIdGenerator_.getNextId(), node.getChild(0), mergeAggInfo); mergeAggNode.init(analyzer); // Transfer having predicates. If hasGrouping == true, the predicates should // instead be evaluated by the 2nd phase agg (the predicates are already there). node.transferConjuncts(mergeAggNode); mergeFragment.addPlanRoot(mergeAggNode); } return mergeFragment; } /** * Returns a fragment that produces the output of either an AnalyticEvalNode * or of the SortNode that provides the input to an AnalyticEvalNode. * ('node' can be either an AnalyticEvalNode or a SortNode). * The returned fragment is either partitioned on the Partition By exprs or * unpartitioned in the absence of such exprs. */ private PlanFragment createAnalyticFragment(PlanNode node, PlanFragment childFragment, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { Preconditions.checkState( node instanceof SortNode || node instanceof AnalyticEvalNode); if (node instanceof AnalyticEvalNode) { AnalyticEvalNode analyticNode = (AnalyticEvalNode) node; if (analyticNode.getPartitionExprs().isEmpty() && analyticNode.getOrderByElements().isEmpty()) { // no Partition-By/Order-By exprs: compute analytic exprs in single // unpartitioned fragment PlanFragment fragment = childFragment; if (childFragment.isPartitioned()) { fragment = createParentFragment( analyzer, childFragment, DataPartition.UNPARTITIONED); } fragment.addPlanRoot(analyticNode); return fragment; } else { childFragment.addPlanRoot(analyticNode); return childFragment; } } SortNode sortNode = (SortNode) node; Preconditions.checkState(sortNode.isAnalyticSort()); PlanFragment analyticFragment = childFragment; if (sortNode.getInputPartition() != null) { // make sure the childFragment's output is partitioned as required by the sortNode sortNode.getInputPartition().substitute( childFragment.getPlanRoot().getOutputSmap(), analyzer); if (!childFragment.getDataPartition().equals(sortNode.getInputPartition())) { analyticFragment = createParentFragment( analyzer, childFragment, sortNode.getInputPartition()); } } analyticFragment.addPlanRoot(sortNode); return analyticFragment; } /** * Returns a new unpartitioned fragment that materializes the result of the given * SortNode. If the child fragment is partitioned, returns a new fragment with a * sort-merging exchange that merges the results of the partitioned sorts. * The offset and limit are adjusted in the child and parent plan nodes to produce * the correct result. */ private PlanFragment createOrderByFragment(SortNode node, PlanFragment childFragment, ArrayList<PlanFragment> fragments, Analyzer analyzer) throws InternalException { node.setChild(0, childFragment.getPlanRoot()); childFragment.addPlanRoot(node); if (!childFragment.isPartitioned()) return childFragment; // Remember original offset and limit. boolean hasLimit = node.hasLimit(); long limit = node.getLimit(); long offset = node.getOffset(); // Create a new fragment for a sort-merging exchange. PlanFragment mergeFragment = createParentFragment(analyzer, childFragment, DataPartition.UNPARTITIONED); ExchangeNode exchNode = (ExchangeNode) mergeFragment.getPlanRoot(); // Set limit, offset and merge parameters in the exchange node. exchNode.unsetLimit(); if (hasLimit) exchNode.setLimit(limit); exchNode.setMergeInfo(node.getSortInfo(), offset); // Child nodes should not process the offset. If there is a limit, // the child nodes need only return (offset + limit) rows. SortNode childSortNode = (SortNode) childFragment.getPlanRoot(); Preconditions.checkState(node == childSortNode); if (hasLimit) { childSortNode.unsetLimit(); childSortNode.setLimit(limit + offset); } childSortNode.setOffset(0); childSortNode.computeStats(analyzer); exchNode.computeStats(analyzer); return mergeFragment; } /** * Create plan tree for single-node execution. Generates PlanNodes for the * Select/Project/Join/Union [All]/Group by/Having/Order by clauses of the query stmt. */ private PlanNode createQueryPlan(QueryStmt stmt, Analyzer analyzer, boolean disableTopN) throws ImpalaException { if (analyzer.hasEmptyResultSet()) { ArrayList<TupleId> tupleIds = Lists.newArrayList(); stmt.getMaterializedTupleIds(tupleIds); EmptySetNode node = new EmptySetNode(nodeIdGenerator_.getNextId(), tupleIds); node.init(analyzer); return node; } PlanNode root; if (stmt instanceof SelectStmt) { root = createSelectPlan((SelectStmt) stmt, analyzer); // insert possible AnalyticEvalNode before SortNode if (((SelectStmt) stmt).getAnalyticInfo() != null) { AnalyticInfo analyticInfo = ((SelectStmt) stmt).getAnalyticInfo(); ArrayList<TupleId> stmtTupleIds = Lists.newArrayList(); stmt.getMaterializedTupleIds(stmtTupleIds); AnalyticPlanner analyticPlanner = new AnalyticPlanner(stmtTupleIds, analyticInfo, analyzer, nodeIdGenerator_); root = analyticPlanner.createSingleNodePlan(root); } } else { Preconditions.checkState(stmt instanceof UnionStmt); root = createUnionPlan((UnionStmt) stmt, analyzer); } // Avoid adding a sort node if the sort tuple has no materialized slots. boolean sortHasMaterializedSlots = false; if (stmt.evaluateOrderBy()) { for (SlotDescriptor sortSlotDesc: stmt.getSortInfo().getSortTupleDescriptor().getSlots()) { if (sortSlotDesc.isMaterialized()) { sortHasMaterializedSlots = true; break; } } } if (stmt.evaluateOrderBy() && sortHasMaterializedSlots) { long limit = stmt.getLimit(); // TODO: External sort could be used for very large limits // not just unlimited order-by boolean useTopN = stmt.hasLimit() && !disableTopN; root = new SortNode(nodeIdGenerator_.getNextId(), root, stmt.getSortInfo(), useTopN, stmt.getOffset()); Preconditions.checkState(root.hasValidStats()); root.setLimit(limit); root.init(analyzer); } else { root.setLimit(stmt.getLimit()); root.computeStats(analyzer); } return root; } /** * If there are unassigned conjuncts_ that are bound by tupleIds_, returns a SelectNode * on top of root that evaluate those conjuncts_; otherwise returns root unchanged. * TODO: change this to assign the unassigned conjuncts to root itself, if that is * semantically correct */ private PlanNode addUnassignedConjuncts( Analyzer analyzer, List<TupleId> tupleIds, PlanNode root) throws InternalException { // No point in adding SelectNode on top of an EmptyNode. if (root instanceof EmptySetNode) return root; Preconditions.checkNotNull(root); // TODO: standardize on logical tuple ids? List<Expr> conjuncts = analyzer.getUnassignedConjuncts(root); //List<Expr> conjuncts_ = analyzer.getUnassignedConjuncts(tupleIds_); if (conjuncts.isEmpty()) return root; // evaluate conjuncts_ in SelectNode SelectNode selectNode = new SelectNode(nodeIdGenerator_.getNextId(), root); // init() picks up the unassigned conjuncts_ selectNode.init(analyzer); Preconditions.checkState(selectNode.hasValidStats()); return selectNode; } /** * Return the cheapest plan that materializes the joins of all TblRefs in refPlans. * Assumes that refPlans are in the order as they originally appeared in the query. * For this plan: * - the plan is executable, ie, all non-cross joins have equi-join predicates * - the leftmost scan is over the largest of the inputs for which we can still * construct an executable plan * - all rhs's are in decreasing order of selectiveness (percentage of rows they * eliminate) * - outer/cross/semi joins: rhs serialized size is < lhs serialized size; * enforced via join inversion, if necessary * Returns null if we can't create an executable plan. */ private PlanNode createCheapestJoinPlan( Analyzer analyzer, List<Pair<TableRef, PlanNode>> refPlans) throws ImpalaException { LOG.trace("createCheapestJoinPlan"); if (refPlans.size() == 1) return refPlans.get(0).second; // collect eligible candidates for the leftmost input; list contains // (plan, materialized size) ArrayList<Pair<TableRef, Long>> candidates = Lists.newArrayList(); for (Pair<TableRef, PlanNode> entry: refPlans) { TableRef ref = entry.first; JoinOperator joinOp = ref.getJoinOp(); // The rhs table of an outer/semi join can appear as the left-most input if we // invert the lhs/rhs and the join op. However, we may only consider this inversion // for the very first join in refPlans, otherwise we could reorder tables/joins // across outer/semi joins which is generally incorrect. if ((joinOp.isOuterJoin() || joinOp.isSemiJoin()) && ref != refPlans.get(1).first) { // ref cannot appear as the leftmost input continue; } PlanNode plan = entry.second; if (plan.getCardinality() == -1) { // use 0 for the size to avoid it becoming the leftmost input // TODO: Consider raw size of scanned partitions in the absence of stats. candidates.add(new Pair(ref, new Long(0))); LOG.trace("candidate " + ref.getAlias() + ": 0"); continue; } Preconditions.checkNotNull(ref.getDesc()); long materializedSize = (long) Math.ceil(plan.getAvgRowSize() * (double) plan.getCardinality()); candidates.add(new Pair(ref, new Long(materializedSize))); LOG.trace("candidate " + ref.getAlias() + ": " + Long.toString(materializedSize)); } if (candidates.isEmpty()) return null; // order candidates by descending materialized size; we want to minimize the memory // consumption of the materialized hash tables required for the join sequence Collections.sort(candidates, new Comparator<Pair<TableRef, Long>>() { public int compare(Pair<TableRef, Long> a, Pair<TableRef, Long> b) { long diff = b.second - a.second; return (diff < 0 ? -1 : (diff > 0 ? 1 : 0)); } }); for (Pair<TableRef, Long> candidate: candidates) { PlanNode result = createJoinPlan(analyzer, candidate.first, refPlans); if (result != null) return result; } return null; } /** * Returns a plan with leftmostRef's plan as its leftmost input; the joins * are in decreasing order of selectiveness (percentage of rows they eliminate). * The leftmostRef's join will be inverted if it is an outer/semi/cross join. */ private PlanNode createJoinPlan( Analyzer analyzer, TableRef leftmostRef, List<Pair<TableRef, PlanNode>> refPlans) throws ImpalaException { LOG.trace("createJoinPlan: " + leftmostRef.getAlias()); // the refs that have yet to be joined List<Pair<TableRef, PlanNode>> remainingRefs = Lists.newArrayList(); PlanNode root = null; // root of accumulated join plan for (Pair<TableRef, PlanNode> entry: refPlans) { if (entry.first == leftmostRef) { root = entry.second; } else { remainingRefs.add(entry); } } Preconditions.checkNotNull(root); // refs that have been joined. The union of joinedRefs and the refs in remainingRefs // are the set of all table refs. Set<TableRef> joinedRefs = Sets.newHashSet(); joinedRefs.add(leftmostRef); // If the leftmostTblRef is an outer/semi/cross join, we must invert it. if (leftmostRef.getJoinOp().isOuterJoin() || leftmostRef.getJoinOp().isSemiJoin() || leftmostRef.getJoinOp().isCrossJoin()) { leftmostRef.invertJoin(); } long numOps = 0; int i = 0; while (!remainingRefs.isEmpty()) { // we minimize the resulting cardinality at each step in the join chain, // which minimizes the total number of hash table lookups PlanNode newRoot = null; Pair<TableRef, PlanNode> minEntry = null; for (Pair<TableRef, PlanNode> entry: remainingRefs) { TableRef ref = entry.first; LOG.trace(Integer.toString(i) + " considering ref " + ref.getAlias()); // Determine whether we can or must consider this join at this point in the plan. // Place outer/semi joins at a fixed position in the plan tree (IMPALA-860), // s.t. all the tables appearing to the left/right of an outer/semi join in // the original query still remain to the left/right after join ordering. This // prevents join re-ordering across outer/semi joins which is generally wrong. // The checks below relies on remainingRefs being in the order as they originally // appeared in the query. JoinOperator joinOp = ref.getJoinOp(); if (joinOp.isOuterJoin() || joinOp.isSemiJoin()) { List<TupleId> currentTids = Lists.newArrayList(root.getTblRefIds()); currentTids.add(ref.getId()); // Place outer/semi joins at a fixed position in the plan tree. We know that // the join resulting from 'ref' must become the new root if the current // root materializes exactly those tuple ids corresponding to TableRefs // appearing to the left of 'ref' in the original query. List<TupleId> tableRefTupleIds = ref.getAllTupleIds(); if (!currentTids.containsAll(tableRefTupleIds) || !tableRefTupleIds.containsAll(currentTids)) { // Do not consider the remaining table refs to prevent incorrect re-ordering // of tables across outer/semi/anti joins. break; } } else if (ref.getJoinOp().isCrossJoin()) { if (!joinedRefs.contains(ref.getLeftTblRef())) continue; } PlanNode rhsPlan = entry.second; analyzer.setAssignedConjuncts(root.getAssignedConjuncts()); PlanNode candidate = createJoinNode(analyzer, root, rhsPlan, null, ref, false); if (candidate == null) continue; LOG.trace("cardinality=" + Long.toString(candidate.getCardinality())); if (joinOp.isOuterJoin() || joinOp.isSemiJoin() || joinOp.isCrossJoin()) { // Invert the join if doing so reduces the size of build-side hash table // (may also reduce network costs depending on the join strategy). // Only consider this optimization if both the lhs/rhs cardinalities are known. long lhsCard = root.getCardinality(); long rhsCard = rhsPlan.getCardinality(); if (lhsCard != -1 && rhsCard != -1 && lhsCard * root.getAvgRowSize() < rhsCard * rhsPlan.getAvgRowSize()) { ref.setJoinOp(ref.getJoinOp().invert()); candidate = createJoinNode(analyzer, rhsPlan, root, ref, null, false); Preconditions.checkNotNull(candidate); } // Use 'candidate' as the new root; don't consider any other table refs at this // position in the plan. if (joinOp.isOuterJoin() || joinOp.isSemiJoin()) { newRoot = candidate; minEntry = entry; break; } } if (newRoot == null || candidate.getCardinality() < newRoot.getCardinality()) { newRoot = candidate; minEntry = entry; } } if (newRoot == null) return null; // we need to insert every rhs row into the hash table and then look up // every lhs row long lhsCardinality = root.getCardinality(); long rhsCardinality = minEntry.second.getCardinality(); numOps += lhsCardinality + rhsCardinality; LOG.debug(Integer.toString(i) + " chose " + minEntry.first.getAlias() + " #lhs=" + Long.toString(lhsCardinality) + " #rhs=" + Long.toString(rhsCardinality) + " #ops=" + Long.toString(numOps)); remainingRefs.remove(minEntry); joinedRefs.add(minEntry.first); root = newRoot; // assign id_ after running through the possible choices in order to end up // with a dense sequence of node ids root.setId(nodeIdGenerator_.getNextId()); analyzer.setAssignedConjuncts(root.getAssignedConjuncts()); // build side copies data to a compact representation in the tuple buffer. root.getChildren().get(1).setCompactData(true); ++i; } return root; } /** * Return a plan with joins in the order of refPlans (= FROM clause order). */ private PlanNode createFromClauseJoinPlan( Analyzer analyzer, List<Pair<TableRef, PlanNode>> refPlans) throws ImpalaException { // create left-deep sequence of binary hash joins; assign node ids as we go along Preconditions.checkState(!refPlans.isEmpty()); PlanNode root = refPlans.get(0).second; for (int i = 1; i < refPlans.size(); ++i) { TableRef innerRef = refPlans.get(i).first; PlanNode innerPlan = refPlans.get(i).second; root = createJoinNode(analyzer, root, innerPlan, null, innerRef, true); root.setId(nodeIdGenerator_.getNextId()); // build side copies data to a compact representation in the tuple buffer. root.getChildren().get(1).setCompactData(true); } return root; } /** * Create tree of PlanNodes that implements the Select/Project/Join/Group by/Having * of the selectStmt query block. */ private PlanNode createSelectPlan(SelectStmt selectStmt, Analyzer analyzer) throws ImpalaException { // no from clause -> materialize the select's exprs with a UnionNode if (selectStmt.getTableRefs().isEmpty()) { return createConstantSelectPlan(selectStmt, analyzer); } // collect output tuples of subtrees ArrayList<TupleId> rowTuples = Lists.newArrayList(); for (TableRef tblRef: selectStmt.getTableRefs()) { rowTuples.addAll(tblRef.getMaterializedTupleIds()); } // Slot materialization: // We need to mark all slots as materialized that are needed during the execution // of selectStmt, and we need to do that prior to creating plans for the TableRefs // (because createTableRefNode() might end up calling computeMemLayout() on one or more // TupleDescriptors, at which point all referenced slots need to be marked). // // For non-join predicates, slots are marked as follows: // - for base table scan predicates, this is done directly by ScanNode.init(), which // can do a better job because it doesn't need to materialize slots that are only // referenced for partition pruning, for instance // - for inline views, non-join predicates are pushed down, at which point the process // repeats itself. selectStmt.materializeRequiredSlots(analyzer); // return a plan that feeds the aggregation of selectStmt with an empty set, // if the selectStmt's select-project-join portion returns an empty result set if (analyzer.hasEmptySpjResultSet()) { PlanNode emptySetNode = new EmptySetNode(nodeIdGenerator_.getNextId(), rowTuples); emptySetNode.init(analyzer); return createAggregationPlan(selectStmt, analyzer, emptySetNode); } // create plans for our table refs; use a list here instead of a map to // maintain a deterministic order of traversing the TableRefs during join // plan generation (helps with tests) List<Pair<TableRef, PlanNode>> refPlans = Lists.newArrayList(); for (TableRef ref: selectStmt.getTableRefs()) { PlanNode plan = createTableRefNode(analyzer, ref); Preconditions.checkState(plan != null); refPlans.add(new Pair(ref, plan)); } // save state of conjunct assignment; needed for join plan generation for (Pair<TableRef, PlanNode> entry: refPlans) { entry.second.setAssignedConjuncts(analyzer.getAssignedConjuncts()); } PlanNode root = null; if (!selectStmt.getSelectList().isStraightJoin()) { root = createCheapestJoinPlan(analyzer, refPlans); } if (selectStmt.getSelectList().isStraightJoin() || root == null) { // we didn't have enough stats to do a cost-based join plan, or the STRAIGHT_JOIN // keyword was in the select list: use the FROM clause order instead root = createFromClauseJoinPlan(analyzer, refPlans); Preconditions.checkNotNull(root); } if (root != null) { // add unassigned conjuncts_ before aggregation // (scenario: agg input comes from an inline view which wasn't able to // evaluate all Where clause conjuncts_ from this scope) root = addUnassignedConjuncts(analyzer, root.getTupleIds(), root); } // add aggregation, if any if (selectStmt.getAggInfo() != null) { root = createAggregationPlan(selectStmt, analyzer, root); } // All the conjuncts_ should be assigned at this point. // TODO: Re-enable this check here and/or elswehere. //Preconditions.checkState(!analyzer.hasUnassignedConjuncts()); return root; } /** * Returns a new AggregationNode that materializes the aggregation of the given stmt. * Assigns conjuncts from the Having clause to the returned node. */ private PlanNode createAggregationPlan(SelectStmt selectStmt, Analyzer analyzer, PlanNode root) throws InternalException { Preconditions.checkState(selectStmt.getAggInfo() != null); // add aggregation, if required AggregateInfo aggInfo = selectStmt.getAggInfo(); root = new AggregationNode(nodeIdGenerator_.getNextId(), root, aggInfo); root.init(analyzer); Preconditions.checkState(root.hasValidStats()); // if we're computing DISTINCT agg fns, the analyzer already created the // 2nd phase agginfo if (aggInfo.isDistinctAgg()) { ((AggregationNode)root).unsetNeedsFinalize(); root = new AggregationNode( nodeIdGenerator_.getNextId(), root, aggInfo.getSecondPhaseDistinctAggInfo()); root.init(analyzer); Preconditions.checkState(root.hasValidStats()); } // add Having clause root.assignConjuncts(analyzer); return root; } /** * Returns a UnionNode that materializes the exprs of the constant selectStmt. * Replaces the resultExprs of the selectStmt with SlotRefs into the materialized tuple. */ private PlanNode createConstantSelectPlan(SelectStmt selectStmt, Analyzer analyzer) throws InternalException { Preconditions.checkState(selectStmt.getTableRefs().isEmpty()); ArrayList<Expr> resultExprs = selectStmt.getBaseTblResultExprs(); ArrayList<String> colLabels = selectStmt.getColLabels(); // Create tuple descriptor for materialized tuple. TupleDescriptor tupleDesc = analyzer.getDescTbl().createTupleDescriptor(); tupleDesc.setIsMaterialized(true); UnionNode unionNode = new UnionNode(nodeIdGenerator_.getNextId(), tupleDesc.getId()); // Analysis guarantees that selects without a FROM clause only have constant exprs. unionNode.addConstExprList(Lists.newArrayList(resultExprs)); // Replace the select stmt's resultExprs with SlotRefs into tupleDesc. for (int i = 0; i < resultExprs.size(); ++i) { SlotDescriptor slotDesc = analyzer.addSlotDescriptor(tupleDesc); slotDesc.setLabel(colLabels.get(i)); slotDesc.setType(resultExprs.get(i).getType()); slotDesc.setStats(ColumnStats.fromExpr(resultExprs.get(i))); slotDesc.setIsMaterialized(true); SlotRef slotRef = new SlotRef(slotDesc); resultExprs.set(i, slotRef); } tupleDesc.computeMemLayout(); // UnionNode.init() needs tupleDesc to have been initialized unionNode.init(analyzer); return unionNode; } /** * Transform '=', '<[=]' and '>[=]' comparisons for given slot into * ValueRange. Also removes those predicates which were used for the construction * of ValueRange from 'conjuncts_'. Only looks at comparisons w/ string constants * (ie, the bounds of the result can be evaluated with Expr::GetValue(NULL)). * HBase row key filtering works only if the row key is mapped to a string column and * the expression is a string constant expression. * If there are multiple competing comparison predicates that could be used * to construct a ValueRange, only the first one from each category is chosen. */ private ValueRange createHBaseValueRange(SlotDescriptor d, List<Expr> conjuncts) { ListIterator<Expr> i = conjuncts.listIterator(); ValueRange result = null; while (i.hasNext()) { Expr e = i.next(); if (!(e instanceof BinaryPredicate)) continue; BinaryPredicate comp = (BinaryPredicate) e; if (comp.getOp() == BinaryPredicate.Operator.NE) continue; Expr slotBinding = comp.getSlotBinding(d.getId()); if (slotBinding == null || !slotBinding.isConstant() || !slotBinding.getType().equals(Type.STRING)) { continue; } if (comp.getOp() == BinaryPredicate.Operator.EQ) { i.remove(); return ValueRange.createEqRange(slotBinding); } if (result == null) result = new ValueRange(); // TODO: do we need copies here? if (comp.getOp() == BinaryPredicate.Operator.GT || comp.getOp() == BinaryPredicate.Operator.GE) { if (result.getLowerBound() == null) { result.setLowerBound(slotBinding); result.setLowerBoundInclusive(comp.getOp() == BinaryPredicate.Operator.GE); i.remove(); } } else { if (result.getUpperBound() == null) { result.setUpperBound(slotBinding); result.setUpperBoundInclusive(comp.getOp() == BinaryPredicate.Operator.LE); i.remove(); } } } return result; } /** * Returns plan tree for an inline view ref: * - predicates from the enclosing scope that can be evaluated directly within * the inline-view plan are pushed down * - predicates that cannot be evaluated directly within the inline-view plan * but only apply to the inline view are evaluated in a SelectNode placed * on top of the inline view plan * - all slots that are referenced by predicates from the enclosing scope that cannot * be pushed down are marked as materialized (so that when computeMemLayout() is * called on the base table descriptors materialized by the inline view it has a * complete picture) */ private PlanNode createInlineViewPlan(Analyzer analyzer, InlineViewRef inlineViewRef) throws ImpalaException { // If possible, "push down" view predicates; this is needed in order to ensure // that predicates such as "x + y = 10" are evaluated in the view's plan tree // rather than a SelectNode grafted on top of that plan tree. // This doesn't prevent predicate propagation, because predicates like // "x = 10" that get pushed down are still connected to equivalent slots // via the equality predicates created for the view's select list. // Include outer join conjuncts here as well because predicates from the // On-clause of an outer join may be pushed into the inline view as well. // // Limitations on predicate propagation into inline views: // If the inline view computes analytic functions, we cannot push any // predicate into the inline view tree (see IMPALA-1243). The reason is that // analytic functions compute aggregates over their entire input, and applying // filters from the enclosing scope *before* the aggregate computation would // alter the results. This is unlike regular aggregate computation, which only // makes the *output* of the computation visible to the enclosing scope, so that // filters from the enclosing scope can be safely applied (to the grouping cols, say) List<Expr> unassigned = analyzer.getUnassignedConjuncts(inlineViewRef.getId().asList(), true); if (!inlineViewRef.getViewStmt().hasLimit() && !inlineViewRef.getViewStmt().hasOffset() && (!(inlineViewRef.getViewStmt() instanceof SelectStmt) || !((SelectStmt)(inlineViewRef.getViewStmt())).hasAnalyticInfo())) { // check if we can evaluate them List<Expr> preds = Lists.newArrayList(); for (Expr e: unassigned) { if (analyzer.canEvalPredicate(inlineViewRef.getId().asList(), e)) preds.add(e); } unassigned.removeAll(preds); // create new predicates against the inline view's unresolved result exprs, not // the resolved result exprs, in order to avoid skipping scopes (and ignoring // limit clauses on the way) List<Expr> viewPredicates = Expr.substituteList(preds, inlineViewRef.getSmap(), analyzer); // "migrate" conjuncts_ by marking them as assigned and re-registering them with // new ids. // Mark pre-substitution conjuncts as assigned, since the ids of the new exprs may // have changed. analyzer.markConjunctsAssigned(preds); analyzer.registerConjuncts(viewPredicates); } // mark (fully resolve) slots referenced by remaining unassigned conjuncts_ as // materialized List<Expr> substUnassigned = Expr.substituteList(unassigned, inlineViewRef.getBaseTblSmap(), analyzer); analyzer.materializeSlots(substUnassigned); // Turn a constant select into a UnionNode that materializes the exprs. // TODO: unify this with createConstantSelectPlan(), this is basically the // same thing QueryStmt viewStmt = inlineViewRef.getViewStmt(); if (viewStmt instanceof SelectStmt) { SelectStmt selectStmt = (SelectStmt) viewStmt; if (selectStmt.getTableRefs().isEmpty()) { // Analysis should have generated a tuple id_ into which to materialize the exprs. Preconditions.checkState(inlineViewRef.getMaterializedTupleIds().size() == 1); // we need to materialize all slots of our inline view tuple analyzer.getTupleDesc(inlineViewRef.getId()).materializeSlots(); UnionNode unionNode = new UnionNode(nodeIdGenerator_.getNextId(), inlineViewRef.getMaterializedTupleIds().get(0)); if (analyzer.hasEmptyResultSet()) return unionNode; unionNode.setTblRefIds(Lists.newArrayList(inlineViewRef.getId())); unionNode.addConstExprList(selectStmt.getBaseTblResultExprs()); unionNode.init(analyzer); return unionNode; } } PlanNode rootNode = createQueryPlan(inlineViewRef.getViewStmt(), inlineViewRef.getAnalyzer(), false); // TODO: we should compute the "physical layout" of the view's descriptor, so that // the avg row size is availble during optimization; however, that means we need to // select references to its resultExprs from the enclosing scope(s) rootNode.setTblRefIds(Lists.newArrayList(inlineViewRef.getId())); // Set smap *before* creating a SelectNode in order to allow proper resolution. // Analytics have an additional level of logical to physical slot remapping. // The composition creates a mapping from the logical output of the inline view // to the physical analytic output. In addition, it retains the logical to // physical analytic slot mappings which are needed to resolve exprs that already // reference the logical analytic tuple (and not the inline view tuple), e.g., // the result exprs set in the coordinator fragment. rootNode.setOutputSmap(ExprSubstitutionMap.compose(inlineViewRef.getBaseTblSmap(), rootNode.getOutputSmap(), analyzer)); // if the view has a limit we may have conjuncts_ from the enclosing scope left rootNode = addUnassignedConjuncts( analyzer, inlineViewRef.getDesc().getId().asList(), rootNode); return rootNode; } /** * Create node for scanning all data files of a particular table. */ private PlanNode createScanNode(Analyzer analyzer, TableRef tblRef) throws InternalException { ScanNode scanNode = null; if (tblRef.getTable() instanceof HdfsTable) { scanNode = new HdfsScanNode(nodeIdGenerator_.getNextId(), tblRef.getDesc(), (HdfsTable)tblRef.getTable()); scanNode.init(analyzer); return scanNode; } else if (tblRef.getTable() instanceof DataSourceTable) { scanNode = new DataSourceScanNode(nodeIdGenerator_.getNextId(), tblRef.getDesc()); scanNode.init(analyzer); return scanNode; } else if (tblRef.getTable() instanceof HBaseTable) { // HBase table scanNode = new HBaseScanNode(nodeIdGenerator_.getNextId(), tblRef.getDesc()); } else { throw new InternalException("Invalid table ref class: " + tblRef.getClass()); } // TODO: move this to HBaseScanNode.init(); Preconditions.checkState(scanNode instanceof HBaseScanNode); List<Expr> conjuncts = analyzer.getUnassignedConjuncts(scanNode); // mark conjuncts_ assigned here; they will either end up inside a // ValueRange or will be evaluated directly by the node analyzer.markConjunctsAssigned(conjuncts); List<ValueRange> keyRanges = Lists.newArrayList(); // determine scan predicates for clustering cols for (int i = 0; i < tblRef.getTable().getNumClusteringCols(); ++i) { SlotDescriptor slotDesc = analyzer.getColumnSlot( tblRef.getDesc(), tblRef.getTable().getColumns().get(i)); if (slotDesc == null || !slotDesc.getType().isStringType()) { // the hbase row key is mapped to a non-string type // (since it's stored in ascii it will be lexicographically ordered, // and non-string comparisons won't work) keyRanges.add(null); } else { // create ValueRange from conjuncts_ for slot; also removes conjuncts_ that were // used as input for filter keyRanges.add(createHBaseValueRange(slotDesc, conjuncts)); } } ((HBaseScanNode)scanNode).setKeyRanges(keyRanges); scanNode.addConjuncts(conjuncts); scanNode.init(analyzer); return scanNode; } /** * Return conjuncts for join between a plan tree and a single TableRef; the conjuncts * can be used for hash table lookups. * - for inner joins, those are equi-join predicates in which one side is fully bound * by planIds and the other by joinedTblRef.id_; * - for outer joins: same type of conjuncts_ as inner joins, but only from the JOIN * clause * Returns the conjuncts_ in 'joinConjuncts' (in which "<lhs> = <rhs>" is returned * as Pair(<lhs>, <rhs>)) and also in their original form in 'joinPredicates'. * Each lhs is bound by planIds, and each rhs by the tuple id of joinedTblRef. * If no conjuncts_ are found, constructs them based on equivalence classes, where * possible. In that case, they are still returned through joinConjuncts, but * joinPredicates would be empty. */ private void getHashLookupJoinConjuncts( Analyzer analyzer, List<TupleId> planIds, TableRef joinedTblRef, List<Pair<Expr, Expr>> joinConjuncts, List<Expr> joinPredicates) { joinConjuncts.clear(); joinPredicates.clear(); TupleId tblRefId = joinedTblRef.getId(); List<TupleId> tblRefIds = tblRefId.asList(); List<Expr> candidates; if (joinedTblRef.getJoinOp().isOuterJoin()) { // TODO: create test for this Preconditions.checkState(joinedTblRef.getOnClause() != null); candidates = analyzer.getEqJoinConjuncts(tblRefId, joinedTblRef); } else { candidates = analyzer.getEqJoinConjuncts(tblRefId, null); } if (candidates == null) return; // equivalence classes of eq predicates in joinPredicates Set<EquivalenceClassId> joinEquivClasses = Sets.newHashSet(); for (Expr e: candidates) { // Ignore predicate if one of its children is a constant. if (e.getChild(0).isConstant() || e.getChild(1).isConstant()) continue; Expr rhsExpr = null; if (e.getChild(0).isBoundByTupleIds(tblRefIds)) { rhsExpr = e.getChild(0); } else { Preconditions.checkState(e.getChild(1).isBoundByTupleIds(tblRefIds)); rhsExpr = e.getChild(1); } Expr lhsExpr = null; if (e.getChild(1).isBoundByTupleIds(planIds)) { lhsExpr = e.getChild(1); } else if (e.getChild(0).isBoundByTupleIds(planIds)) { lhsExpr = e.getChild(0); } else { // not an equi-join condition between lhsIds and rhsId continue; } // Ignore predicates that express a redundant equivalence relationship. We assume // that for each equivalence class, the equivalences between slots from only the // lhs or rhs are already enforced by predicates in the lhs/rhs plan trees, // respectively (see Analyzer.enforceSlotEquivalences()). Therefore, it is // sufficient to establish equivalence between the lhs and rhs slots by assigning // a single join predicate per equivalence class, i.e., any join predicates beyond // that are redundant. We still return those predicates in joinPredicates so they // get marked as assigned. Pair<SlotId, SlotId> joinSlots = BinaryPredicate.getEqSlots(e); if (joinSlots != null) { EquivalenceClassId id1 = analyzer.getEquivClassId(joinSlots.first); EquivalenceClassId id2 = analyzer.getEquivClassId(joinSlots.second); // both slots need not be in the same equiv class, due to outer joins // null check: we don't have equiv classes for anything in subqueries if (id1 != null && id2 != null && id1.equals(id2) && joinEquivClasses.contains(id1)) { // record this so it gets marked as assigned later joinPredicates.add(e); continue; } joinEquivClasses.add(id1); } // e is a non-redundant join predicate Preconditions.checkState(lhsExpr != rhsExpr); joinPredicates.add(e); joinConjuncts.add(Pair.create(lhsExpr, rhsExpr)); } if (!joinPredicates.isEmpty()) return; Preconditions.checkState(joinConjuncts.isEmpty()); // construct joinConjunct entries derived from equivalence class membership List<SlotId> lhsSlotIds = Lists.newArrayList(); for (SlotDescriptor slotDesc: joinedTblRef.getDesc().getSlots()) { analyzer.getEquivSlots(slotDesc.getId(), planIds, lhsSlotIds); if (!lhsSlotIds.isEmpty()) { // construct a BinaryPredicates in order to get correct casting; // we only do this for one of the equivalent slots, all the other implied // equalities are redundant Expr pred = analyzer.createEqPredicate(lhsSlotIds.get(0), slotDesc.getId()); joinConjuncts.add(new Pair<Expr, Expr>(pred.getChild(0), pred.getChild(1))); } } } /** * Create a node to join outer with inner. Either the outer or the inner may be a plan * created from a table ref (but not both), and the corresponding outer/innerRef * should be non-null. */ private PlanNode createJoinNode( Analyzer analyzer, PlanNode outer, PlanNode inner, TableRef outerRef, TableRef innerRef, boolean throwOnError) throws ImpalaException { Preconditions.checkState(innerRef != null ^ outerRef != null); TableRef tblRef = (innerRef != null) ? innerRef : outerRef; if (tblRef.getJoinOp() == JoinOperator.CROSS_JOIN) { // TODO If there are eq join predicates then we should construct a hash join CrossJoinNode result = new CrossJoinNode(outer, inner); result.init(analyzer); result.getChildren().get(1).setCompactData(true); return result; } List<Pair<Expr, Expr>> eqJoinConjuncts = Lists.newArrayList(); List<Expr> eqJoinPredicates = Lists.newArrayList(); // get eq join predicates for the TableRefs' ids (not the PlanNodes' ids, which // are materialized) if (innerRef != null) { getHashLookupJoinConjuncts( analyzer, outer.getTblRefIds(), innerRef, eqJoinConjuncts, eqJoinPredicates); } else { getHashLookupJoinConjuncts( analyzer, inner.getTblRefIds(), outerRef, eqJoinConjuncts, eqJoinPredicates); // Reverse the lhs/rhs of the join conjuncts. for (Pair<Expr, Expr> eqJoinConjunct: eqJoinConjuncts) { Expr swapTmp = eqJoinConjunct.first; eqJoinConjunct.first = eqJoinConjunct.second; eqJoinConjunct.second = swapTmp; } } if (eqJoinConjuncts.isEmpty()) { if (!throwOnError) return null; throw new NotImplementedException( String.format( "Join with '%s' requires at least one conjunctive equality predicate. To " + "perform a Cartesian product between two tables, use a CROSS JOIN.", innerRef.getAliasAsName())); } analyzer.markConjunctsAssigned(eqJoinPredicates); List<Expr> otherJoinConjuncts = Lists.newArrayList(); if (tblRef.getJoinOp().isOuterJoin()) { // Also assign conjuncts from On clause. All remaining unassigned conjuncts // that can be evaluated by this join are assigned in createSelectPlan(). otherJoinConjuncts = analyzer.getUnassignedOjConjuncts(tblRef); } else if (tblRef.getJoinOp().isSemiJoin()) { // Unassigned conjuncts bound by the rhs tuple id of a semi join must have come // from the join's On-clause, and therefore, must be added to the other join // conjuncts to produce correct results. otherJoinConjuncts = analyzer.getUnassignedConjuncts(tblRef.getAllTupleIds(), false); } analyzer.markConjunctsAssigned(otherJoinConjuncts); HashJoinNode result = new HashJoinNode(outer, inner, tblRef, eqJoinConjuncts, otherJoinConjuncts); result.init(analyzer); // build side of join copies data to a compact representation in the tuple buffer result.getChildren().get(1).setCompactData(true); return result; } /** * Create a tree of PlanNodes for the given tblRef, which can be a BaseTableRef or a * InlineViewRef */ private PlanNode createTableRefNode(Analyzer analyzer, TableRef tblRef) throws ImpalaException { if (tblRef instanceof BaseTableRef) { return createScanNode(analyzer, tblRef); } if (tblRef instanceof InlineViewRef) { return createInlineViewPlan(analyzer, (InlineViewRef) tblRef); } throw new InternalException("unknown TableRef node"); } /** * Create a plan tree corresponding to 'unionOperands' for the given unionStmt. * The individual operands' plan trees are attached to a single UnionNode. */ private UnionNode createUnionPlan( Analyzer analyzer, UnionStmt unionStmt, List<UnionOperand> unionOperands) throws ImpalaException { UnionNode unionNode = new UnionNode(nodeIdGenerator_.getNextId(), unionStmt.getTupleId()); for (UnionOperand op: unionOperands) { QueryStmt queryStmt = op.getQueryStmt(); if (op.isDropped()) continue; if (queryStmt instanceof SelectStmt) { SelectStmt selectStmt = (SelectStmt) queryStmt; if (selectStmt.getTableRefs().isEmpty()) { unionNode.addConstExprList(selectStmt.getBaseTblResultExprs()); continue; } } PlanNode opPlan = createQueryPlan(queryStmt, analyzer, false); unionNode.addChild(opPlan, op.getQueryStmt().getBaseTblResultExprs()); } unionNode.init(analyzer); return unionNode; } /** * Returns plan tree for unionStmt: * - distinctOperands' plan trees are collected in a single UnionNode * and duplicates removed via distinct aggregation * - the output of that plus the allOperands' plan trees are collected in * another UnionNode which materializes the result of unionStmt */ private PlanNode createUnionPlan(UnionStmt unionStmt, Analyzer analyzer ) throws ImpalaException { // Turn unassigned predicates for unionStmt's tupleId_ into predicates for // the individual operands. // Do this prior to creating the operands' plan trees so they get a chance to // pick up propagated predicates. List<Expr> conjuncts = analyzer.getUnassignedConjuncts(unionStmt.getTupleId().asList(), false); for (UnionOperand op: unionStmt.getOperands()) { List<Expr> opConjuncts = Expr.substituteList(conjuncts, op.getSmap(), analyzer); op.getAnalyzer().registerConjuncts(opConjuncts); // Some of the opConjuncts have become constant and eval'd to false, or an ancestor // block is already guaranteed to return empty results. if (op.getAnalyzer().hasEmptyResultSet()) op.drop(); } analyzer.markConjunctsAssigned(conjuncts); // mark slots after predicate propagation but prior to plan tree generation unionStmt.materializeRequiredSlots(analyzer); PlanNode result = null; // create DISTINCT tree if (unionStmt.hasDistinctOps()) { result = createUnionPlan( analyzer, unionStmt, unionStmt.getDistinctOperands()); result = new AggregationNode( nodeIdGenerator_.getNextId(), result, unionStmt.getDistinctAggInfo()); result.init(analyzer); } // create ALL tree if (unionStmt.hasAllOps()) { UnionNode allMerge = createUnionPlan(analyzer, unionStmt, unionStmt.getAllOperands()); // for unionStmt, baseTblResultExprs = resultExprs if (result != null) allMerge.addChild(result, unionStmt.getDistinctAggInfo().getGroupingExprs()); result = allMerge; } return result; } /** * Represents a set of PlanNodes and DataSinks that execute and consume resources * concurrently. PlanNodes and DataSinks in such a pipelined plan node set may belong * to different plan fragments because data is streamed across fragments. * * For example, a series of left-deep joins consists of two plan node sets. The first * set contains all build-side nodes. The second set contains the leftmost * scan. Both sets contain all join nodes because they execute and consume * resources during the build and probe phases. Similarly, all nodes below a 'blocking' * node (e.g, an AggregationNode) are placed into a differnet plan node set than the * nodes above it, but the blocking node itself belongs to both sets. */ private class PipelinedPlanNodeSet { // Minimum per-host resource requirements to ensure that no plan node set can have // estimates of zero, even if the contained PlanNodes have estimates of zero. public static final long MIN_PER_HOST_MEM = 10 * 1024 * 1024; public static final int MIN_PER_HOST_VCORES = 1; // List of plan nodes that execute and consume resources concurrently. private final ArrayList<PlanNode> planNodes = Lists.newArrayList(); // DataSinks that execute and consume resources concurrently. // Primarily used for estimating the cost of insert queries. private final List<DataSink> dataSinks = Lists.newArrayList(); // Estimated per-host memory and CPU requirements. // Valid after computeResourceEstimates(). private long perHostMem = MIN_PER_HOST_MEM; private int perHostVcores = MIN_PER_HOST_VCORES; public void add(PlanNode node) { Preconditions.checkNotNull(node.getFragment()); planNodes.add(node); } public void addSink(DataSink sink) { Preconditions.checkNotNull(sink); dataSinks.add(sink); } /** * Computes the estimated per-host memory and CPU requirements of this plan node set. * Optionally excludes unpartitioned fragments from the estimation. * Returns true if at least one plan node was included in the estimation. * Otherwise returns false indicating the estimates are invalid. */ public boolean computeResourceEstimates(boolean excludeUnpartitionedFragments, TQueryOptions queryOptions) { Set<PlanFragment> uniqueFragments = Sets.newHashSet(); // Distinguish the per-host memory estimates for scan nodes and non-scan nodes to // get a tighter estimate on the amount of memory required by multiple concurrent // scans. The memory required by all concurrent scans of the same type (Hdfs/Hbase) // cannot exceed the per-host upper memory bound for that scan type. Intuitively, // the amount of I/O buffers is limited by the disk bandwidth. long perHostHbaseScanMem = 0L; long perHostHdfsScanMem = 0L; long perHostNonScanMem = 0L; for (int i = 0; i < planNodes.size(); ++i) { PlanNode node = planNodes.get(i); PlanFragment fragment = node.getFragment(); if (!fragment.isPartitioned() && excludeUnpartitionedFragments) continue; node.computeCosts(queryOptions); uniqueFragments.add(fragment); if (node.getPerHostMemCost() < 0) { LOG.warn(String.format("Invalid per-host memory requirement %s of node %s.\n" + "PlanNode stats are: numNodes_=%s ", node.getPerHostMemCost(), node.getClass().getSimpleName(), node.getNumNodes())); } if (node instanceof HBaseScanNode) { perHostHbaseScanMem += node.getPerHostMemCost(); } else if (node instanceof HdfsScanNode) { perHostHdfsScanMem += node.getPerHostMemCost(); } else { perHostNonScanMem += node.getPerHostMemCost(); } } // The memory required by concurrent scans cannot exceed the upper memory bound // for that scan type. // TODO: In the future, we may want to restrict scanner concurrency based on a // memory limit. This estimation will need to accoung for that as well. perHostHbaseScanMem = Math.min(perHostHbaseScanMem, HBaseScanNode.getPerHostMemUpperBound()); perHostHdfsScanMem = Math.min(perHostHdfsScanMem, HdfsScanNode.getPerHostMemUpperBound()); long perHostDataSinkMem = 0L; for (int i = 0; i < dataSinks.size(); ++i) { DataSink sink = dataSinks.get(i); PlanFragment fragment = sink.getFragment(); if (!fragment.isPartitioned() && excludeUnpartitionedFragments) continue; // Sanity check that this plan-node set has at least one PlanNode of fragment. Preconditions.checkState(uniqueFragments.contains(fragment)); sink.computeCosts(); if (sink.getPerHostMemCost() < 0) { LOG.warn(String.format("Invalid per-host memory requirement %s of sink %s.\n", sink.getPerHostMemCost(), sink.getClass().getSimpleName())); } perHostDataSinkMem += sink.getPerHostMemCost(); } // Combine the memory estimates of all sinks, scans nodes and non-scan nodes. long perHostMem = perHostHdfsScanMem + perHostHbaseScanMem + perHostNonScanMem + perHostDataSinkMem; // The backend needs at least one thread per fragment. int perHostVcores = uniqueFragments.size(); // This plan node set might only have unpartitioned fragments. // Only set estimates if they are valid. if (perHostMem >= 0 && perHostVcores >= 0) { this.perHostMem = perHostMem; this.perHostVcores = perHostVcores; return true; } return false; } public long getPerHostMem() { return perHostMem; } public int getPerHostVcores() { return perHostVcores; } } /** * Estimates the per-host memory and CPU requirements for the given plan fragments, * and sets the results in request. * Optionally excludes the requirements for unpartitioned fragments. * TODO: The LOG.warn() messages should eventually become Preconditions checks * once resource estimation is more robust. */ public void computeResourceReqs(List<PlanFragment> fragments, boolean excludeUnpartitionedFragments, TQueryOptions queryOptions, TQueryExecRequest request) { Preconditions.checkState(!fragments.isEmpty()); Preconditions.checkNotNull(request); // Maps from an ExchangeNode's PlanNodeId to the fragments feeding it. // TODO: This mapping is not necessary anymore. Remove it and clean up. Map<PlanNodeId, List<PlanFragment>> exchangeSources = Maps.newHashMap(); for (PlanFragment fragment: fragments) { if (fragment.getDestNode() == null) continue; List<PlanFragment> srcFragments = exchangeSources.get(fragment.getDestNode().getId()); if (srcFragments == null) { srcFragments = Lists.newArrayList(); exchangeSources.put(fragment.getDestNode().getId(), srcFragments); } srcFragments.add(fragment); } // Compute pipelined plan node sets. ArrayList<PipelinedPlanNodeSet> planNodeSets = Lists.newArrayList(new PipelinedPlanNodeSet()); computePlanNodeSets(fragments.get(0).getPlanRoot(), exchangeSources, planNodeSets.get(0), null, planNodeSets); // Compute the max of the per-host mem and vcores requirement. // Note that the max mem and vcores may come from different plan node sets. long maxPerHostMem = Long.MIN_VALUE; int maxPerHostVcores = Integer.MIN_VALUE; for (PipelinedPlanNodeSet planNodeSet: planNodeSets) { if (!planNodeSet.computeResourceEstimates( excludeUnpartitionedFragments, queryOptions)) { continue; } long perHostMem = planNodeSet.getPerHostMem(); int perHostVcores = planNodeSet.getPerHostVcores(); if (perHostMem > maxPerHostMem) maxPerHostMem = perHostMem; if (perHostVcores > maxPerHostVcores) maxPerHostVcores = perHostVcores; } // Do not ask for more cores than are in the RuntimeEnv. maxPerHostVcores = Math.min(maxPerHostVcores, RuntimeEnv.INSTANCE.getNumCores()); // Legitimately set costs to zero if there are only unpartitioned fragments // and excludeUnpartitionedFragments is true. if (maxPerHostMem == Long.MIN_VALUE || maxPerHostVcores == Integer.MIN_VALUE) { boolean allUnpartitioned = true; for (PlanFragment fragment: fragments) { if (fragment.isPartitioned()) { allUnpartitioned = false; break; } } if (allUnpartitioned && excludeUnpartitionedFragments) { maxPerHostMem = 0; maxPerHostVcores = 0; } } if (maxPerHostMem < 0 || maxPerHostMem == Long.MIN_VALUE) { LOG.warn("Invalid per-host memory requirement: " + maxPerHostMem); } if (maxPerHostVcores < 0 || maxPerHostVcores == Integer.MIN_VALUE) { LOG.warn("Invalid per-host virtual cores requirement: " + maxPerHostVcores); } request.setPer_host_mem_req(maxPerHostMem); request.setPer_host_vcores((short) maxPerHostVcores); LOG.debug("Estimated per-host peak memory requirement: " + maxPerHostMem); LOG.debug("Estimated per-host virtual cores requirement: " + maxPerHostVcores); } /** * Populates 'planNodeSets' by recursively traversing the plan tree rooted at 'node' * belonging to 'fragment'. The traversal spans fragments by resolving exchange nodes * to their feeding fragment via exchangeSources. * * The plan node sets are computed top-down. As a result, the plan node sets are added * in reverse order of their runtime execution. * * Nodes are generally added to lhsSet. Joins are treated specially in that their * left child is added to lhsSet and their right child to rhsSet to make sure * that concurrent join builds end up in the same plan node set. */ private void computePlanNodeSets(PlanNode node, Map<PlanNodeId, List<PlanFragment>> exchangeSources, PipelinedPlanNodeSet lhsSet, PipelinedPlanNodeSet rhsSet, ArrayList<PipelinedPlanNodeSet> planNodeSets) { lhsSet.add(node); if (node == node.getFragment().getPlanRoot() && node.getFragment().hasSink()) { lhsSet.addSink(node.getFragment().getSink()); } if (node instanceof HashJoinNode) { // Create a new set for the right-hand sides of joins if necessary. if (rhsSet == null) { rhsSet = new PipelinedPlanNodeSet(); planNodeSets.add(rhsSet); } // The join node itself is added to the lhsSet (above) and the rhsSet. rhsSet.add(node); computePlanNodeSets(node.getChild(1), exchangeSources, rhsSet, null, planNodeSets); computePlanNodeSets(node.getChild(0), exchangeSources, lhsSet, rhsSet, planNodeSets); return; } if (node instanceof ExchangeNode) { // Recurse into the plan roots of the fragments feeding this exchange. // Assume that all feeding fragments execute concurrently. List<PlanFragment> srcFragments = exchangeSources.get(node.getId()); Preconditions.checkNotNull(srcFragments); for (PlanFragment srcFragment: srcFragments) { computePlanNodeSets(srcFragment.getPlanRoot(), exchangeSources, lhsSet, null, planNodeSets); } return; } if (node.isBlockingNode()) { // We add blocking nodes to two plan node sets because they require resources while // consuming their input (execution of the preceding set) and while they // emit their output (execution of the following set). lhsSet = new PipelinedPlanNodeSet(); lhsSet.add(node); planNodeSets.add(lhsSet); // Join builds under this blocking node belong in a new rhsSet. rhsSet = null; } // Assume that non-join, non-blocking nodes with multiple children (e.g., UnionNode) // consume their inputs in an arbitrary order (i.e., all child subtrees execute // concurrently). for (PlanNode child: node.getChildren()) { computePlanNodeSets(child, exchangeSources, lhsSet, rhsSet, planNodeSets); } } }
apache-2.0
qq1588518/JRediClients
src/main/java/redis/clients/redisson/api/mapreduce/RMapReduce.java
3392
/** * Copyright 2016 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package redis.clients.redisson.api.mapreduce; import java.util.concurrent.TimeUnit; /** * * MapReduce allows to process large amount of data stored in Redis map * using Mapper, Reducer and/or Collator tasks launched across Redisson Nodes. * <p> * Usage example: * * <pre> * public class WordMapper implements RMapper&lt;String, String, String, Integer&gt; { * * public void map(String key, String value, RCollector&lt;String, Integer&gt; collector) { * String[] words = value.split(&quot;[^a-zA-Z]&quot;); * for (String word : words) { * collector.emit(word, 1); * } * } * * } * * public class WordReducer implements RReducer&lt;String, Integer&gt; { * * public Integer reduce(String reducedKey, Iterator&lt;Integer&gt; iter) { * int sum = 0; * while (iter.hasNext()) { * Integer i = (Integer) iter.next(); * sum += i; * } * return sum; * } * * } * * public class WordCollator implements RCollator&lt;String, Integer, Integer&gt; { * * public Integer collate(Map&lt;String, Integer&gt; resultMap) { * int result = 0; * for (Integer count : resultMap.values()) { * result += count; * } * return result; * } * * } * * RMap&lt;String, String&gt; map = redisson.getMap(&quot;myWords&quot;); * * Map&lt;String, Integer&gt; wordsCount = map.&lt;String, Integer&gt;mapReduce() * .mapper(new WordMapper()) * .reducer(new WordReducer()) * .execute(); * * Integer totalCount = map.&lt;String, Integer&gt;mapReduce() * .mapper(new WordMapper()) * .reducer(new WordReducer()) * .execute(new WordCollator()); * * </pre> * * @author Nikita Koksharov * * @param <KIn> input key * @param <VIn> input value * @param <KOut> output key * @param <VOut> output value */ public interface RMapReduce<KIn, VIn, KOut, VOut> extends RMapReduceExecutor<VIn, KOut, VOut> { /** * Defines timeout for MapReduce process. * <code>0</code> means infinity timeout. * * @param timeout for process * @param unit of timeout * @return self instance */ RMapReduce<KIn, VIn, KOut, VOut> timeout(long timeout, TimeUnit unit); /** * Setup Mapper object * * @param mapper used during MapReduce * @return self instance */ RMapReduce<KIn, VIn, KOut, VOut> mapper(RMapper<KIn, VIn, KOut, VOut> mapper); /** * Setup Reducer object * * @param reducer used during MapReduce * @return self instance */ RMapReduce<KIn, VIn, KOut, VOut> reducer(RReducer<KOut, VOut> reducer); }
apache-2.0
greenpeppersoftware/greenpepper3-maven-runner
src/main/java/com/greenpepper/maven/runner/resolver/FileResolver.java
1207
/** * Copyright (c) 2009 Pyxis Technologies inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, * or see the FSF site: http://www.fsf.org. */ package com.greenpepper.maven.runner.resolver; import java.io.File; public class FileResolver implements ProjectFileResolver.Resolver { public final boolean canResolve(String value) { File projectFile = new File( value ); return projectFile.isFile() && projectFile.exists(); } public final File resolve(String value) { return new File( value ); } }
apache-2.0
buttermilk-crypto/buttermilk
buttermilk-core/src/main/java/com/cryptoregistry/formats/simplereader/pub/JSONC2KeyForPublicationReader.java
2745
/* * This file is part of Buttermilk * Copyright 2011-2014 David R. Smith All Rights Reserved. * */ package com.cryptoregistry.formats.simplereader.pub; import java.io.Reader; import java.util.Date; import java.util.Iterator; import java.util.Map; import com.cryptoregistry.c2.key.C2KeyMetadata; import com.cryptoregistry.c2.key.Curve25519KeyForPublication; import com.cryptoregistry.c2.key.PublicKey; import com.cryptoregistry.formats.EncodingHint; import com.cryptoregistry.formats.KeyFormat; import com.cryptoregistry.formats.Mode; import com.cryptoregistry.util.ArmoredString; import com.cryptoregistry.util.TimeUtil; import com.fasterxml.jackson.databind.ObjectMapper; /** * Given a map of key data in unlocked condition, create a C2 style key from that * * @author Dave * */ public class JSONC2KeyForPublicationReader { protected final ObjectMapper mapper; protected final Map<String,Object> map; @SuppressWarnings("unchecked") public JSONC2KeyForPublicationReader(Reader in) { mapper = new ObjectMapper(); try { map = mapper.readValue(in, Map.class); } catch (Exception e) { throw new RuntimeException(e); } } @SuppressWarnings("unchecked") public JSONC2KeyForPublicationReader(String in) { mapper = new ObjectMapper(); try { map = mapper.readValue(in, Map.class); } catch (Exception e) { throw new RuntimeException(e); } } @SuppressWarnings("unchecked") public Curve25519KeyForPublication parse() { Iterator<String> iter = map.keySet().iterator(); if(iter.hasNext()){ String distUUID = iter.next(); Map<String,Object> inner = (Map<String,Object>) map.get(distUUID); if(distUUID.endsWith("-P")){ // String keyAlgorithm = String.valueOf(inner.get("KeyAlgorithm")); Date createdOn = TimeUtil.getISO8601FormatDate(String.valueOf(inner.get("CreatedOn"))); // at the moment these are always base64url EncodingHint enc = EncodingHint.valueOf(String.valueOf(inner.get("Encoding"))); if(enc != EncodingHint.Base64url) throw new RuntimeException("Unexpected encoding, needs to be Base64url"); ArmoredString P = new ArmoredString(String.valueOf(inner.get("P"))); C2KeyMetadata meta = new C2KeyMetadata( distUUID.substring(0,distUUID.length()-2), createdOn, new KeyFormat(enc,Mode.REQUEST_FOR_PUBLICATION,null) ); PublicKey pKey = new PublicKey(P.decodeToBytes()); return new Curve25519KeyForPublication(meta,pKey); }else{ throw new RuntimeException("unexpected Mode, needs to be ForPublication"); } }else{ throw new RuntimeException("Count not find the uuid, fail"); } } }
apache-2.0
sambalmueslie/WOT-API-LIB
wot_api_lib_generator/src/main/java/de/sambalmueslie/wot_api_definition/tankopedia_api/EncyclopediaTankEnginesRequestDefinition.java
832
package de.sambalmueslie.wot_api_definition.tankopedia_api; import de.sambalmueslie.wot_api_definition.common.WotApiFieldDefinition; import de.sambalmueslie.wot_api_definition.common.WotApiRequestDefinition; import de.sambalmueslie.wot_api_definition.common.WotRequestDefinition; /** * Method returns list of engines. * * @autor Sambalmueslie */ @WotApiRequestDefinition(method = "wot/encyclopedia/tankengines/") public class EncyclopediaTankEnginesRequestDefinition extends WotRequestDefinition { /** Module ID. */ @WotApiFieldDefinition(list = true) private long module_id; /** * Nation. Valid values: "ussr" — U.S.S.R. "germany" — Germany "usa" — U.S.A. "france" — France "uk" — U.K. "china" — China "japan" — * Japan. */ @WotApiFieldDefinition private String nation; }
apache-2.0
matrix-org/matrix-android-sdk
matrix-sdk/src/main/java/org/matrix/androidsdk/core/ContentUtils.java
1739
/* * Copyright 2014 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.matrix.androidsdk.core; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import org.matrix.androidsdk.rest.model.message.ImageInfo; import java.io.File; /** * Static content utility methods. */ public class ContentUtils { private static final String LOG_TAG = FileContentUtils.class.getSimpleName(); /** * Build an ImageInfo object based on the image at the given path. * * @param filePath the path to the image in storage * @return the image info */ public static ImageInfo getImageInfoFromFile(String filePath) { ImageInfo imageInfo = new ImageInfo(); try { Bitmap imageBitmap = BitmapFactory.decodeFile(filePath); imageInfo.w = imageBitmap.getWidth(); imageInfo.h = imageBitmap.getHeight(); File file = new File(filePath); imageInfo.size = file.length(); imageInfo.mimetype = FileContentUtils.getMimeType(filePath); } catch (OutOfMemoryError oom) { Log.e(LOG_TAG, "## getImageInfoFromFile() : oom", oom); } return imageInfo; } }
apache-2.0
aehlig/bazel
src/main/java/com/google/devtools/build/lib/collect/nestedset/NestedSetFingerprintCache.java
5195
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.collect.nestedset; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; import com.google.devtools.build.lib.actions.CommandLineItem; import com.google.devtools.build.lib.actions.CommandLineItem.MapFn; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.DigestHashFunction; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** Computes fingerprints for nested sets, reusing sub-computations from children. */ public class NestedSetFingerprintCache { private static final int EMPTY_SET_DIGEST = 104_395_303; /** Memoize the subresults. We have to have one cache per type of command item map function. */ private Map<CommandLineItem.MapFn<?>, DigestMap> mapFnToDigestMap = createMap(); private final Set<Class<?>> seenMapFns = new HashSet<>(); private final Multiset<Class<?>> seenParametrizedMapFns = HashMultiset.create(); public <T> void addNestedSetToFingerprint(Fingerprint fingerprint, NestedSet<T> nestedSet) { addNestedSetToFingerprint(CommandLineItem.MapFn.DEFAULT, fingerprint, nestedSet); } public <T> void addNestedSetToFingerprint( CommandLineItem.MapFn<? super T> mapFn, Fingerprint fingerprint, NestedSet<T> nestedSet) { if (mapFn instanceof CommandLineItem.CapturingMapFn) { addNestedSetToFingerprintSlow(mapFn, fingerprint, nestedSet); return; } // Only top-level nested sets can be empty, so we can bail here if (nestedSet.isEmpty()) { fingerprint.addInt(EMPTY_SET_DIGEST); return; } DigestMap digestMap = mapFnToDigestMap.computeIfAbsent(mapFn, this::newDigestMap); fingerprint.addInt(nestedSet.getOrder().ordinal()); Object children = nestedSet.getChildren(); addToFingerprint(mapFn, fingerprint, digestMap, children); } private <T> void addNestedSetToFingerprintSlow( MapFn<? super T> mapFn, Fingerprint fingerprint, NestedSet<T> nestedSet) { for (T object : nestedSet) { mapFn.expandToCommandLine(object, fingerprint); } } public void clear() { mapFnToDigestMap = createMap(); seenMapFns.clear(); seenParametrizedMapFns.clear(); } @SuppressWarnings("unchecked") private <T> void addToFingerprint( CommandLineItem.MapFn<? super T> mapFn, Fingerprint fingerprint, DigestMap digestMap, Object children) { if (children instanceof Object[]) { if (!digestMap.readDigest(children, fingerprint)) { Fingerprint childrenFingerprint = new Fingerprint(); for (Object child : (Object[]) children) { addToFingerprint(mapFn, childrenFingerprint, digestMap, child); } digestMap.insertAndReadDigest(children, childrenFingerprint, fingerprint); } } else { addToFingerprint(mapFn, fingerprint, (T) children); } } @VisibleForTesting <T> void addToFingerprint( CommandLineItem.MapFn<? super T> mapFn, Fingerprint fingerprint, T object) { mapFn.expandToCommandLine(object, fingerprint); } private static Map<CommandLineItem.MapFn<?>, DigestMap> createMap() { return new ConcurrentHashMap<>(); } private DigestMap newDigestMap(CommandLineItem.MapFn<?> mapFn) { Class<?> mapFnClass = mapFn.getClass(); if (mapFn instanceof CommandLineItem.ParametrizedMapFn) { int occurrences = seenParametrizedMapFns.add(mapFnClass, 1) + 1; if (occurrences > ((CommandLineItem.ParametrizedMapFn) mapFn).maxInstancesAllowed()) { throw new IllegalArgumentException( String.format( "Too many instances of CommandLineItem.ParametrizedMapFn '%s' detected. " + "Please construct fewer instances or use CommandLineItem.CapturingMapFn.", mapFnClass.getName())); } } else { if (!seenMapFns.add(mapFnClass)) { throw new IllegalArgumentException( String.format( "Illegal mapFn implementation: '%s'. " + "CommandLineItem.MapFn instances must be singletons. " + "Please see CommandLineItem.ParametrizedMapFn or " + "CommandLineItem.CapturingMapFn for alternatives.", mapFnClass.getName())); } } // TODO(b/112460990): Use the value from DigestHashFunction.getDefault(), but check for // contention. return new DigestMap(DigestHashFunction.SHA256, 1024); } }
apache-2.0
NovaOrdis/nort
main/src/test/java/io/novaordis/release/model/MockInLineXMLEditor.java
4187
/* * Copyright (c) 2016 Nova Ordis LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.novaordis.release.model; import io.novaordis.utilities.xml.editor.InLineXMLEditor; import io.novaordis.utilities.xml.editor.XMLElement; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Ovidiu Feodorov <ovidiu@novaordis.com> * @since 11/28/16 */ public class MockInLineXMLEditor implements InLineXMLEditor { // Constants ------------------------------------------------------------------------------------------------------- // Static ---------------------------------------------------------------------------------------------------------- // Attributes ------------------------------------------------------------------------------------------------------ private Map<String, List<XMLElement>> paths; // Constructors ---------------------------------------------------------------------------------------------------- public MockInLineXMLEditor() { this.paths = new HashMap<>(); } // InLineXMLEditor implementation ---------------------------------------------------------------------------------- @Override public File getFile() { throw new RuntimeException("getFile() NOT YET IMPLEMENTED"); } @Override public int getLineCount() { throw new RuntimeException("getLineCount() NOT YET IMPLEMENTED"); } @Override public boolean isDirty() { throw new RuntimeException("isDirty() NOT YET IMPLEMENTED"); } @Override public String getContent() { throw new RuntimeException("getContent() NOT YET IMPLEMENTED"); } @Override public String get(String path) { throw new RuntimeException("get() NOT YET IMPLEMENTED"); } @Override public List<String> getList(String path) { throw new RuntimeException("getList() NOT YET IMPLEMENTED"); } @Override public List<XMLElement> getElements(String path) { List<XMLElement> elements = paths.get(path); if (elements == null) { return Collections.emptyList(); } return elements; } @Override public boolean set(String path, String newValue) { throw new RuntimeException("set() NOT YET IMPLEMENTED"); } @Override public boolean save() throws IOException { throw new RuntimeException("save() NOT YET IMPLEMENTED"); } @Override public boolean undo() throws IOException { throw new RuntimeException("undo() NOT YET IMPLEMENTED"); } // Public ---------------------------------------------------------------------------------------------------------- public void setElements(String path, XMLElement... elements) { if (elements == null) { return; } List<XMLElement> storage = paths.get(path); if (storage == null) { storage = new ArrayList<>(); paths.put(path, storage); } Collections.addAll(storage, elements); } // Package protected ----------------------------------------------------------------------------------------------- // Protected ------------------------------------------------------------------------------------------------------- // Private --------------------------------------------------------------------------------------------------------- // Inner classes --------------------------------------------------------------------------------------------------- }
apache-2.0
java110/MicroCommunity
service-front/src/main/java/com/java110/front/components/carBlackWhite/ChooseCarBlackWhiteComponent.java
1104
package com.java110.front.components.carBlackWhite; import com.java110.core.context.IPageData; import com.java110.front.smo.carBlackWhite.IListCarBlackWhitesSMO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Component; /** * 应用组件管理类 * <p> * add by wuxw * <p> * 2019-06-29 */ @Component("chooseCarBlackWhite") public class ChooseCarBlackWhiteComponent { @Autowired private IListCarBlackWhitesSMO listCarBlackWhitesSMOImpl; /** * 查询应用列表 * * @param pd 页面数据封装 * @return 返回 ResponseEntity 对象 */ public ResponseEntity<String> list(IPageData pd) { return listCarBlackWhitesSMOImpl.listCarBlackWhites(pd); } public IListCarBlackWhitesSMO getListCarBlackWhitesSMOImpl() { return listCarBlackWhitesSMOImpl; } public void setListCarBlackWhitesSMOImpl(IListCarBlackWhitesSMO listCarBlackWhitesSMOImpl) { this.listCarBlackWhitesSMOImpl = listCarBlackWhitesSMOImpl; } }
apache-2.0
zhengzhou/LoadingLayout
loadingview.library/src/main/java/com/zayn/loadingview/library/behavior/UpBehavior.java
1050
package com.zayn.loadingview.library.behavior; import android.content.Context; import android.util.AttributeSet; import android.view.View; import com.zayn.loadingview.library.IBehavior; import com.zayn.loadingview.library.NestedLoadingLayout; /** * Created by zhou on 16-4-25. */ public class UpBehavior extends IBehavior<View> { private View target; public UpBehavior(Context context, AttributeSet attrs) { super(context, attrs); } @Override public int getZOrder() { return -1; } @Override public int getTotalOffset(View view) { return view.getMeasuredHeight(); } @Override public void onScrolled(View view, int offset) { } @Override public boolean onStateChange(View view, int state) { if(state == NestedLoadingLayout.SCROLL_STATE_DRAGGING){ target.setVisibility(View.VISIBLE); } else if(state == NestedLoadingLayout.SCROLL_STATE_IDLE){ target.setVisibility(View.INVISIBLE); } return false; } }
apache-2.0
aosp-mirror/platform_frameworks_support
car/src/main/java/androidx/car/drawer/DrawerItemClickListener.java
950
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.car.drawer; /** * Listener for handling clicks on items/views managed by {@link DrawerItemViewHolder}. */ public interface DrawerItemClickListener { /** * Callback when item is clicked. * * @param position Adapter position of the clicked item. */ void onItemClick(int position); }
apache-2.0
dremio/dremio-oss
services/datastore/src/test/java/com/dremio/datastore/generator/supplier/fixed/FixedLengthByteContainerSupplier.java
1185
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.datastore.generator.supplier.fixed; import java.nio.charset.StandardCharsets; import com.dremio.datastore.generator.ByteContainerStoreGenerator.ByteContainer; /** * Supplier that generates ByteContainer with fixed-length byte[]. */ public class FixedLengthByteContainerSupplier extends FixedLengthSupplier<ByteContainer> { public FixedLengthByteContainerSupplier(String prefix) { super(prefix); } @Override ByteContainer convertToTarget(String generated) { return new ByteContainer(generated.getBytes(StandardCharsets.UTF_8)); } }
apache-2.0
web3j/web3j
crypto/src/main/java/org/web3j/crypto/ECDSASignature.java
2476
/* * Copyright 2019 Web3 Labs Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.web3j.crypto; import java.math.BigInteger; /** An ECDSA Signature. */ public class ECDSASignature { public final BigInteger r; public final BigInteger s; public ECDSASignature(BigInteger r, BigInteger s) { this.r = r; this.s = s; } /** * @return true if the S component is "low", that means it is below {@link * Sign#HALF_CURVE_ORDER}. See <a * href="https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#Low_S_values_in_signatures"> * BIP62</a>. */ public boolean isCanonical() { return s.compareTo(Sign.HALF_CURVE_ORDER) <= 0; } /** * Will automatically adjust the S component to be less than or equal to half the curve order, * if necessary. This is required because for every signature (r,s) the signature (r, -s (mod * N)) is a valid signature of the same message. However, we dislike the ability to modify the * bits of a Bitcoin transaction after it's been signed, as that violates various assumed * invariants. Thus in future only one of those forms will be considered legal and the other * will be banned. * * @return the signature in a canonicalised form. */ public ECDSASignature toCanonicalised() { if (!isCanonical()) { // The order of the curve is the number of valid points that exist on that curve. // If S is in the upper half of the number of valid points, then bring it back to // the lower half. Otherwise, imagine that // N = 10 // s = 8, so (-8 % 10 == 2) thus both (r, 8) and (r, 2) are valid solutions. // 10 - 8 == 2, giving us always the latter solution, which is canonical. return new ECDSASignature(r, Sign.CURVE.getN().subtract(s)); } else { return this; } } }
apache-2.0
justplay1/Shoppist
presentation/src/test/java/com/justplay1/shoppist/models/mappers/CurrencyModelDataMapperTest.java
3664
/* * Copyright (C) 2016 Mkhytar Mkhoian * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.justplay1.shoppist.models.mappers; import com.justplay1.shoppist.models.CurrencyModel; import com.justplay1.shoppist.models.CurrencyViewModel; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static com.justplay1.shoppist.ViewModelUtil.FAKE_CURRENCY_NAME; import static com.justplay1.shoppist.ViewModelUtil.FAKE_ID; import static com.justplay1.shoppist.ViewModelUtil.FAKE_NAME; import static com.justplay1.shoppist.ViewModelUtil.createFakeCurrencyModel; import static com.justplay1.shoppist.ViewModelUtil.createFakeCurrencyViewModel; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; /** * Created by Mkhytar Mkhoian. */ public class CurrencyModelDataMapperTest { private CurrencyViewModelMapper mapper; @Before public void setUp() throws Exception { mapper = new CurrencyViewModelMapper(); } @Test public void transformCurrencyViewModel() { CurrencyViewModel viewModel = createFakeCurrencyViewModel(); CurrencyModel model = mapper.transform(viewModel); assertThat(model, is(instanceOf(CurrencyModel.class))); assertThat(model.getId(), is(FAKE_ID)); assertThat(model.getName(), is(FAKE_CURRENCY_NAME)); } @Test public void transformCurrencyViewModelCollection() { CurrencyViewModel mockViewModelOne = mock(CurrencyViewModel.class); CurrencyViewModel mockViewModelTwo = mock(CurrencyViewModel.class); List<CurrencyViewModel> list = new ArrayList<>(5); list.add(mockViewModelOne); list.add(mockViewModelTwo); Collection<CurrencyModel> collection = mapper.transform(list); assertThat(collection.toArray()[0], is(instanceOf(CurrencyModel.class))); assertThat(collection.toArray()[1], is(instanceOf(CurrencyModel.class))); assertThat(collection.size(), is(2)); } @Test public void transformCurrencyModel() { CurrencyModel model = createFakeCurrencyModel(); CurrencyViewModel viewModel = mapper.transformToViewModel(model); assertThat(viewModel, is(instanceOf(CurrencyViewModel.class))); assertThat(viewModel.getId(), is(FAKE_ID)); assertThat(viewModel.getName(), is(FAKE_NAME)); } @Test public void transformCurrencyModelCollection() { CurrencyModel mockModelOne = mock(CurrencyModel.class); CurrencyModel mockModelTwo = mock(CurrencyModel.class); List<CurrencyModel> list = new ArrayList<>(5); list.add(mockModelOne); list.add(mockModelTwo); Collection<CurrencyViewModel> collection = mapper.transformToViewModel(list); assertThat(collection.toArray()[0], is(instanceOf(CurrencyViewModel.class))); assertThat(collection.toArray()[1], is(instanceOf(CurrencyViewModel.class))); assertThat(collection.size(), is(2)); } }
apache-2.0
vmpalmamorales/Raider-Oasis
Proyecto/src/main/java/es/urjc/jer/Appservidor1Application.java
943
package es.urjc.jer; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Bean; import org.springframework.web.socket.config.annotation.EnableWebSocket; import org.springframework.web.socket.config.annotation.WebSocketConfigurer; import org.springframework.web.socket.config.annotation.WebSocketHandlerRegistry; @SpringBootApplication @EnableWebSocket public class Appservidor1Application implements WebSocketConfigurer { @Override public void registerWebSocketHandlers(WebSocketHandlerRegistry registry) { registry.addHandler(createPlayerHandler(), "/game") .setAllowedOrigins("*"); } @Bean public PlayerHandler createPlayerHandler() { return new PlayerHandler(); } public static void main(String[] args) { SpringApplication.run(Appservidor1Application.class, args); } }
apache-2.0
bozimmerman/CoffeeMud
com/planet_ink/coffee_mud/Commands/Package.java
5561
package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2005-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Package extends StdCommand { public Package() { } private final String[] access=I(new String[]{"PACKAGE"}); @Override public String[] getAccessWords() { return access; } @Override public boolean execute(final MOB mob, final List<String> commands, final int metaFlags) throws java.io.IOException { final Vector<String> origCmds=new XVector<String>(commands); if(commands.size()<2) { CMLib.commands().postCommandFail(mob,origCmds,L("Package what?")); return false; } commands.remove(0); String whatName=""; if(commands.size()>0) whatName=commands.get(commands.size()-1); final int maxToGet=CMLib.english().parseMaxToGive(mob,commands,true,mob,false); if(maxToGet<0) return false; String whatToGet=CMParms.combine(commands,0); boolean allFlag=(commands.size()>0)?commands.get(0).equalsIgnoreCase("all"):false; if(whatToGet.toUpperCase().startsWith("ALL.")) { allFlag=true; whatToGet="ALL "+whatToGet.substring(4); } if(whatToGet.toUpperCase().endsWith(".ALL")) { allFlag=true; whatToGet="ALL "+whatToGet.substring(0,whatToGet.length()-4); } final List<Item> itemsV=new ArrayList<Item>(); int addendum=1; String addendumStr=""; boolean packagingPackagesProblem=false; do { Environmental getThis=null; getThis=mob.location().fetchFromRoomFavorItems(null,whatToGet+addendumStr); if(getThis==null) break; if(getThis instanceof PackagedItems) packagingPackagesProblem=true; else { if((getThis instanceof Item) &&(CMLib.flags().canBeSeenBy(getThis,mob)) &&((!allFlag)||CMLib.flags().isGettable(((Item)getThis))||(getThis.displayText().length()>0)) &&(!itemsV.contains(getThis))) itemsV.add((Item)getThis); } addendumStr="."+(++addendum); } while((allFlag)&&(itemsV.size()<maxToGet)) ; if(itemsV.size()==0) { if(packagingPackagesProblem) CMLib.commands().postCommandFail(mob,origCmds,L("You can't package up packages.",whatName)); else CMLib.commands().postCommandFail(mob,origCmds,L("You don't see '@x1' here.",whatName)); return false; } for(int i=0;i<itemsV.size();i++) { final Item I=itemsV.get(i); if((I instanceof Coins) ||(CMLib.flags().isEnspelled(I)) ||(CMLib.flags().isOnFire(I))) { CMLib.commands().postCommandFail(mob,origCmds,L("Items such as @x1 may not be packaged.",I.name(mob))); return false; } } final PackagedItems thePackage=(PackagedItems)CMClass.getItem("GenPackagedItems"); if(thePackage==null) return false; if(!thePackage.isPackagable(itemsV)) { CMLib.commands().postCommandFail(mob,origCmds,L("All items in a package must be absolutely identical. Some here are not.")); return false; } Item getThis=null; for(int i=0;i<itemsV.size();i++) { getThis=itemsV.get(i); if((!mob.isMine(getThis))&&(!Get.get(mob,null,getThis,true,"get",true))) return false; } if(getThis==null) return false; final String name=CMLib.english().removeArticleLead(getThis.name()); final CMMsg msg=CMClass.getMsg(mob,getThis,null,CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> package(s) up @x1 <T-NAMENOART>(s).",""+itemsV.size())); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); thePackage.setName(name); if(thePackage.packageMe(getThis,itemsV.size())) { for(int i=0;i<itemsV.size();i++) itemsV.get(i).destroy(); mob.location().addItem(thePackage,ItemPossessor.Expire.Player_Drop); mob.location().recoverRoomStats(); mob.location().recoverRoomStats(); } } return false; } @Override public double combatActionsCost(final MOB mob, final List<String> cmds) { return CMProps.getCommandCombatActionCost(ID()); } @Override public double actionsCost(final MOB mob, final List<String> cmds) { return CMProps.getCommandActionCost(ID()); } @Override public boolean canBeOrdered() { return true; } }
apache-2.0
softctrl/hackerrank_codes
30_Days_of_Code/Day_02_Operators.java
883
import java.io.*; import java.util.*; import java.text.*; import java.math.*; import java.util.regex.*; public class Arithmetic { public static void main(String[] args) { Scanner scan = new Scanner(System.in); double mealCost = scan.nextDouble(); // original meal price int tipPercent = scan.nextInt(); // tip percentage int taxPercent = scan.nextInt(); // tax percentage scan.close(); // Write your calculation code here. double tip = ((1.0)*mealCost) * (tipPercent/100.0); double tax = ((1.0)*mealCost) * (taxPercent/100.0); // cast the result of the rounding operation to an int and save it as totalCost int totalCost = (int) Math.round(mealCost + tip + tax); // Print your result System.out.format("The total meal cost is %d dollars.", totalCost); } }
apache-2.0
IHTSDO/snow-owl
commons/com.b2international.index/src/com/b2international/index/lucene/IntIndexField.java
2583
/* * Copyright 2011-2016 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.index.lucene; import static com.google.common.base.Preconditions.checkNotNull; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField.Type; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import com.google.common.collect.ImmutableSet; /** * @since 4.3 */ public class IntIndexField extends IndexFieldBase<Integer> { public IntIndexField(String fieldName) { this(fieldName, true); } public IntIndexField(String fieldName, boolean store) { super(fieldName, store); } @Override public Query toQuery(Integer value) { return IntPoint.newExactQuery(fieldName(), value); } @Override protected Query toSetQuery(Iterable<Integer> values) { return IntPoint.newSetQuery(fieldName(), ImmutableSet.copyOf(values)); } @Override public void addTo(Document doc, Integer value) { super.addTo(doc, value); if (Store.YES == isStored()) { doc.add(new StoredField(fieldName(), value)); } } @Override protected IndexableField toField(Integer value) { return new IntPoint(fieldName(), value); } @Override protected BytesRef toBytesRef(Integer value) { byte[] packed = new byte[Integer.BYTES]; NumericUtils.intToSortableBytes(value, packed, 0); return new BytesRef(packed); } @Override protected Type getSortFieldType() { return Type.INT; } @Override public Integer getValue(IndexableField field) { return getNumber(field).intValue(); } public short getShortValue(Document doc) { return getNumber(getField(doc)).shortValue(); } private Number getNumber(IndexableField field) { return checkNotNull(field.numericValue(), "Cannot get numeric value from field '%s'"); } }
apache-2.0
kieker-monitoring/kieker
kieker-analysis/test/kieker/analysis/junit/plugin/filter/record/delayfilter/TestRealtimeRecordDelayFilterNoAcceleration.java
1603
/*************************************************************************** * Copyright 2021 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.analysis.junit.plugin.filter.record.delayfilter; /** * * @author Andre van Hoorn * * @since 1.7 */ public class TestRealtimeRecordDelayFilterNoAcceleration extends AbstractTestRealtimeRecordDelayFilter { private static final long[] EVENT_TIME_OFFSETS_SECONDS = { 0L, 1L, 2L, 7L, 17L, 19L }; // relative to the start time // intervals of length INTERVAL_SIZE_NANOS relative to start time private static final long[] EXPECTED_THROUGHPUT_LIST_OFFSET_SECS_INTERVAL_5SECS = { 3L, // i.e., in interval (0,5( 1L, // i.e., in interval (5,10( 0L, // i.e., in interval (10,15( 2L, // i.e., in interval (15,20( }; public TestRealtimeRecordDelayFilterNoAcceleration() { super(EVENT_TIME_OFFSETS_SECONDS, EXPECTED_THROUGHPUT_LIST_OFFSET_SECS_INTERVAL_5SECS, 1); // 1 = no acceleration/slow down } }
apache-2.0
meggermo/jackrabbit-oak
oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
7169
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index.lucene; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.annotation.CheckForNull; import javax.annotation.Nullable; import com.google.common.collect.Iterables; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndex; import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory; import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.ReaderRefreshPolicy; import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReader; import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReaderFactory; import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester; import org.apache.lucene.store.Directory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class IndexNode { private static final AtomicInteger INDEX_NODE_COUNTER = new AtomicInteger(); static IndexNode open(String indexPath, NodeState root, NodeState defnNodeState, LuceneIndexReaderFactory readerFactory, @Nullable NRTIndexFactory nrtFactory) throws IOException { IndexDefinition definition = new IndexDefinition(root, defnNodeState); List<LuceneIndexReader> readers = readerFactory.createReaders(definition, defnNodeState, indexPath); NRTIndex nrtIndex = nrtFactory != null ? nrtFactory.createIndex(definition) : null; if (!readers.isEmpty()){ return new IndexNode(PathUtils.getName(indexPath), definition, readers, nrtIndex); } return null; } private static final Logger log = LoggerFactory.getLogger(IndexNode.class); private final List<LuceneIndexReader> readers; private final String name; private final IndexDefinition definition; private final ReadWriteLock lock = new ReentrantReadWriteLock(); private volatile IndexSearcher indexSearcher; private final NRTIndex nrtIndex; private final ReaderRefreshPolicy refreshPolicy; private final Runnable refreshCallback = new Runnable() { @Override public void run() { refreshReaders(); } }; private boolean closed = false; private List<LuceneIndexReader> nrtReaders; private final int indexNodeId = INDEX_NODE_COUNTER.incrementAndGet(); IndexNode(String name, IndexDefinition definition, List<LuceneIndexReader> readers, @Nullable NRTIndex nrtIndex) throws IOException { checkArgument(!readers.isEmpty()); this.name = name; this.definition = definition; this.readers = readers; this.nrtIndex = nrtIndex; this.nrtReaders = getNRTReaders(); this.indexSearcher = new IndexSearcher(createReader(nrtReaders)); this.refreshPolicy = nrtIndex != null ? nrtIndex.getRefreshPolicy() : ReaderRefreshPolicy.NEVER; } String getName() { return name; } IndexDefinition getDefinition() { return definition; } public IndexSearcher getSearcher() { return indexSearcher; } Directory getSuggestDirectory() { return getDefaultReader().getSuggestDirectory(); } AnalyzingInfixSuggester getLookup() { return getDefaultReader().getLookup(); } boolean acquire() { lock.readLock().lock(); if (closed) { lock.readLock().unlock(); return false; } else { refreshPolicy.refreshOnReadIfRequired(refreshCallback); return true; } } public void release() { lock.readLock().unlock(); } public int getIndexNodeId() { return indexNodeId; } void close() throws IOException { lock.writeLock().lock(); try { checkState(!closed); closed = true; } finally { lock.writeLock().unlock(); } //Do not close the NRTIndex here as it might be in use //by newer IndexNode. Just close the readers obtained from //them for (LuceneIndexReader reader : Iterables.concat(readers, nrtReaders)){ reader.close(); } } @CheckForNull public LuceneIndexWriter getLocalWriter() throws IOException{ return nrtIndex != null ? nrtIndex.getWriter() : null; } public void refreshReadersOnWriteIfRequired() { refreshPolicy.refreshOnWriteIfRequired(refreshCallback); } private void refreshReaders(){ List<LuceneIndexReader> newNRTReaders = getNRTReaders(); //The list reference would differ if index got updated //so if they are same no need to reinitialize the searcher if (newNRTReaders != nrtReaders) { nrtReaders = newNRTReaders; indexSearcher = new IndexSearcher(createReader(nrtReaders)); log.debug("Refreshed reader for index [{}]", definition); } } private LuceneIndexReader getDefaultReader(){ //TODO This is still required to support Suggester, Spellcheck etc OAK-4643 return readers.get(0); } private IndexReader createReader(List<LuceneIndexReader> nrtReaders) { if (readers.size() == 1 && nrtReaders.isEmpty()){ return readers.get(0).getReader(); } IndexReader[] readerArr = new IndexReader[readers.size() + nrtReaders.size()]; int i = 0; for (LuceneIndexReader r : Iterables.concat(readers, nrtReaders)){ readerArr[i++] = r.getReader(); } return new MultiReader(readerArr, true); } private List<LuceneIndexReader> getNRTReaders() { return nrtIndex != null ? nrtIndex.getReaders() : Collections.<LuceneIndexReader>emptyList(); } }
apache-2.0
de-graeuler/jtracapi
jtracapi/trunk/JTracRpcClient/src/main/java/de/graeuler/jtracapi/converter/TicketDynamicFieldTypeConverter.java
1913
package de.graeuler.jtracapi.converter; import java.util.HashMap; import java.util.Map; import org.apache.xmlrpc.common.TypeConverter; import de.graeuler.jtracapi.model.field.TicketAttributeField; public abstract class TicketDynamicFieldTypeConverter<T extends TicketAttributeField> implements TypeConverter { @Override public boolean isConvertable(Object pObject) { return pObject instanceof TicketAttributeField; } /** * Creates a new object that extends TicketDynamicField. This method creates * the object returned by this.convert * * @return T */ public abstract T newDynamicTicketField(); /** * Casts an Object instance to the generic TicketDynamicField * * @param object * @return object casted to T */ public abstract T castToDynamicField(Object object); /** * Sets all fields of T field using the values stored in attributeMap. This * method is called by the convert method. * * @param field * call the setters for field using the attribute Map * @param attributeMap * holds the values to initialize the field */ public abstract void setDynamicFieldAttributes(T field, Map<String, Object> attributeMap); /** * Puts the field values into the attribute Map for the backConvert Method * * @param attributeMap * @param field */ public abstract void setStructAttributes(Map<String, Object> attributeMap, T field); @Override public Object convert(Object pObject) { T f = newDynamicTicketField(); @SuppressWarnings("unchecked") Map<String, Object> m = (Map<String, Object>) pObject; setDynamicFieldAttributes(f, m); return f; } @Override public Object backConvert(Object result) { T c = castToDynamicField(result); Map<String, Object> m = new HashMap<String,Object>(); setStructAttributes(m, c); return m; } }
apache-2.0
maheshgaya/PopularMovies
app/src/main/java/com/maheshgaya/android/popularmovies/sync/MovieAuthenticator.java
2366
package com.maheshgaya.android.popularmovies.sync; import android.accounts.AbstractAccountAuthenticator; import android.accounts.Account; import android.accounts.AccountAuthenticatorResponse; import android.accounts.NetworkErrorException; import android.content.Context; import android.os.Bundle; /** * Created by Mahesh Gaya on 10/22/16. */ public class MovieAuthenticator extends AbstractAccountAuthenticator { public MovieAuthenticator(Context context){ super(context); } @Override public Bundle editProperties(AccountAuthenticatorResponse accountAuthenticatorResponse, String s) { throw new UnsupportedOperationException(); } @Override public Bundle addAccount(AccountAuthenticatorResponse accountAuthenticatorResponse, String s, String s1, String[] strings, Bundle bundle) throws NetworkErrorException { return null; } @Override public Bundle confirmCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, Bundle bundle) throws NetworkErrorException { return null; } @Override public Bundle getAuthToken(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String s, Bundle bundle) { throw new UnsupportedOperationException(); } @Override public String getAuthTokenLabel(String s) { throw new UnsupportedOperationException(); } @Override public Bundle updateCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String s, Bundle bundle) throws NetworkErrorException { throw new UnsupportedOperationException(); } @Override public Bundle hasFeatures(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String[] strings) throws NetworkErrorException { throw new UnsupportedOperationException(); } }
apache-2.0
jeaninevb/SwengGroup5
app/src/androidTest/java/soft/swenggroup5/DecoderUtilAutomatorTest.java
6503
package soft.swenggroup5; import android.content.ContentProviderOperation; import android.content.Context; import android.content.Intent; import android.provider.ContactsContract; import android.support.test.InstrumentationRegistry; import android.support.test.filters.SdkSuppress; import android.support.test.runner.AndroidJUnit4; import android.support.test.uiautomator.UiDevice; import android.support.test.uiautomator.UiObject; import android.support.test.uiautomator.UiSelector; import android.util.Log; import org.junit.Test; import org.junit.runner.RunWith; import java.io.File; import java.util.ArrayList; import static junit.framework.TestCase.assertEquals; /** * DecoderUtilAutomatorTest * * Contains tests for DecoderUtils that uses auto-input code. Tests are only compatible with the * Nexus 4 at the moment. */ @RunWith(AndroidJUnit4.class) @SdkSuppress(minSdkVersion = 18) public class DecoderUtilAutomatorTest { //As ContactData opens the device's default Contact App to insert contacts, different //+ "resource_ids" must be used to identify the "save contact button" on the opened app. //+ These are needed to automate pushing the save button to write Contacts to the device private static final String[] CONTACT_SAVE_BUTTON_ID = { "com.sonyericsson.android.socialphonebook:id/save_menu_item", //Sony Experia m2 "com.android.contacts:id/menu_save" //Nexus 5 }; private static final int WAIT_TIME = 5000; //5 seconds final static private int REQUEST_CODE_ASK_PERMISSIONS = 8888; /** * test_saveData_Contact_valid * * Tests: ContactData.saveData() * * Tests if contact data can be correctly decoded and inserted on the running * Android device. Only tests with a single contact at the moment. * To test this, it creates A ContactData and then encodes it, we then decode * the encoded data and try to insert the decoded data onto the device. Finally * we check if the inserted data is the same as the data we originally encoded. * * */ @Test public void test_saveData_contact_valid() throws Exception{ // Initialize UiDevice instance, the object which will look at the current screen UiDevice mDevice = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation()); // Start from the home screen mDevice.pressHome(); //context needed to start Activities Context context = InstrumentationRegistry.getContext(); //Create a ContactData which will be encoded, for us to then decode. ContactData contactToDecode = DecoderUtilsTest.getExpectedValidContactData(); File f = contactToDecode.toFile(context); String data = EncoderUtils.getFileContents(f); ReceivedData decodedContact = DecoderUtils.decodeFileData(data, ContactData.FILE_EXTENSION); Intent intent = decodedContact.TEST_saveData(context); //NEW_TASK flag needed to allow this Intent to act as a standalone app intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); //Hackish method to have test wait until the Intent has fully started, hopefully this can //+be changed later to wait until Intent is ready for use Thread.sleep(WAIT_TIME); //UiObject to represent the "Save new Contact" button in the Contact App UiObject saveButton; int resourceIdIndex = 0; //The save button will have a different id for every Contact App. //+ keep trying ids until one works. do { saveButton = mDevice.findObject(new UiSelector() .resourceId(CONTACT_SAVE_BUTTON_ID[resourceIdIndex])); resourceIdIndex++; } while (resourceIdIndex < CONTACT_SAVE_BUTTON_ID.length && !saveButton.exists()); if (saveButton.exists()) { //Presses the button. Will also probably cause "back button" to be called and return //+ the device to the main "Contact App" activity saveButton.click(); } Thread.sleep(WAIT_TIME); ContactData expected = DecoderUtilsTest.getExpectedValidContactData(); expected.printData(); ContactData result = DecoderUtilsTest.getResultValidContactData(context); result.printData(); //close the Contacts App*/ mDevice.pressRecentApps(); Thread.sleep(WAIT_TIME); //Nexus 5 close Contacts app Code //Check if there is an element of this type, only exist in Nexus XMLs //TODO: Won't work if Contacts is not the only open app, will fix this soon UiObject app = mDevice.findObject(new UiSelector().resourceId( "com.android.systemui:id/task_view_content" )); if(app.exists()){ app = mDevice.findObject(new UiSelector().descriptionContains("Contacts"));//find contact Element app.dragTo(0, app.getBounds().centerY(), 5); //drag left very quickly //Sony Experia close Contacts app code }else{ app = mDevice.findObject(new UiSelector().descriptionContains("Contacts")); app.swipeLeft(100);//drags left like dragTo but sony reads dragTo as clicks and opens app incorrectly } //Code to delete the just inserted Contact //ContentProviderOperations are actions you can define to manipulate data on an //+ Android device. They must be held in ArrayLists as they're supposed to be used //+ in batches, even if we only use one here ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>(); ops.add(ContentProviderOperation.newDelete(ContactsContract.RawContacts.CONTENT_URI) //delete anything in Contacts .withSelection( ContactsContract.Data.DISPLAY_NAME + " = ?", //that matches this selection, i.e. new String[]{"Test Contact"}) //+ anything with name = "Test Contact" .build()); context.getContentResolver().applyBatch(ContactsContract.AUTHORITY, ops); //do operation Log.d("XXX","EXP"); expected.printData(); Log.d("XXX","RES"); result.printData(); Thread.sleep(WAIT_TIME); // TODO remove this redundant test when following test is solved assertEquals(true, true); assertEquals(true, expected.equals(result)); } }
apache-2.0
smb510/twitterbyName
twitter4j-stream/src/main/java/twitter4j/FilterQuery.java
6364
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j; import twitter4j.internal.http.HttpParameter; import twitter4j.internal.util.z_T4JInternalStringUtil; import java.util.ArrayList; import java.util.Arrays; /** * @author Yusuke Yamamoto - yusuke at mac.com * @since Twitter4J 2.1.2 */ public final class FilterQuery implements java.io.Serializable { private static final long serialVersionUID = 430966623248982833L; private int count; private long[] follow; private String[] track; private double[][] locations; /** * Creates a new FilterQuery */ public FilterQuery() { this.count = 0; this.follow = null; this.track = null; this.locations = null; } /** * Creates a new FilterQuery * * @param follow Specifies the users, by ID, to receive public tweets from. */ public FilterQuery(long[] follow) { this(); this.count = 0; this.follow = follow; } /** * Creates a new FilterQuery * * @param count Indicates the number of previous statuses to stream before transitioning to the live stream. * @param follow Specifies the users, by ID, to receive public tweets from. */ public FilterQuery(int count, long[] follow) { this(); this.count = count; this.follow = follow; } /** * Creates a new FilterQuery * * @param count Indicates the number of previous statuses to stream before transitioning to the live stream. * @param follow Specifies the users, by ID, to receive public tweets from. * @param track Specifies keywords to track. */ public FilterQuery(int count, long[] follow, String[] track) { this(); this.count = count; this.follow = follow; this.track = track; } /** * Creates a new FilterQuery * * @param count Indicates the number of previous statuses to stream before transitioning to the live stream. * @param follow Specifies the users, by ID, to receive public tweets from. * @param track Specifies keywords to track. * @param locations Specifies the locations to track. 2D array */ public FilterQuery(int count, long[] follow, String[] track, double[][] locations) { this.count = count; this.follow = follow; this.track = track; this.locations = locations; } /** * Sets count * * @param count Indicates the number of previous statuses to stream before transitioning to the live stream. * @return this instance */ public FilterQuery count(int count) { this.count = count; return this; } /** * Sets follow * * @param follow Specifies the users, by ID, to receive public tweets from. * @return this instance */ public FilterQuery follow(long[] follow) { this.follow = follow; return this; } /** * Sets track * * @param track Specifies keywords to track. * @return this instance */ public FilterQuery track(String[] track) { this.track = track; return this; } /** * Sets locations * * @param locations Specifies the locations to track. 2D array * @return this instance */ public FilterQuery locations(double[][] locations) { this.locations = locations; return this; } /*package*/ HttpParameter[] asHttpParameterArray(HttpParameter stallWarningsParam) { ArrayList<HttpParameter> params = new ArrayList<HttpParameter>(); params.add(new HttpParameter("count", count)); if (follow != null && follow.length > 0) { params.add(new HttpParameter("follow" , z_T4JInternalStringUtil.join(follow))); } if (track != null && track.length > 0) { params.add(new HttpParameter("track" , z_T4JInternalStringUtil.join(track))); } if (locations != null && locations.length > 0) { params.add(new HttpParameter("locations" , toLocationsString(locations))); } params.add(stallWarningsParam); HttpParameter[] paramArray = new HttpParameter[params.size()]; return params.toArray(paramArray); } private String toLocationsString(final double[][] keywords) { final StringBuilder buf = new StringBuilder(20 * keywords.length * 2); for (double[] keyword : keywords) { if (0 != buf.length()) { buf.append(","); } buf.append(keyword[0]); buf.append(","); buf.append(keyword[1]); } return buf.toString(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FilterQuery that = (FilterQuery) o; if (count != that.count) return false; if (!Arrays.equals(follow, that.follow)) return false; if (!Arrays.equals(track, that.track)) return false; return true; } @Override public int hashCode() { int result = count; result = 31 * result + (follow != null ? Arrays.hashCode(follow) : 0); result = 31 * result + (track != null ? Arrays.hashCode(track) : 0); return result; } @Override public String toString() { return "FilterQuery{" + "count=" + count + ", follow=" + Arrays.toString(follow) + ", track=" + (track == null ? null : Arrays.asList(track)) + ", locations=" + (locations == null ? null : Arrays.asList(locations)) + '}'; } }
apache-2.0
bengong/widget
src/core/widget/ThreeColorBorder.java
1462
package core.widget; import java.awt.Color; import java.awt.Component; import java.awt.Graphics; import java.awt.Insets; import javax.swing.border.AbstractBorder; public class ThreeColorBorder extends AbstractBorder { private Color normal = Colors.border; private Color focus = Colors.borderFocus; private Color disable = Colors.borderDisable; private Insets insets = new Insets(1, 1, 1, 1); public ThreeColorBorder() { super(); } public ThreeColorBorder(Color normal, Color focus, Color disable) { super(); this.normal = normal; this.focus = focus; this.disable = disable; } public ThreeColorBorder(Color normal, Color focus, Color disable, Insets insets) { super(); this.normal = normal; this.focus = focus; this.disable = disable; this.insets = insets; } @Override public Insets getBorderInsets(Component c, Insets insets) { return insets; } @Override public Insets getBorderInsets(Component c) { return insets; } @Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { Color color = normal; if (!c.isEnabled()) { color = disable; } else if (c.isFocusOwner()) { color = focus; } Color oldColor = g.getColor(); g.setColor(color); // 僅畫下劃線。 // g.drawRect(x, y, width - 1, height - 1); g.drawLine(x, y+height-1, x+width, y+height-1); g.setColor(oldColor); } }
apache-2.0
blstream/AugumentedSzczecin_java
api/src/main/java/com/bls/resource/PlacesResource.java
1217
package com.bls.resource; import java.util.Collection; import javax.inject.Inject; import javax.inject.Singleton; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import com.bls.core.place.Place; import com.bls.core.user.User; import com.bls.dao.PlaceDao; import com.codahale.metrics.annotation.ExceptionMetered; import com.codahale.metrics.annotation.Timed; import io.dropwizard.auth.Auth; import io.dropwizard.hibernate.UnitOfWork; @Singleton @Path("/places") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class PlacesResource { private final PlaceDao<Place> placeDao; @Inject public PlacesResource(final PlaceDao placeDao) { this.placeDao = placeDao; } @GET @UnitOfWork @Timed @ExceptionMetered public Collection<Place> getAll(@Auth User user) { return placeDao.findAll(user); } @POST @UnitOfWork @Timed @ExceptionMetered public Place add(@Auth User user, @Valid final Place place) { return placeDao.checkDuplicateAndCreate(place, user); } }
apache-2.0
zhihu/Matisse
sample/src/main/java/com/zhihu/matisse/sample/SampleActivity.java
7981
/* * Copyright 2017 Zhihu Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zhihu.matisse.sample; import android.Manifest; import android.annotation.SuppressLint; import android.content.Intent; import android.content.pm.ActivityInfo; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import android.widget.Toast; import androidx.appcompat.app.AppCompatActivity; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import com.tbruyelle.rxpermissions2.RxPermissions; import com.zhihu.matisse.Matisse; import com.zhihu.matisse.MimeType; import com.zhihu.matisse.engine.impl.GlideEngine; import com.zhihu.matisse.engine.impl.PicassoEngine; import com.zhihu.matisse.filter.Filter; import com.zhihu.matisse.internal.entity.CaptureStrategy; import java.util.List; public class SampleActivity extends AppCompatActivity implements View.OnClickListener { private static final int REQUEST_CODE_CHOOSE = 23; private UriAdapter mAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); findViewById(R.id.zhihu).setOnClickListener(this); findViewById(R.id.dracula).setOnClickListener(this); findViewById(R.id.only_gif).setOnClickListener(this); RecyclerView recyclerView = (RecyclerView) findViewById(R.id.recyclerview); recyclerView.setLayoutManager(new LinearLayoutManager(this)); recyclerView.setAdapter(mAdapter = new UriAdapter()); } // <editor-fold defaultstate="collapsed" desc="onClick"> @SuppressLint("CheckResult") @Override public void onClick(final View v) { RxPermissions rxPermissions = new RxPermissions(this); rxPermissions.request(Manifest.permission.WRITE_EXTERNAL_STORAGE) .subscribe(aBoolean -> { if (aBoolean) { startAction(v); } else { Toast.makeText(SampleActivity.this, R.string.permission_request_denied, Toast.LENGTH_LONG) .show(); } }, Throwable::printStackTrace); } // </editor-fold> private void startAction(View v) { switch (v.getId()) { case R.id.zhihu: Matisse.from(SampleActivity.this) .choose(MimeType.ofImage(), false) .countable(true) .capture(true) .captureStrategy( new CaptureStrategy(true, "com.zhihu.matisse.sample.fileprovider", "test")) .maxSelectable(9) .addFilter(new GifSizeFilter(320, 320, 5 * Filter.K * Filter.K)) .gridExpectedSize( getResources().getDimensionPixelSize(R.dimen.grid_expected_size)) .restrictOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT) .thumbnailScale(0.85f) .imageEngine(new GlideEngine()) .setOnSelectedListener((uriList, pathList) -> { Log.e("onSelected", "onSelected: pathList=" + pathList); }) .showSingleMediaType(true) .originalEnable(true) .maxOriginalSize(10) .autoHideToolbarOnSingleTap(true) .setOnCheckedListener(isChecked -> { Log.e("isChecked", "onCheck: isChecked=" + isChecked); }) .forResult(REQUEST_CODE_CHOOSE); break; case R.id.dracula: Matisse.from(SampleActivity.this) .choose(MimeType.ofImage()) .theme(R.style.Matisse_Dracula) .countable(false) .addFilter(new GifSizeFilter(320, 320, 5 * Filter.K * Filter.K)) .maxSelectable(9) .originalEnable(true) .maxOriginalSize(10) .imageEngine(new PicassoEngine()) .forResult(REQUEST_CODE_CHOOSE); break; case R.id.only_gif: Matisse.from(SampleActivity.this) .choose(MimeType.of(MimeType.GIF), false) .countable(true) .maxSelectable(9) .addFilter(new GifSizeFilter(320, 320, 5 * Filter.K * Filter.K)) .gridExpectedSize( getResources().getDimensionPixelSize(R.dimen.grid_expected_size)) .restrictOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT) .thumbnailScale(0.85f) .imageEngine(new GlideEngine()) .showSingleMediaType(true) .originalEnable(true) .maxOriginalSize(10) .autoHideToolbarOnSingleTap(true) .forResult(REQUEST_CODE_CHOOSE); break; default: break; } mAdapter.setData(null, null); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_CODE_CHOOSE && resultCode == RESULT_OK) { mAdapter.setData(Matisse.obtainResult(data), Matisse.obtainPathResult(data)); Log.e("OnActivityResult ", String.valueOf(Matisse.obtainOriginalState(data))); } } private static class UriAdapter extends RecyclerView.Adapter<UriAdapter.UriViewHolder> { private List<Uri> mUris; private List<String> mPaths; void setData(List<Uri> uris, List<String> paths) { mUris = uris; mPaths = paths; notifyDataSetChanged(); } @Override public UriViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { return new UriViewHolder( LayoutInflater.from(parent.getContext()).inflate(R.layout.uri_item, parent, false)); } @Override public void onBindViewHolder(UriViewHolder holder, int position) { holder.mUri.setText(mUris.get(position).toString()); holder.mPath.setText(mPaths.get(position)); holder.mUri.setAlpha(position % 2 == 0 ? 1.0f : 0.54f); holder.mPath.setAlpha(position % 2 == 0 ? 1.0f : 0.54f); } @Override public int getItemCount() { return mUris == null ? 0 : mUris.size(); } static class UriViewHolder extends RecyclerView.ViewHolder { private TextView mUri; private TextView mPath; UriViewHolder(View contentView) { super(contentView); mUri = (TextView) contentView.findViewById(R.id.uri); mPath = (TextView) contentView.findViewById(R.id.path); } } } }
apache-2.0
googleapis/java-talent
proto-google-cloud-talent-v4beta1/src/main/java/com/google/cloud/talent/v4beta1/ListProfilesRequest.java
46197
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4beta1/profile_service.proto package com.google.cloud.talent.v4beta1; /** * * * <pre> * List profiles request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.ListProfilesRequest} */ public final class ListProfilesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.ListProfilesRequest) ListProfilesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListProfilesRequest.newBuilder() to construct. private ListProfilesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListProfilesRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListProfilesRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListProfilesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 24: { pageSize_ = input.readInt32(); break; } case 34: { com.google.protobuf.FieldMask.Builder subBuilder = null; if (readMask_ != null) { subBuilder = readMask_.toBuilder(); } readMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(readMask_); readMask_ = subBuilder.buildPartial(); } break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); filter_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.ProfileServiceProto .internal_static_google_cloud_talent_v4beta1_ListProfilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.ProfileServiceProto .internal_static_google_cloud_talent_v4beta1_ListProfilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.ListProfilesRequest.class, com.google.cloud.talent.v4beta1.ListProfilesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 5; private volatile java.lang.Object filter_; /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object pageToken_; /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_; /** * * * <pre> * The maximum number of profiles to be returned, at most 100. * Default is 100 unless a positive number smaller than 100 is specified. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int READ_MASK_FIELD_NUMBER = 4; private com.google.protobuf.FieldMask readMask_; /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> * * @return Whether the readMask field is set. */ @java.lang.Override public boolean hasReadMask() { return readMask_ != null; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> * * @return The readMask. */ @java.lang.Override public com.google.protobuf.FieldMask getReadMask() { return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { return getReadMask(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (readMask_ != null) { output.writeMessage(4, getReadMask()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (readMask_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getReadMask()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4beta1.ListProfilesRequest)) { return super.equals(obj); } com.google.cloud.talent.v4beta1.ListProfilesRequest other = (com.google.cloud.talent.v4beta1.ListProfilesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (getPageSize() != other.getPageSize()) return false; if (hasReadMask() != other.hasReadMask()) return false; if (hasReadMask()) { if (!getReadMask().equals(other.getReadMask())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); if (hasReadMask()) { hash = (37 * hash) + READ_MASK_FIELD_NUMBER; hash = (53 * hash) + getReadMask().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4beta1.ListProfilesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List profiles request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.ListProfilesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.ListProfilesRequest) com.google.cloud.talent.v4beta1.ListProfilesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.ProfileServiceProto .internal_static_google_cloud_talent_v4beta1_ListProfilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.ProfileServiceProto .internal_static_google_cloud_talent_v4beta1_ListProfilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.ListProfilesRequest.class, com.google.cloud.talent.v4beta1.ListProfilesRequest.Builder.class); } // Construct using com.google.cloud.talent.v4beta1.ListProfilesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; filter_ = ""; pageToken_ = ""; pageSize_ = 0; if (readMaskBuilder_ == null) { readMask_ = null; } else { readMask_ = null; readMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4beta1.ProfileServiceProto .internal_static_google_cloud_talent_v4beta1_ListProfilesRequest_descriptor; } @java.lang.Override public com.google.cloud.talent.v4beta1.ListProfilesRequest getDefaultInstanceForType() { return com.google.cloud.talent.v4beta1.ListProfilesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4beta1.ListProfilesRequest build() { com.google.cloud.talent.v4beta1.ListProfilesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4beta1.ListProfilesRequest buildPartial() { com.google.cloud.talent.v4beta1.ListProfilesRequest result = new com.google.cloud.talent.v4beta1.ListProfilesRequest(this); result.parent_ = parent_; result.filter_ = filter_; result.pageToken_ = pageToken_; result.pageSize_ = pageSize_; if (readMaskBuilder_ == null) { result.readMask_ = readMask_; } else { result.readMask_ = readMaskBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4beta1.ListProfilesRequest) { return mergeFrom((com.google.cloud.talent.v4beta1.ListProfilesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4beta1.ListProfilesRequest other) { if (other == com.google.cloud.talent.v4beta1.ListProfilesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (other.hasReadMask()) { mergeReadMask(other.getReadMask()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.talent.v4beta1.ListProfilesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.talent.v4beta1.ListProfilesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the tenant under which the profile is created. * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenants/bar". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * * * <pre> * The filter string specifies the profiles to be enumerated. * Supported operator: =, AND * The field(s) eligible for filtering are: * * `externalId` * * `groupId` * externalId and groupId cannot be specified at the same time. If both * externalId and groupId are provided, the API will return a bad request * error. * Sample Query: * * externalId = "externalId-1" * * groupId = "groupId-1" * </pre> * * <code>string filter = 5;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * The token that specifies the current offset (that is, starting result). * Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to * continue the list. * </pre> * * <code>string page_token = 2;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of profiles to be returned, at most 100. * Default is 100 unless a positive number smaller than 100 is specified. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of profiles to be returned, at most 100. * Default is 100 unless a positive number smaller than 100 is specified. * </pre> * * <code>int32 page_size = 3;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * The maximum number of profiles to be returned, at most 100. * Default is 100 unless a positive number smaller than 100 is specified. * </pre> * * <code>int32 page_size = 3;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private com.google.protobuf.FieldMask readMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> readMaskBuilder_; /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> * * @return Whether the readMask field is set. */ public boolean hasReadMask() { return readMaskBuilder_ != null || readMask_ != null; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> * * @return The readMask. */ public com.google.protobuf.FieldMask getReadMask() { if (readMaskBuilder_ == null) { return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; } else { return readMaskBuilder_.getMessage(); } } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public Builder setReadMask(com.google.protobuf.FieldMask value) { if (readMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } readMask_ = value; onChanged(); } else { readMaskBuilder_.setMessage(value); } return this; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public Builder setReadMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (readMaskBuilder_ == null) { readMask_ = builderForValue.build(); onChanged(); } else { readMaskBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public Builder mergeReadMask(com.google.protobuf.FieldMask value) { if (readMaskBuilder_ == null) { if (readMask_ != null) { readMask_ = com.google.protobuf.FieldMask.newBuilder(readMask_).mergeFrom(value).buildPartial(); } else { readMask_ = value; } onChanged(); } else { readMaskBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public Builder clearReadMask() { if (readMaskBuilder_ == null) { readMask_ = null; onChanged(); } else { readMask_ = null; readMaskBuilder_ = null; } return this; } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public com.google.protobuf.FieldMask.Builder getReadMaskBuilder() { onChanged(); return getReadMaskFieldBuilder().getBuilder(); } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { if (readMaskBuilder_ != null) { return readMaskBuilder_.getMessageOrBuilder(); } else { return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; } } /** * * * <pre> * A field mask to specify the profile fields to be listed in response. * All fields are listed if it is unset. * Valid values are: * * name * </pre> * * <code>.google.protobuf.FieldMask read_mask = 4;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getReadMaskFieldBuilder() { if (readMaskBuilder_ == null) { readMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getReadMask(), getParentForChildren(), isClean()); readMask_ = null; } return readMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.ListProfilesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListProfilesRequest) private static final com.google.cloud.talent.v4beta1.ListProfilesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.ListProfilesRequest(); } public static com.google.cloud.talent.v4beta1.ListProfilesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListProfilesRequest> PARSER = new com.google.protobuf.AbstractParser<ListProfilesRequest>() { @java.lang.Override public ListProfilesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListProfilesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListProfilesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListProfilesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4beta1.ListProfilesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
liwanzhang/learning-designpatterns
src/test/java/zwl/learning/designpattern/test/GeneralFactoryTest.java
502
package zwl.learning.designpattern.test; import org.junit.Test; import zwl.learning.designpattern.factorymethod.generalfactory.SendFactory; import zwl.learning.designpattern.factorymethod.generalfactory.Sender; /** * @author zhangwanli * @description * @date 2017-08-22 上午11:15 */ public class GeneralFactoryTest { @Test public void generalFactoryTest() { SendFactory factory = new SendFactory(); Sender sender = factory.produce("sms"); sender.Send(); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-lightsail/src/main/java/com/amazonaws/services/lightsail/AbstractAmazonLightsail.java
28999
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lightsail; import javax.annotation.Generated; import com.amazonaws.services.lightsail.model.*; import com.amazonaws.*; /** * Abstract implementation of {@code AmazonLightsail}. Convenient method forms pass through to the corresponding * overload that takes a request object, which throws an {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAmazonLightsail implements AmazonLightsail { protected AbstractAmazonLightsail() { } @Override public void setEndpoint(String endpoint) { throw new java.lang.UnsupportedOperationException(); } @Override public void setRegion(com.amazonaws.regions.Region region) { throw new java.lang.UnsupportedOperationException(); } @Override public AllocateStaticIpResult allocateStaticIp(AllocateStaticIpRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public AttachCertificateToDistributionResult attachCertificateToDistribution(AttachCertificateToDistributionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public AttachDiskResult attachDisk(AttachDiskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public AttachInstancesToLoadBalancerResult attachInstancesToLoadBalancer(AttachInstancesToLoadBalancerRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public AttachLoadBalancerTlsCertificateResult attachLoadBalancerTlsCertificate(AttachLoadBalancerTlsCertificateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public AttachStaticIpResult attachStaticIp(AttachStaticIpRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CloseInstancePublicPortsResult closeInstancePublicPorts(CloseInstancePublicPortsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CopySnapshotResult copySnapshot(CopySnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateBucketResult createBucket(CreateBucketRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateBucketAccessKeyResult createBucketAccessKey(CreateBucketAccessKeyRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateCertificateResult createCertificate(CreateCertificateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateCloudFormationStackResult createCloudFormationStack(CreateCloudFormationStackRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateContactMethodResult createContactMethod(CreateContactMethodRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateContainerServiceResult createContainerService(CreateContainerServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateContainerServiceDeploymentResult createContainerServiceDeployment(CreateContainerServiceDeploymentRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateContainerServiceRegistryLoginResult createContainerServiceRegistryLogin(CreateContainerServiceRegistryLoginRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDiskResult createDisk(CreateDiskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDiskFromSnapshotResult createDiskFromSnapshot(CreateDiskFromSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDiskSnapshotResult createDiskSnapshot(CreateDiskSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDistributionResult createDistribution(CreateDistributionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDomainResult createDomain(CreateDomainRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDomainEntryResult createDomainEntry(CreateDomainEntryRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateInstanceSnapshotResult createInstanceSnapshot(CreateInstanceSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateInstancesResult createInstances(CreateInstancesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateInstancesFromSnapshotResult createInstancesFromSnapshot(CreateInstancesFromSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateKeyPairResult createKeyPair(CreateKeyPairRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLoadBalancerResult createLoadBalancer(CreateLoadBalancerRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLoadBalancerTlsCertificateResult createLoadBalancerTlsCertificate(CreateLoadBalancerTlsCertificateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateRelationalDatabaseResult createRelationalDatabase(CreateRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateRelationalDatabaseFromSnapshotResult createRelationalDatabaseFromSnapshot(CreateRelationalDatabaseFromSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateRelationalDatabaseSnapshotResult createRelationalDatabaseSnapshot(CreateRelationalDatabaseSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteAlarmResult deleteAlarm(DeleteAlarmRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteAutoSnapshotResult deleteAutoSnapshot(DeleteAutoSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteBucketResult deleteBucket(DeleteBucketRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteBucketAccessKeyResult deleteBucketAccessKey(DeleteBucketAccessKeyRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteCertificateResult deleteCertificate(DeleteCertificateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteContactMethodResult deleteContactMethod(DeleteContactMethodRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteContainerImageResult deleteContainerImage(DeleteContainerImageRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteContainerServiceResult deleteContainerService(DeleteContainerServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDiskResult deleteDisk(DeleteDiskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDiskSnapshotResult deleteDiskSnapshot(DeleteDiskSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDistributionResult deleteDistribution(DeleteDistributionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDomainResult deleteDomain(DeleteDomainRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDomainEntryResult deleteDomainEntry(DeleteDomainEntryRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteInstanceResult deleteInstance(DeleteInstanceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteInstanceSnapshotResult deleteInstanceSnapshot(DeleteInstanceSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteKeyPairResult deleteKeyPair(DeleteKeyPairRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteKnownHostKeysResult deleteKnownHostKeys(DeleteKnownHostKeysRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteLoadBalancerResult deleteLoadBalancer(DeleteLoadBalancerRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteLoadBalancerTlsCertificateResult deleteLoadBalancerTlsCertificate(DeleteLoadBalancerTlsCertificateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteRelationalDatabaseResult deleteRelationalDatabase(DeleteRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteRelationalDatabaseSnapshotResult deleteRelationalDatabaseSnapshot(DeleteRelationalDatabaseSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DetachCertificateFromDistributionResult detachCertificateFromDistribution(DetachCertificateFromDistributionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DetachDiskResult detachDisk(DetachDiskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DetachInstancesFromLoadBalancerResult detachInstancesFromLoadBalancer(DetachInstancesFromLoadBalancerRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DetachStaticIpResult detachStaticIp(DetachStaticIpRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DisableAddOnResult disableAddOn(DisableAddOnRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DownloadDefaultKeyPairResult downloadDefaultKeyPair(DownloadDefaultKeyPairRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public EnableAddOnResult enableAddOn(EnableAddOnRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ExportSnapshotResult exportSnapshot(ExportSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetActiveNamesResult getActiveNames(GetActiveNamesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetAlarmsResult getAlarms(GetAlarmsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetAutoSnapshotsResult getAutoSnapshots(GetAutoSnapshotsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBlueprintsResult getBlueprints(GetBlueprintsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBucketAccessKeysResult getBucketAccessKeys(GetBucketAccessKeysRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBucketBundlesResult getBucketBundles(GetBucketBundlesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBucketMetricDataResult getBucketMetricData(GetBucketMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBucketsResult getBuckets(GetBucketsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetBundlesResult getBundles(GetBundlesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetCertificatesResult getCertificates(GetCertificatesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetCloudFormationStackRecordsResult getCloudFormationStackRecords(GetCloudFormationStackRecordsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContactMethodsResult getContactMethods(GetContactMethodsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerAPIMetadataResult getContainerAPIMetadata(GetContainerAPIMetadataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerImagesResult getContainerImages(GetContainerImagesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerLogResult getContainerLog(GetContainerLogRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerServiceDeploymentsResult getContainerServiceDeployments(GetContainerServiceDeploymentsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerServiceMetricDataResult getContainerServiceMetricData(GetContainerServiceMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerServicePowersResult getContainerServicePowers(GetContainerServicePowersRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetContainerServicesResult getContainerServices(GetContainerServicesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDiskResult getDisk(GetDiskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDiskSnapshotResult getDiskSnapshot(GetDiskSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDiskSnapshotsResult getDiskSnapshots(GetDiskSnapshotsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDisksResult getDisks(GetDisksRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDistributionBundlesResult getDistributionBundles(GetDistributionBundlesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDistributionLatestCacheResetResult getDistributionLatestCacheReset(GetDistributionLatestCacheResetRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDistributionMetricDataResult getDistributionMetricData(GetDistributionMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDistributionsResult getDistributions(GetDistributionsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDomainResult getDomain(GetDomainRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDomainsResult getDomains(GetDomainsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetExportSnapshotRecordsResult getExportSnapshotRecords(GetExportSnapshotRecordsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceResult getInstance(GetInstanceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceAccessDetailsResult getInstanceAccessDetails(GetInstanceAccessDetailsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceMetricDataResult getInstanceMetricData(GetInstanceMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstancePortStatesResult getInstancePortStates(GetInstancePortStatesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceSnapshotResult getInstanceSnapshot(GetInstanceSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceSnapshotsResult getInstanceSnapshots(GetInstanceSnapshotsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstanceStateResult getInstanceState(GetInstanceStateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetInstancesResult getInstances(GetInstancesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetKeyPairResult getKeyPair(GetKeyPairRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetKeyPairsResult getKeyPairs(GetKeyPairsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLoadBalancerResult getLoadBalancer(GetLoadBalancerRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLoadBalancerMetricDataResult getLoadBalancerMetricData(GetLoadBalancerMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLoadBalancerTlsCertificatesResult getLoadBalancerTlsCertificates(GetLoadBalancerTlsCertificatesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLoadBalancersResult getLoadBalancers(GetLoadBalancersRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetOperationResult getOperation(GetOperationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetOperationsResult getOperations(GetOperationsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetOperationsForResourceResult getOperationsForResource(GetOperationsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRegionsResult getRegions(GetRegionsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseResult getRelationalDatabase(GetRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseBlueprintsResult getRelationalDatabaseBlueprints(GetRelationalDatabaseBlueprintsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseBundlesResult getRelationalDatabaseBundles(GetRelationalDatabaseBundlesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseEventsResult getRelationalDatabaseEvents(GetRelationalDatabaseEventsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseLogEventsResult getRelationalDatabaseLogEvents(GetRelationalDatabaseLogEventsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseLogStreamsResult getRelationalDatabaseLogStreams(GetRelationalDatabaseLogStreamsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseMasterUserPasswordResult getRelationalDatabaseMasterUserPassword(GetRelationalDatabaseMasterUserPasswordRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseMetricDataResult getRelationalDatabaseMetricData(GetRelationalDatabaseMetricDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseParametersResult getRelationalDatabaseParameters(GetRelationalDatabaseParametersRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseSnapshotResult getRelationalDatabaseSnapshot(GetRelationalDatabaseSnapshotRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabaseSnapshotsResult getRelationalDatabaseSnapshots(GetRelationalDatabaseSnapshotsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRelationalDatabasesResult getRelationalDatabases(GetRelationalDatabasesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetStaticIpResult getStaticIp(GetStaticIpRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetStaticIpsResult getStaticIps(GetStaticIpsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ImportKeyPairResult importKeyPair(ImportKeyPairRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public IsVpcPeeredResult isVpcPeered(IsVpcPeeredRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public OpenInstancePublicPortsResult openInstancePublicPorts(OpenInstancePublicPortsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public PeerVpcResult peerVpc(PeerVpcRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public PutAlarmResult putAlarm(PutAlarmRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public PutInstancePublicPortsResult putInstancePublicPorts(PutInstancePublicPortsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RebootInstanceResult rebootInstance(RebootInstanceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RebootRelationalDatabaseResult rebootRelationalDatabase(RebootRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RegisterContainerImageResult registerContainerImage(RegisterContainerImageRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ReleaseStaticIpResult releaseStaticIp(ReleaseStaticIpRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ResetDistributionCacheResult resetDistributionCache(ResetDistributionCacheRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public SendContactMethodVerificationResult sendContactMethodVerification(SendContactMethodVerificationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public SetIpAddressTypeResult setIpAddressType(SetIpAddressTypeRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public SetResourceAccessForBucketResult setResourceAccessForBucket(SetResourceAccessForBucketRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public StartInstanceResult startInstance(StartInstanceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public StartRelationalDatabaseResult startRelationalDatabase(StartRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public StopInstanceResult stopInstance(StopInstanceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public StopRelationalDatabaseResult stopRelationalDatabase(StopRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public TagResourceResult tagResource(TagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public TestAlarmResult testAlarm(TestAlarmRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UnpeerVpcResult unpeerVpc(UnpeerVpcRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UntagResourceResult untagResource(UntagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateBucketResult updateBucket(UpdateBucketRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateBucketBundleResult updateBucketBundle(UpdateBucketBundleRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateContainerServiceResult updateContainerService(UpdateContainerServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateDistributionResult updateDistribution(UpdateDistributionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateDistributionBundleResult updateDistributionBundle(UpdateDistributionBundleRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateDomainEntryResult updateDomainEntry(UpdateDomainEntryRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateLoadBalancerAttributeResult updateLoadBalancerAttribute(UpdateLoadBalancerAttributeRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateRelationalDatabaseResult updateRelationalDatabase(UpdateRelationalDatabaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateRelationalDatabaseParametersResult updateRelationalDatabaseParameters(UpdateRelationalDatabaseParametersRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public void shutdown() { throw new java.lang.UnsupportedOperationException(); } @Override public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } }
apache-2.0
cloudnautique/cloud-cattle
code/iaas/logic-common/src/main/java/io/cattle/platform/process/common/handler/AbstractObjectProcessLogic.java
5191
package io.cattle.platform.process.common.handler; import io.cattle.platform.core.constants.CommonStatesConstants; import io.cattle.platform.engine.handler.AbstractProcessLogic; import io.cattle.platform.engine.process.ExitReason; import io.cattle.platform.engine.process.ProcessInstance; import io.cattle.platform.engine.process.impl.ProcessCancelException; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.object.meta.ObjectMetaDataManager; import io.cattle.platform.object.process.ObjectProcessManager; import io.cattle.platform.object.process.StandardProcess; import io.cattle.platform.object.util.ObjectUtils; import java.util.List; import java.util.Map; import javax.inject.Inject; public abstract class AbstractObjectProcessLogic extends AbstractProcessLogic { protected ObjectManager objectManager; protected ObjectProcessManager objectProcessManager; protected ObjectMetaDataManager objectMetaDataManager; public ObjectManager getObjectManager() { return objectManager; } protected ExitReason activate(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.ACTIVATE, obj, data); } protected ExitReason deactivate(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.DEACTIVATE, obj, data); } protected ExitReason deactivateThenRemove(Object obj, Map<String,Object> data) { Object state = ObjectUtils.getPropertyIgnoreErrors(obj, ObjectMetaDataManager.STATE_FIELD); if ( CommonStatesConstants.ACTIVE.equals(state) ) { getObjectProcessManager().executeStandardProcess(StandardProcess.DEACTIVATE, obj, data); obj = getObjectManager().reload(obj); } if ( CommonStatesConstants.PURGED.equals(state) ) { return null; } return getObjectProcessManager().executeStandardProcess(StandardProcess.REMOVE, obj, data); } protected ExitReason createThenActivate(Object obj, Map<String,Object> data) { createIgnoreCancel(obj, data); return getObjectProcessManager().executeStandardProcess(StandardProcess.ACTIVATE, obj, data); } protected ExitReason createIgnoreCancel(Object obj, Map<String,Object> data) { try { return getObjectProcessManager().executeStandardProcess(StandardProcess.CREATE, obj, data); } catch ( ProcessCancelException e ) { return null; } } protected ExitReason create(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.CREATE, obj, data); } protected ExitReason remove(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.REMOVE, obj, data); } protected ExitReason restore(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.RESTORE, obj, data); } protected ExitReason deallocate(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.DEALLOCATE, obj, data); } protected ExitReason allocate(Object obj, Map<String,Object> data) { return getObjectProcessManager().executeStandardProcess(StandardProcess.ALLOCATE, obj, data); } protected ExitReason execute(String processName, Object resource, Map<String, Object> data) { ProcessInstance pi = getObjectProcessManager().createProcessInstance(processName, resource, data); return pi.execute(); } @Inject public void setObjectManager(ObjectManager objectManager) { this.objectManager = objectManager; } public ObjectProcessManager getObjectProcessManager() { return objectProcessManager; } @Inject public void setObjectProcessManager(ObjectProcessManager objectProcessManager) { this.objectProcessManager = objectProcessManager; } public ObjectMetaDataManager getObjectMetaDataManager() { return objectMetaDataManager; } @Inject public void setObjectMetaDataManager(ObjectMetaDataManager objectMetaDataManager) { this.objectMetaDataManager = objectMetaDataManager; } /* Delegate Methods */ public <T> T loadResource(Class<T> type, String resourceId) { return objectManager.loadResource(type, resourceId); } public <T> T loadResource(Class<T> type, Long resourceId) { return objectManager.loadResource(type, resourceId); } public <T> T loadResource(String resourceType, String resourceId) { return objectManager.loadResource(resourceType, resourceId); } public <T> T loadResource(String resourceType, Long resourceId) { return objectManager.loadResource(resourceType, resourceId); } public <T> List<T> children(Object obj, Class<T> type) { return objectManager.children(obj, type); } public <T> List<T> mappedChildren(Object obj, Class<T> type) { return objectManager.mappedChildren(obj, type); } }
apache-2.0
vijayan007/java-tutorial
4-chapter/1-jdbc-intro/src/insert/InsertToDept.java
1839
/** * */ package insert; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.Scanner; import com.mysql.jdbc.Driver; /** * @author Vijayan Srinivasan * @author Janarthanan Shanmugam * @since Feb 15, 2017 11:44:40 AM * */ public class InsertToDept { public static void main(String[] args) throws SQLException { Driver driver = new Driver(); DriverManager.registerDriver(driver); String url = "jdbc:mysql://localhost:3306/jana-db"; String user = "root"; String password = "mysql"; Connection con = DriverManager.getConnection(url, user, password); PreparedStatement statement = con.prepareStatement( "insert into department(department_name,Student_name,Course,Gender,dob,Date_of_join,Year_of_passout)values(?,?,?,?,?,?,?)"); Scanner scanner = new Scanner(System.in); System.out.println("Enter your Department"); String dep = scanner.nextLine(); System.out.println("Enter your student name"); String sname = scanner.nextLine(); System.out.println("Enter your Course"); String course = scanner.nextLine(); System.out.println("Enter your Gender"); String Gender = scanner.nextLine(); System.out.println("Enter your Date_of_birth"); String dob = scanner.nextLine(); System.out.println("Enter your Date_of_join"); int doj = scanner.nextInt(); System.out.println("Enter your Year of passout"); int year = scanner.nextInt(); scanner.close(); statement.setString(1, dep); statement.setString(2, sname); statement.setString(3, course); statement.setString(4, Gender); statement.setString(5, dob); statement.setInt(6, doj); statement.setInt(7, year); statement.executeUpdate(); statement.close(); con.close(); } }
apache-2.0
google/tink
java_src/src/test/java/com/google/crypto/tink/hybrid/EciesAeadHkdfPrivateKeyManagerTest.java
16165
// Copyright 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package com.google.crypto.tink.hybrid; import static com.google.common.truth.Truth.assertThat; import static com.google.crypto.tink.testing.KeyTypeManagerTestUtil.testKeyTemplateCompatible; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import com.google.crypto.tink.HybridDecrypt; import com.google.crypto.tink.HybridEncrypt; import com.google.crypto.tink.KeyTemplate; import com.google.crypto.tink.KeyTypeManager; import com.google.crypto.tink.aead.AeadConfig; import com.google.crypto.tink.aead.AeadKeyTemplates; import com.google.crypto.tink.aead.AesCtrHmacAeadKeyManager; import com.google.crypto.tink.proto.EcPointFormat; import com.google.crypto.tink.proto.EciesAeadHkdfKeyFormat; import com.google.crypto.tink.proto.EciesAeadHkdfParams; import com.google.crypto.tink.proto.EciesAeadHkdfPrivateKey; import com.google.crypto.tink.proto.EciesAeadHkdfPublicKey; import com.google.crypto.tink.proto.EciesHkdfKemParams; import com.google.crypto.tink.proto.EllipticCurveType; import com.google.crypto.tink.proto.HashType; import com.google.crypto.tink.proto.KeyData.KeyMaterialType; import com.google.crypto.tink.subtle.EciesAeadHkdfDemHelper; import com.google.crypto.tink.subtle.EciesAeadHkdfHybridEncrypt; import com.google.crypto.tink.subtle.EllipticCurves; import com.google.crypto.tink.subtle.Random; import com.google.crypto.tink.testing.TestUtil; import com.google.protobuf.ExtensionRegistryLite; import java.security.GeneralSecurityException; import java.security.interfaces.ECPublicKey; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for EciesAeadHkdfPrivateKeyManager. */ @RunWith(JUnit4.class) public class EciesAeadHkdfPrivateKeyManagerTest { @BeforeClass public static void setUp() throws Exception { AeadConfig.register(); } private final EciesAeadHkdfPrivateKeyManager manager = new EciesAeadHkdfPrivateKeyManager(); private final KeyTypeManager.KeyFactory<EciesAeadHkdfKeyFormat, EciesAeadHkdfPrivateKey> factory = manager.keyFactory(); @Test public void basics() throws Exception { assertThat(manager.getKeyType()) .isEqualTo("type.googleapis.com/google.crypto.tink.EciesAeadHkdfPrivateKey"); assertThat(manager.getVersion()).isEqualTo(0); assertThat(manager.keyMaterialType()).isEqualTo(KeyMaterialType.ASYMMETRIC_PRIVATE); } @Test public void validateKeyFormat_empty() throws Exception { assertThrows( GeneralSecurityException.class, () -> factory.validateKeyFormat(EciesAeadHkdfKeyFormat.getDefaultInstance())); } private static EciesAeadHkdfKeyFormat createKeyFormat( EllipticCurveType curve, HashType hashType, EcPointFormat ecPointFormat, KeyTemplate demKeyTemplate, byte[] salt) { return EciesAeadHkdfKeyFormat.newBuilder() .setParams( EciesAeadHkdfPrivateKeyManager.createParams( curve, hashType, ecPointFormat, demKeyTemplate, salt)) .build(); } @Test public void validateKeyFormat_valid() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.SHA256, EcPointFormat.UNCOMPRESSED, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); factory.validateKeyFormat(format); } @Test public void validateKeyFormat_noPointFormat_throws() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.SHA256, EcPointFormat.UNKNOWN_FORMAT, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); assertThrows(GeneralSecurityException.class, () -> factory.validateKeyFormat(format)); } @Test public void validateKeyFormat_noDem_throws() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.SHA256, EcPointFormat.UNCOMPRESSED, KeyTemplate.create("", new byte[0], KeyTemplate.OutputPrefixType.TINK), TestUtil.hexDecode("aabbccddeeff")); assertThrows(GeneralSecurityException.class, () -> factory.validateKeyFormat(format)); } @Test public void validateKeyFormat_noKemCurve_throws() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.UNKNOWN_CURVE, HashType.SHA256, EcPointFormat.UNCOMPRESSED, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); assertThrows(GeneralSecurityException.class, () -> factory.validateKeyFormat(format)); } @Test public void validateKeyFormat_noKemHash_throws() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.UNKNOWN_HASH, EcPointFormat.UNCOMPRESSED, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); assertThrows(GeneralSecurityException.class, () -> factory.validateKeyFormat(format)); } @Test public void createKey_checkValues() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.SHA256, EcPointFormat.UNCOMPRESSED, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); EciesAeadHkdfPrivateKey key = factory.createKey(format); assertThat(key.getPublicKey().getParams()).isEqualTo(format.getParams()); assertThat(key.getPublicKey().getX()).isNotEmpty(); assertThat(key.getPublicKey().getY()).isNotEmpty(); assertThat(key.getKeyValue()).isNotEmpty(); } private EciesAeadHkdfPrivateKey createValidKey() throws Exception { EciesAeadHkdfKeyFormat format = createKeyFormat( EllipticCurveType.NIST_P256, HashType.SHA256, EcPointFormat.UNCOMPRESSED, AesCtrHmacAeadKeyManager.aes128CtrHmacSha256Template(), TestUtil.hexDecode("aabbccddeeff")); return factory.createKey(format); } @Test public void validateKey_valid() throws Exception { manager.validateKey(createValidKey()); } @Test public void validateKey_invalidVersion_throws() throws Exception { EciesAeadHkdfPrivateKey key = EciesAeadHkdfPrivateKey.newBuilder(createValidKey()).setVersion(1).build(); assertThrows(GeneralSecurityException.class, () -> manager.validateKey(key)); } @Test public void getPublicKey_values() throws Exception { EciesAeadHkdfPrivateKey key = createValidKey(); EciesAeadHkdfPublicKey publicKey = manager.getPublicKey(key); assertThat(publicKey).isEqualTo(key.getPublicKey()); } @Test public void createPrimitive() throws Exception { EciesAeadHkdfPrivateKey key = createValidKey(); HybridDecrypt hybridDecrypt = manager.getPrimitive(key, HybridDecrypt.class); EciesAeadHkdfParams eciesParams = key.getPublicKey().getParams(); EciesHkdfKemParams kemParams = eciesParams.getKemParams(); ECPublicKey recipientPublicKey = EllipticCurves.getEcPublicKey( HybridUtil.toCurveType(kemParams.getCurveType()), key.getPublicKey().getX().toByteArray(), key.getPublicKey().getY().toByteArray()); EciesAeadHkdfDemHelper demHelper = new RegistryEciesAeadHkdfDemHelper(eciesParams.getDemParams().getAeadDem()); HybridEncrypt hybridEncrypt = new EciesAeadHkdfHybridEncrypt( recipientPublicKey, kemParams.getHkdfSalt().toByteArray(), HybridUtil.toHmacAlgo(kemParams.getHkdfHashType()), HybridUtil.toPointFormatType(eciesParams.getEcPointFormat()), demHelper); byte[] message = Random.randBytes(20); byte[] contextInfo = Random.randBytes(20); assertThat(hybridDecrypt.decrypt(hybridEncrypt.encrypt(message, contextInfo), contextInfo)) .isEqualTo(message); } @Test public void testEciesP256HkdfHmacSha256Aes128GcmTemplate() throws Exception { KeyTemplate template = EciesAeadHkdfPrivateKeyManager.eciesP256HkdfHmacSha256Aes128GcmTemplate(); assertEquals(new EciesAeadHkdfPrivateKeyManager().getKeyType(), template.getTypeUrl()); assertEquals(KeyTemplate.OutputPrefixType.TINK, template.getOutputPrefixType()); EciesAeadHkdfKeyFormat format = EciesAeadHkdfKeyFormat.parseFrom( template.getValue(), ExtensionRegistryLite.getEmptyRegistry()); assertThat(format.hasParams()).isTrue(); assertThat(format.getParams().hasKemParams()).isTrue(); assertThat(format.getParams().hasDemParams()).isTrue(); assertThat(format.getParams().getDemParams().hasAeadDem()).isTrue(); assertThat(format.getParams().getEcPointFormat()).isEqualTo(EcPointFormat.UNCOMPRESSED); EciesHkdfKemParams kemParams = format.getParams().getKemParams(); assertThat(kemParams.getCurveType()).isEqualTo(EllipticCurveType.NIST_P256); assertThat(kemParams.getHkdfHashType()).isEqualTo(HashType.SHA256); assertThat(kemParams.getHkdfSalt()).isEmpty(); assertThat(format.getParams().getDemParams().getAeadDem().toString()) .isEqualTo(AeadKeyTemplates.AES128_GCM.toString()); } @Test public void testRawEciesP256HkdfHmacSha256Aes128GcmCompressedTemplate() throws Exception { KeyTemplate template = EciesAeadHkdfPrivateKeyManager.rawEciesP256HkdfHmacSha256Aes128GcmCompressedTemplate(); assertEquals(new EciesAeadHkdfPrivateKeyManager().getKeyType(), template.getTypeUrl()); assertEquals(KeyTemplate.OutputPrefixType.RAW, template.getOutputPrefixType()); EciesAeadHkdfKeyFormat format = EciesAeadHkdfKeyFormat.parseFrom( template.getValue(), ExtensionRegistryLite.getEmptyRegistry()); assertThat(format.hasParams()).isTrue(); assertThat(format.getParams().hasKemParams()).isTrue(); assertThat(format.getParams().hasDemParams()).isTrue(); assertThat(format.getParams().getDemParams().hasAeadDem()).isTrue(); assertThat(format.getParams().getEcPointFormat()).isEqualTo(EcPointFormat.COMPRESSED); EciesHkdfKemParams kemParams = format.getParams().getKemParams(); assertThat(kemParams.getCurveType()).isEqualTo(EllipticCurveType.NIST_P256); assertThat(kemParams.getHkdfHashType()).isEqualTo(HashType.SHA256); assertThat(kemParams.getHkdfSalt()).isEmpty(); assertThat(format.getParams().getDemParams().getAeadDem().toString()) .isEqualTo(AeadKeyTemplates.AES128_GCM.toString()); } @Test public void testEciesP256HkdfHmacSha256Aes128CtrHmacSha256Template() throws Exception { KeyTemplate template = EciesAeadHkdfPrivateKeyManager.eciesP256HkdfHmacSha256Aes128CtrHmacSha256Template(); assertEquals(new EciesAeadHkdfPrivateKeyManager().getKeyType(), template.getTypeUrl()); assertEquals(KeyTemplate.OutputPrefixType.TINK, template.getOutputPrefixType()); EciesAeadHkdfKeyFormat format = EciesAeadHkdfKeyFormat.parseFrom( template.getValue(), ExtensionRegistryLite.getEmptyRegistry()); assertThat(format.hasParams()).isTrue(); assertThat(format.getParams().hasKemParams()).isTrue(); assertThat(format.getParams().hasDemParams()).isTrue(); assertThat(format.getParams().getDemParams().hasAeadDem()).isTrue(); assertThat(format.getParams().getEcPointFormat()).isEqualTo(EcPointFormat.UNCOMPRESSED); EciesHkdfKemParams kemParams = format.getParams().getKemParams(); assertThat(kemParams.getCurveType()).isEqualTo(EllipticCurveType.NIST_P256); assertThat(kemParams.getHkdfHashType()).isEqualTo(HashType.SHA256); assertThat(kemParams.getHkdfSalt()).isEmpty(); assertThat(format.getParams().getDemParams().getAeadDem().toString()) .isEqualTo(AeadKeyTemplates.AES128_CTR_HMAC_SHA256.toString()); } @Test public void testRawEciesP256HkdfHmacSha256Aes128CtrHmacSha256CompressedTemplate() throws Exception { KeyTemplate template = EciesAeadHkdfPrivateKeyManager .rawEciesP256HkdfHmacSha256Aes128CtrHmacSha256CompressedTemplate(); assertEquals(new EciesAeadHkdfPrivateKeyManager().getKeyType(), template.getTypeUrl()); assertEquals(KeyTemplate.OutputPrefixType.RAW, template.getOutputPrefixType()); EciesAeadHkdfKeyFormat format = EciesAeadHkdfKeyFormat.parseFrom( template.getValue(), ExtensionRegistryLite.getEmptyRegistry()); assertThat(format.hasParams()).isTrue(); assertThat(format.getParams().hasKemParams()).isTrue(); assertThat(format.getParams().hasDemParams()).isTrue(); assertThat(format.getParams().getDemParams().hasAeadDem()).isTrue(); assertThat(format.getParams().getEcPointFormat()).isEqualTo(EcPointFormat.COMPRESSED); EciesHkdfKemParams kemParams = format.getParams().getKemParams(); assertThat(kemParams.getCurveType()).isEqualTo(EllipticCurveType.NIST_P256); assertThat(kemParams.getHkdfHashType()).isEqualTo(HashType.SHA256); assertThat(kemParams.getHkdfSalt()).isEmpty(); assertThat(format.getParams().getDemParams().getAeadDem().toString()) .isEqualTo(AeadKeyTemplates.AES128_CTR_HMAC_SHA256.toString()); } @Test public void testKeyTemplateAndManagerCompatibility() throws Exception { EciesAeadHkdfPrivateKeyManager manager = new EciesAeadHkdfPrivateKeyManager(); testKeyTemplateCompatible( manager, EciesAeadHkdfPrivateKeyManager.eciesP256HkdfHmacSha256Aes128GcmTemplate()); testKeyTemplateCompatible( manager, EciesAeadHkdfPrivateKeyManager.rawEciesP256HkdfHmacSha256Aes128GcmCompressedTemplate()); testKeyTemplateCompatible( manager, EciesAeadHkdfPrivateKeyManager.eciesP256HkdfHmacSha256Aes128CtrHmacSha256Template()); testKeyTemplateCompatible( manager, EciesAeadHkdfPrivateKeyManager .rawEciesP256HkdfHmacSha256Aes128CtrHmacSha256CompressedTemplate()); } @Test public void testKeyFormats() throws Exception { factory.validateKeyFormat( factory.keyFormats().get("ECIES_P256_HKDF_HMAC_SHA256_AES128_GCM").keyFormat); factory.validateKeyFormat( factory.keyFormats().get("ECIES_P256_HKDF_HMAC_SHA256_AES128_GCM_RAW").keyFormat); factory.validateKeyFormat( factory.keyFormats().get("ECIES_P256_COMPRESSED_HKDF_HMAC_SHA256_AES128_GCM").keyFormat); factory.validateKeyFormat( factory .keyFormats() .get("ECIES_P256_COMPRESSED_HKDF_HMAC_SHA256_AES128_GCM_RAW") .keyFormat); factory.validateKeyFormat( factory.keyFormats().get("ECIES_P256_HKDF_HMAC_SHA256_AES128_CTR_HMAC_SHA256").keyFormat); factory.validateKeyFormat( factory .keyFormats() .get("ECIES_P256_HKDF_HMAC_SHA256_AES128_CTR_HMAC_SHA256_RAW") .keyFormat); factory.validateKeyFormat( factory .keyFormats() .get("ECIES_P256_COMPRESSED_HKDF_HMAC_SHA256_AES128_CTR_HMAC_SHA256") .keyFormat); factory.validateKeyFormat( factory .keyFormats() .get("ECIES_P256_COMPRESSED_HKDF_HMAC_SHA256_AES128_CTR_HMAC_SHA256_RAW") .keyFormat); } }
apache-2.0
ow2-chameleon/fuchsia
tools/grid/src/main/java/org/ow2/chameleon/fuchsia/tools/grid/data/query/ContentImporter.java
3891
/* * #%L * OW2 Chameleon - Fuchsia Framework * %% * Copyright (C) 2009 - 2014 OW2 Chameleon * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.ow2.chameleon.fuchsia.tools.grid.data.query; import org.apache.felix.ipojo.Factory; import org.apache.felix.ipojo.annotations.*; import org.codehaus.jackson.map.ObjectMapper; import org.osgi.framework.BundleContext; import org.osgi.service.http.HttpService; import org.osgi.service.http.NamespaceException; import org.ow2.chameleon.fuchsia.core.component.ImporterService; import org.ow2.chameleon.fuchsia.tools.grid.ContentHelper; import org.ow2.chameleon.fuchsia.tools.grid.model.LinkerNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.List; @Component @Instantiate public class ContentImporter extends HttpServlet { private static final Logger LOG = LoggerFactory.getLogger(ContentImporter.class); private static final String URL = "/contentImporter"; public static final List<String> IMPORTER_SERVICE_INTERFACE = new ArrayList<String>(); public static final List<String> IMPORTER_SERVICE_PROPERTIES = new ArrayList<String>(); // initialize the static lists static { IMPORTER_SERVICE_INTERFACE.add(ImporterService.class.getName()); } @Requires HttpService web; @Requires ContentHelper content; BundleContext context; public ContentImporter(BundleContext context) { this.context = context; } @Validate public void validate() { try { web.registerServlet(URL, this, null, null); } catch (ServletException e) { LOG.error("Error while registering the servlet", e); } catch (NamespaceException e) { LOG.error("Error while registering the servlet", e); } } @Invalidate public void invalidate() { web.unregister(URL); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { List<LinkerNode> rootList = new ArrayList<LinkerNode>(); ObjectMapper mapper = new ObjectMapper(); for (Factory factory : content.getFuchsiaFactories(IMPORTER_SERVICE_INTERFACE, IMPORTER_SERVICE_PROPERTIES)) { rootList.add(new LinkerNode(factory.getName())); } /* try { ServiceReference[] references=context.getServiceReferences(Factory.class.getName(),null); if(references!=null) { for (ServiceReference sr : references) { for(String key:sr.getPropertyKeys()){ System.out.println(key+"----->" + sr.getProperty(key)); } System.out.println("######################################"); Factory fact=(Factory) context.getService(sr); } } } catch (InvalidSyntaxException e) { e.printStackTrace(); } */ //rootList.add(new ImportationLinkerNode("jander fact")); mapper.writeValue(resp.getWriter(), rootList); } }
apache-2.0
elminsterjimmy/java
EasyExpressionSystem/src/main/java/com/elminster/easyexpression/operator/GreaterEqualThanCompareOperator.java
884
package com.elminster.easyexpression.operator; import com.elminster.easyexpression.IVariable; public class GreaterEqualThanCompareOperator extends CompareOperator { public static final IOperator INSTANCE = new GreaterEqualThanCompareOperator(); @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected boolean compare(IVariable leftVariable, IVariable rightVariable) { boolean rst = false; Object leftValue = leftVariable.getVariableValue(); Object rightValue = rightVariable.getVariableValue(); int c = -1; if (leftValue instanceof Comparable) { c = ((Comparable)leftValue).compareTo(rightValue); } else { c = (leftValue.toString().compareTo(rightValue.toString())); } if (0 <= c) { rst = true; } return rst; } @Override public String getOperatorName() { return " >= "; } }
apache-2.0
liujia128/RocketMQ-Master-analyze
rocketmq-broker/src/main/java/com/alibaba/rocketmq/broker/processor/ForwardRequestProcessor.java
1702
/** * Copyright (C) 2010-2013 Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.rocketmq.broker.processor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alibaba.rocketmq.broker.BrokerController; import com.alibaba.rocketmq.common.constant.LoggerName; import com.alibaba.rocketmq.remoting.netty.NettyRequestProcessor; import com.alibaba.rocketmq.remoting.protocol.RemotingCommand; import io.netty.channel.ChannelHandlerContext; /** * 向Client转发请求,通常用于管理、监控、统计目的 * * @author shijia.wxr<vintage.wang@gmail.com> * @since 2013-7-26 */ public class ForwardRequestProcessor implements NettyRequestProcessor { private static final Logger log = LoggerFactory.getLogger(LoggerName.BrokerLoggerName); private final BrokerController brokerController; public ForwardRequestProcessor(final BrokerController brokerController) { this.brokerController = brokerController; } @Override public RemotingCommand processRequest(ChannelHandlerContext ctx, RemotingCommand request) { // TODO Auto-generated method stub return null; } }
apache-2.0
tensorflow/java
tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/train/ApplyGradientDescent.java
5319
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.train; import java.util.Arrays; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.Output; import org.tensorflow.op.RawOp; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.op.annotation.OpMetadata; import org.tensorflow.op.annotation.Operator; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.family.TType; /** * Update '*var' by subtracting 'alpha' * 'delta' from it. * * @param <T> data type for {@code out} output */ @OpMetadata( opType = ApplyGradientDescent.OP_NAME, inputsClass = ApplyGradientDescent.Inputs.class ) @Operator( group = "train" ) public final class ApplyGradientDescent<T extends TType> extends RawOp implements Operand<T> { /** * The name of this op, as known by TensorFlow core engine */ public static final String OP_NAME = "ApplyGradientDescent"; private Output<T> out; public ApplyGradientDescent(Operation operation) { super(operation, OP_NAME); int outputIdx = 0; out = operation.output(outputIdx++); } /** * Factory method to create a class wrapping a new ApplyGradientDescent operation. * * @param scope current scope * @param var Should be from a Variable(). * @param alpha Scaling factor. Must be a scalar. * @param delta The change. * @param options carries optional attribute values * @param <T> data type for {@code ApplyGradientDescent} output and operands * @return a new instance of ApplyGradientDescent */ @Endpoint( describeByClass = true ) public static <T extends TType> ApplyGradientDescent<T> create(Scope scope, Operand<T> var, Operand<T> alpha, Operand<T> delta, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "ApplyGradientDescent"); opBuilder.addInput(var.asOutput()); opBuilder.addInput(alpha.asOutput()); opBuilder.addInput(delta.asOutput()); if (options != null) { for (Options opts : options) { if (opts.useLocking != null) { opBuilder.setAttr("use_locking", opts.useLocking); } } } return new ApplyGradientDescent<>(opBuilder.build()); } /** * Sets the useLocking option. * * @param useLocking If {@code True}, the subtraction will be protected by a lock; * otherwise the behavior is undefined, but may exhibit less contention. * @return this Options instance. */ public static Options useLocking(Boolean useLocking) { return new Options().useLocking(useLocking); } /** * Gets out. * Same as &quot;var&quot;. * @return out. */ public Output<T> out() { return out; } @Override public Output<T> asOutput() { return out; } /** * Optional attributes for {@link org.tensorflow.op.train.ApplyGradientDescent} */ public static class Options { private Boolean useLocking; private Options() { } /** * Sets the useLocking option. * * @param useLocking If {@code True}, the subtraction will be protected by a lock; * otherwise the behavior is undefined, but may exhibit less contention. * @return this Options instance. */ public Options useLocking(Boolean useLocking) { this.useLocking = useLocking; return this; } } @OpInputsMetadata( outputsClass = ApplyGradientDescent.class ) public static class Inputs<T extends TType> extends RawOpInputs<ApplyGradientDescent<T>> { /** * Should be from a Variable(). */ public final Operand<T> var; /** * Scaling factor. Must be a scalar. */ public final Operand<T> alpha; /** * The change. */ public final Operand<T> delta; /** * The T attribute */ public final DataType T; /** * If `True`, the subtraction will be protected by a lock; * otherwise the behavior is undefined, but may exhibit less contention. */ public final boolean useLocking; public Inputs(GraphOperation op) { super(new ApplyGradientDescent<>(op), op, Arrays.asList("T", "use_locking")); int inputIndex = 0; var = (Operand<T>) op.input(inputIndex++); alpha = (Operand<T>) op.input(inputIndex++); delta = (Operand<T>) op.input(inputIndex++); T = op.attributes().getAttrType("T"); useLocking = op.attributes().getAttrBool("use_locking"); } } }
apache-2.0
wu-sheng/sky-walking
apm-commons/apm-util/src/main/java/org/apache/skywalking/apm/util/ConfigInitializer.java
7970
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.skywalking.apm.util; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; /** * Init a class's static fields by a {@link Properties}, including static fields and static inner classes. * <p> */ public class ConfigInitializer { public static void initialize(Properties properties, Class<?> rootConfigType) throws IllegalAccessException { initNextLevel(properties, rootConfigType, new ConfigDesc()); } private static void initNextLevel(Properties properties, Class<?> recentConfigType, ConfigDesc parentDesc) throws IllegalArgumentException, IllegalAccessException { for (Field field : recentConfigType.getFields()) { if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers())) { String configKey = (parentDesc + "." + field.getName()).toLowerCase(); Class<?> type = field.getType(); if (type.equals(Map.class)) { /* * Map config format is, config_key[map_key]=map_value * Such as plugin.opgroup.resttemplate.rule[abc]=/url/path */ // Deduct two generic types of the map ParameterizedType genericType = (ParameterizedType) field.getGenericType(); Type[] argumentTypes = genericType.getActualTypeArguments(); Type keyType = null; Type valueType = null; if (argumentTypes != null && argumentTypes.length == 2) { // Get key type and value type of the map keyType = argumentTypes[0]; valueType = argumentTypes[1]; } Map map = (Map) field.get(null); // Set the map from config key and properties setForMapType(configKey, map, properties, keyType, valueType); } else { /* * Others typical field type */ String value = properties.getProperty(configKey); // Convert the value into real type final Length lengthDefine = field.getAnnotation(Length.class); if (lengthDefine != null) { if (value != null && value.length() > lengthDefine.value()) { value = value.substring(0, lengthDefine.value()); } } Object convertedValue = convertToTypicalType(type, value); if (convertedValue != null) { field.set(null, convertedValue); } } } } for (Class<?> innerConfiguration : recentConfigType.getClasses()) { parentDesc.append(innerConfiguration.getSimpleName()); initNextLevel(properties, innerConfiguration, parentDesc); parentDesc.removeLastDesc(); } } /** * Convert string value to typical type. * * @param type type to convert * @param value string value to be converted * @return converted value or null */ private static Object convertToTypicalType(Type type, String value) { if (value == null || type == null) { return null; } Object result = null; if (String.class.equals(type)) { result = value; } else if (int.class.equals(type) || Integer.class.equals(type)) { result = Integer.valueOf(value); } else if (long.class.equals(type) || Long.class.equals(type)) { result = Long.valueOf(value); } else if (boolean.class.equals(type) || Boolean.class.equals(type)) { result = Boolean.valueOf(value); } else if (float.class.equals(type) || Float.class.equals(type)) { result = Boolean.valueOf(value); } else if (double.class.equals(type) || Double.class.equals(type)) { result = Double.valueOf(value); } else if (List.class.equals(type)) { result = convert2List(value); } else if (type instanceof Class) { Class<?> clazz = (Class<?>) type; if (clazz.isEnum()) { result = Enum.valueOf((Class<Enum>) type, value.toUpperCase()); } } return result; } /** * Set map items. * * @param configKey config key must not be null * @param map map to set must not be null * @param properties properties must not be null * @param keyType key type of the map * @param valueType value type of the map */ private static void setForMapType(String configKey, Map<Object, Object> map, Properties properties, final Type keyType, final Type valueType) { Objects.requireNonNull(configKey); Objects.requireNonNull(map); Objects.requireNonNull(properties); String prefix = configKey + "["; String suffix = "]"; properties.forEach((propertyKey, propertyValue) -> { String propertyStringKey = propertyKey.toString(); if (propertyStringKey.startsWith(prefix) && propertyStringKey.endsWith(suffix)) { String itemKey = propertyStringKey.substring( prefix.length(), propertyStringKey.length() - suffix.length()); Object keyObj; Object valueObj; keyObj = convertToTypicalType(keyType, itemKey); valueObj = convertToTypicalType(valueType, propertyValue.toString()); if (keyObj == null) { keyObj = itemKey; } if (valueObj == null) { valueObj = propertyValue; } map.put(keyObj, valueObj); } }); } private static List<String> convert2List(String value) { if (StringUtil.isEmpty(value)) { return Collections.emptyList(); } List<String> result = new LinkedList<>(); String[] segments = value.split(","); for (String segment : segments) { String trimmedSegment = segment.trim(); if (StringUtil.isNotEmpty(trimmedSegment)) { result.add(trimmedSegment); } } return result; } } class ConfigDesc { private LinkedList<String> descs = new LinkedList<>(); void append(String currentDesc) { if (StringUtil.isNotEmpty(currentDesc)) { descs.addLast(currentDesc); } } void removeLastDesc() { descs.removeLast(); } @Override public String toString() { return String.join(".", descs); } }
apache-2.0
authorjapps/zerocode
kafka-testing/src/test/java/org/jsmart/zerocode/integration/tests/kafka/consume/KafkaConsumeJsonTest.java
547
package org.jsmart.zerocode.integration.tests.kafka.consume; import org.jsmart.zerocode.core.domain.Scenario; import org.jsmart.zerocode.core.domain.TargetEnv; import org.jsmart.zerocode.core.runner.ZeroCodeUnitRunner; import org.junit.Test; import org.junit.runner.RunWith; @TargetEnv("kafka_servers/kafka_test_server.properties") @RunWith(ZeroCodeUnitRunner.class) public class KafkaConsumeJsonTest { @Test @Scenario("kafka/consume/test_kafka_consume_json_msg.json") public void testKafkaConsume_json() throws Exception { } }
apache-2.0
stagraqubole/presto
presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaTableHandle.java
4500
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.kafka; import com.facebook.presto.spi.ConnectorTableHandle; import com.facebook.presto.spi.SchemaTableName; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Objects; import static com.google.common.base.Preconditions.checkNotNull; /** * Kafka specific {@link ConnectorTableHandle}. */ public final class KafkaTableHandle implements ConnectorTableHandle { /** * connector id */ private final String connectorId; /** * The schema name for this table. Is set through configuration and read * using {@link KafkaConnectorConfig#getDefaultSchema()}. Usually 'default'. */ private final String schemaName; /** * The table name used by presto. */ private final String tableName; /** * The topic name that is read from Kafka. */ private final String topicName; private final String keyDataFormat; private final String messageDataFormat; @JsonCreator public KafkaTableHandle( @JsonProperty("connectorId") String connectorId, @JsonProperty("schemaName") String schemaName, @JsonProperty("tableName") String tableName, @JsonProperty("topicName") String topicName, @JsonProperty("keyDataFormat") String keyDataFormat, @JsonProperty("messageDataFormat") String messageDataFormat) { this.connectorId = checkNotNull(connectorId, "connectorId is null"); this.schemaName = checkNotNull(schemaName, "schemaName is null"); this.tableName = checkNotNull(tableName, "tableName is null"); this.topicName = checkNotNull(topicName, "topicName is null"); this.keyDataFormat = checkNotNull(keyDataFormat, "keyDataFormat is null"); this.messageDataFormat = checkNotNull(messageDataFormat, "messageDataFormat is null"); } @JsonProperty public String getConnectorId() { return connectorId; } @JsonProperty public String getSchemaName() { return schemaName; } @JsonProperty public String getTableName() { return tableName; } @JsonProperty public String getTopicName() { return topicName; } @JsonProperty public String getKeyDataFormat() { return keyDataFormat; } @JsonProperty public String getMessageDataFormat() { return messageDataFormat; } public SchemaTableName toSchemaTableName() { return new SchemaTableName(schemaName, tableName); } @Override public int hashCode() { return Objects.hashCode(connectorId, schemaName, tableName, topicName, keyDataFormat, messageDataFormat); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } KafkaTableHandle other = (KafkaTableHandle) obj; return Objects.equal(this.connectorId, other.connectorId) && Objects.equal(this.schemaName, other.schemaName) && Objects.equal(this.tableName, other.tableName) && Objects.equal(this.topicName, other.topicName) && Objects.equal(this.keyDataFormat, other.keyDataFormat) && Objects.equal(this.messageDataFormat, other.messageDataFormat); } @Override public String toString() { return Objects.toStringHelper(this) .add("connectorId", connectorId) .add("schemaName", schemaName) .add("tableName", tableName) .add("topicName", topicName) .add("keyDataFormat", keyDataFormat) .add("messageDataFormat", messageDataFormat) .toString(); } }
apache-2.0
cyenyxe/eva-pipeline
src/test/java/uk/ac/ebi/eva/pipeline/io/writers/GeneWriterTest.java
3746
/* * Copyright 2016 EMBL - European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ebi.eva.pipeline.io.writers; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; import uk.ac.ebi.eva.pipeline.configuration.MongoConfiguration; import uk.ac.ebi.eva.pipeline.io.mappers.GeneLineMapper; import uk.ac.ebi.eva.pipeline.model.FeatureCoordinates; import uk.ac.ebi.eva.pipeline.parameters.MongoConnection; import uk.ac.ebi.eva.test.data.GtfStaticTestData; import uk.ac.ebi.eva.test.rules.TemporaryMongoRule; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * {@link GeneWriter} * input: a List of FeatureCoordinates to each call of `.write()` * output: the FeatureCoordinates get written in mongo, with at least: chromosome, start and end. */ @RunWith(SpringRunner.class) @TestPropertySource({"classpath:test-mongo.properties"}) @ContextConfiguration(classes = {MongoConnection.class, MongoMappingContext.class}) public class GeneWriterTest { private static final String COLLECTION_FEATURES_NAME = "features"; @Autowired private MongoConnection mongoConnection; @Autowired private MongoMappingContext mongoMappingContext; @Rule public TemporaryMongoRule mongoRule = new TemporaryMongoRule(); @Test public void shouldWriteAllFieldsIntoMongoDb() throws Exception { String databaseName = mongoRule.getRandomTemporaryDatabaseName(); MongoOperations mongoOperations = MongoConfiguration.getMongoOperations(databaseName, mongoConnection, mongoMappingContext); GeneWriter geneWriter = new GeneWriter(mongoOperations, COLLECTION_FEATURES_NAME); GeneLineMapper lineMapper = new GeneLineMapper(); List<FeatureCoordinates> genes = new ArrayList<>(); for (String gtfLine : GtfStaticTestData.GTF_CONTENT.split(GtfStaticTestData.GTF_LINE_SPLIT)) { if (!gtfLine.startsWith(GtfStaticTestData.GTF_COMMENT_LINE)) { genes.add(lineMapper.mapLine(gtfLine, 0)); } } geneWriter.write(genes); DBCollection genesCollection = mongoRule.getCollection(databaseName, COLLECTION_FEATURES_NAME); // count documents in DB and check they have region (chr + start + end) DBCursor cursor = genesCollection.find(); int count = 0; while (cursor.hasNext()) { count++; DBObject next = cursor.next(); assertTrue(next.get("chromosome") != null); assertTrue(next.get("start") != null); assertTrue(next.get("end") != null); } assertEquals(genes.size(), count); } }
apache-2.0
mouse3150/blooming
joss/src/main/java/org/javaswift/joss/client/website/AbstractFileObject.java
138
package org.javaswift.joss.client.website; public abstract class AbstractFileObject<N extends FileObject> implements FileObject<N> { }
apache-2.0
pyj0918/study
demo_api_inst/src/main/java/com/ylink/utils/MessageUtils.java
2231
package com.ylink.utils; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; public class MessageUtils { public static Map<String, Object> paraFilter(Map<String, Object> sArray, String ...ignoreFlied) { Map result = new HashMap(); if ((sArray == null) || (sArray.size() <= 0)) { return result; } for (String key : sArray.keySet()) { String value = (String) sArray.get(key); if ((value == null) || (value.equals(""))) continue; if (ArrayUtils.contains(ignoreFlied, key)) { continue; } result.put(key, value); } return result; } public static String createLinkString(Map<String, Object> params) { List keys = new ArrayList(params.keySet()); Collections.sort(keys); String prestr = ""; for (int i = 0; i < keys.size(); ++i) { String key = (String) keys.get(i); if (!(params.get(key) instanceof String)) { throw new RuntimeException("加签排序时发生数据异常."); } String value = (String) params.get(key); if (i == keys.size() - 1) prestr = prestr + key + "=" + value; else { prestr = prestr + key + "=" + value + "&"; } } return prestr; } public static Map<String, Object> conversionDataTransmissionForMap(Object obj) throws Exception { List<Field> effectiveFields = findEffectiveFields(obj); Map datas = new HashMap(); for (Field field : effectiveFields) { String key = field.getName(); String value = (String) ReflectionUtil.reflectGetValue(obj, key); if (!(StringUtils.isEmpty(value))) ; datas.put(key, value); } return datas; } protected static List<Field> findEffectiveFields(Object obj) throws Exception { Class clazz = obj.getClass(); Field[] fields = clazz.getDeclaredFields(); List effectiveFields = new ArrayList(); for (Field field : fields) { effectiveFields.add(field); } Class superClass = clazz.getSuperclass(); Field[] superField = superClass.getDeclaredFields(); for (Field field : superField) { effectiveFields.add(field); } return effectiveFields; } }
apache-2.0
mufaddalq/cloudstack-datera-driver
plugins/hypervisors/ucs/src/com/cloud/ucs/database/UcsBladeDao.java
949
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // package com.cloud.ucs.database; import com.cloud.utils.db.GenericDao; public interface UcsBladeDao extends GenericDao<UcsBladeVO, Long> { }
apache-2.0
youngmonkeys/ezyfox
ezyfox-bean/src/main/java/com/tvd12/ezyfox/bean/impl/EzySimplePrototypeFactory.java
5143
package com.tvd12.ezyfox.bean.impl; import static com.tvd12.ezyfox.bean.impl.EzyBeanKey.of; import static com.tvd12.ezyfox.reflect.EzyClasses.flatSuperAndInterfaceClasses; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Predicate; import com.tvd12.ezyfox.annotation.EzyKeyValue; import com.tvd12.ezyfox.bean.EzyPrototypeFactory; import com.tvd12.ezyfox.bean.EzyPrototypeSupplier; import com.tvd12.ezyfox.bean.annotation.EzyPrototype; import com.tvd12.ezyfox.io.EzyMaps; @SuppressWarnings({ "unchecked", "rawtypes" }) public class EzySimplePrototypeFactory extends EzySimpleBeanFactory implements EzyPrototypeFactory { protected final Set<EzyPrototypeSupplier> supplierSet = new HashSet<>(); protected final Map<EzyBeanKey, EzyPrototypeSupplier> supplierByKey = new ConcurrentHashMap<>(); protected final Map<EzyPrototypeSupplier, Map> suppliersByProperties = new ConcurrentHashMap<>(); @Override public EzyPrototypeSupplier getSupplier(Class objectType) { return getSupplier(getBeanName(objectType), objectType); } @Override public EzyPrototypeSupplier getSupplier(String objectName, Class objectType) { String realname = translateBeanName(objectName, objectType); EzyPrototypeSupplier supplier = supplierByKey.get(of(realname, objectType)); if(supplier == null) { for(EzyBeanKey key : supplierByKey.keySet()) { if(objectType.isAssignableFrom(key.getType())) { supplier = supplierByKey.get(key); break; } } } return supplier; } @Override public EzyPrototypeSupplier getAnnotatedSupplier(Class annotationClass) { List<EzyPrototypeSupplier> list = getSuppliers(annotationClass); if(list.size() > 0) return list.get(0); return null; } @Override public EzyPrototypeSupplier getSupplier(Map properties) { for(Entry<EzyPrototypeSupplier, Map> entry : suppliersByProperties.entrySet()) { if(EzyMaps.containsAll(entry.getValue(), properties)) return entry.getKey(); } return null; } @Override public List<EzyPrototypeSupplier> getSuppliers() { return new ArrayList<>(supplierSet); } @Override public List<EzyPrototypeSupplier> getSuppliers(Map properties) { Set<EzyPrototypeSupplier> set = new HashSet<>(); for(Entry<EzyPrototypeSupplier, Map> entry : suppliersByProperties.entrySet()) { if(EzyMaps.containsAll(entry.getValue(), properties)) set.add(entry.getKey()); } return new ArrayList<>(set); } @Override public List<EzyPrototypeSupplier> getSuppliers(Class... annotationClasses) { return getSuppliers(s -> { for(Class annotationClass : annotationClasses) { if(s.getObjectType().isAnnotationPresent(annotationClass)) return true; } return false; }); } @Override public List<EzyPrototypeSupplier> getSuppliers(Predicate<EzyPrototypeSupplier> filter) { List<EzyPrototypeSupplier> list = new ArrayList<>(); for(EzyPrototypeSupplier supplier : supplierSet) { if(filter.test(supplier)) list.add(supplier); } return list; } @Override public List<EzyPrototypeSupplier> getSuppliersOf(Class parentClass) { return getSuppliers(s -> parentClass.isAssignableFrom(s.getObjectType()) ); } @Override public Map getProperties(EzyPrototypeSupplier supplier) { return suppliersByProperties.get(supplier); } @Override public void addSupplier(EzyPrototypeSupplier supplier) { Class type = supplier.getObjectType(); addSupplier(getBeanName(type), supplier); } @Override public void addSupplier(String objectName, EzyPrototypeSupplier supplier) { Class<?> type = supplier.getObjectType(); addSupplier(objectName, supplier, getProperties(type)); } @Override public void addSupplier( String objectName, EzyPrototypeSupplier supplier, Map properties) { Class<?> type = supplier.getObjectType(); EzyBeanKey key = of(objectName, type); if(supplierByKey.containsKey(key)) return; supplierSet.add(supplier); supplierByKey.put(key, supplier); suppliersByProperties.put(supplier, properties); String defname = getDefaultBeanName(type); mapBeanName(defname, type, objectName); Set<Class> subTypes = flatSuperAndInterfaceClasses(type, true); for(Class<?> subType : subTypes) checkAndAddSupplier(objectName, subType, supplier); } private void checkAndAddSupplier( String objectName, Class<?> type, EzyPrototypeSupplier supplier) { EzyBeanKey key = of(objectName, type); if(supplierByKey.containsKey(key)) return; supplierByKey.put(key, supplier); } private String getBeanName(Class<?> type) { return EzyBeanNameParser.getPrototypeName(type); } private Map getProperties(Class<?> type) { EzyPrototype ann = type.getAnnotation(EzyPrototype.class); Map properties = new HashMap<>(); EzyKeyValue[] keyValues = ann != null ? ann.properties() : new EzyKeyValue[0]; Arrays.stream(keyValues).forEach(kv -> properties.put(kv.key(), kv.value())); return properties; } }
apache-2.0
opetrovski/development
oscm-app/javasrc/org/oscm/app/dao/TemplateFileDAO.java
2973
/******************************************************************************* * * Copyright FUJITSU LIMITED 2017 * * Creation Date: Mar 31, 2017 * *******************************************************************************/ package org.oscm.app.dao; import java.util.Collections; import java.util.Date; import java.util.List; import javax.ejb.LocalBean; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import org.oscm.app.domain.TemplateFile; /** * Data access object for template files. * * @author miethaner */ @Stateless @LocalBean public class TemplateFileDAO { @PersistenceContext(name = "persistence/em", unitName = "oscm-app") public EntityManager em; public TemplateFile getTemplateFileByUnique(String fileName, String controllerId) { TypedQuery<TemplateFile> query = em.createNamedQuery( "TemplateFile.getForFileAndControllerId", TemplateFile.class); query.setParameter("fileName", fileName); query.setParameter("controllerId", controllerId); try { return query.getSingleResult(); } catch (NoResultException e) { return null; } } public List<TemplateFile> getTemplateFilesByControllerId( String controllerId) { TypedQuery<TemplateFile> query = em.createNamedQuery( "TemplateFile.getForControllerId", TemplateFile.class); query.setParameter("controllerId", controllerId); try { return query.getResultList(); } catch (NoResultException e) { return Collections.emptyList(); } } public void saveTemplateFile(TemplateFile file) { file.setLastChange(new Date()); TemplateFile dbFile = find(file); if (dbFile == null) { em.persist(file); } else { file.setTkey(dbFile.getTkey()); em.merge(file); } em.flush(); } public void deleteTemplateFile(TemplateFile file) { TemplateFile dbFile = find(file); em.remove(dbFile); em.flush(); } public TemplateFile find(TemplateFile file) { TemplateFile dbFile = em.find(TemplateFile.class, Long.valueOf(file.getTkey())); if (dbFile == null) { dbFile = getTemplateFileByUnique(file.getFileName(), file.getControllerId()); } return dbFile; } }
apache-2.0
danielsavior/HelpDesk
src/helpdesk/controllers/OfficeController.java
1316
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package helpdesk.controllers; import helpdesk.DAO.OfficeDAO; import helpdesk.beans.Office; import java.util.List; /** * * @author daniel */ public class OfficeController implements ITodosController<Office> { private OfficeDAO dao; @Override public void insert(Office objeto) { dao=new OfficeDAO(); try{ dao.insert(objeto); }catch(Exception e){ e.printStackTrace(); } } @Override public void delete(Office objeto) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void update(Office objeto) { dao=new OfficeDAO(); dao.update(objeto); } @Override public Office buscarPorID(long id) { dao=new OfficeDAO(); return dao.buscaPorID(id); } @Override public List<Office> buscarPorNome(String nome) { throw new UnsupportedOperationException("Not supported yet."); } @Override public List<Office> listar() { try{ dao=new OfficeDAO(); return dao.listar("Office"); }catch(Exception e){ e.printStackTrace(); return null; } } }
apache-2.0
oehme/analysing-gradle-performance
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p70/Production1400.java
2212
package org.gradle.test.performance.mediummonolithicjavaproject.p70; import org.gradle.test.performance.mediummonolithicjavaproject.p69.Production1391; import org.gradle.test.performance.mediummonolithicjavaproject.p69.Production1395; import org.gradle.test.performance.mediummonolithicjavaproject.p69.Production1399; public class Production1400 { private Production1391 property0; public Production1391 getProperty0() { return property0; } public void setProperty0(Production1391 value) { property0 = value; } private Production1395 property1; public Production1395 getProperty1() { return property1; } public void setProperty1(Production1395 value) { property1 = value; } private Production1399 property2; public Production1399 getProperty2() { return property2; } public void setProperty2(Production1399 value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
apache-2.0
shwetasshinde24/Panoply
patched-driver-sdk/customSDK/Linux_SGXEclipsePlugin/build_directory/plugins/com.intel.sgx/src/com/intel/sgx/handlers/AddEnclave.java
6667
/////////////////////////////////////////////////////////////////////////// // Copyright (c) 2016 Intel Corporation. // // // // All rights reserved. This program and the accompanying materials // // are made available under the terms of the Eclipse Public License v1.0 // // which accompanies this distribution, and is available at // // http://www.eclipse.org/legal/epl-v10.html // // // // Contributors: // // Intel Corporation - initial implementation and documentation // /////////////////////////////////////////////////////////////////////////// package com.intel.sgx.handlers; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Map; import org.eclipse.cdt.core.templateengine.TemplateCore; import org.eclipse.cdt.core.templateengine.TemplateEngine; import org.eclipse.cdt.managedbuilder.core.ManagedBuildManager; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.commands.IHandler; import org.eclipse.core.commands.IHandlerListener; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Platform; import org.eclipse.jface.dialogs.InputDialog; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.handlers.HandlerUtil; import com.intel.sgx.Activator; import com.intel.sgx.dialogs.AddEnclaveFileDialog; import com.intel.sgx.preferences.PreferenceConstants; public class AddEnclave extends ModuleCreationBaseHandler implements IHandler { public String edlFilename = ""; public String linuxMakePath = ""; @Override public void addHandlerListener(IHandlerListener handlerListener) { } @Override public void dispose() { } @Override public Object execute(ExecutionEvent event) throws ExecutionException { String edlBasename,linuxPath,enclaveBasename; IProject project = null; // Display display = Display.getCurrent(); // Shell shell = new Shell(display); Shell shell = null; AddEnclaveFileDialog dialog = new AddEnclaveFileDialog(shell, this); if (dialog.open() != InputDialog.OK) { return null; } if((edlFilename.isEmpty())){ System.err.println("No Enclave selected to Import."); return null; } edlBasename = edlFilename; ISelection selection = HandlerUtil.getCurrentSelection(event); Object element = null; if (selection instanceof IStructuredSelection) { element = ((IStructuredSelection) selection).getFirstElement(); if (element instanceof IResource) { project = ((IResource) element).getProject(); } } if (!project.exists()) { System.err.println("Error: Project not found"); return null; } for (IConfigurationElement i : Platform.getExtensionRegistry().getConfigurationElementsFor("org.eclipse.core.resources.projectNature")){ } IPath linuxMkRelPath = (Path.fromOSString(linuxMakePath)).makeRelativeTo(project.getLocation().append("sgx").append("enclave_"+edlBasename)); if(linuxMkRelPath.removeLastSegments(1).lastSegment().toString().equalsIgnoreCase("sgx")){ linuxPath = linuxMkRelPath.removeLastSegments(3).toOSString(); enclaveBasename = linuxMkRelPath.removeLastSegments(2).lastSegment().toString(); } else{ linuxPath = linuxMkRelPath.removeLastSegments(2).toOSString(); enclaveBasename = linuxMkRelPath.removeLastSegments(1).lastSegment().toString(); } IProgressMonitor monitor = new NullProgressMonitor(); TemplateCore template = null; if(isCPProject(project)) if(dialog.generateApp()) { template = TemplateEngine.getDefault().getTemplateById("SGXEnclaveC++WithSample"); } else { template = TemplateEngine.getDefault().getTemplateById("SGXEnclaveC++Minimal"); } else if(dialog.generateApp()) { template = TemplateEngine.getDefault().getTemplateById("SGXEnclaveCWithSample"); } else { template = TemplateEngine.getDefault().getTemplateById("SGXEnclaveCMinimal"); } Map<String, String> valueStore = template.getValueStore(); valueStore.put("projectName", project.getName()); valueStore.put("workspacePath", linuxPath); valueStore.put("baseName", enclaveBasename); valueStore.put("enclaveName",edlFilename); valueStore.put("EnclaveName",capitalize(edlFilename)); valueStore.put("ENCLAVENAME",edlFilename.toUpperCase()); valueStore.put("SdkPathFromPlugin", Activator.getDefault().getPreferenceStore().getString(PreferenceConstants.SDK_PATH)); IStatus[] statuses = template.executeTemplateProcesses(monitor, false); for(IStatus e: statuses) { } ManagedBuildManager.saveBuildInfo(project, true); try { project.refreshLocal(IResource.DEPTH_INFINITE, null); } catch (CoreException e) { Activator.log(e); e.printStackTrace(); } return null; } private String capitalize(final String line) { return Character.toUpperCase(line.charAt(0)) + line.substring(1); } @Override public boolean isEnabled() { return true; } @Override public boolean isHandled() { return true; } @Override public void removeHandlerListener(IHandlerListener handlerListener) { } public void setFilename(String filename) { edlFilename = filename; } public static void copyFile(File source, File dest) throws IOException { byte[] bytes = new byte[4092]; if (source != null && dest != null) { if (source.isFile()) { FileInputStream in = null; FileOutputStream out = null; try { in = new FileInputStream(source); out = new FileOutputStream(dest); int len; while ((len = in.read(bytes)) != -1) { out.write(bytes, 0, len); } } catch (Exception e) { Activator.log(e); System.err.println("Error: " + e.toString()); } finally { try { if (in != null) in.close(); } finally { if (out != null) out.close(); } } } } } }
apache-2.0
OpenXIP/xip-app-tutorial
src/XipHostedAppFrame.java
2391
/* Copyright (c) 2013, Washington University in St.Louis. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.awt.Dimension; import javax.swing.JFrame; import javax.swing.JPanel; import org.nema.dicom.wg23.Rectangle; /** * <font face="Tahoma" size="2"> * <br></br> * @version May 2009 * @author Lawrence Tarbox * </font> * This file is derived from ApplicationFrameTempl.java in the * edu.wustl.xipApplication.samples directory, coupled with * sections extracted out of rev. 216 on trunk of ImageAnnotation.java */ public class XipHostedAppFrame extends JFrame { private static final long serialVersionUID = 1L; XipHostedAppPanel appPanel = new XipHostedAppPanel(); XipHostedApp mainApp; public XipHostedAppFrame (XipHostedApp mainAppIn){ mainApp = mainAppIn; setUndecorated(true); /*Set application dimensions */ Rectangle rect = mainApp.getClientToHost().getAvailableScreen(null); setBounds(rect.getRefPointX(), rect.getRefPointY(), rect.getWidth(), rect.getHeight()); // previously in ImageAnnotation appPanel.setVisible(false); appPanel.addOutputAvailableListener(mainApp); setContentPane(appPanel); setVisible(true); } public Dimension getAppPanelDimension(){ return getPreferredSize(); } public void setAppPanelDimension(Dimension size){ setPreferredSize(size); } public JPanel getDisplayPanel(){ return appPanel; } public void setSceneGraphInputs(String inputs) { String outDir = mainApp.getClientToHost().getOutputDir(); appPanel.setOutputDir(outDir); //update scene graph if(appPanel.getIvCanvas().set("LoadDicom.name", inputs)){ System.out.println("Load Dicom files Successfully"); appPanel.getIvCanvas().processQueue(); } appPanel.setVisible(true); appPanel.repaint(); } }
apache-2.0
NobleAries/MyQuotes
app/src/main/java/io/blacklagoonapps/myquotes/command/Command.java
207
package io.blacklagoonapps.myquotes.command; public interface Command { void execute(); Command NO_OPERATION = new Command() { @Override public void execute() { } }; }
apache-2.0
maheshika/carbon-appmgt
components/appmgt/org.wso2.carbon.appmgt.usage/org.wso2.carbon.appmgt.usage.publisher/src/main/java/org/wso2/carbon/appmgt/usage/publisher/APIMgtFaultHandler.java
4062
package org.wso2.carbon.appmgt.usage.publisher; import org.apache.synapse.MessageContext; import org.apache.synapse.SynapseConstants; import org.apache.synapse.mediators.AbstractMediator; import org.wso2.carbon.appmgt.usage.publisher.dto.FaultPublisherDTO; import org.wso2.carbon.appmgt.usage.publisher.internal.UsageComponent; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; public class APIMgtFaultHandler extends AbstractMediator{ private boolean enabled = UsageComponent.getApiMgtConfigReaderService().isEnabled(); private volatile APIMgtUsageDataPublisher publisher; private String publisherClass = UsageComponent.getApiMgtConfigReaderService().getPublisherClass(); public APIMgtFaultHandler(){ if (!enabled) { return; } if (publisher == null) { synchronized (this){ if (publisher == null) { try { log.debug("Instantiating Data Publisher"); publisher = (APIMgtUsageDataPublisher)Class.forName(publisherClass).newInstance(); publisher.init(); } catch (ClassNotFoundException e) { log.error("Class not found " + publisherClass); } catch (InstantiationException e) { log.error("Error instantiating " + publisherClass); } catch (IllegalAccessException e) { log.error("Illegal access to " + publisherClass); } } } } } public boolean mediate(MessageContext messageContext) { try{ if (!enabled) { return true; } long requestTime = ((Long) messageContext.getProperty(APIMgtUsagePublisherConstants.REQUEST_TIME)).longValue(); FaultPublisherDTO faultPublisherDTO = new FaultPublisherDTO(); faultPublisherDTO.setConsumerKey((String) messageContext.getProperty(APIMgtUsagePublisherConstants.CONSUMER_KEY)); faultPublisherDTO.setContext((String) messageContext.getProperty(APIMgtUsagePublisherConstants.CONTEXT)); faultPublisherDTO.setApi_version((String) messageContext.getProperty(APIMgtUsagePublisherConstants.API_VERSION)); faultPublisherDTO.setApi((String) messageContext.getProperty(APIMgtUsagePublisherConstants.API)); faultPublisherDTO.setResource((String) messageContext.getProperty(APIMgtUsagePublisherConstants.RESOURCE)); faultPublisherDTO.setMethod((String) messageContext.getProperty(APIMgtUsagePublisherConstants.HTTP_METHOD)); faultPublisherDTO.setVersion((String) messageContext.getProperty(APIMgtUsagePublisherConstants.VERSION)); faultPublisherDTO.setErrorCode(String.valueOf(messageContext.getProperty(SynapseConstants.ERROR_CODE))); faultPublisherDTO.setErrorMessage((String) messageContext.getProperty(SynapseConstants.ERROR_MESSAGE)); faultPublisherDTO.setRequestTime(requestTime); faultPublisherDTO.setUsername((String) messageContext.getProperty(APIMgtUsagePublisherConstants.USER_ID)); faultPublisherDTO.setTenantDomain(MultitenantUtils.getTenantDomain(faultPublisherDTO.getUsername())); faultPublisherDTO.setHostName((String) messageContext.getProperty(APIMgtUsagePublisherConstants.HOST_NAME)); faultPublisherDTO.setApiPublisher((String) messageContext.getProperty(APIMgtUsagePublisherConstants.API_PUBLISHER)); faultPublisherDTO.setApplicationName((String) messageContext.getProperty(APIMgtUsagePublisherConstants.APPLICATION_NAME)); faultPublisherDTO.setApplicationId((String) messageContext.getProperty(APIMgtUsagePublisherConstants.APPLICATION_ID)); publisher.publishEvent(faultPublisherDTO); }catch (Throwable e){ log.error("Cannot publish event. " + e.getMessage(), e); } return true; // Should never stop the message flow } }
apache-2.0
huangxingzhe/lottery
src/main/java/com/tommy/model/Remark.java
893
package com.tommy.model; import java.io.Serializable; import java.util.Date; public class Remark implements Serializable { // private Integer id; // private Integer startNo; // private String content; // private Date createTime; static final long serialVersionUID = 1L; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getStartNo() { return startNo; } public void setStartNo(Integer startNo) { this.startNo = startNo; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } }
apache-2.0
trasa/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/DescribeVpcEndpointServicesResult.java
7445
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * */ public class DescribeVpcEndpointServicesResult implements Serializable, Cloneable { /** * A list of supported AWS services. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> serviceNames; /** * The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. */ private String nextToken; /** * A list of supported AWS services. * * @return A list of supported AWS services. */ public java.util.List<String> getServiceNames() { if (serviceNames == null) { serviceNames = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); serviceNames.setAutoConstruct(true); } return serviceNames; } /** * A list of supported AWS services. * * @param serviceNames A list of supported AWS services. */ public void setServiceNames(java.util.Collection<String> serviceNames) { if (serviceNames == null) { this.serviceNames = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> serviceNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(serviceNames.size()); serviceNamesCopy.addAll(serviceNames); this.serviceNames = serviceNamesCopy; } /** * A list of supported AWS services. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setServiceNames(java.util.Collection)} or {@link * #withServiceNames(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param serviceNames A list of supported AWS services. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeVpcEndpointServicesResult withServiceNames(String... serviceNames) { if (getServiceNames() == null) setServiceNames(new java.util.ArrayList<String>(serviceNames.length)); for (String value : serviceNames) { getServiceNames().add(value); } return this; } /** * A list of supported AWS services. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param serviceNames A list of supported AWS services. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeVpcEndpointServicesResult withServiceNames(java.util.Collection<String> serviceNames) { if (serviceNames == null) { this.serviceNames = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> serviceNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(serviceNames.size()); serviceNamesCopy.addAll(serviceNames); this.serviceNames = serviceNamesCopy; } return this; } /** * The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. * * @return The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. */ public String getNextToken() { return nextToken; } /** * The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. * * @param nextToken The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param nextToken The token to use when requesting the next set of items. If there are * no additional items to return, the string is empty. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeVpcEndpointServicesResult withNextToken(String nextToken) { this.nextToken = nextToken; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServiceNames() != null) sb.append("ServiceNames: " + getServiceNames() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServiceNames() == null) ? 0 : getServiceNames().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeVpcEndpointServicesResult == false) return false; DescribeVpcEndpointServicesResult other = (DescribeVpcEndpointServicesResult)obj; if (other.getServiceNames() == null ^ this.getServiceNames() == null) return false; if (other.getServiceNames() != null && other.getServiceNames().equals(this.getServiceNames()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public DescribeVpcEndpointServicesResult clone() { try { return (DescribeVpcEndpointServicesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
HuaweiBigData/carbondata
core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
7077
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.core.metadata.schema.table; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.metadata.schema.BucketingInfo; import org.apache.carbondata.core.metadata.schema.PartitionInfo; import org.apache.carbondata.core.metadata.schema.SchemaEvolution; import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema; /** * Persisting the table information */ public class TableSchema implements Serializable, Writable { /** * serialization version */ private static final long serialVersionUID = -1928614587722507026L; /** * table id */ private String tableId; /** * table Name */ private String tableName; /** * Columns in the table */ private List<ColumnSchema> listOfColumns; /** * History of schema evolution of this table */ private SchemaEvolution schemaEvalution; /** * contains all key value pairs for table properties set by user in craete DDL */ private Map<String, String> tableProperties; /** * Information about bucketing of fields and number of buckets */ private BucketingInfo bucketingInfo; /** * Information about partition type, partition column, numbers */ private PartitionInfo partitionInfo; public TableSchema() { this.listOfColumns = new ArrayList<ColumnSchema>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE); } /** * @return the tableId */ public String getTableId() { return tableId; } /** * @param tableId the tableId to set */ public void setTableId(String tableId) { this.tableId = tableId; } /** * @return the listOfColumns */ public List<ColumnSchema> getListOfColumns() { return listOfColumns; } /** * @param listOfColumns the listOfColumns to set */ public void setListOfColumns(List<ColumnSchema> listOfColumns) { this.listOfColumns = listOfColumns; } /** * @return the schemaEvalution */ public SchemaEvolution getSchemaEvalution() { return schemaEvalution; } /** * @param schemaEvalution the schemaEvalution to set */ public void setSchemaEvalution(SchemaEvolution schemaEvalution) { this.schemaEvalution = schemaEvalution; } /** * @return the tableName */ public String getTableName() { return tableName; } /** * @param tableName the tableName to set */ public void setTableName(String tableName) { this.tableName = tableName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * result + ((tableName == null) ? 0 : tableName.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TableSchema other = (TableSchema) obj; if (tableId == null) { if (other.tableId != null) { return false; } } else if (!tableId.equals(other.tableId)) { return false; } if (tableName == null) { if (other.tableName != null) { return false; } } else if (!tableName.equals(other.tableName)) { return false; } return true; } /** * @return */ public Map<String, String> getTableProperties() { return tableProperties; } /** * @param tableProperties */ public void setTableProperties(Map<String, String> tableProperties) { this.tableProperties = tableProperties; } public BucketingInfo getBucketingInfo() { return bucketingInfo; } public void setBucketingInfo(BucketingInfo bucketingInfo) { this.bucketingInfo = bucketingInfo; } public PartitionInfo getPartitionInfo() { return partitionInfo; } public void setPartitionInfo(PartitionInfo partitionInfo) { this.partitionInfo = partitionInfo; } @Override public void write(DataOutput out) throws IOException { out.writeUTF(tableId); out.writeUTF(tableName); out.writeInt(listOfColumns.size()); for (ColumnSchema column : listOfColumns) { column.write(out); } if (null != partitionInfo) { out.writeBoolean(true); partitionInfo.write(out); } else { out.writeBoolean(false); } if (null != bucketingInfo) { out.writeBoolean(true); bucketingInfo.write(out); } else { out.writeBoolean(false); } } @Override public void readFields(DataInput in) throws IOException { this.tableId = in.readUTF(); this.tableName = in.readUTF(); int listSize = in.readInt(); this.listOfColumns = new ArrayList<>(listSize); for (int i = 0; i < listSize; i++) { ColumnSchema schema = new ColumnSchema(); schema.readFields(in); this.listOfColumns.add(schema); } boolean partitionExists = in.readBoolean(); if (partitionExists) { this.partitionInfo = new PartitionInfo(); this.partitionInfo.readFields(in); } boolean bucketingExists = in.readBoolean(); if (bucketingExists) { this.bucketingInfo = new BucketingInfo(); this.bucketingInfo.readFields(in); } } /** * Below method will be used to build child schema object which will be stored in * parent table * */ public DataMapSchema buildChildSchema(String dataMapName, String className, String databaseName, String queryString, String queryType) { RelationIdentifier relationIdentifier = new RelationIdentifier(databaseName, tableName, tableId); Map<String, String> properties = new HashMap<>(); properties.put("CHILD_SELECT QUERY", queryString); properties.put("QUERYTYPE", queryType); DataMapSchema dataMapSchema = new DataMapSchema(dataMapName, className); dataMapSchema.setProperties(properties); dataMapSchema.setChildSchema(this); dataMapSchema.setRelationIdentifier(relationIdentifier); return dataMapSchema; } }
apache-2.0
wangqi/gameserver
server/src/main/script/script/CraftCalDiamond.java
1829
package script; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.xinqihd.sns.gameserver.config.equip.WeaponPojo; import com.xinqihd.sns.gameserver.db.mongo.EquipManager; import com.xinqihd.sns.gameserver.entity.user.PropData; import com.xinqihd.sns.gameserver.entity.user.User; import com.xinqihd.sns.gameserver.forge.ComposeStatus; import com.xinqihd.sns.gameserver.script.ScriptManager; import com.xinqihd.sns.gameserver.script.ScriptResult; /** * 计算武器升级需要的钻石数量 * */ public class CraftCalDiamond { private static final Logger logger = LoggerFactory .getLogger(CraftCalDiamond.class); /** * 每提升10级,需要的钻石数量+10颗,颜色每提升1个等级,则需要1.2倍钻石数量 1 1 2 10 3 45 4 80 5 125 6 180 7 245 8 320 9 405 */ public static ScriptResult func(Object[] parameters) { ScriptResult result = ScriptManager.checkParameters(parameters, 2); if (result != null) { return result; } User user = (User) parameters[0]; PropData propData = (PropData) parameters[1]; int count = 0; WeaponPojo weapon = EquipManager.getInstance().getWeaponById(propData.getItemId()); if ( weapon != null ) { int userLevel = weapon.getUserLevel(); if ( userLevel == 0 ) { count = 1; } else if ( userLevel == 10 ) { count = 10; } else if ( userLevel < 90 ) { count = (userLevel/10+1)*(userLevel/10+1)*5; int colorIndex = propData.getWeaponColor().ordinal(); if ( colorIndex > 0 ) { count = Math.round(count * 1.2f * colorIndex); } } } List list = new ArrayList(); list.add(count); result = new ScriptResult(); result.setType(ScriptResult.Type.SUCCESS_RETURN); result.setResult(list); return result; } }
apache-2.0
iraupph/tictactoe-android
AndEngineGLES2/src/org/andengine/util/adt/data/operator/LongOperator.java
2261
package org.andengine.util.adt.data.operator; /** * (c) Zynga 2012 * * @author Nicolas Gramlich <ngramlich@zynga.com> * @since 14:24:15 - 02.02.2012 */ public enum LongOperator { // =========================================================== // Elements // =========================================================== EQUALS() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA == pLongB; } }, NOT_EQUALS() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA != pLongB; } }, LESS_THAN() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA < pLongB; } }, LESS_OR_EQUAL_THAN() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA <= pLongB; } }, MORE_THAN() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA > pLongB; } }, MORE_OR_EQUAL_THAN() { @Override public boolean check(final long pLongA, final long pLongB) { return pLongA >= pLongB; } }; // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== // =========================================================== // Constructors // =========================================================== // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== public abstract boolean check(final long pLongA, final long pLongB); // =========================================================== // Methods // =========================================================== // =========================================================== // Inner and Anonymous Classes // =========================================================== }
apache-2.0
agwlvssainokuni/springapp
foundation/src/main/java/cherry/foundation/mail/MailSendHandlerImpl.java
3927
/* * Copyright 2014,2015 agwlvssainokuni * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cherry.foundation.mail; import java.util.List; import javax.mail.internet.MimeMessage; import org.joda.time.LocalDateTime; import org.springframework.mail.SimpleMailMessage; import org.springframework.mail.javamail.JavaMailSender; import org.springframework.mail.javamail.MimeMessageHelper; import org.springframework.mail.javamail.MimeMessagePreparator; import org.springframework.transaction.annotation.Transactional; import cherry.foundation.bizdtm.BizDateTime; public class MailSendHandlerImpl implements MailSendHandler { private BizDateTime bizDateTime; private MessageStore messageStore; private JavaMailSender mailSender; public void setBizDateTime(BizDateTime bizDateTime) { this.bizDateTime = bizDateTime; } public void setMessageStore(MessageStore messageStore) { this.messageStore = messageStore; } public void setMailSender(JavaMailSender mailSender) { this.mailSender = mailSender; } @Transactional @Override public long sendLater(String launcherId, String messageName, String from, List<String> to, List<String> cc, List<String> bcc, String replyTo, String subject, String body, LocalDateTime scheduledAt) { return messageStore.createMessage(launcherId, messageName, scheduledAt, from, to, cc, bcc, replyTo, subject, body); } @Transactional @Override public long sendNow(String launcherId, String messageName, String from, List<String> to, List<String> cc, List<String> bcc, String replyTo, String subject, String body) { return sendNow(launcherId, messageName, from, to, cc, bcc, replyTo, subject, body, null); } @Transactional @Override public long sendNow(String launcherId, String messageName, String from, List<String> to, List<String> cc, List<String> bcc, String replyTo, String subject, String body, AttachmentPreparator preparator) { LocalDateTime now = bizDateTime.now(); long messageId = messageStore.createMessage(launcherId, messageName, now, from, to, cc, bcc, replyTo, subject, body); SimpleMailMessage msg = messageStore.getMessage(messageId); messageStore.finishMessage(messageId); send(msg, preparator); return messageId; } @Transactional @Override public List<Long> listMessage(LocalDateTime dtm) { return messageStore.listMessage(dtm); } @Transactional @Override public boolean sendMessage(long messageId) { SimpleMailMessage msg = messageStore.getMessage(messageId); if (msg == null) { return false; } messageStore.finishMessage(messageId); send(msg, null); return true; } private void send(final SimpleMailMessage msg, final AttachmentPreparator preparator) { if (preparator == null) { mailSender.send(msg); } else { mailSender.send(new MimeMessagePreparator() { @Override public void prepare(MimeMessage mimeMessage) throws Exception { MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true); helper.setFrom(msg.getFrom()); helper.setTo(msg.getTo()); helper.setCc(msg.getCc()); helper.setBcc(msg.getBcc()); helper.setReplyTo(msg.getReplyTo()); helper.setSubject(msg.getSubject()); helper.setText(msg.getText()); preparator.prepare(new Attachment(helper)); } }); } } }
apache-2.0
millmanorama/autopsy
Core/src/org/sleuthkit/autopsy/imagewriter/ImageWriter.java
13376
/* * Autopsy Forensic Browser * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.imagewriter; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import org.netbeans.api.progress.ProgressHandle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent; import org.sleuthkit.datamodel.SleuthkitJNI; import org.sleuthkit.datamodel.TskCoreException; /** * The ImageWriter class is used to complete VHD copies created from local disks * after the ingest process completes. The AddImageTask for this data source must have included * a non-empty imageWriterPath parameter to enable Image Writer. * * Most of the cancellation/cleanup is handled through ImageWriterService */ class ImageWriter implements PropertyChangeListener{ private final Logger logger = Logger.getLogger(ImageWriter.class.getName()); private final Long dataSourceId; private final ImageWriterSettings settings; private Long imageHandle = null; private Future<Integer> finishTask = null; private ProgressHandle progressHandle = null; private ScheduledFuture<?> progressUpdateTask = null; private boolean isCancelled = false; private boolean isStarted = false; private final Object currentTasksLock = new Object(); // Get this lock before accessing imageHandle, finishTask, progressHandle, progressUpdateTask, // isCancelled, isStarted, or isFinished private ScheduledThreadPoolExecutor periodicTasksExecutor = null; private final boolean doUI; private SleuthkitCase caseDb = null; /** * Create the Image Writer object. * After creation, startListeners() should be called. * @param dataSourceId */ ImageWriter(Long dataSourceId, ImageWriterSettings settings){ this.dataSourceId = dataSourceId; this.settings = settings; doUI = RuntimeProperties.runningWithGUI(); // We save the reference to the sleuthkit case here in case getOpenCase() is set to // null before Image Writer finishes. The user can still elect to wait for image writer // (in ImageWriterService.closeCaseResources) even though the case is closing. try{ caseDb = Case.getCurrentCaseThrows().getSleuthkitCase(); } catch (NoCurrentCaseException ex){ logger.log(Level.SEVERE, "Unable to load case. Image writer will be cancelled."); this.isCancelled = true; } } /** * Add this ImageWriter object as a listener to the necessary events */ void subscribeToEvents(){ IngestManager.getInstance().addIngestJobEventListener(this); } /** * Deregister this object from the events. This is ok to call multiple times. */ void unsubscribeFromEvents(){ IngestManager.getInstance().removeIngestJobEventListener(this); } /** * Handle the events: * DATA_SOURCE_ANALYSIS_COMPLETED - start the finish image process and clean up after it is complete */ @Override public void propertyChange(PropertyChangeEvent evt) { if(evt instanceof DataSourceAnalysisCompletedEvent){ DataSourceAnalysisCompletedEvent event = (DataSourceAnalysisCompletedEvent)evt; if(event.getDataSource() != null){ long imageId = event.getDataSource().getId(); String name = event.getDataSource().getName(); // Check that the event corresponds to this datasource if(imageId != dataSourceId){ return; } new Thread(() -> { startFinishImage(name); }).start(); } else { logger.log(Level.SEVERE, "DataSourceAnalysisCompletedEvent did not contain a dataSource object"); //NON-NLS } } } @Messages({ "# {0} - data source name", "ImageWriter.progressBar.message=Finishing acquisition of {0} (unplug device to cancel)" }) private void startFinishImage(String dataSourceName){ synchronized(currentTasksLock){ if(isCancelled){ return; } // If we've already started the finish process for this datasource, return. // Multiple DataSourceAnalysisCompletedEvent events can come from // the same image if more ingest modules are run later if(isStarted){ return; } Image image; try{ image = Case.getCurrentCaseThrows().getSleuthkitCase().getImageById(dataSourceId); imageHandle = image.getImageHandle(); } catch (NoCurrentCaseException ex){ // This exception means that getOpenCase() failed because no case was open. // This can happen when the user closes the case while ingest is ongoing - canceling // ingest fires off the DataSourceAnalysisCompletedEvent while the case is in the // process of closing. logger.log(Level.WARNING, String.format("Case closed before ImageWriter could start the finishing process for %s", dataSourceName)); return; } catch (TskCoreException ex){ logger.log(Level.SEVERE, "Error loading image", ex); return; } logger.log(Level.INFO, String.format("Finishing VHD image for %s", dataSourceName)); //NON-NLS if(doUI){ periodicTasksExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat("image-writer-progress-update-%d").build()); //NON-NLS progressHandle = ProgressHandle.createHandle(Bundle.ImageWriter_progressBar_message(dataSourceName)); progressHandle.start(100); progressUpdateTask = periodicTasksExecutor.scheduleWithFixedDelay( new ProgressUpdateTask(progressHandle, imageHandle), 0, 250, TimeUnit.MILLISECONDS); } // The added complexity here with the Future is because we absolutely need to make sure // the call to finishImageWriter returns before allowing the TSK data structures to be freed // during case close. finishTask = Executors.newSingleThreadExecutor().submit(new Callable<Integer>(){ @Override public Integer call() throws TskCoreException{ try{ int result = SleuthkitJNI.finishImageWriter(imageHandle); // We've decided to always update the path to the VHD, even if it wasn't finished. // This supports the case where an analyst has partially ingested a device // but has to stop before completion. They will at least have part of the image. if(settings.getUpdateDatabasePath()){ caseDb.updateImagePath(settings.getPath(), dataSourceId); } return result; } catch (TskCoreException ex){ logger.log(Level.SEVERE, "Error finishing VHD image", ex); //NON-NLS return -1; } } }); // Setting this means that finishTask and all the UI updaters are initialized (if running UI) isStarted = true; } // Wait for finishImageWriter to complete int result = 0; try{ // The call to get() can happen multiple times if the user closes the case, which is ok result = finishTask.get(); } catch (InterruptedException | ExecutionException ex){ logger.log(Level.SEVERE, "Error finishing VHD image", ex); //NON-NLS } synchronized(currentTasksLock){ if(doUI){ // Some of these may be called twice if the user closes the case progressUpdateTask.cancel(true); progressHandle.finish(); periodicTasksExecutor.shutdown(); } } if(result == 0){ logger.log(Level.INFO, String.format("Successfully finished writing VHD image for %s", dataSourceName)); //NON-NLS } else { logger.log(Level.INFO, String.format("Finished VHD image for %s with errors", dataSourceName)); //NON-NLS } } /** * If a task hasn't been started yet, set the cancel flag so it can no longer * start. * This is intended to be used in case close so a job doesn't suddenly start * up during cleanup. */ void cancelIfNotStarted(){ synchronized(currentTasksLock){ if(! isStarted){ isCancelled = true; } } } /** * Check if the finishTask process is running. * @return true if the finish task is still going on, false if it is finished or * never started */ boolean jobIsInProgress(){ synchronized(currentTasksLock){ return((isStarted) && (! finishTask.isDone())); } } /** * Cancels a single job. * Does not wait for the job to complete. Safe to call with Image Writer in any state. */ void cancelJob(){ synchronized(currentTasksLock){ // All of the following is redundant but safe to call on a complete job isCancelled = true; if(isStarted){ SleuthkitJNI.cancelFinishImage(imageHandle); // Stop the progress bar update task. // The thread from startFinishImage will also stop it // once the task completes, but we don't have a guarantee on // when that happens. // Since we've stopped the update task, we'll stop the associated progress // bar now, too. if(doUI){ progressUpdateTask.cancel(true); progressHandle.finish(); } } } } /** * Blocks while all finishImage tasks complete. * Also makes sure the progressUpdateTask is canceled. */ void waitForJobToFinish(){ synchronized(currentTasksLock){ // Wait for the finish task to end if(isStarted){ try{ finishTask.get(); } catch (InterruptedException | ExecutionException ex){ Logger.getLogger(ImageWriter.class.getName()).log(Level.SEVERE, "Error finishing VHD image", ex); //NON-NLS } if(doUI){ progressUpdateTask.cancel(true); } } } } /** * Task to query the Sleuthkit processing to get the percentage done. */ private final class ProgressUpdateTask implements Runnable { final long imageHandle; final ProgressHandle progressHandle; ProgressUpdateTask(ProgressHandle progressHandle, long imageHandle){ this.imageHandle = imageHandle; this.progressHandle = progressHandle; } @Override public void run() { try { int progress = SleuthkitJNI.getFinishImageProgress(imageHandle); progressHandle.progress(progress); } catch (Exception ex) { logger.log(Level.SEVERE, "Unexpected exception in ProgressUpdateTask", ex); //NON-NLS } } } }
apache-2.0
google-code/aliper
aliper-remote/src/main/java/com/alibaba/aliper/remote/proxy/protocol/HandShake.java
488
package com.alibaba.aliper.remote.proxy.protocol; import java.io.InputStream; import java.io.OutputStream; public class HandShake { public static void writeClientRequest(OutputStream output,Class type){ } public static void readServerAnswer(InputStream inputStream) { } public static String readClientRequest(InputStream inputStream) { return null; } public static void writeServerAnswerFailed(OutputStream outputStream, String string) { } }
apache-2.0
diwer/daily-simple
webSrc/src/main/java/cn/whatisee/web/config/SessionInitializer.java
327
package cn.whatisee.web.config; import org.springframework.session.web.context.AbstractHttpSessionApplicationInitializer; /** * Created by ming on 16/7/26. */ public class SessionInitializer extends AbstractHttpSessionApplicationInitializer { public SessionInitializer() { super(SessionConfig.class); } }
apache-2.0
emil-wcislo/sbql4j8
sbql4j8/src/main/openjdk/sbql4j8/com/sun/tools/doclets/formats/html/ProfileIndexFrameWriter.java
6757
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sbql4j8.com.sun.tools.doclets.formats.html; import java.io.*; import sbql4j8.com.sun.tools.javac.sym.Profiles; import sbql4j8.com.sun.tools.doclets.formats.html.markup.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.util.*; import sbql4j8.com.sun.tools.javac.jvm.Profile; /** * Generate the profile index for the left-hand frame in the generated output. * A click on the profile name in this frame will update the page in the top * left hand frame with the listing of packages of the clicked profile. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> * * @author Bhavesh Patel */ public class ProfileIndexFrameWriter extends AbstractProfileIndexWriter { /** * Construct the ProfileIndexFrameWriter object. * * @param configuration the configuration object * @param filename Name of the profile index file to be generated. */ public ProfileIndexFrameWriter(ConfigurationImpl configuration, DocPath filename) throws IOException { super(configuration, filename); } /** * Generate the profile index file named "profile-overview-frame.html". * @throws DocletAbortException * @param configuration the configuration object */ public static void generate(ConfigurationImpl configuration) { ProfileIndexFrameWriter profilegen; DocPath filename = DocPaths.PROFILE_OVERVIEW_FRAME; try { profilegen = new ProfileIndexFrameWriter(configuration, filename); profilegen.buildProfileIndexFile("doclet.Window_Overview", false); profilegen.close(); } catch (IOException exc) { configuration.standardmessage.error( "doclet.exception_encountered", exc.toString(), filename); throw new DocletAbortException(exc); } } /** * {@inheritDoc} */ protected void addProfilesList(Profiles profiles, String text, String tableSummary, Content body) { Content heading = HtmlTree.HEADING(HtmlConstants.PROFILE_HEADING, true, profilesLabel); Content div = HtmlTree.DIV(HtmlStyle.indexContainer, heading); HtmlTree ul = new HtmlTree(HtmlTag.UL); ul.setTitle(profilesLabel); String profileName; for (int i = 1; i < profiles.getProfileCount(); i++) { profileName = (Profile.lookup(i)).name; // If the profile has valid packages to be documented, add it to the // left-frame generated for profile index. if (configuration.shouldDocumentProfile(profileName)) ul.addContent(getProfile(profileName)); } div.addContent(ul); body.addContent(div); } /** * Gets each profile name as a separate link. * * @param profileName the profile being documented * @return content for the profile link */ protected Content getProfile(String profileName) { Content profileLinkContent; Content profileLabel; profileLabel = new StringContent(profileName); profileLinkContent = getHyperLink(DocPaths.profileFrame(profileName), profileLabel, "", "packageListFrame"); Content li = HtmlTree.LI(profileLinkContent); return li; } /** * {@inheritDoc} */ protected void addNavigationBarHeader(Content body) { Content headerContent; if (configuration.packagesheader.length() > 0) { headerContent = new RawHtml(replaceDocRootDir(configuration.packagesheader)); } else { headerContent = new RawHtml(replaceDocRootDir(configuration.header)); } Content heading = HtmlTree.HEADING(HtmlConstants.TITLE_HEADING, true, HtmlStyle.bar, headerContent); body.addContent(heading); } /** * Do nothing as there is no overview information in this page. */ protected void addOverviewHeader(Content body) { } /** * Adds "All Classes" link for the top of the left-hand frame page to the * documentation tree. * * @param div the Content object to which the all classes link should be added */ protected void addAllClassesLink(Content div) { Content linkContent = getHyperLink(DocPaths.ALLCLASSES_FRAME, allclassesLabel, "", "packageFrame"); Content span = HtmlTree.SPAN(linkContent); div.addContent(span); } /** * Adds "All Packages" link for the top of the left-hand frame page to the * documentation tree. * * @param div the Content object to which the all packages link should be added */ protected void addAllPackagesLink(Content div) { Content linkContent = getHyperLink(DocPaths.OVERVIEW_FRAME, allpackagesLabel, "", "packageListFrame"); Content span = HtmlTree.SPAN(linkContent); div.addContent(span); } /** * {@inheritDoc} */ protected void addNavigationBarFooter(Content body) { Content p = HtmlTree.P(getSpace()); body.addContent(p); } protected void addProfilePackagesList(Profiles profiles, String text, String tableSummary, Content body, String profileName) { } }
apache-2.0
GoodgameStudios/jawampa
src/test/java/ws/wamp/jawampa/roles/CalleeTest.java
10370
package ws.wamp.jawampa.roles; import static org.mockito.Matchers.any; import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatcher; import org.mockito.InOrder; import org.mockito.Mock; import rx.Observer; import rx.subjects.PublishSubject; import ws.wamp.jawampa.ApplicationError; import ws.wamp.jawampa.Request; import ws.wamp.jawampa.ids.RegistrationId; import ws.wamp.jawampa.ids.RequestId; import ws.wamp.jawampa.io.BaseClient; import ws.wamp.jawampa.messages.ErrorMessage; import ws.wamp.jawampa.messages.InvocationMessage; import ws.wamp.jawampa.messages.RegisterMessage; import ws.wamp.jawampa.messages.RegisteredMessage; import ws.wamp.jawampa.messages.SubscribeMessage; import ws.wamp.jawampa.messages.UnregisterMessage; import ws.wamp.jawampa.messages.UnregisteredMessage; import ws.wamp.jawampa.messages.WampMessage; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; public class CalleeTest { private static final RequestId REQUEST_ID = RequestId.of( 42L ); private static final RequestId REQUEST_ID2 = RequestId.of( 57L ); private static final RequestId REQUEST_ID3 = RequestId.of( 97L ); private static final RegistrationId REGISTRATION_ID = RegistrationId.of( 23L ); @Mock private BaseClient baseClient; private String procedure = "some_procedure"; @Mock private ArrayNode arguments; @Mock private ObjectNode kwArguments; @Mock private ArrayNode replyArguments; @Mock private ObjectNode replyKwArguments; private Callee subject; private PublishSubject<Request> callSubject; @Mock private Observer<Request> callObserver; private PublishSubject<Void> unsubscribeSubject; @Mock private Observer<Void> unsubscriptionObserver; @Before public void setup() { initMocks( this ); subject = new Callee( baseClient ); callSubject = PublishSubject.create(); callSubject.subscribe( callObserver ); unsubscribeSubject = PublishSubject.create(); unsubscribeSubject.subscribe( unsubscriptionObserver ); when( baseClient.getNewRequestId() ).thenReturn( REQUEST_ID ) .thenReturn( REQUEST_ID2 ) .thenThrow( new IllegalStateException( "No more request ids for you!" ) ); } @Test public void testRegisterSendsRegisterMessage() { subject.register( procedure, callSubject ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { RegisterMessage message = (RegisterMessage)argument; if ( !message.requestId.equals( REQUEST_ID ) ) return false; if ( message.procedure != procedure ) return false; return true; } }; verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } @Test public void testMethodIsCalledAfterRegistration() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.onInvocation( new InvocationMessage( REQUEST_ID2, REGISTRATION_ID, null, arguments, kwArguments ) ); ArgumentMatcher<Request> requestMatcher = new ArgumentMatcher<Request>() { @Override public boolean matches( Object argument ) { Request data = (Request)argument; if ( data.arguments() != arguments ) return false; if ( data.keywordArguments() != kwArguments ) return false; return true; } }; verify( callObserver ).onNext( argThat( requestMatcher ) ); verify( callObserver, never()).onCompleted(); verify( callObserver, never()).onError( any( Throwable.class ) ); } @Test public void testSubscriptionErrorIsDeliveredToClient() { subject.register( procedure, callSubject ); subject.onRegisterError( new ErrorMessage( RegisterMessage.ID, REQUEST_ID, null, ApplicationError.INVALID_ARGUMENT, null, null ) ); verify( callObserver, never() ).onNext( any( Request.class ) ); verify( callObserver, never()).onCompleted(); verify( callObserver).onError( any( Throwable.class ) ); } @Test public void testUnsubscribeSendsUnsubscribeMessage() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { UnregisterMessage message = (UnregisterMessage)argument; if ( !message.requestId.equals( REQUEST_ID2 ) ) return false; if ( !message.registrationId.equals( REGISTRATION_ID ) ) return false; return true; } }; InOrder inOrder = inOrder( baseClient ); inOrder.verify( baseClient ).scheduleMessageToRouter( any( WampMessage.class ) ); inOrder.verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } @Test public void testSuccessfulUnsubscribeIsDeliveredToClient() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); subject.onUnregistered( new UnregisteredMessage( REQUEST_ID2 ) ); verify( unsubscriptionObserver, never() ).onNext( any( Void.class ) ); verify( unsubscriptionObserver ).onCompleted(); verify( unsubscriptionObserver, never() ).onError( any( Throwable.class ) ); } @Test public void testUnsubscribeErrorIsDeliveredToClient() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); subject.onUnregisterError( new ErrorMessage( SubscribeMessage.ID, REQUEST_ID2, null, ApplicationError.INVALID_ARGUMENT, null, null ) ); verify( unsubscriptionObserver, never() ).onNext( any( Void.class ) ); verify( unsubscriptionObserver, never() ).onCompleted(); verify( unsubscriptionObserver ).onError( any( ApplicationError.class ) ); } @Test public void testInvokeAfterUnregisteredIsAnError() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); subject.onUnregistered( new UnregisteredMessage( REQUEST_ID2 ) ); subject.onInvocation( new InvocationMessage( REQUEST_ID3, REGISTRATION_ID, null, arguments, kwArguments ) ); verify( baseClient ).onProtocolError(); } @Test public void testInvokeDuringUnregisteringMustStillWork() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); subject.onInvocation( new InvocationMessage( REQUEST_ID3, REGISTRATION_ID, null, arguments, kwArguments ) ); ArgumentMatcher<Request> requestMatcher = new ArgumentMatcher<Request>() { @Override public boolean matches( Object argument ) { Request data = (Request)argument; if ( data.arguments() != arguments ) return false; if ( data.keywordArguments() != kwArguments ) return false; return true; } }; verify( callObserver ).onNext( argThat( requestMatcher ) ); verify( callObserver, never()).onCompleted(); verify( callObserver, never()).onError( any( Throwable.class ) ); } @Test public void testRegistrationSubjectIsCompletedWhenUnsubscribeIsSuccessful() { subject.register( procedure, callSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); subject.unregister( procedure, unsubscribeSubject ); subject.onUnregistered( new UnregisteredMessage( REQUEST_ID2 ) ); verify( callObserver ).onCompleted(); verify( callObserver, never()).onError( any( Throwable.class ) ); } @Test public void testUnregisterBeforeRegisteredMessage() { subject.register( procedure, callSubject ); subject.unregister( procedure, unsubscribeSubject ); subject.onRegistered( new RegisteredMessage( REQUEST_ID, REGISTRATION_ID ) ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { UnregisterMessage message = (UnregisterMessage)argument; if ( !message.requestId.equals( REQUEST_ID2 ) ) return false; if ( !message.registrationId.equals( REGISTRATION_ID ) ) return false; return true; } }; InOrder inOrder = inOrder( baseClient ); inOrder.verify( baseClient ).scheduleMessageToRouter( any( WampMessage.class ) ); inOrder.verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } }
apache-2.0
Sellegit/j2objc
translator/src/test/java/com/google/devtools/j2objc/translate/OuterReferenceResolverTest.java
10745
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.translate; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.devtools.j2objc.GenerationTest; import com.google.devtools.j2objc.ast.AnonymousClassDeclaration; import com.google.devtools.j2objc.ast.CompilationUnit; import com.google.devtools.j2objc.ast.InfixExpression; import com.google.devtools.j2objc.ast.MethodInvocation; import com.google.devtools.j2objc.ast.PostfixExpression; import com.google.devtools.j2objc.ast.TreeConverter; import com.google.devtools.j2objc.ast.TreeNode; import com.google.devtools.j2objc.ast.TreeNode.Kind; import com.google.devtools.j2objc.ast.TreeVisitor; import com.google.devtools.j2objc.ast.TypeDeclaration; import com.google.devtools.j2objc.file.RegularInputFile; import com.google.devtools.j2objc.types.Types; import com.google.devtools.j2objc.util.BindingUtil; import org.eclipse.jdt.core.dom.ITypeBinding; import org.eclipse.jdt.core.dom.IVariableBinding; import java.util.List; /** * Unit tests for {@link OuterReferenceResolver}. * * @author Keith Stanger */ public class OuterReferenceResolverTest extends GenerationTest { private ListMultimap<Kind, TreeNode> nodesByType = ArrayListMultimap.create(); @Override protected void tearDown() throws Exception { nodesByType.clear(); OuterReferenceResolver.cleanup(); } public void testOuterVarAccess() { resolveSource("Test", "class Test { int i; class Inner { void test() { i++; } } }"); TypeDeclaration innerNode = (TypeDeclaration) nodesByType.get(Kind.TYPE_DECLARATION).get(1); assertTrue(OuterReferenceResolver.needsOuterReference(innerNode.getTypeBinding())); PostfixExpression increment = (PostfixExpression) nodesByType.get(Kind.POSTFIX_EXPRESSION).get(0); List<IVariableBinding> path = OuterReferenceResolver.getPath(increment.getOperand()); assertNotNull(path); assertEquals(2, path.size()); assertEquals("Test", path.get(0).getType().getName()); } public void testInheritedOuterMethod() { resolveSource("Test", "class Test { class A { void foo() {} } class B extends A { " + "class Inner { void test() { foo(); } } } }"); TypeDeclaration aNode = (TypeDeclaration) nodesByType.get(Kind.TYPE_DECLARATION).get(1); TypeDeclaration bNode = (TypeDeclaration) nodesByType.get(Kind.TYPE_DECLARATION).get(2); TypeDeclaration innerNode = (TypeDeclaration) nodesByType.get(Kind.TYPE_DECLARATION).get(3); assertFalse(OuterReferenceResolver.needsOuterReference(aNode.getTypeBinding())); assertFalse(OuterReferenceResolver.needsOuterReference(bNode.getTypeBinding())); assertTrue(OuterReferenceResolver.needsOuterReference(innerNode.getTypeBinding())); // B will need an outer reference to Test so it can initialize its // superclass A. List<IVariableBinding> bPath = OuterReferenceResolver.getPath(bNode); assertNotNull(bPath); assertEquals(1, bPath.size()); assertEquals(OuterReferenceResolver.OUTER_PARAMETER, bPath.get(0)); // foo() call will need to get to B's scope to call the inherited method. MethodInvocation fooCall = (MethodInvocation) nodesByType.get(Kind.METHOD_INVOCATION).get(0); List<IVariableBinding> fooPath = OuterReferenceResolver.getPath(fooCall); assertNotNull(fooPath); assertEquals(1, fooPath.size()); assertEquals("B", fooPath.get(0).getType().getName()); } public void testCapturedLocalVariable() { resolveSource("Test", "class Test { void test(final int i) { Runnable r = new Runnable() { " + "public void run() { int i2 = i + 1; } }; } }"); AnonymousClassDeclaration runnableNode = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(0); ITypeBinding runnableBinding = runnableNode.getTypeBinding(); assertFalse(OuterReferenceResolver.needsOuterReference(runnableBinding)); List<IVariableBinding> capturedVars = OuterReferenceResolver.getCapturedVars(runnableBinding); List<IVariableBinding> innerFields = OuterReferenceResolver.getInnerFields(runnableBinding); assertEquals(1, capturedVars.size()); assertEquals(1, innerFields.size()); assertEquals("i", capturedVars.get(0).getName()); assertEquals("val$i", innerFields.get(0).getName()); InfixExpression addition = (InfixExpression) nodesByType.get(Kind.INFIX_EXPRESSION).get(0); List<IVariableBinding> iPath = OuterReferenceResolver.getPath(addition.getLeftOperand()); assertNotNull(iPath); assertEquals(1, iPath.size()); assertEquals("val$i", iPath.get(0).getName()); } public void testCapturedWeakLocalVariable() { resolveSource("Test", "import com.google.j2objc.annotations.Weak;" + "class Test { void test(@Weak final int i) { Runnable r = new Runnable() { " + "public void run() { int i2 = i + 1; } }; } }"); AnonymousClassDeclaration runnableNode = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(0); ITypeBinding runnableBinding = runnableNode.getTypeBinding(); List<IVariableBinding> innerFields = OuterReferenceResolver.getInnerFields(runnableBinding); assertEquals(1, innerFields.size()); assertTrue(BindingUtil.isWeakReference(innerFields.get(0))); } public void testAnonymousClassInheritsLocalClassInStaticMethod() { resolveSource("Test", "class Test { static void test() { class LocalClass {}; new LocalClass() {}; } }"); AnonymousClassDeclaration decl = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(0); ITypeBinding type = decl.getTypeBinding(); assertFalse(OuterReferenceResolver.needsOuterParam(type)); } public void testAnonymouseClassCapturedWeakOuterField() { resolveSource("Test", "import com.google.j2objc.annotations.*; public class Test {" + "@WeakOuter private Runnable Delegate = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } };" + "private Runnable Delegate2 = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } };}"); AnonymousClassDeclaration runnableNode = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(0); ITypeBinding runnableBinding = runnableNode.getTypeBinding(); IVariableBinding outerField = OuterReferenceResolver.getOuterField(runnableBinding); assertTrue(BindingUtil.isWeakReference(outerField)); AnonymousClassDeclaration runnableNode2 = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(1); ITypeBinding runnableBinding2 = runnableNode2.getTypeBinding(); IVariableBinding outerField2 = OuterReferenceResolver.getOuterField(runnableBinding2); assertTrue(!BindingUtil.isWeakReference(outerField2)); } public void testAnonymouseClassCapturedWeakOuterLocalVariable() { resolveSource("Test", "import com.google.j2objc.annotations.*; public class Test {" + "public void someMethod() { @WeakOuter Runnable var = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } }; }" + "public void someMethod2() { Runnable var2 = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } }; }}"); AnonymousClassDeclaration runnableNode = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(0); ITypeBinding runnableBinding = runnableNode.getTypeBinding(); IVariableBinding outerField = OuterReferenceResolver.getOuterField(runnableBinding); assertTrue(BindingUtil.isWeakReference(outerField)); AnonymousClassDeclaration runnableNode2 = (AnonymousClassDeclaration) nodesByType.get(Kind.ANONYMOUS_CLASS_DECLARATION).get(1); ITypeBinding runnableBinding2 = runnableNode2.getTypeBinding(); IVariableBinding outerField2 = OuterReferenceResolver.getOuterField(runnableBinding2); assertTrue(!BindingUtil.isWeakReference(outerField2)); } public void testAnonymouseClassWeakOuterVariableMustInitWhenDeclared() { resolveSource("Test", "import com.google.j2objc.annotations.*; public class Test {" + "@WeakOuter private Runnable Delegate = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } };" + "private Runnable Delegate2 = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } };" + "@WeakOuter private Runnable Delegate3;" + "private Runnable Delegate4;" + "public void someMethod() { @WeakOuter Runnable var = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } }; }" + "public void someMethod2() { Runnable var2 = new Runnable() {" + "@Override public void run() { System.out.println(\"asd\" + Test.this); } }; }" + "public void someMethod3() { @WeakOuter Runnable var3; }" + "public void someMethod4() { Runnable var4; }}"); assertErrorCount(2); } private void resolveSource(String name, String source) { org.eclipse.jdt.core.dom.CompilationUnit jdtUnit = compileType(name + ".java", source); Types.initialize(jdtUnit); CompilationUnit unit = TreeConverter.convertCompilationUnit( jdtUnit, new RegularInputFile(name + ".java"), source); OuterReferenceResolver.resolve(unit); findTypeDeclarations(unit); } private void findTypeDeclarations(CompilationUnit unit) { unit.accept(new TreeVisitor() { @Override public boolean preVisit(TreeNode node) { nodesByType.put(node.getKind(), node); return true; } }); } }
apache-2.0
tfisher1226/ARIES
nam/nam-view/src/main/java/admin/permission/PermissionEventManager.java
696
package admin.permission; import java.io.Serializable; import javax.enterprise.context.SessionScoped; import javax.inject.Inject; import javax.inject.Named; import admin.Permission; import nam.ui.design.AbstractEventManager; import nam.ui.design.SelectionContext; @SessionScoped @Named("permissionEventManager") public class PermissionEventManager extends AbstractEventManager<Permission> implements Serializable { @Inject private SelectionContext selectionContext; @Override public Permission getInstance() { return selectionContext.getSelection("permission"); } public void removePermission() { Permission permission = getInstance(); fireRemoveEvent(permission); } }
apache-2.0
chirino/mop
mop-core/src/main/java/org/fusesource/mop/commands/ServiceMix.java
2930
/** * Copyright (C) 2009 Progress Software, Inc. All rights reserved. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.mop.commands; import java.io.File; import java.util.ArrayList; import java.util.List; import org.fusesource.mop.Command; public class ServiceMix extends AbstractContainerBase { @Command public void servicemix(List<String> params) throws Exception { installAndLaunch(params); } protected String getContainerName() { return "ServiceMix"; } protected String getArtefactId() { // fool MOP into not shading with leading org.fusesource.mop StringBuffer sb = new StringBuffer(); sb.append("org").append(".").append("apache").append("."); sb.append("servicemix").append(":apache-servicemix:"); return sb.toString(); } protected String getPrefix() { return "apache-servicemix-"; } protected String getCommandName() { return "servicemix"; } protected List<String> processArgs(List<String> command, List<String> params) { if (!version.startsWith("3")) { command.add("server"); } extractEnvironment(params); extractSecondaryCommands(params); return command; } protected String getInput() { return null; } protected File getDeployFolder(File root) { if (version.startsWith("3")) { return new File(root, "hotdeploy"); } else { return new File(root, "deploy"); } } @Override protected List<String> getSecondaryCommand(File root, List<String> params) { List<String> commands = null; if (!"".equals(secondaryArgs)) { commands = new ArrayList<String>(); commands.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + (isWindows() ? "java.exe" : "java")); commands.add("-jar"); commands.add(root + File.separator + "lib" + File.separator + "karaf-client.jar"); if (version.startsWith("4.1") || version.startsWith("4.2")) { commands.add("-r"); commands.add("10"); commands.add("-d"); commands.add("5"); } commands.add(secondaryArgs); } return commands; } }
apache-2.0
alidili/Demos
BleDemo/app/src/main/java/com/yl/ble/BleService.java
7829
package com.yl.ble; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothGatt; import android.bluetooth.BluetoothGattCallback; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattDescriptor; import android.bluetooth.BluetoothGattService; import android.bluetooth.BluetoothProfile; import android.content.Intent; import android.os.Binder; import android.os.IBinder; import android.text.TextUtils; import java.util.UUID; /** * 蓝牙连接服务 * <p> * Created by yangle on 2018/7/5. * Website:http://www.yangle.tech */ public class BleService extends Service { private final String TAG = BleService.class.getSimpleName(); private BluetoothGatt mBluetoothGatt; // 蓝牙连接状态 private int mConnectionState = 0; // 蓝牙连接已断开 private final int STATE_DISCONNECTED = 0; // 蓝牙正在连接 private final int STATE_CONNECTING = 1; // 蓝牙已连接 private final int STATE_CONNECTED = 2; // 蓝牙已连接 public final static String ACTION_GATT_CONNECTED = "com.yl.ble.ACTION_GATT_CONNECTED"; // 蓝牙已断开 public final static String ACTION_GATT_DISCONNECTED = "com.yl.ble.ACTION_GATT_DISCONNECTED"; // 发现GATT服务 public final static String ACTION_GATT_SERVICES_DISCOVERED = "com.yl.ble.ACTION_GATT_SERVICES_DISCOVERED"; // 收到蓝牙数据 public final static String ACTION_DATA_AVAILABLE = "com.yl.ble.ACTION_DATA_AVAILABLE"; // 连接失败 public final static String ACTION_CONNECTING_FAIL = "com.yl.ble.ACTION_CONNECTING_FAIL"; // 蓝牙数据 public final static String EXTRA_DATA = "com.yl.ble.EXTRA_DATA"; // 服务标识 private final UUID SERVICE_UUID = UUID.fromString("0000ace0-0000-1000-8000-00805f9b34fb"); // 特征标识(读取数据) private final UUID CHARACTERISTIC_READ_UUID = UUID.fromString("0000ace0-0001-1000-8000-00805f9b34fb"); // 特征标识(发送数据) private final UUID CHARACTERISTIC_WRITE_UUID = UUID.fromString("0000ace0-0003-1000-8000-00805f9b34fb"); // 描述标识 private final UUID DESCRIPTOR_UUID = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb"); // 服务相关 private final IBinder mBinder = new LocalBinder(); public class LocalBinder extends Binder { public BleService getService() { return BleService.this; } } @Override public IBinder onBind(Intent intent) { return mBinder; } @Override public boolean onUnbind(Intent intent) { release(); return super.onUnbind(intent); } /** * 蓝牙操作回调 * 蓝牙连接状态才会回调 */ private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() { @Override public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) { if (newState == BluetoothProfile.STATE_CONNECTED) { // 蓝牙已连接 mConnectionState = STATE_CONNECTED; sendBleBroadcast(ACTION_GATT_CONNECTED); // 搜索GATT服务 mBluetoothGatt.discoverServices(); } else if (newState == BluetoothProfile.STATE_DISCONNECTED) { // 蓝牙已断开连接 mConnectionState = STATE_DISCONNECTED; sendBleBroadcast(ACTION_GATT_DISCONNECTED); } } @Override public void onServicesDiscovered(BluetoothGatt gatt, int status) { // 发现GATT服务 if (status == BluetoothGatt.GATT_SUCCESS) { setBleNotification(); } } @Override public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) { // 收到数据 sendBleBroadcast(ACTION_DATA_AVAILABLE, characteristic); } }; /** * 发送通知 * * @param action 广播Action */ private void sendBleBroadcast(String action) { Intent intent = new Intent(action); sendBroadcast(intent); } /** * 发送通知 * * @param action 广播Action * @param characteristic 数据 */ private void sendBleBroadcast(String action, BluetoothGattCharacteristic characteristic) { Intent intent = new Intent(action); if (CHARACTERISTIC_READ_UUID.equals(characteristic.getUuid())) { intent.putExtra(EXTRA_DATA, characteristic.getValue()); } sendBroadcast(intent); } /** * 蓝牙连接 * * @param bluetoothAdapter BluetoothAdapter * @param address 设备mac地址 * @return true:成功 false: */ public boolean connect(BluetoothAdapter bluetoothAdapter, String address) { if (bluetoothAdapter == null || TextUtils.isEmpty(address)) { return false; } BluetoothDevice device = bluetoothAdapter.getRemoteDevice(address); if (device == null) { return false; } mBluetoothGatt = device.connectGatt(this, false, mGattCallback); mConnectionState = STATE_CONNECTING; return true; } /** * 蓝牙断开连接 */ public void disconnect() { if (mBluetoothGatt == null) { return; } mBluetoothGatt.disconnect(); } /** * 释放相关资源 */ public void release() { if (mBluetoothGatt == null) { return; } mBluetoothGatt.close(); mBluetoothGatt = null; } /** * 设置蓝牙设备在数据改变时,通知App */ public void setBleNotification() { if (mBluetoothGatt == null) { sendBleBroadcast(ACTION_CONNECTING_FAIL); return; } // 获取蓝牙设备的服务 BluetoothGattService gattService = mBluetoothGatt.getService(SERVICE_UUID); if (gattService == null) { sendBleBroadcast(ACTION_CONNECTING_FAIL); return; } // 获取蓝牙设备的特征 BluetoothGattCharacteristic gattCharacteristic = gattService.getCharacteristic(CHARACTERISTIC_READ_UUID); if (gattCharacteristic == null) { sendBleBroadcast(ACTION_CONNECTING_FAIL); return; } // 获取蓝牙设备特征的描述符 BluetoothGattDescriptor descriptor = gattCharacteristic.getDescriptor(DESCRIPTOR_UUID); descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE); if (mBluetoothGatt.writeDescriptor(descriptor)) { // 蓝牙设备在数据改变时,通知App,App在收到数据后回调onCharacteristicChanged方法 mBluetoothGatt.setCharacteristicNotification(gattCharacteristic, true); } } /** * 发送数据 * * @param data 数据 * @return true:发送成功 false:发送失败 */ public boolean sendData(byte[] data) { // 获取蓝牙设备的服务 BluetoothGattService gattService = null; if (mBluetoothGatt != null) { gattService = mBluetoothGatt.getService(SERVICE_UUID); } if (gattService == null) { return false; } // 获取蓝牙设备的特征 BluetoothGattCharacteristic gattCharacteristic = gattService.getCharacteristic(CHARACTERISTIC_WRITE_UUID); if (gattCharacteristic == null) { return false; } // 发送数据 gattCharacteristic.setValue(data); return mBluetoothGatt.writeCharacteristic(gattCharacteristic); } }
apache-2.0
leleuj/pac4j
pac4j-oauth/src/test/java/org/pac4j/oauth/client/LinkedInClientIT.java
3989
/* Copyright 2012 - 2014 Jerome Leleu Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.pac4j.oauth.client; import org.apache.commons.lang3.StringUtils; import org.pac4j.core.client.Client; import org.pac4j.core.profile.Gender; import org.pac4j.core.profile.ProfileHelper; import org.pac4j.core.profile.UserProfile; import org.pac4j.oauth.profile.linkedin.LinkedInProfile; import com.esotericsoftware.kryo.Kryo; import com.gargoylesoftware.htmlunit.WebClient; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.html.HtmlPasswordInput; import com.gargoylesoftware.htmlunit.html.HtmlSubmitInput; import com.gargoylesoftware.htmlunit.html.HtmlTextInput; /** * This class tests the {@link LinkedInClient} class by simulating a complete authentication. * * @author Jerome Leleu * @since 1.0.0 */ public class LinkedInClientIT extends OAuthClientIT { @SuppressWarnings("rawtypes") @Override protected Client getClient() { final LinkedInClient linkedInClient = new LinkedInClient(); linkedInClient.setKey("gsqj8dn56ayn"); linkedInClient.setSecret("kUFAZ2oYvwMQ6HFl"); linkedInClient.setCallbackUrl(PAC4J_URL); return linkedInClient; } @Override protected String getCallbackUrl(final WebClient webClient, final HtmlPage authorizationPage) throws Exception { final HtmlForm form = authorizationPage.getFormByName("oauthAuthorizeForm"); final HtmlTextInput sessionKey = form.getInputByName("session_key"); sessionKey.setValueAttribute("testscribeup@gmail.com"); final HtmlPasswordInput sessionPassword = form.getInputByName("session_password"); sessionPassword.setValueAttribute("testpwdscribeup56"); final HtmlSubmitInput submit = form.getInputByName("authorize"); final HtmlPage callbackPage = submit.click(); final String callbackUrl = callbackPage.getUrl().toString(); logger.debug("callbackUrl : {}", callbackUrl); return callbackUrl; } @Override protected void registerForKryo(final Kryo kryo) { kryo.register(LinkedInProfile.class); } @Override protected void verifyProfile(final UserProfile userProfile) { final LinkedInProfile profile = (LinkedInProfile) userProfile; logger.debug("userProfile : {}", profile); assertEquals("167439971", profile.getId()); assertEquals(LinkedInProfile.class.getSimpleName() + UserProfile.SEPARATOR + "167439971", profile.getTypedId()); assertTrue(ProfileHelper.isTypedIdOf(profile.getTypedId(), LinkedInProfile.class)); assertTrue(StringUtils.isNotBlank(profile.getAccessToken())); assertCommonProfile(userProfile, null, "test", "scribeUp", "test scribeUp", null, Gender.UNSPECIFIED, null, null, "http://www.linkedin.com/profile/view?id=167439971&amp;authType=name&amp;authToken=_IWF&amp;trk=api*", null); assertEquals("ScribeUP développeur chez OpenSource", profile.getHeadline()); assertNotNull(profile.getAccessSecret()); assertEquals(6, profile.getAttributes().size()); } }
apache-2.0
rlugojr/incubator-trafodion
core/sqf/src/seatrans/tm/hbasetmlib2/src/main/java/org/trafodion/dtm/TmAuditTlog.java
58212
// @@@ START COPYRIGHT @@@ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // @@@ END COPYRIGHT @@@ package org.trafodion.dtm; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.Logger; import org.apache.commons.codec.binary.Hex; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.transactional.TransactionManager; import org.apache.hadoop.hbase.client.transactional.TransactionState; import org.apache.hadoop.hbase.client.transactional.CommitUnsuccessfulException; import org.apache.hadoop.hbase.client.transactional.UnknownTransactionException; import org.apache.hadoop.hbase.client.transactional.HBaseBackedTransactionLogger; import org.apache.hadoop.hbase.client.transactional.TransactionRegionLocation; import org.apache.hadoop.hbase.client.transactional.TransState; import org.apache.hadoop.hbase.client.transactional.UnknownTransactionException; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos.TlogDeleteRequest; import org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos.TlogDeleteResponse; import org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos.TrxRegionService; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.regionserver.RegionSplitPolicy; import com.google.protobuf.ByteString; import com.google.protobuf.HBaseZeroCopyByteString; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.StringTokenizer; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.RejectedExecutionException; public class TmAuditTlog { static final Log LOG = LogFactory.getLog(TmAuditTlog.class); private Configuration config; private static String TLOG_TABLE_NAME; private static final byte[] TLOG_FAMILY = Bytes.toBytes("tf"); private static final byte[] ASN_STATE = Bytes.toBytes("as"); private static final byte[] QUAL_TX_STATE = Bytes.toBytes("tx"); private static HTable[] table; private static Connection connection; private static HBaseAuditControlPoint tLogControlPoint; private static long tLogControlPointNum; private static long tLogHashKey; private static int tLogHashShiftFactor; private int dtmid; // For performance metrics private static long[] startTimes; private static long[] endTimes; private static long[] synchTimes; private static long[] bufferSizes; private static AtomicInteger timeIndex; private static long totalWriteTime; private static long totalSynchTime; private static long totalPrepTime; private static AtomicLong totalWrites; private static AtomicLong totalRecords; private static long minWriteTime; private static long minWriteTimeBuffSize; private static long maxWriteTime; private static long maxWriteTimeBuffSize; private static double avgWriteTime; private static long minPrepTime; private static long maxPrepTime; private static double avgPrepTime; private static long minSynchTime; private static long maxSynchTime; private static double avgSynchTime; private static long minBufferSize; private static long maxBufferSize; private static double avgBufferSize; private static int versions; private static int tlogNumLogs; private boolean useAutoFlush; private static boolean ageCommitted; private static boolean forceControlPoint; private boolean disableBlockCache; private boolean controlPointDeferred; private int TlogRetryDelay; private int TlogRetryCount; private static AtomicLong asn; // Audit sequence number is the monotonic increasing value of the tLog write private static Object tlogAuditLock[]; // Lock for synchronizing access via regions. private static Object tablePutLock; // Lock for synchronizing table.put operations // to avoid ArrayIndexOutOfBoundsException private static byte filler[]; public static final int TLOG_SLEEP = 1000; // One second public static final int TLOG_SLEEP_INCR = 5000; // Five seconds public static final int TLOG_RETRY_ATTEMPTS = 5; /** * tlogThreadPool - pool of thread for asynchronous requests */ ExecutorService tlogThreadPool; private abstract class TlogCallable implements Callable<Integer>{ TransactionState transactionState; HRegionLocation location; HTable table; byte[] startKey; byte[] endKey_orig; byte[] endKey; TlogCallable(TransactionState txState, HRegionLocation location, Connection connection) throws IOException { transactionState = txState; this.location = location; table = new HTable(location.getRegionInfo().getTable(), connection, tlogThreadPool); startKey = location.getRegionInfo().getStartKey(); endKey_orig = location.getRegionInfo().getEndKey(); endKey = TransactionManager.binaryIncrementPos(endKey_orig, -1); } public Integer deleteEntriesOlderThanASNX(final byte[] regionName, final long auditSeqNum, final boolean pv_ageCommitted) throws IOException { long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- ENTRY auditSeqNum: " + auditSeqNum + ", thread " + threadId); boolean retry = false; boolean refresh = false; final Scan scan = new Scan(startKey, endKey); int retryCount = 0; int retrySleep = TLOG_SLEEP; do { if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- ENTRY ASN: " + auditSeqNum); Batch.Call<TrxRegionService, TlogDeleteResponse> callable = new Batch.Call<TrxRegionService, TlogDeleteResponse>() { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<TlogDeleteResponse> rpcCallback = new BlockingRpcCallback<TlogDeleteResponse>(); @Override public TlogDeleteResponse call(TrxRegionService instance) throws IOException { org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos.TlogDeleteRequest.Builder builder = TlogDeleteRequest.newBuilder(); builder.setAuditSeqNum(auditSeqNum); builder.setTransactionId(transactionState.getTransactionId()); builder.setScan(ProtobufUtil.toScan(scan)); builder.setRegionName(ByteString.copyFromUtf8(Bytes.toString(regionName))); //ByteString.copyFromUtf8(Bytes.toString(regionName))); builder.setAgeCommitted(pv_ageCommitted); instance.deleteTlogEntries(controller, builder.build(), rpcCallback); return rpcCallback.get(); } }; Map<byte[], TlogDeleteResponse> result = null; try { if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- before coprocessorService ASN: " + auditSeqNum + " startKey: " + new String(startKey, "UTF-8") + " endKey: " + new String(endKey, "UTF-8")); result = table.coprocessorService(TrxRegionService.class, startKey, endKey, callable); } catch (Throwable e) { String msg = new String("ERROR occurred while calling deleteTlogEntries coprocessor service in deleteEntriesOlderThanASNX: " + e); LOG.error(msg, e); throw new IOException(msg, e); } if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- after coprocessorService ASN: " + auditSeqNum + " startKey: " + new String(startKey, "UTF-8") + " result size: " + result.size()); if(result.size() != 1) { LOG.error("deleteEntriesOlderThanASNX, received incorrect result size: " + result.size() + " ASN: " + auditSeqNum); throw new IOException("Wrong result size in deleteEntriesOlderThanASNX"); } else { // size is 1 for (TlogDeleteResponse TD_response : result.values()){ if(TD_response.getHasException()) { if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX coprocessor exception: " + TD_response.getException()); throw new IOException(TD_response.getException()); } if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX coprocessor deleted count: " + TD_response.getCount()); } retry = false; } if (refresh) { HRegionLocation lv_hrl = table.getRegionLocation(startKey); HRegionInfo lv_hri = lv_hrl.getRegionInfo(); String lv_node = lv_hrl.getHostname(); int lv_length = lv_node.indexOf('.'); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- location being refreshed : " + location.getRegionInfo().getRegionNameAsString() + "endKey: " + Hex.encodeHexString(location.getRegionInfo().getEndKey()) + " for ASN: " + auditSeqNum); if(retryCount == TLOG_RETRY_ATTEMPTS) { LOG.error("Exceeded retry attempts (" + retryCount + ") in deleteEntriesOlderThanASNX for ASN: " + auditSeqNum); // We have received our reply in the form of an exception, // so decrement outstanding count and wake up waiters to avoid // getting hung forever IOException ie = new IOException("Exceeded retry attempts (" + retryCount + ") in deleteEntriesOlderThanASNX for ASN: " + auditSeqNum); transactionState.requestPendingCountDec(ie); throw ie; } if (LOG.isWarnEnabled()) LOG.warn("deleteEntriesOlderThanASNX -- " + table.toString() + " location being refreshed"); if (LOG.isWarnEnabled()) LOG.warn("deleteEntriesOlderThanASNX -- lv_hri: " + lv_hri); if (LOG.isWarnEnabled()) LOG.warn("deleteEntriesOlderThanASNX -- location.getRegionInfo(): " + location.getRegionInfo()); table.getRegionLocation(startKey, true); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- setting retry, count: " + retryCount); refresh = false; } retryCount++; if (retryCount < TLOG_RETRY_ATTEMPTS && retry == true) { try { Thread.sleep(retrySleep); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } retrySleep += TLOG_SLEEP_INCR; } } while (retryCount < TLOG_RETRY_ATTEMPTS && retry == true); // We have received our reply so decrement outstanding count transactionState.requestPendingCountDec(null); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASNX -- EXIT ASN: " + auditSeqNum); return 0; } //getTransactionStatesFromIntervalX } // TlogCallable private class AuditBuffer{ private ArrayList<Put> buffer; // Each Put is an audit record private AuditBuffer () { buffer = new ArrayList<Put>(); buffer.clear(); } private void bufferAdd(Put localPut) { long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("BufferAdd start in thread " + threadId ); buffer.add(localPut); if (LOG.isTraceEnabled()) LOG.trace("BufferAdd end in thread " + threadId ); } private int bufferSize() { int lvSize; long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("BufferSize start in thread " + threadId ); lvSize = buffer.size(); if (LOG.isTraceEnabled()) LOG.trace("AuditBuffer bufferSize end; returning " + lvSize + " in thread " + Thread.currentThread().getId()); return lvSize; } private void bufferClear() { long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("AuditBuffer bufferClear start in thread " + threadId); buffer.clear(); if (LOG.isTraceEnabled()) LOG.trace("AuditBuffer bufferClear end in thread " + threadId); } private ArrayList<Put> getBuffer() { long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("getBuffer start in thread " + threadId ); return this.buffer; } }// End of class AuditBuffer public class TmAuditTlogRegionSplitPolicy extends RegionSplitPolicy { @Override protected boolean shouldSplit(){ return false; } } public TmAuditTlog (Configuration config, Connection connection) throws IOException, RuntimeException { this.config = config; this.connection = connection; this.dtmid = Integer.parseInt(config.get("dtmid")); if (LOG.isTraceEnabled()) LOG.trace("Enter TmAuditTlog constructor for dtmid " + dtmid); TLOG_TABLE_NAME = config.get("TLOG_TABLE_NAME"); int fillerSize = 2; int intThreads = 16; String numThreads = System.getenv("TM_JAVA_THREAD_POOL_SIZE"); if (numThreads != null){ intThreads = Integer.parseInt(numThreads); } tlogThreadPool = Executors.newFixedThreadPool(intThreads); controlPointDeferred = false; forceControlPoint = false; try { String controlPointFlush = System.getenv("TM_TLOG_FLUSH_CONTROL_POINT"); if (controlPointFlush != null){ forceControlPoint = (Integer.parseInt(controlPointFlush) != 0); if (LOG.isTraceEnabled()) LOG.trace("controlPointFlush != null"); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_FLUSH_CONTROL_POINT is not valid in ms.env"); } LOG.info("forceControlPoint is " + forceControlPoint); useAutoFlush = true; try { String autoFlush = System.getenv("TM_TLOG_AUTO_FLUSH"); if (autoFlush != null){ useAutoFlush = (Integer.parseInt(autoFlush) != 0); if (LOG.isTraceEnabled()) LOG.trace("autoFlush != null"); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_AUTO_FLUSH is not valid in ms.env"); } LOG.info("useAutoFlush is " + useAutoFlush); ageCommitted = false; try { String ageCommittedRecords = System.getenv("TM_TLOG_AGE_COMMITTED_RECORDS"); if (ageCommittedRecords != null){ ageCommitted = (Integer.parseInt(ageCommittedRecords) != 0); if (LOG.isTraceEnabled()) LOG.trace("ageCommittedRecords != null"); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_AGE_COMMITTED_RECORDS is not valid in ms.env"); } LOG.info("ageCommitted is " + ageCommitted); versions = 10; try { String maxVersions = System.getenv("TM_TLOG_MAX_VERSIONS"); if (maxVersions != null){ versions = (Integer.parseInt(maxVersions) > versions ? Integer.parseInt(maxVersions) : versions); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_MAX_VERSIONS is not valid in ms.env"); } TlogRetryDelay = 5000; // 3 seconds try { String retryDelayS = System.getenv("TM_TLOG_RETRY_DELAY"); if (retryDelayS != null){ TlogRetryDelay = (Integer.parseInt(retryDelayS) > TlogRetryDelay ? Integer.parseInt(retryDelayS) : TlogRetryDelay); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_RETRY_DELAY is not valid in ms.env"); } TlogRetryCount = 60; try { String retryCountS = System.getenv("TM_TLOG_RETRY_COUNT"); if (retryCountS != null){ TlogRetryCount = (Integer.parseInt(retryCountS) > TlogRetryCount ? Integer.parseInt(retryCountS) : TlogRetryCount); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_RETRY_COUNT is not valid in ms.env"); } tlogNumLogs = 1; try { String numLogs = System.getenv("TM_TLOG_NUM_LOGS"); if (numLogs != null) { tlogNumLogs = Math.max( 1, Integer.parseInt(numLogs)); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_NUM_LOGS is not valid in ms.env"); } disableBlockCache = false; try { String blockCacheString = System.getenv("TM_TLOG_DISABLE_BLOCK_CACHE"); if (blockCacheString != null){ disableBlockCache = (Integer.parseInt(blockCacheString) != 0); if (LOG.isTraceEnabled()) LOG.trace("disableBlockCache != null"); } } catch (NumberFormatException e) { LOG.error("TM_TLOG_DISABLE_BLOCK_CACHE is not valid in ms.env"); } LOG.info("disableBlockCache is " + disableBlockCache); switch (tlogNumLogs) { case 1: tLogHashKey = 0; // 0b0; tLogHashShiftFactor = 63; break; case 2: tLogHashKey = 1; // 0b1; tLogHashShiftFactor = 63; break; case 4: tLogHashKey = 3; // 0b11; tLogHashShiftFactor = 62; break; case 8: tLogHashKey = 7; // 0b111; tLogHashShiftFactor = 61; break; case 16: tLogHashKey = 15; // 0b1111; tLogHashShiftFactor = 60; break; case 32: tLogHashKey = 31; // 0b11111; tLogHashShiftFactor = 59; break; default : { LOG.error("TM_TLOG_NUM_LOGS must be 1 or a power of 2 in the range 2-32"); throw new RuntimeException(); } } if (LOG.isDebugEnabled()) LOG.debug("TM_TLOG_NUM_LOGS is " + tlogNumLogs); HColumnDescriptor hcol = new HColumnDescriptor(TLOG_FAMILY); if (disableBlockCache) { hcol.setBlockCacheEnabled(false); } hcol.setMaxVersions(versions); filler = new byte[fillerSize]; Arrays.fill(filler, (byte) ' '); startTimes = new long[50]; endTimes = new long[50]; synchTimes = new long[50]; bufferSizes = new long[50]; totalWriteTime = 0; totalSynchTime = 0; totalPrepTime = 0; totalWrites = new AtomicLong(0); totalRecords = new AtomicLong(0); minWriteTime = 1000000000; minWriteTimeBuffSize = 0; maxWriteTime = 0; maxWriteTimeBuffSize = 0; avgWriteTime = 0; minPrepTime = 1000000000; maxPrepTime = 0; avgPrepTime = 0; minSynchTime = 1000000000; maxSynchTime = 0; avgSynchTime = 0; minBufferSize = 1000; maxBufferSize = 0; avgBufferSize = 0; timeIndex = new AtomicInteger(1); asn = new AtomicLong(); // Monotonically increasing count of write operations long lvAsn = 0; if (LOG.isTraceEnabled()) LOG.trace("try new HBaseAuditControlPoint"); tLogControlPoint = new HBaseAuditControlPoint(config, connection); tlogAuditLock = new Object[tlogNumLogs]; table = new HTable[tlogNumLogs]; // Get the asn from the last control point. This ignores // any asn increments between the last control point // write and a system crash and could result in asn numbers // being reused. However this would just mean that some old // records are held onto a bit longer before cleanup and is safe. asn.set(tLogControlPoint.getStartingAuditSeqNum()); Admin admin = connection.getAdmin(); for (int i = 0 ; i < tlogNumLogs; i++) { tlogAuditLock[i] = new Object(); String lv_tLogName = new String(TLOG_TABLE_NAME + "_LOG_" + Integer.toHexString(i)); boolean lvTlogExists = admin.tableExists(TableName.valueOf(lv_tLogName)); if (LOG.isTraceEnabled()) LOG.trace("Tlog table " + lv_tLogName + (lvTlogExists? " exists" : " does not exist" )); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(lv_tLogName)); desc.addFamily(hcol); if (lvTlogExists == false) { // Need to prime the asn for future writes try { if (LOG.isTraceEnabled()) LOG.trace("Creating the table " + lv_tLogName); admin.createTable(desc); asn.set(1L); // TLOG didn't exist previously, so start asn at 1 } catch (TableExistsException e) { // TODO: ignoring the exception LOG.error("Table " + lv_tLogName + " already exists"); } } if (LOG.isTraceEnabled()) LOG.trace("try new HTable index " + i); table[i] = new HTable(config, desc.getName()); table[i].setAutoFlushTo(this.useAutoFlush); } admin.close(); lvAsn = asn.get(); // This control point write needs to be delayed until after recovery completes, // but is here as a placeholder if (LOG.isTraceEnabled()) LOG.trace("Starting a control point with asn value " + lvAsn); tLogControlPointNum = tLogControlPoint.doControlPoint(lvAsn); if (LOG.isTraceEnabled()) LOG.trace("Exit constructor()"); return; } public long getNextAuditSeqNum(int nid) throws IOException{ if (LOG.isTraceEnabled()) LOG.trace("getNextAuditSeqNum node: " + nid); return tLogControlPoint.getNextAuditSeqNum(nid); } public static long asnGetAndIncrement () { if (LOG.isTraceEnabled()) LOG.trace("asnGetAndIncrement"); return asn.getAndIncrement(); } public void putSingleRecord(final long lvTransid, final long lvCommitId, final String lvTxState, final Set<TransactionRegionLocation> regions, boolean forced) throws IOException { putSingleRecord(lvTransid, lvCommitId, lvTxState, regions, forced, -1); } public void putSingleRecord(final long lvTransid, final long lvCommitId, final String lvTxState, final Set<TransactionRegionLocation> regions, boolean forced, long recoveryASN) throws IOException { long threadId = Thread.currentThread().getId(); if (LOG.isTraceEnabled()) LOG.trace("putSingleRecord start in thread " + threadId); StringBuilder tableString = new StringBuilder(); String transidString = new String(String.valueOf(lvTransid)); String commitIdString = new String(String.valueOf(lvCommitId)); boolean lvResult = true; long lvAsn; long startSynch = 0; long endSynch = 0; int lv_lockIndex = 0; int lv_TimeIndex = (timeIndex.getAndIncrement() % 50 ); long lv_TotalWrites = totalWrites.incrementAndGet(); long lv_TotalRecords = totalRecords.incrementAndGet(); if (regions != null) { // Regions passed in indicate a state record where recovery might be needed following a crash. // To facilitate branch notification we translate the regions into table names that can then // be translated back into new region names following a restart. THis allows us to ensure all // branches reply prior to cleanup Iterator<TransactionRegionLocation> it = regions.iterator(); List<String> tableNameList = new ArrayList<String>(); while (it.hasNext()) { String name = new String(it.next().getRegionInfo().getTable().getNameAsString()); if ((name.length() > 0) && (tableNameList.contains(name) != true)){ // We have a table name not already in the list tableNameList.add(name); tableString.append(","); tableString.append(name); } } if (LOG.isTraceEnabled()) LOG.trace("table names: " + tableString.toString() + " in thread " + threadId); } //Create the Put as directed by the hashed key boolean //create our own hashed key long key = (((lvTransid & tLogHashKey) << tLogHashShiftFactor) + (lvTransid & 0xFFFFFFFF)); lv_lockIndex = (int)(lvTransid & tLogHashKey); if (LOG.isTraceEnabled()) LOG.trace("key: " + key + ", hex: " + Long.toHexString(key) + ", transid: " + lvTransid); Put p = new Put(Bytes.toBytes(key)); if (recoveryASN == -1){ // This is a normal audit record so we manage the ASN lvAsn = asn.getAndIncrement(); } else { // This is a recovery audit record so use the ASN passed in lvAsn = recoveryASN; } if (LOG.isTraceEnabled()) LOG.trace("transid: " + lvTransid + " state: " + lvTxState + " ASN: " + lvAsn + " in thread " + threadId); p.add(TLOG_FAMILY, ASN_STATE, Bytes.toBytes(String.valueOf(lvAsn) + "," + String.valueOf(lvTransid) + "," + lvTxState + "," + Bytes.toString(filler) + "," + String.valueOf(lvCommitId) + "," + tableString.toString())); if (recoveryASN != -1){ // We need to send this to a remote Tlog, not our local one, so open the appropriate table if (LOG.isTraceEnabled()) LOG.trace("putSingleRecord writing to remote Tlog for transid: " + lvTransid + " state: " + lvTxState + " ASN: " + lvAsn + " in thread " + threadId); Table recoveryTable; int lv_ownerNid = (int)TransactionState.getNodeId(lvTransid); String lv_tLogName = new String("TRAFODION._DTM_.TLOG" + String.valueOf(lv_ownerNid) + "_LOG_" + Integer.toHexString(lv_lockIndex)); recoveryTable = connection.getTable(TableName.valueOf(lv_tLogName)); try { recoveryTable.put(p); } finally { recoveryTable.close(); } } else { // This goes to our local TLOG if (LOG.isTraceEnabled()) LOG.trace("TLOG putSingleRecord synchronizing tlogAuditLock[" + lv_lockIndex + "] in thread " + threadId ); startSynch = System.nanoTime(); synchronized (tlogAuditLock[lv_lockIndex]) { endSynch = System.nanoTime(); try { if (LOG.isTraceEnabled()) LOG.trace("try table.put " + p ); startTimes[lv_TimeIndex] = System.nanoTime(); table[lv_lockIndex].put(p); if ((forced) && (useAutoFlush == false)) { if (LOG.isTraceEnabled()) LOG.trace("flushing commits"); table[lv_lockIndex].flushCommits(); } endTimes[lv_TimeIndex] = System.nanoTime(); } catch (IOException e2){ // create record of the exception LOG.error("putSingleRecord Exception ", e2); throw e2; } } // End global synchronization if (LOG.isTraceEnabled()) LOG.trace("TLOG putSingleRecord synchronization complete in thread " + threadId ); synchTimes[lv_TimeIndex] = endSynch - startSynch; totalSynchTime += synchTimes[lv_TimeIndex]; totalWriteTime += (endTimes[lv_TimeIndex] - startTimes[lv_TimeIndex]); if (synchTimes[lv_TimeIndex] > maxSynchTime) { maxSynchTime = synchTimes[lv_TimeIndex]; } if (synchTimes[lv_TimeIndex] < minSynchTime) { minSynchTime = synchTimes[lv_TimeIndex]; } if ((endTimes[lv_TimeIndex] - startTimes[lv_TimeIndex]) > maxWriteTime) { maxWriteTime = (endTimes[lv_TimeIndex] - startTimes[lv_TimeIndex]); } if ((endTimes[lv_TimeIndex] - startTimes[lv_TimeIndex]) < minWriteTime) { minWriteTime = (endTimes[lv_TimeIndex] - startTimes[lv_TimeIndex]); } if (lv_TimeIndex == 49) { timeIndex.set(1); // Start over so we don't exceed the array size } if (lv_TotalWrites == 59999) { avgWriteTime = (double) (totalWriteTime/lv_TotalWrites); avgSynchTime = (double) (totalSynchTime/lv_TotalWrites); LOG.info("TLog Audit Write Report\n" + " Total records: " + lv_TotalRecords + " in " + lv_TotalWrites + " write operations\n" + " Write time:\n" + " Min: " + minWriteTime / 1000 + " microseconds\n" + " Max: " + maxWriteTime / 1000 + " microseconds\n" + " Avg: " + avgWriteTime / 1000 + " microseconds\n" + " Synch time:\n" + " Min: " + minSynchTime / 1000 + " microseconds\n" + " Max: " + maxSynchTime / 1000 + " microseconds\n" + " Avg: " + avgSynchTime / 1000 + " microseconds\n"); // Start at index 1 since there is no startTimes[0] timeIndex.set(1); endTimes[0] = System.nanoTime(); totalWriteTime = 0; totalSynchTime = 0; totalPrepTime = 0; totalRecords.set(0); totalWrites.set(0); minWriteTime = 50000; // Some arbitrary high value maxWriteTime = 0; minWriteTimeBuffSize = 0; maxWriteTimeBuffSize = 0; minSynchTime = 50000; // Some arbitrary high value maxSynchTime = 0; minPrepTime = 50000; // Some arbitrary high value maxPrepTime = 0; minBufferSize = 1000; // Some arbitrary high value maxBufferSize = 0; } }// End else revoveryASN == -1 if (LOG.isTraceEnabled()) LOG.trace("putSingleRecord exit"); } public static int getRecord(final long lvTransid) throws IOException { if (LOG.isTraceEnabled()) LOG.trace("getRecord start"); TransState lvTxState = TransState.STATE_NOTX; String stateString; int lv_lockIndex = (int)(lvTransid & tLogHashKey); if (LOG.isTraceEnabled()) LOG.trace("getRecord end; returning " + lvTxState); return lvTxState.getValue(); } public static String getRecord(final String transidString) throws IOException { if (LOG.isTraceEnabled()) LOG.trace("getRecord start"); long lvTransid = Long.parseLong(transidString, 10); int lv_lockIndex = (int)(lvTransid & tLogHashKey); String lvTxState = new String("NO RECORD"); Get g; //create our own hashed key long key = (((lvTransid & tLogHashKey) << tLogHashShiftFactor) + (lvTransid & 0xFFFFFFFF)); if (LOG.isTraceEnabled()) LOG.trace("key: " + key + " hex: " + Long.toHexString(key)); g = new Get(Bytes.toBytes(key)); try { Result r = table[lv_lockIndex].get(g); byte [] value = r.getValue(TLOG_FAMILY, ASN_STATE); StringTokenizer st = new StringTokenizer(value.toString(), ","); String asnToken = st.nextElement().toString(); String transidToken = st.nextElement().toString(); lvTxState = st.nextElement().toString(); if (LOG.isTraceEnabled()) LOG.trace("transid: " + transidToken + " state: " + lvTxState); } catch (IOException e){ LOG.error("getRecord IOException: ", e); throw e; } if (LOG.isTraceEnabled()) LOG.trace("getRecord end; returning String:" + lvTxState); return lvTxState; } public static boolean deleteRecord(final long lvTransid) throws IOException { if (LOG.isTraceEnabled()) LOG.trace("deleteRecord start " + lvTransid); String transidString = new String(String.valueOf(lvTransid)); int lv_lockIndex = (int)(lvTransid & tLogHashKey); Delete d; //create our own hashed key long key = (((lvTransid & tLogHashKey) << tLogHashShiftFactor) + (lvTransid & 0xFFFFFFFF)); if (LOG.isTraceEnabled()) LOG.trace("key: " + key + " hex: " + Long.toHexString(key)); d = new Delete(Bytes.toBytes(key)); if (LOG.isTraceEnabled()) LOG.trace("deleteRecord (" + lvTransid + ") "); table[lv_lockIndex].delete(d); if (LOG.isTraceEnabled()) LOG.trace("deleteRecord - exit"); return true; } public boolean deleteAgedEntries(final long lvAsn) throws IOException { if (LOG.isTraceEnabled()) LOG.trace("deleteAgedEntries start: Entries older than " + lvAsn + " will be removed"); Table deleteTable; for (int i = 0; i < tlogNumLogs; i++) { String lv_tLogName = new String(TLOG_TABLE_NAME + "_LOG_" + Integer.toHexString(i)); if (LOG.isTraceEnabled()) LOG.trace("delete table is: " + lv_tLogName); deleteTable = connection.getTable(TableName.valueOf(lv_tLogName)); try { boolean scanComplete = false; Scan s = new Scan(); s.setCaching(100); s.setCacheBlocks(false); ArrayList<Delete> deleteList = new ArrayList<Delete>(); ResultScanner ss = deleteTable.getScanner(s); try { for (Result r : ss) { for (Cell cell : r.rawCells()) { StringTokenizer st = new StringTokenizer(Bytes.toString(CellUtil.cloneValue(cell)), ","); if (LOG.isTraceEnabled()) LOG.trace("string tokenizer success "); if (st.hasMoreElements()) { String asnToken = st.nextElement().toString() ; String transidToken = st.nextElement().toString() ; String stateToken = st.nextElement().toString() ; if ((Long.parseLong(asnToken) < lvAsn) && (stateToken.equals("FORGOTTEN"))) { String rowKey = new String(r.getRow()); Delete del = new Delete(r.getRow()); if (LOG.isTraceEnabled()) LOG.trace("adding transid: " + transidToken + " to delete list"); deleteList.add(del); } else if ((Long.parseLong(asnToken) < lvAsn) && (stateToken.equals("COMMITTED") || stateToken.equals("ABORTED"))) { if (ageCommitted) { Delete del = new Delete(r.getRow()); if (LOG.isTraceEnabled()) LOG.trace("adding transid: " + transidToken + " to delete list"); deleteList.add(del); } else { Get get = new Get(r.getRow()); get.setMaxVersions(versions); // will return last n versions of row Result lvResult = deleteTable.get(get); List<Cell> list = lvResult.getColumnCells(TLOG_FAMILY, ASN_STATE); // returns all versions of this column for (Cell element : list) { StringTokenizer stok = new StringTokenizer(Bytes.toString(CellUtil.cloneValue(element)), ","); if (stok.hasMoreElements()) { if (LOG.isTraceEnabled()) LOG.trace("Performing secondary search on (" + transidToken + ")"); asnToken = stok.nextElement().toString() ; transidToken = stok.nextElement().toString() ; stateToken = stok.nextElement().toString() ; if ((Long.parseLong(asnToken) < lvAsn) && (stateToken.equals("FORGOTTEN"))) { Delete del = new Delete(r.getRow()); if (LOG.isTraceEnabled()) LOG.trace("Secondary search found new delete - adding (" + transidToken + ") with asn: " + asnToken + " to delete list"); deleteList.add(del); break; } else { if (LOG.isTraceEnabled()) LOG.trace("Secondary search skipping entry with asn: " + asnToken + ", state: " + stateToken + ", transid: " + transidToken ); } } } } } else { if (LOG.isTraceEnabled()) LOG.trace("deleteAgedEntries skipping asn: " + asnToken + ", transid: " + transidToken + ", state: " + stateToken); } } } } } finally { if (LOG.isTraceEnabled()) LOG.trace("deleteAgedEntries closing ResultScanner"); ss.close(); } if (LOG.isTraceEnabled()) LOG.trace("attempting to delete list with " + deleteList.size() + " elements from table " + lv_tLogName); deleteTable.delete(deleteList); } finally { deleteTable.close(); } } if (LOG.isTraceEnabled()) LOG.trace("deleteAgedEntries - exit"); return true; } public long writeControlPointRecords (final Map<Long, TransactionState> map) throws IOException { int lv_lockIndex; int cpWrites = 0; long startTime = System.nanoTime(); long endTime; if (LOG.isTraceEnabled()) LOG.trace("Tlog " + getTlogTableNameBase() + " writeControlPointRecords start with map size " + map.size()); try { for (Map.Entry<Long, TransactionState> e : map.entrySet()) { try { Long transid = e.getKey(); lv_lockIndex = (int)(transid & tLogHashKey); TransactionState value = e.getValue(); if (value.getStatus().equals("COMMITTED")){ if (LOG.isTraceEnabled()) LOG.trace("writeControlPointRecords adding record for trans (" + transid + ") : state is " + value.getStatus()); cpWrites++; putSingleRecord(transid, value.getCommitId(), value.getStatus(), value.getParticipatingRegions(), forceControlPoint); } } catch (IOException ex) { LOG.error("formatRecord Exception ", ex); throw ex; } } } catch (ConcurrentModificationException cme){ LOG.info("writeControlPointRecords ConcurrentModificationException; delaying control point ", cme); // Return the current value rather than incrementing this interval. controlPointDeferred = true; return tLogControlPoint.getCurrControlPt() - 1; } endTime = System.nanoTime(); if (LOG.isDebugEnabled()) LOG.debug("TLog Control Point Write Report\n" + " Total records: " + map.size() + " in " + cpWrites + " write operations\n" + " Write time: " + (endTime - startTime) / 1000 + " microseconds\n" ); if (LOG.isTraceEnabled()) LOG.trace("writeControlPointRecords exit "); return -1L; } public long addControlPoint (final Map<Long, TransactionState> map) throws IOException { if (LOG.isDebugEnabled()) LOG.debug("addControlPoint start with map size " + map.size()); long lvCtrlPt = 0L; long agedAsn; // Writes older than this audit seq num will be deleted long lvAsn; // local copy of the asn long key; boolean success = false; if (controlPointDeferred) { // We deferred the control point once already due to concurrency. We'll synchronize this timeIndex synchronized (map) { if (LOG.isTraceEnabled()) LOG.trace("Writing synchronized control point records"); lvAsn = writeControlPointRecords(map); } controlPointDeferred = false; } else { lvAsn = writeControlPointRecords(map); if (lvAsn != -1L){ return lvAsn; } } lvAsn = asn.getAndIncrement(); if (LOG.isTraceEnabled()) LOG.trace("lvAsn reset to: " + lvAsn); // Write the control point interval and the ASN to the control point table lvCtrlPt = tLogControlPoint.doControlPoint(lvAsn); if ((lvCtrlPt - 5) > 0){ // We'll keep 5 control points of audit try { agedAsn = tLogControlPoint.getRecord(String.valueOf(lvCtrlPt - 5)); if (agedAsn > 0){ try { if (LOG.isTraceEnabled()) LOG.trace("Attempting to remove TLOG writes older than asn " + agedAsn); // deleteAgedEntries(agedAsn); deleteEntriesOlderThanASN(agedAsn, ageCommitted); } catch (IOException e){ LOG.error("deleteAgedEntries Exception ", e); throw e; } } try { tLogControlPoint.deleteAgedRecords(lvCtrlPt - 5); } catch (IOException e){ // TODO: ignoring the exception LOG.error("addControlPoint - control point record not found "); } } catch (IOException e){ LOG.error("addControlPoint IOException ", e); throw e; } } if (LOG.isDebugEnabled()) LOG.debug("addControlPoint returning " + lvCtrlPt); return lvCtrlPt; } public void getTransactionState (TransactionState ts) throws IOException { if (LOG.isTraceEnabled()) LOG.trace("getTransactionState start; transid: " + ts.getTransactionId()); // This request might be for a transaction not originating on this node, so we need to open // the appropriate Tlog Table unknownTransactionTable; long lvTransid = ts.getTransactionId(); int lv_ownerNid = (int)TransactionState.getNodeId(lvTransid); int lv_lockIndex = (int)(lvTransid & tLogHashKey); String lv_tLogName = new String("TRAFODION._DTM_.TLOG" + String.valueOf(lv_ownerNid) + "_LOG_" + Integer.toHexString(lv_lockIndex)); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState reading from: " + lv_tLogName); unknownTransactionTable = connection.getTable(TableName.valueOf(lv_tLogName)); String transidString = new String(String.valueOf(lvTransid)); Get g; long key = (((lvTransid & tLogHashKey) << tLogHashShiftFactor) + (lvTransid & 0xFFFFFFFF)); if (LOG.isTraceEnabled()) LOG.trace("key: " + key + ", hexkey: " + Long.toHexString(key) + ", transid: " + lvTransid); g = new Get(Bytes.toBytes(key)); TransState lvTxState = TransState.STATE_NOTX; String stateString = ""; String transidToken = ""; String commitIdToken = ""; Result r = unknownTransactionTable.get(g); if (r == null) { if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: tLog result is null: " + transidString); } if (r.isEmpty()) { if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: tLog empty result: " + transidString); } byte [] value = r.getValue(TLOG_FAMILY, ASN_STATE); if (value == null) { ts.setStatus(TransState.STATE_NOTX); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: tLog value is null: " + transidString); return; } if (value.length == 0) { ts.setStatus(TransState.STATE_NOTX); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: tLog transaction not found: " + transidString); return; } ts.clearParticipatingRegions(); String recordString = new String (Bytes.toString(value)); StringTokenizer st = new StringTokenizer(recordString, ","); if (st.hasMoreElements()) { String asnToken = st.nextElement().toString(); transidToken = st.nextElement().toString(); stateString = st.nextElement().toString(); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: transaction: " + transidToken + " stateString is: " + stateString); } if (stateString.compareTo("COMMITTED") == 0){ lvTxState = TransState.STATE_COMMITTED; } else if (stateString.compareTo("ABORTED") == 0){ lvTxState = TransState.STATE_ABORTED; } else if (stateString.compareTo("ACTIVE") == 0){ lvTxState = TransState.STATE_ACTIVE; } else if (stateString.compareTo("PREPARED") == 0){ lvTxState = TransState.STATE_PREPARED; } else if (stateString.compareTo("NOTX") == 0){ lvTxState = TransState.STATE_NOTX; } else if (stateString.compareTo("FORGOTTEN") == 0){ // Need to get the previous state record so we know how to drive the regions String keyS = new String(r.getRow()); Get get = new Get(r.getRow()); get.setMaxVersions(versions); // will return last n versions of row Result lvResult = unknownTransactionTable.get(get); // byte[] b = lvResult.getValue(TLOG_FAMILY, ASN_STATE); // returns current version of value List<Cell> list = lvResult.getColumnCells(TLOG_FAMILY, ASN_STATE); // returns all versions of this column for (Cell element : list) { String stringValue = new String(CellUtil.cloneValue(element)); st = new StringTokenizer(stringValue, ","); if (st.hasMoreElements()) { if (LOG.isTraceEnabled()) LOG.trace("Performing secondary search on (" + transidToken + ")"); String asnToken = st.nextElement().toString() ; transidToken = st.nextElement().toString() ; String stateToken = st.nextElement().toString() ; if ((stateToken.compareTo("COMMITTED") == 0) || (stateToken.compareTo("ABORTED") == 0)) { String rowKey = new String(r.getRow()); if (LOG.isTraceEnabled()) LOG.trace("Secondary search found record for (" + transidToken + ") with state: " + stateToken); lvTxState = (stateToken.compareTo("COMMITTED") == 0 ) ? TransState.STATE_COMMITTED : TransState.STATE_ABORTED; break; } else { if (LOG.isTraceEnabled()) LOG.trace("Secondary search skipping entry for (" + transidToken + ") with state: " + stateToken ); } } } } else if (stateString.compareTo("ABORTING") == 0){ lvTxState = TransState.STATE_ABORTING; } else if (stateString.compareTo("COMMITTING") == 0){ lvTxState = TransState.STATE_COMMITTING; } else if (stateString.compareTo("PREPARING") == 0){ lvTxState = TransState.STATE_PREPARING; } else if (stateString.compareTo("FORGETTING") == 0){ lvTxState = TransState.STATE_FORGETTING; } else if (stateString.compareTo("FORGETTING_HEUR") == 0){ lvTxState = TransState.STATE_FORGETTING_HEUR; } else if (stateString.compareTo("BEGINNING") == 0){ lvTxState = TransState.STATE_BEGINNING; } else if (stateString.compareTo("HUNGCOMMITTED") == 0){ lvTxState = TransState.STATE_HUNGCOMMITTED; } else if (stateString.compareTo("HUNGABORTED") == 0){ lvTxState = TransState.STATE_HUNGABORTED; } else if (stateString.compareTo("IDLE") == 0){ lvTxState = TransState.STATE_IDLE; } else if (stateString.compareTo("FORGOTTEN_HEUR") == 0){ lvTxState = TransState.STATE_FORGOTTEN_HEUR; } else if (stateString.compareTo("ABORTING_PART2") == 0){ lvTxState = TransState.STATE_ABORTING_PART2; } else if (stateString.compareTo("TERMINATING") == 0){ lvTxState = TransState.STATE_TERMINATING; } else { lvTxState = TransState.STATE_BAD; } // get past the filler st.nextElement(); commitIdToken = st.nextElement().toString(); ts.setCommitId(Long.parseLong(commitIdToken)); // Load the TransactionState object up with regions while (st.hasMoreElements()) { String tableNameToken = st.nextToken(); HTable table = new HTable(config, tableNameToken); NavigableMap<HRegionInfo, ServerName> regions = table.getRegionLocations(); Iterator<Map.Entry<HRegionInfo, ServerName>> it = regions.entrySet().iterator(); while(it.hasNext()) { // iterate entries. NavigableMap.Entry<HRegionInfo, ServerName> pairs = it.next(); HRegionInfo regionKey = pairs.getKey(); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: transaction: " + transidToken + " adding region: " + regionKey.getRegionNameAsString()); ServerName serverValue = regions.get(regionKey); String hostAndPort = new String(serverValue.getHostAndPort()); StringTokenizer tok = new StringTokenizer(hostAndPort, ":"); String hostName = new String(tok.nextElement().toString()); int portNumber = Integer.parseInt(tok.nextElement().toString()); TransactionRegionLocation loc = new TransactionRegionLocation(regionKey, serverValue); ts.addRegion(loc); } } ts.setStatus(lvTxState); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState: returning transid: " + ts.getTransactionId() + " state: " + lvTxState); if (LOG.isTraceEnabled()) LOG.trace("getTransactionState end transid: " + ts.getTransactionId()); return; } public String getTlogTableNameBase(){ return TLOG_TABLE_NAME; } /** * Method : deleteEntriesOlderThanASN * Params : pv_ASN - ASN before which all audit records will be deleted * Return : void * Purpose : Delete transaction records which are no longer needed */ public void deleteEntriesOlderThanASN(final long pv_ASN, final boolean pv_ageCommitted) throws IOException { int loopIndex = 0; long threadId = Thread.currentThread().getId(); // This TransactionState object is just a mechanism to keep track of the asynch rpc calls // send to regions in order to retrience the desired set of transactions TransactionState transactionState = new TransactionState(0); CompletionService<Integer> compPool = new ExecutorCompletionService<Integer>(tlogThreadPool); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASN: " + pv_ASN + ", in thread: " + threadId); List<HRegionLocation> regionList; // For every Tlog table for this node for (int index = 0; index < tlogNumLogs; index++) { String lv_tLogName = new String("TRAFODION._DTM_.TLOG" + String.valueOf(this.dtmid) + "_LOG_" + Integer.toHexString(index)); regionList = connection.getRegionLocator(TableName.valueOf(lv_tLogName)).getAllRegionLocations(); loopIndex++; int regionIndex = 0; // For every region in this table for (HRegionLocation location : regionList) { regionIndex++; final byte[] regionName = location.getRegionInfo().getRegionName(); compPool.submit(new TlogCallable(transactionState, location, connection) { public Integer call() throws IOException { if (LOG.isTraceEnabled()) LOG.trace("before deleteEntriesOlderThanASNX() ASN: " + pv_ASN); return deleteEntriesOlderThanASNX(regionName, pv_ASN, pv_ageCommitted); } }); boolean loopBack = false; do { try { loopBack = false; int partialResult = compPool.take().get(); if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASN partial result: " + partialResult + " loopIndex " + loopIndex + " regionIndex " + regionIndex); } catch (InterruptedException e2) { LOG.error("exception retieving reply in deleteEntriesOlderThanASN for interval ASN: " + pv_ASN + " ", e2); loopBack = true; } catch (ExecutionException ee) { LOG.error("Execution exception", ee); throw new IOException(ee); } } while (loopBack); } } if (LOG.isTraceEnabled()) LOG.trace("deleteEntriesOlderThanASN tlog callable requests completed in thread " + threadId); return; } }
apache-2.0
alump/Vibrate
vibrate-demo/src/main/java/org/vaadin/alump/vibrate/demo/VibrateDemoUI.java
4789
/** * VibrateDemoUI.java (Vibrate) * * Copyright 2013 Vaadin Ltd, Sami Viitanen <alump@vaadin.org> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.alump.vibrate.demo; import com.vaadin.annotations.Push; import com.vaadin.annotations.Theme; import com.vaadin.annotations.VaadinServletConfiguration; import com.vaadin.server.VaadinRequest; import com.vaadin.server.VaadinServlet; import com.vaadin.ui.Button; import com.vaadin.ui.Label; import com.vaadin.ui.UI; import com.vaadin.ui.VerticalLayout; import org.vaadin.alump.fancylayouts.FancyNotifications; import org.vaadin.alump.vibrate.Vibrate; import com.vaadin.annotations.Title; import javax.servlet.annotation.WebServlet; @Title("Vibrate Demo") @Theme("demo") @Push public class VibrateDemoUI extends UI { @WebServlet(value = "/*") @VaadinServletConfiguration(productionMode = false, ui = VibrateDemoUI.class, widgetset = "org.vaadin.alump.vibrate.demo.gwt.VibrateDemoWidgetSet") public static class FancyLayoutsUIServlet extends VaadinServlet { } private FancyNotifications notifications; @Override protected void init(VaadinRequest vaadinRequest) { VerticalLayout layout = new VerticalLayout(); layout.setSizeFull(); setContent(layout); notifications = new FancyNotifications(); layout.addComponent(notifications); Label label = new Label("This demo works only with Android Chrome"); layout.addComponent(label); Button singleButton = new Button("Single vibrate"); singleButton.setWidth("100%"); layout.addComponent(singleButton); singleButton.addClickListener(singleClickListener); Button patternButton = new Button("Pattern vibrate"); patternButton.setWidth("100%"); layout.addComponent(patternButton); patternButton.addClickListener(patternClickListener); Button stopButton = new Button("End vibrate"); stopButton.setWidth("100%"); layout.addComponent(stopButton); stopButton.addClickListener(stopClickListener); Button notificationButton = new Button("Delayed notification vibrate"); notificationButton.setWidth("100%"); layout.addComponent(notificationButton); notificationButton.addClickListener(notificationClickListener); } private Button.ClickListener singleClickListener = new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent clickEvent) { Vibrate.vibrate(500); } }; private Button.ClickListener patternClickListener = new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent clickEvent) { Vibrate.vibrate(Vibrate.createPattern().vibrate(800).delay(400).vibrate(400).delay(200).vibrate(200).delay(100).vibrate(100)); } }; private Button.ClickListener stopClickListener = new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent clickEvent) { Vibrate.stopVibrate(); } }; private Button.ClickListener notificationClickListener = new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent clickEvent) { final int delay = (int)Math.ceil(Math.random() * 10000.0); Thread thread = new Thread(new DelayedNotification(delay)); thread.start(); } }; private class DelayedNotification implements Runnable { private int delayMs = 0; public DelayedNotification(int delayMs) { this.delayMs = delayMs; } @Override public void run() { try { Thread.sleep(delayMs); if(VibrateDemoUI.this.isAttached()) { VibrateDemoUI.this.access(new Runnable() { @Override public void run() { notifications.showNotification(null, "Hello Vibrating World"); Vibrate.vibrate(300); } }); } } catch (InterruptedException e) { e.printStackTrace(); } } }; }
apache-2.0
jirikoud/GurpsTable
app/src/main/java/cz/jksoftware/gurpstable/activity/MainActivity.java
7079
package cz.jksoftware.gurpstable.activity; import android.app.Activity; import android.content.Intent; import android.support.design.widget.TextInputEditText; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.RadioButton; import cz.jksoftware.gurpstable.dialog.SpellEnergyDialog; import cz.jksoftware.gurpstable.dialog.SpellFatalDialog; import cz.jksoftware.gurpstable.R; public class MainActivity extends AppCompatActivity implements SpellEnergyDialog.ResultListener, SpellFatalDialog.ResultListener { public static final int REQUEST_SPELL_RESULT = 100; private TextInputEditText mTextEditLevel; private TextInputEditText mTextEditThaumatology; private TextInputEditText mTextEditRitual; private Button mButtonPlus; private Button mButtonMinus; private RadioButton mButtonThaumatology; private RadioButton mButtonRitual; private Button mButtonSpell; private Button mButtonTotal; private Button mButtonFatal; private int getLevel() { int level = 0; try { level = Integer.parseInt(mTextEditLevel.getText().toString()); } catch (Exception exc) { } return level; } private int getThaumatologyLevel() { return (18 - getLevel()); } private int getRitualLevel() { return (15 - getLevel()); } private void updateLevel(int level) { mTextEditLevel.setText(String.valueOf(level)); mTextEditThaumatology.setText(String.valueOf(getThaumatologyLevel())); mTextEditRitual.setText(String.valueOf(getRitualLevel())); } private void prepareButtons() { mButtonPlus.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int level = getLevel(); level++; updateLevel(level); } }); mButtonMinus.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int level = getLevel(); if (level > 0) { level--; } updateLevel(level); } }); mButtonThaumatology.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mButtonRitual.setChecked(false); } }); mButtonRitual.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mButtonThaumatology.setChecked(false); } }); mButtonTotal.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int level = getLevel(); level -= level; if (level < 0) { level = 0; } updateLevel(level); } }); mButtonSpell.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { SpellEnergyDialog dialog = SpellEnergyDialog.newInstance(MainActivity.this); dialog.show(getSupportFragmentManager(), "energy"); } }); mButtonFatal.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { SpellFatalDialog dialog = SpellFatalDialog.newInstance(MainActivity.this); dialog.show(getSupportFragmentManager(), "energy"); } }); } @Override public void onSpellFailed() { Intent intent = new Intent(this, SpellResultActivity.class); intent.putExtra(SpellResultActivity.EXTRA_LIMIT, mButtonThaumatology.isChecked() ? getThaumatologyLevel() : getRitualLevel()); intent.putExtra(SpellResultActivity.EXTRA_SPELL_STATE, SpellResultActivity.SPELL_STATE_FAIL); startActivityForResult(intent, REQUEST_SPELL_RESULT); } @Override public void onSpellSuccess(int energy) { Intent intent = new Intent(this, SpellResultActivity.class); intent.putExtra(SpellResultActivity.EXTRA_LIMIT, mButtonThaumatology.isChecked() ? getThaumatologyLevel() : getRitualLevel()); intent.putExtra(SpellResultActivity.EXTRA_SPELL_STATE, SpellResultActivity.SPELL_STATE_SUCCESS); intent.putExtra(SpellResultActivity.EXTRA_ENERGY, energy); startActivityForResult(intent, REQUEST_SPELL_RESULT); } @Override public void onSpellFatal(int energy) { Intent intent = new Intent(this, SpellResultActivity.class); intent.putExtra(SpellResultActivity.EXTRA_LIMIT, mButtonThaumatology.isChecked() ? getThaumatologyLevel() : getRitualLevel()); intent.putExtra(SpellResultActivity.EXTRA_SPELL_STATE, SpellResultActivity.SPELL_STATE_FATAL); intent.putExtra(SpellResultActivity.EXTRA_ENERGY, energy); startActivityForResult(intent, REQUEST_SPELL_RESULT); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mTextEditLevel = (TextInputEditText) findViewById(R.id.text_edit_level); mTextEditThaumatology = (TextInputEditText) findViewById(R.id.text_edit_thauma); mTextEditRitual = (TextInputEditText) findViewById(R.id.text_edit_ritual); mButtonPlus = (Button) findViewById(R.id.button_plus); mButtonMinus = (Button) findViewById(R.id.button_minus); mButtonThaumatology = (RadioButton) findViewById(R.id.button_thaumatology); mButtonRitual = (RadioButton) findViewById(R.id.button_ritual); mButtonSpell = (Button) findViewById(R.id.button_spell); mButtonTotal = (Button) findViewById(R.id.button_total); mButtonFatal = (Button) findViewById(R.id.button_fatal); prepareButtons(); updateLevel(0); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_SPELL_RESULT) { if (resultCode == Activity.RESULT_OK) { boolean isFatal = data.getBooleanExtra(SpellResultActivity.EXTRA_IS_FATAL, false); if (!isFatal) { int level = getLevel(); boolean isEffect = data.getBooleanExtra(SpellResultActivity.EXTRA_IS_EFFECT, false); if (isEffect) { level -= 5; if (level < 0) { level = 0; } } else { level++; } updateLevel(level); } return; } } super.onActivityResult(requestCode, resultCode, data); } }
apache-2.0
dmagda/incubator-ignite
modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java
6275
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.platform; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteException; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobAdapter; import org.apache.ignite.compute.ComputeJobResult; import org.apache.ignite.compute.ComputeTaskAdapter; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.resources.IgniteInstanceResource; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.List; import java.util.Map; /** * Test task producing result without any arguments. */ public class PlatformComputeEchoTask extends ComputeTaskAdapter<Integer, Object> { /** Type: NULL. */ private static final int TYPE_NULL = 0; /** Type: byte. */ private static final int TYPE_BYTE = 1; /** Type: bool. */ private static final int TYPE_BOOL = 2; /** Type: short. */ private static final int TYPE_SHORT = 3; /** Type: char. */ private static final int TYPE_CHAR = 4; /** Type: int. */ private static final int TYPE_INT = 5; /** Type: long. */ private static final int TYPE_LONG = 6; /** Type: float. */ private static final int TYPE_FLOAT = 7; /** Type: double. */ private static final int TYPE_DOUBLE = 8; /** Type: array. */ private static final int TYPE_ARRAY = 9; /** Type: collection. */ private static final int TYPE_COLLECTION = 10; /** Type: map. */ private static final int TYPE_MAP = 11; /** Type: binary object which exists in all platforms. */ private static final int TYPE_BINARY = 12; /** Type: binary object which exists only in Java. */ private static final int TYPE_BINARY_JAVA = 13; /** Type: object array. */ private static final int TYPE_OBJ_ARRAY = 14; /** Type: binary object array. */ private static final int TYPE_BINARY_ARRAY = 15; /** Type: enum. */ private static final int TYPE_ENUM = 16; /** Type: enum array. */ private static final int TYPE_ENUM_ARRAY = 17; /** Type: enum array. */ private static final int TYPE_ENUM_FIELD = 18; /** {@inheritDoc} */ @Nullable @Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid, @Nullable Integer arg) { return Collections.singletonMap(new EchoJob(arg), F.first(subgrid)); } /** {@inheritDoc} */ @Nullable @Override public Object reduce(List<ComputeJobResult> results) { return results.get(0).getData(); } /** * Job. */ private static class EchoJob extends ComputeJobAdapter { /** Type. */ private Integer type; /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** * Constructor. * * @param type Result type. */ public EchoJob(Integer type) { this.type = type; } /** {@inheritDoc} */ @Nullable @Override public Object execute() { switch (type) { case TYPE_NULL: return null; case TYPE_BYTE: return (byte)1; case TYPE_BOOL: return true; case TYPE_SHORT: return (short)1; case TYPE_CHAR: return (char)1; case TYPE_INT: return 1; case TYPE_LONG: return (long)1; case TYPE_FLOAT: return (float)1; case TYPE_DOUBLE: return (double)1; case TYPE_ARRAY: return new int[] { 1 }; case TYPE_COLLECTION: return Collections.singletonList(1); case TYPE_MAP: return Collections.singletonMap(1, 1); case TYPE_BINARY: return new PlatformComputeBinarizable(1); case TYPE_BINARY_JAVA: return new PlatformComputeJavaBinarizable(1); case TYPE_OBJ_ARRAY: return new String[] { "foo", "bar", "baz" }; case TYPE_BINARY_ARRAY: return new PlatformComputeBinarizable[] { new PlatformComputeBinarizable(1), new PlatformComputeBinarizable(2), new PlatformComputeBinarizable(3) }; case TYPE_ENUM: return PlatformComputeEnum.BAR; case TYPE_ENUM_ARRAY: return new PlatformComputeEnum[] { PlatformComputeEnum.BAR, PlatformComputeEnum.BAZ, PlatformComputeEnum.FOO }; case TYPE_ENUM_FIELD: IgniteCache<Integer, BinaryObject> cache = ignite.cache(null).withKeepBinary(); BinaryObject obj = cache.get(TYPE_ENUM_FIELD); BinaryObject val = obj.field("interopEnum"); return val.deserialize(); default: throw new IgniteException("Unknown type: " + type); } } } }
apache-2.0
gravitee-io/graviteeio-access-management
gravitee-am-gateway/gravitee-am-gateway-handler/gravitee-am-gateway-handler-oidc/src/main/java/io/gravitee/am/gateway/handler/oauth2/exception/BadClientCredentialsException.java
918
/** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.am.gateway.handler.oauth2.exception; /** * @author David BRASSELY (david.brassely at graviteesource.com) * @author GraviteeSource Team */ public class BadClientCredentialsException extends io.gravitee.am.common.exception.oauth2.BadClientCredentialsException { }
apache-2.0
iritgo/iritgo-aktario
aktario-xp/src/main/java/de/iritgo/aktario/xp/CodeEditor.java
2600
/** * This file is part of the Iritgo/Aktario Framework. * * Copyright (C) 2005-2011 Iritgo Technologies. * Copyright (C) 2003-2005 BueroByte GbR. * * Iritgo licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.iritgo.aktario.xp; import org.syntax.jedit.JEditTextArea; import java.awt.event.AdjustmentListener; /** * @version $Id: CodeEditor.java,v 1.8 2006/09/25 10:34:32 grappendorf Exp $ */ public class CodeEditor extends JEditTextArea { /** . */ private static final long serialVersionUID = 1L; /** * Creates a new CodeEditor with the default settings. */ public CodeEditor() { } /** * Add an adjustment listener to the horizontal scroll bar. * * @param AdjustmentListener The listener to add. */ public void addHorizontalAdjustmentListener(AdjustmentListener listener) { horizontal.addAdjustmentListener(listener); } /** * Add an adjustment listener to the vertical scroll bar. * * @param AdjustmentListener The listener to add. */ public void addVerticalAdjustmentListener(AdjustmentListener listener) { vertical.addAdjustmentListener(listener); } /** * Get the first visible line. * * @return The first visible line. */ public int getFirstVisibleLine() { return vertical.getValue(); } /** * Get the first visible column. * * @return The first visible column. */ public int getFirstVisibleColumn() { return horizontal.getValue(); } /** * Selects from the start offset to the end offset. This is the * general selection method used by all other selecting methods. * The caret position will be start if start &lt; end, and end * if end &gt; start. * @param start The start offset * @param end The end offset */ public void select(int start, int end) { int newStart; int newEnd; boolean newBias; if (start <= end) { newStart = start; newEnd = end; newBias = false; } else { newStart = end; newEnd = start; newBias = true; } if (newStart < 0 || newEnd > getDocumentLength()) { return; } super.select(start, end); } }
apache-2.0
moley/leguan
leguan-plugins/leguan-plugin-scanclasspath/src/integrationtest/java/org/leguan/scanclasspath/ScanClasspathIntegrationTest.java
498
package org.leguan.scanclasspath; import org.junit.Test; import org.leguan.gradle.GradleIntegrationtestUtils; import java.io.IOException; public class ScanClasspathIntegrationTest { @Test public void start () throws IOException { GradleIntegrationtestUtils integrationtestUtils = new GradleIntegrationtestUtils(); integrationtestUtils.startTestProjectWithPlugin("testprojectUnusedDeps", "leguan-plugin-scanclasspath", ScanClasspathPlugin.SCAN_CLASSPATH_TASKNAME, getClass()); } }
apache-2.0
wuman/titan-android
src/main/java/com/thinkaurelius/titan/graphdb/vertices/StandardTitanVertex.java
4098
package com.thinkaurelius.titan.graphdb.vertices; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.thinkaurelius.titan.core.InvalidElementException; import com.thinkaurelius.titan.graphdb.adjacencylist.AdjacencyList; import com.thinkaurelius.titan.graphdb.adjacencylist.AdjacencyListFactory; import com.thinkaurelius.titan.graphdb.adjacencylist.InitialAdjListFactory; import com.thinkaurelius.titan.graphdb.adjacencylist.ModificationStatus; import com.thinkaurelius.titan.graphdb.query.AtomicQuery; import com.thinkaurelius.titan.graphdb.relations.EdgeDirection; import com.thinkaurelius.titan.graphdb.relations.InternalRelation; import com.thinkaurelius.titan.graphdb.transaction.InternalTitanTransaction; import com.tinkerpop.blueprints.Direction; import java.util.concurrent.locks.ReentrantLock; public class StandardTitanVertex extends AbstractTitanVertex { private volatile AdjacencyList inEdges; private volatile AdjacencyList outEdges; protected final ReentrantLock adjLock = new ReentrantLock(); public StandardTitanVertex(InternalTitanTransaction g, AdjacencyListFactory adjList) { super(g); inEdges = adjList.emptyList(); outEdges = adjList.emptyList(); } @Override public boolean addRelation(InternalRelation e, boolean isNew) { assert isAvailable(); Preconditions.checkArgument(e.isIncidentOn(this), "TitanRelation is not incident on this node!"); boolean success = false; boolean loadIn = false; ModificationStatus status = new ModificationStatus(); if (EdgeDirection.IN.impliedBy(e.getDirection(this))) { loadIn = true; adjLock.lock(); try { inEdges = inEdges.addEdge(e,status); } finally { adjLock.unlock(); } success = status.hasChanged(); } if (EdgeDirection.OUT.impliedBy(e.getDirection(this))) { adjLock.lock(); try { outEdges = outEdges.addEdge(e, e.getType().isFunctional(), status); } finally { adjLock.unlock(); } if (status.hasChanged()) { if (loadIn && !success) throw new InvalidElementException("Could only load one direction of loop-edge",e); success=true; } else { if (loadIn && success) throw new InvalidElementException("Could only load one direction of loop-edge",e); success=false; } } return success; } @Override public Iterable<InternalRelation> getRelations(AtomicQuery query, boolean loadRemaining) { assert isAvailable(); if (loadRemaining) ensureLoadedEdges(query); Iterable<InternalRelation> iter=AdjacencyList.Empty; for (EdgeDirection dir : EdgeDirection.values()) { if (!query.isAllowedDirection(dir)) continue; Iterable<InternalRelation> siter; switch(dir) { case OUT: siter = VertexUtil.getQuerySpecificIterable(outEdges, query); break; case IN: siter = VertexUtil.getQuerySpecificIterable(inEdges, query); //if (query.isAllowedDirection(EdgeDirection.OUT)) siter = VertexUtil.filterLoopEdges(siter,this); break; default: throw new AssertionError("Unrecognized direction: "+ dir); } if (iter==AdjacencyList.Empty) iter = siter; else if (siter!=AdjacencyList.Empty) iter = Iterables.concat(iter, siter); } iter = VertexUtil.filterByQuery(query, iter); return iter; } @Override public void removeRelation(InternalRelation e) { Preconditions.checkArgument(isAvailable() && e.isIncidentOn(this)); Direction dir = e.getDirection(this); if (EdgeDirection.IN.impliedBy(dir)) inEdges.removeEdge(e,ModificationStatus.none); if (EdgeDirection.OUT.impliedBy(dir)) outEdges.removeEdge(e,ModificationStatus.none); } @Override public synchronized void remove() { super.remove(); inEdges=InitialAdjListFactory.EmptyFactory.emptyList(); outEdges=InitialAdjListFactory.EmptyFactory.emptyList(); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/transform/CancelAuditTaskResultJsonUnmarshaller.java
1615
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.iot.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import static com.fasterxml.jackson.core.JsonToken.*; /** * CancelAuditTaskResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CancelAuditTaskResultJsonUnmarshaller implements Unmarshaller<CancelAuditTaskResult, JsonUnmarshallerContext> { public CancelAuditTaskResult unmarshall(JsonUnmarshallerContext context) throws Exception { CancelAuditTaskResult cancelAuditTaskResult = new CancelAuditTaskResult(); return cancelAuditTaskResult; } private static CancelAuditTaskResultJsonUnmarshaller instance; public static CancelAuditTaskResultJsonUnmarshaller getInstance() { if (instance == null) instance = new CancelAuditTaskResultJsonUnmarshaller(); return instance; } }
apache-2.0
hazendaz/assertj-core
src/test/java/org/assertj/core/util/Files_newTemporaryFile_Test.java
1142
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2021 the original author or authors. */ package org.assertj.core.util; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import org.junit.jupiter.api.Test; /** * Tests for <code>{@link Files#newTemporaryFolder()}</code>. * * @author Alex Ruiz * @author Yvonne Wang */ class Files_newTemporaryFile_Test extends Files_TestCase { @Test void should_create_new_temporary_file() { File f = null; try { f = Files.newTemporaryFile(); assertThat(f).isFile(); } finally { if (f != null) f.delete(); } } }
apache-2.0
rogerfanrui/dubbo-monitor
src/main/java/com/njwd/rpc/monitor/MonitorApplication.java
1005
package com.njwd.rpc.monitor; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.Banner.Mode; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.web.servlet.config.annotation.EnableWebMvc; @SpringBootApplication @ComponentScan({ "com.njwd.rpc.monitor.**" }) @EnableAutoConfiguration public class MonitorApplication { //main public static void main(String[] args) { SpringApplication app = new SpringApplication(MonitorApplication.class); app.setBannerMode(Mode.OFF); app.run(args); } }
apache-2.0
sankin/spark-starter
src/main/java/com/sankin/spark/words/service/WordCountService.java
124
package com.sankin.spark.words.service; public interface WordCountService { void process(String input, String output); }
apache-2.0
gingerwizard/elasticsearch
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicListener.java
1858
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.eql.execution.search; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; import org.elasticsearch.xpack.eql.execution.payload.SearchResponsePayload; import org.elasticsearch.xpack.eql.session.Payload; import static org.elasticsearch.xpack.eql.execution.search.RuntimeUtils.logSearchResponse; public class BasicListener implements ActionListener<SearchResponse> { private static final Logger log = RuntimeUtils.QUERY_LOG; private final ActionListener<Payload> listener; public BasicListener(ActionListener<Payload> listener) { this.listener = listener; } @Override public void onResponse(SearchResponse response) { try { ShardSearchFailure[] failures = response.getShardFailures(); if (CollectionUtils.isEmpty(failures) == false) { listener.onFailure(new EqlIllegalArgumentException(failures[0].reason(), failures[0].getCause())); } else { if (log.isTraceEnabled()) { logSearchResponse(response, log); } listener.onResponse(new SearchResponsePayload(response)); } } catch (Exception ex) { onFailure(ex); } } @Override public void onFailure(Exception ex) { listener.onFailure(ex); } }
apache-2.0
CODA-Masters/Little-Nibolas
LittleNibolas-Version72h/core/src/com/codamasters/screens/GameOverActual.java
5691
package com.codamasters.screens; import static com.badlogic.gdx.scenes.scene2d.actions.Actions.moveTo; import static com.badlogic.gdx.scenes.scene2d.actions.Actions.run; import static com.badlogic.gdx.scenes.scene2d.actions.Actions.sequence; import com.codamasters.LittleNibolas; import com.codamasters.LNHelpers.AssetLoaderSpace; import com.codamasters.LNHelpers.AssetsLoader; import com.codamasters.LNHelpers.AssetsLoaderRome; import com.codamasters.tween.ActorAccessor; import com.codamasters.tween.SpriteAccessor; import aurelienribon.tweenengine.BaseTween; import aurelienribon.tweenengine.Timeline; import aurelienribon.tweenengine.Tween; import aurelienribon.tweenengine.TweenCallback; import aurelienribon.tweenengine.TweenManager; import com.badlogic.gdx.Game; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Screen; import com.badlogic.gdx.Application.ApplicationType; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureAtlas; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.utils.ClickListener; import com.badlogic.gdx.utils.viewport.Viewport; public class GameOverActual implements Screen { private Stage stage; private Skin skin; private Table table; private TweenManager tweenManager; private Label puntos; private Sprite splash; private SpriteBatch batch; @Override public void render(float delta) { Gdx.gl.glClearColor(0, 0, 0, 0); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); stage.act(delta); stage.draw(); tweenManager.update(delta); } @Override public void resize(int width, int height) { stage.getViewport().update(width, height, false); table.invalidateHierarchy(); } @Override public void show() { stage = new Stage(); //Gdx.graphics.setDisplayMode((int) (Gdx.graphics.getHeight() / 1.5f), Gdx.graphics.getHeight(), false); Gdx.input.setInputProcessor(stage); skin = new Skin(Gdx.files.internal("ui/menuSkin.json"), new TextureAtlas("ui/atlas.pack")); table = new Table(skin); table.setFillParent(true); // creating heading Label heading = new Label("Game Over", skin, "big"); heading.setFontScale(3); int score = ScreenRome.getScore(); int highscore = ScreenRome.getHighScore(); if(score > highscore){ puntos = new Label("Nuevo record:" + score + "!!!",skin); puntos.setFontScale(1); AssetLoaderSpace.setHighScore(score); }else{ puntos = new Label("Puntuacion obtenida:" + score,skin); puntos.setFontScale(1); } // creating buttons TextButton buttonPlay = new TextButton("Reintentar", skin,"big"); buttonPlay.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { stage.addAction(sequence(moveTo(0, -stage.getHeight(), .5f), run(new Runnable() { @Override public void run() { AssetsLoaderRome.reloadNibolas(); ((Game) Gdx.app.getApplicationListener()).setScreen(new ScreenRome()); } }))); } }); buttonPlay.pad(10, 60, 10, 60); TextButton buttonSettings = new TextButton("Volver al menu", skin,"big"); buttonSettings.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { stage.addAction(sequence(moveTo(0, -stage.getHeight(), .5f), run(new Runnable() { @Override public void run() { AssetLoaderSpace.music_menu.play(); //AssetLoaderSpace.estrellado.stop(); ((Game) Gdx.app.getApplicationListener()).setScreen(new LevelMenu()); } }))); } }); buttonSettings.pad(10); // putting stuff together table.add(heading).spaceBottom(100).row(); table.add(puntos).spaceBottom(50).row(); table.add(buttonPlay).spaceBottom(15).row(); table.add(buttonSettings).spaceBottom(15).row(); stage.addActor(table); // creating animations tweenManager = new TweenManager(); Tween.registerAccessor(Actor.class, new ActorAccessor()); // heading color animation Timeline.createSequence().beginSequence() .push(Tween.to(heading, ActorAccessor.RGB, .5f).target(1, 0, 0)) .end().repeat(Tween.INFINITY, 0).start(tweenManager); // heading and buttons fade-in Timeline.createSequence().beginSequence() .push(Tween.set(buttonPlay, ActorAccessor.ALPHA).target(0)) .push(Tween.set(buttonSettings, ActorAccessor.ALPHA).target(0)) .push(Tween.from(heading, ActorAccessor.ALPHA, .25f).target(0)) .push(Tween.to(buttonPlay, ActorAccessor.ALPHA, .25f).target(1)) .push(Tween.to(buttonSettings, ActorAccessor.ALPHA, .25f).target(1)) .end().start(tweenManager); // table fade-in Tween.from(table, ActorAccessor.ALPHA, .75f).target(0).start(tweenManager); Tween.from(table, ActorAccessor.Y, .75f).target(Gdx.graphics.getHeight() / 8).start(tweenManager); tweenManager.update(Gdx.graphics.getDeltaTime()); } @Override public void hide() { dispose(); } @Override public void pause() { } @Override public void resume() { } @Override public void dispose() { stage.dispose(); skin.dispose(); } }
apache-2.0
deepakddixit/incubator-geode
geode-core/src/main/java/org/apache/geode/internal/cache/TXEntryUserAttrState.java
2448
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import org.apache.geode.cache.CacheRuntimeException; import org.apache.geode.cache.CommitConflictException; import org.apache.geode.cache.Region; import org.apache.geode.internal.i18n.LocalizedStrings; /** * TXEntryUserAttrState is the entity that tracks transactional changes to an entry user attribute. * * * @since GemFire 4.0 * */ public class TXEntryUserAttrState { private final Object originalValue; private Object pendingValue; public TXEntryUserAttrState(Object originalValue) { this.originalValue = originalValue; this.pendingValue = originalValue; } public Object getOriginalValue() { return this.originalValue; } public Object getPendingValue() { return this.pendingValue; } public Object setPendingValue(Object pv) { Object result = this.pendingValue; this.pendingValue = pv; return result; } void checkForConflict(InternalRegion r, Object key) throws CommitConflictException { Object curCmtValue = r.basicGetEntryUserAttribute(key); if (this.originalValue != curCmtValue) { throw new CommitConflictException( LocalizedStrings.TXEntryUserAttrState_ENTRY_USER_ATTRIBUTE_FOR_KEY_0_ON_REGION_1_HAD_ALREADY_BEEN_CHANGED_TO_2 .toLocalizedString(new Object[] {key, r.getFullPath(), curCmtValue})); } } void applyChanges(InternalRegion r, Object key) { try { Region.Entry re = r.getEntry(key); re.setUserAttribute(this.pendingValue); } catch (CacheRuntimeException ignore) { // ignore any exceptions since we have already locked and // found no conflicts. } } }
apache-2.0
opensim-org/opensim-gui
Gui/opensim/utils/src/org/opensim/version40/TheApp40.java
386
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.opensim.version40; /** * * @author Ayman-NMBL */ public class TheApp40 { // This is an empty class used as a tag to Preferences registry because we need a class to locate package }
apache-2.0
crowdcode-de/spring-cloud-performance-tuning
domain-services/product-service/src/main/java/io/crowdcode/flaschenhals/product/repository/ProductRepository.java
519
package io.crowdcode.flaschenhals.product.repository; import io.crowdcode.flaschenhals.product.model.Product; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import java.util.List; public interface ProductRepository extends JpaRepository<Product, Long> { @Query("SELECT p FROM Product p LEFT JOIN p.tags t WHERE t in :tags") List<Product> findByTag(@Param("tags") String... tags); }
apache-2.0
leobm/teavm-jquery
utils/src/main/java/de/iterable/teavm/utils/jso/JSArrayUtils.java
3562
/* * Copyright 2015 Jan-Felix Wittmann. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.iterable.teavm.utils.jso; import java.util.ArrayList; import java.util.List; import org.teavm.jso.JSObject; import org.teavm.jso.core.JSArray; import org.teavm.jso.core.JSNumber; import org.teavm.jso.core.JSString; /** * * @author Jan-Felix Wittmann */ public final class JSArrayUtils { private JSArrayUtils() { } public static <V extends Object, S extends JSObject> JSArray<S> of(V[] items, JSFromObjectMapper<V, S> mapper) { final JSArray<S> array = JSArray.create(items.length); for (int i = 0; i < items.length; ++i) { array.set(i, mapper.apply(items[i])); } return array; } @SafeVarargs public static JSArray<JSString> of(String... items) { return of(items, value -> JSString.valueOf(value)); } @SafeVarargs public static JSArray<JSNumber> of(Integer... items) { return of(items, value -> JSNumber.valueOf(value)); } @SafeVarargs public static JSArray<JSNumber> of(Double... items) { return of(items, value -> JSNumber.valueOf(value)); } @SafeVarargs public static JSArray<JSNumber> of(Float... items) { return of(items, value -> JSNumber.valueOf(value)); } public static <V extends Object, S extends JSObject> JSArray<S> of(Iterable<V> items, JSFromObjectMapper<V, S> mapper) { final JSArray<S> array = JSArray.create(); for (V item : items) { array.push(mapper.apply(item)); } return array; } public static JSArray<JSString> ofStringIterable(Iterable<String> items) { return of(items, value -> JSString.valueOf(value)); } public static JSArray<JSNumber> ofIntIterable(Iterable<Integer> items) { return of(items, value -> JSNumber.valueOf(value)); } public static JSArray<JSNumber> ofFloatIterable(Iterable<Float> items) { return of(items, value -> JSNumber.valueOf(value)); } public static JSArray<JSNumber> ofDoubleIterable(Iterable<Float> items) { return of(items, value -> JSNumber.valueOf(value)); } public static <V extends JSObject, S extends Object> List<S> asList(JSArray<V> arr, JSToObjectMapper<V, S> mapper) { final List<S> list = new ArrayList<>(arr.getLength()); for (int i = 0; i < arr.getLength(); i++) { list.add(mapper.apply(arr.get(i))); } return list; } public static List<String> asStringList(JSArray<JSString> arr) { return asList(arr, value -> value.stringValue()); } public static List<Integer> asIntList(JSArray<JSNumber> arr) { return asList(arr, value -> value.intValue()); } public static List<Double> asDoubleList(JSArray<JSNumber> arr) { return asList(arr, value -> value.doubleValue()); } public static List<Float> asFloatList(JSArray<JSNumber> arr) { return asList(arr, value -> value.floatValue()); } }
apache-2.0
Kiranprajapati2719/java
LargestInArray.java
812
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package array; /** * * @author Com 8 */ public class LargestInArray { /** * @param args the command line arguments */ public static void main(String []args){ //To do code double [] myList = {1.8,2.9,3.4,5.1,6.5}; double max=myList[0]; for(int i=1; i<myList.length; i++){ if(myList[i]>max){ max=myList[i]; } } System.out.println("Largest number is "+max); } }
apache-2.0
PlanetWaves/clockworkengine
branches/3.0/engine/src/core/com/clockwork/scene/shape/PQTorus.java
6887
// $Id: PQTorus.java 4131 2009-03-19 20:15:28Z blaine.dev $ package com.clockwork.scene.shape; import com.clockwork.export.InputCapsule; import com.clockwork.export.CWExporter; import com.clockwork.export.CWImporter; import com.clockwork.export.OutputCapsule; import com.clockwork.math.FastMath; import com.clockwork.math.Vector3f; import com.clockwork.scene.Mesh; import com.clockwork.scene.VertexBuffer.Type; import static com.clockwork.util.BufferUtils.*; import java.io.IOException; import java.nio.FloatBuffer; import java.nio.ShortBuffer; /** * A parameterized torus, also known as a <em>pq</em> torus. * * @version $Revision: 4131 $, $Date: 2009-03-19 16:15:28 -0400 (Thu, 19 Mar 2009) $ */ public class PQTorus extends Mesh { private float p, q; private float radius, width; private int steps, radialSamples; public PQTorus() { } /** * Creates a parameterized torus. * * Steps and radialSamples are both degree of accuracy values. * * @param p the x/z oscillation. * @param q the y oscillation. * @param radius the radius of the PQTorus. * @param width the width of the torus. * @param steps the steps along the torus. * @param radialSamples radial samples for the torus. */ public PQTorus(float p, float q, float radius, float width, int steps, int radialSamples) { super(); updateGeometry(p, q, radius, width, steps, radialSamples); } public float getP() { return p; } public float getQ() { return q; } public int getRadialSamples() { return radialSamples; } public float getRadius() { return radius; } public int getSteps() { return steps; } public float getWidth() { return width; } /** * Rebuilds this torus based on a new set of parameters. * * @param p the x/z oscillation. * @param q the y oscillation. * @param radius the radius of the PQTorus. * @param width the width of the torus. * @param steps the steps along the torus. * @param radialSamples radial samples for the torus. */ public void updateGeometry(float p, float q, float radius, float width, int steps, int radialSamples) { this.p = p; this.q = q; this.radius = radius; this.width = width; this.steps = steps; this.radialSamples = radialSamples; final float thetaStep = (FastMath.TWO_PI / steps); final float betaStep = (FastMath.TWO_PI / radialSamples); Vector3f[] torusPoints = new Vector3f[steps]; // Allocate all of the required buffers int vertCount = radialSamples * steps; FloatBuffer fpb = createVector3Buffer(vertCount); FloatBuffer fnb = createVector3Buffer(vertCount); FloatBuffer ftb = createVector2Buffer(vertCount); Vector3f pointB, T, N, B; Vector3f tempNorm = new Vector3f(); float r, x, y, z, theta = 0.0f, beta; int nvertex = 0; // Move along the length of the pq torus for (int i = 0; i < steps; i++) { theta += thetaStep; float circleFraction = ((float) i) / (float) steps; // Find the point on the torus r = (0.5f * (2.0f + FastMath.sin(q * theta)) * radius); x = (r * FastMath.cos(p * theta) * radius); y = (r * FastMath.sin(p * theta) * radius); z = (r * FastMath.cos(q * theta) * radius); torusPoints[i] = new Vector3f(x, y, z); // Now find a point slightly farther along the torus r = (0.5f * (2.0f + FastMath.sin(q * (theta + 0.01f))) * radius); x = (r * FastMath.cos(p * (theta + 0.01f)) * radius); y = (r * FastMath.sin(p * (theta + 0.01f)) * radius); z = (r * FastMath.cos(q * (theta + 0.01f)) * radius); pointB = new Vector3f(x, y, z); // Approximate the Frenet Frame T = pointB.subtract(torusPoints[i]); N = torusPoints[i].add(pointB); B = T.cross(N); N = B.cross(T); // Normalise the two vectors and then use them to create an oriented circle N = N.normalize(); B = B.normalize(); beta = 0.0f; for (int j = 0; j < radialSamples; j++, nvertex++) { beta += betaStep; float cx = FastMath.cos(beta) * width; float cy = FastMath.sin(beta) * width; float radialFraction = ((float) j) / radialSamples; tempNorm.x = (cx * N.x + cy * B.x); tempNorm.y = (cx * N.y + cy * B.y); tempNorm.z = (cx * N.z + cy * B.z); fnb.put(tempNorm.x).put(tempNorm.y).put(tempNorm.z); tempNorm.addLocal(torusPoints[i]); fpb.put(tempNorm.x).put(tempNorm.y).put(tempNorm.z); ftb.put(radialFraction).put(circleFraction); } } // Update the indices data ShortBuffer sib = createShortBuffer(6 * vertCount); for (int i = 0; i < vertCount; i++) { sib.put(new short[] { (short)(i), (short)(i - radialSamples), (short)(i + 1), (short)(i + 1), (short)(i - radialSamples), (short)(i - radialSamples + 1) }); } for (int i = 0, len = sib.capacity(); i < len; i++) { int ind = sib.get(i); if (ind < 0) { ind += vertCount; sib.put(i, (short) ind); } else if (ind >= vertCount) { ind -= vertCount; sib.put(i, (short) ind); } } sib.rewind(); setBuffer(Type.Position, 3, fpb); setBuffer(Type.Normal, 3, fnb); setBuffer(Type.TexCoord, 2, ftb); setBuffer(Type.Index, 3, sib); } @Override public void read(CWImporter e) throws IOException { super.read(e); InputCapsule capsule = e.getCapsule(this); p = capsule.readFloat("p", 0); q = capsule.readFloat("q", 0); radius = capsule.readFloat("radius", 0); width = capsule.readFloat("width", 0); steps = capsule.readInt("steps", 0); radialSamples = capsule.readInt("radialSamples", 0); } @Override public void write(CWExporter e) throws IOException { super.write(e); OutputCapsule capsule = e.getCapsule(this); capsule.write(p, "p", 0); capsule.write(q, "q", 0); capsule.write(radius, "radius", 0); capsule.write(width, "width", 0); capsule.write(steps, "steps", 0); capsule.write(radialSamples, "radialSamples", 0); } }
apache-2.0
shot/hadoop-source-reading
src/mapred/org/apache/hadoop/mapred/JobPriority.java
978
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; /** * Used to describe the priority of the running job. * */ public enum JobPriority { VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW; }
apache-2.0
m-m-m/music
core/src/main/java/net/sf/mmm/music/datatype/api/ClefSymbol.java
1195
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package net.sf.mmm.music.datatype.api; import net.sf.mmm.music.partiture.MusicStaff; /** * The clef is the initial symbol of a {@link MusicStaff staff} that indicates which line is identifying which * {@link TonePitch tone}. * * @author hohwille */ public enum ClefSymbol { /** * The G-clef which is also called treble-clef (violin-clef). This is the most common clef used in modern music. If * you have proper unicode support you can see it here: &#119070; */ G, /** * The F-clef which is also called bass-clef. Besides the G-clef this is also commonly used in modern music. If you * have proper unicode support you can see it here: &#119074; */ F, /** * The C-clef which is also called alto-clef (or tenor-clef according to placement). If you have proper unicode * support you can see it here: &#119073; */ C, /** * The neutral-clef is also called percussion-clef. It is actually not a clef in the same sense as the others. If you * have proper unicode support you can see it here: &#119077; */ N, }
apache-2.0
raulh82vlc/ShoppingList
domain/src/main/java/com/raulh82vlc/CheckoutShoppingList/domain/interactors/CheckoutShoppingListInteractorImpl.java
5113
/* * Copyright (C) 2017 Raul Hernandez Lopez @raulh82vlc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.raulh82vlc.CheckoutShoppingList.domain.interactors; import com.raulh82vlc.CheckoutShoppingList.domain.ConstantsDomain; import com.raulh82vlc.CheckoutShoppingList.domain.executors.Interactor; import com.raulh82vlc.CheckoutShoppingList.domain.executors.InteractorExecutor; import com.raulh82vlc.CheckoutShoppingList.domain.executors.MainThread; import com.raulh82vlc.CheckoutShoppingList.domain.models.ProductDomain; import com.raulh82vlc.CheckoutShoppingList.domain.models.ProductResponse; import com.raulh82vlc.CheckoutShoppingList.domain.repository.ProductsRepository; import java.util.List; import java.util.Map; import javax.inject.Inject; /** * Implementation of Checkout of the Shopping List Interactor * * @author Raul Hernandez Lopez */ public class CheckoutShoppingListInteractorImpl implements CheckoutShoppingListInteractor, Interactor { final private InteractorExecutor executor; final private MainThread mainThread; final private ProductsRepository<ProductResponse, ProductDomain> repository; private CheckoutShoppingListCallback callback; // Strategy to checkout with discounts or without any private final CheckoutStrategy checkoutStrategy; @Inject public CheckoutShoppingListInteractorImpl(InteractorExecutor executor, MainThread mainThread, ProductsRepository repository, CheckoutStrategy checkoutStrategy) { this.executor = executor; this.mainThread = mainThread; this.repository = repository; this.checkoutStrategy = checkoutStrategy; } @Override public void execute(CheckoutShoppingListCallback callback) { this.callback = callback; executor.run(this); } @Override public void run() { //there is no error case, because despite this is with an empty basket, says 0.00€ notifySuccessfullyCheckedOut( getResultCheckOutSum( repository.getShoppingListDictionary(), repository.getProductsReferenceDictionary()), repository.getShoppingList()); } /** * Checks proper business logic calculations with and without discounts * @param shoppingListDictionary product amounts per product type * @param referenceProductListDictionary product price per product type * @return total amount calculated */ protected float getResultCheckOutSum(Map<String, Integer> shoppingListDictionary, Map<String, Float> referenceProductListDictionary) { float resultCheckOutSum = 0f; for (Map.Entry<String, Integer> setOfValues : shoppingListDictionary.entrySet()) { switch (setOfValues.getKey()) { case ConstantsDomain.VOUCHER_TYPE: resultCheckOutSum += checkoutStrategy.applyDiscountsToTypeXperY(setOfValues.getValue(), referenceProductListDictionary.get(ConstantsDomain.VOUCHER_TYPE), ConstantsDomain.BUY, ConstantsDomain.FREE); break; case ConstantsDomain.TSHIRT_TYPE: resultCheckOutSum += checkoutStrategy.applyDiscountsToTypeXOrMore(setOfValues.getValue(), referenceProductListDictionary.get(ConstantsDomain.TSHIRT_TYPE), ConstantsDomain.LIMIT_FOR_APPLYING_DISCOUNT, ConstantsDomain.DISCOUNT_PRICE_PER_UNIT); break; case ConstantsDomain.MUG_TYPE: default: resultCheckOutSum += checkoutStrategy.applyNoDiscounts(setOfValues.getValue(), referenceProductListDictionary.get(ConstantsDomain.MUG_TYPE)); break; } } return resultCheckOutSum; } /** * <p>Notifies to the UI (main) thread the result of checkout, * and sends a callback the string</p> */ private void notifySuccessfullyCheckedOut(final float shoppingListCalculated, final List<ProductDomain> shoppingList) { mainThread.post(new Runnable() { @Override public void run() { callback.onCheckoutOK(shoppingListCalculated, shoppingList); } }); } }
apache-2.0
Hangekk/coolweather
app/src/main/java/com/coolweather/android/WeatherActivity.java
9351
package com.coolweather.android; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.os.Build; import android.preference.PreferenceManager; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.coolweather.android.gson.Forecast; import com.coolweather.android.gson.Weather; import com.coolweather.android.service.AutoUpdateService; import com.coolweather.android.util.HttpUtil; import com.coolweather.android.util.Utility; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class WeatherActivity extends AppCompatActivity { private ScrollView weatherLayout; private TextView titleCity; private TextView titleUpdateTime; private TextView degreeText; private TextView weatherInfoText; private LinearLayout forecastLayout; private TextView aqiText; private TextView pm25Text; private TextView comfortText; private TextView carWashText; private TextView sportText; private ImageView bingPicImg; public SwipeRefreshLayout swipeRefreshLayout; public DrawerLayout drawerLayout; private Button navButton; private String mWeatherId; private static final String TAG = "WeatherActivity"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if(Build.VERSION.SDK_INT>=21){ View decorView=getWindow().getDecorView(); decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN|View.SYSTEM_UI_FLAG_LAYOUT_STABLE); getWindow().setStatusBarColor(Color.TRANSPARENT); } setContentView(R.layout.activity_weather); //初始化各种控件 weatherLayout = (ScrollView) findViewById(R.id.weather_layout); titleCity = (TextView) findViewById(R.id.title_city); titleUpdateTime = (TextView) findViewById(R.id.title_update_time); degreeText = (TextView) findViewById(R.id.degree_text); weatherInfoText = (TextView) findViewById(R.id.weather_info_text); forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout); aqiText = (TextView) findViewById(R.id.aqi_text); pm25Text = (TextView) findViewById(R.id.pm25_text); comfortText = (TextView) findViewById(R.id.comfort_text); carWashText = (TextView) findViewById(R.id.car_wash_text); sportText = (TextView) findViewById(R.id.sport_text); bingPicImg=(ImageView)findViewById(R.id.bing_pic_img); drawerLayout=(DrawerLayout)findViewById(R.id.drawer_layout); navButton=(Button)findViewById(R.id.nav_button); swipeRefreshLayout=(SwipeRefreshLayout)findViewById(R.id.swipe_refresh); swipeRefreshLayout.setColorSchemeResources(R.color.colorPrimary); SharedPreferences prefs= PreferenceManager.getDefaultSharedPreferences(this); String weatherString =prefs.getString("weather",null); if(weatherString!=null){ //有缓存时直接解析天气数据 Weather weather= Utility.handleWeatherResponse(weatherString); mWeatherId=weather.basic.weatherId; showWeatherInfo(weather); }else{ //无缓存时去服务器查询天气 mWeatherId=getIntent().getStringExtra("weather_id"); weatherLayout.setVisibility(View.INVISIBLE); requestWeather(mWeatherId); } swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { requestWeather(mWeatherId); } }); navButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { drawerLayout.openDrawer(GravityCompat.START); } }); String bingPic=prefs.getString("bing_pic",null); if(bingPic!=null){ Glide.with(this).load(bingPic).into(bingPicImg); }else { loadBingPic(); } } /** * 加载必应每日一图 */ private void loadBingPic() { HttpUtil.sendOkHttpRequest("http://guolin.tech/api/bing_pic", new Callback() { @Override public void onResponse(Call call, Response response) throws IOException { final String bingPic=response.body().string(); SharedPreferences.Editor edit=PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit(); edit.putString("bing_pic",bingPic); edit.apply(); runOnUiThread(new Runnable() { @Override public void run() { Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg); } }); } @Override public void onFailure(Call call, IOException e) { } }); } /** * 根据天气id请求城市天气信息 * @param weatherId */ public void requestWeather(String weatherId) { String weatherUrl="http://guolin.tech/api/weather?cityid="+weatherId+"&key=af593815a57c4fed90db0bd480b5e46d"; HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() { @Override public void onResponse(Call call, Response response) throws IOException { final String responseText=response.body().string(); final Weather weather=Utility.handleWeatherResponse(responseText); runOnUiThread(new Runnable() { @Override public void run() { if(weather!=null&&"ok".equals(weather.status)){ SharedPreferences.Editor edit=PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit(); edit.putString("weather",responseText); edit.apply(); showWeatherInfo(weather); startService(new Intent(WeatherActivity.this, AutoUpdateService.class)); }else { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); } swipeRefreshLayout.setRefreshing(false); } }); } @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); swipeRefreshLayout.setRefreshing(false); } }); } }); } /** * 处理并展示Weather实体类中的数据 * @param weather */ private void showWeatherInfo(Weather weather) { String cityName = weather.basic.cityName; String updateTime = weather.basic.update.updateTime.split(" ")[1]; String degree = weather.now.temperature + "℃"; String weatherInfo = weather.now.more.info; titleCity.setText(cityName); titleUpdateTime.setText(updateTime); degreeText.setText(degree); weatherInfoText.setText(weatherInfo); forecastLayout.removeAllViews(); for (Forecast forecast : weather.forecastList) { View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false); TextView dateText = (TextView) view.findViewById(R.id.date_text); TextView infoText = (TextView) view.findViewById(R.id.info_text); TextView maxText = (TextView) view.findViewById(R.id.max_text); TextView minText = (TextView) view.findViewById(R.id.min_text); dateText.setText(forecast.date); infoText.setText(forecast.more.info); maxText.setText(forecast.temperature.max); minText.setText(forecast.temperature.min); forecastLayout.addView(view); } if (weather.aqi != null) { aqiText.setText(weather.aqi.city.aqi); pm25Text.setText(weather.aqi.city.pm25); } String comfort = "舒适度:" + weather.suggestion.comfort.info; String carWash = "洗车指数:" + weather.suggestion.carWash.info; String sport = "运行建议:" + weather.suggestion.sport.info; comfortText.setText(comfort); carWashText.setText(carWash); sportText.setText(sport); weatherLayout.setVisibility(View.VISIBLE); } }
apache-2.0
agileowl/tapestry-5
tapestry-ioc/src/test/java/org/apache/tapestry5/ioc/internal/DecorateByMarkerModule.java
3516
// Copyright 2010,, 2011 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.ioc.internal; import org.apache.tapestry5.ioc.GreenMarker; import org.apache.tapestry5.ioc.Greeter; import org.apache.tapestry5.ioc.RedMarker; import org.apache.tapestry5.ioc.ServiceResources; import org.apache.tapestry5.ioc.annotations.Decorate; import org.apache.tapestry5.ioc.annotations.Local; import org.apache.tapestry5.ioc.annotations.Marker; import org.apache.tapestry5.ioc.annotations.Order; import org.apache.tapestry5.ioc.services.AspectDecorator; import org.apache.tapestry5.ioc.services.AspectInterceptorBuilder; import org.apache.tapestry5.plastic.MethodAdvice; import org.apache.tapestry5.plastic.MethodInvocation; public class DecorateByMarkerModule { @Decorate(serviceInterface = Greeter.class) @GreenMarker public static <T> T greeter(ServiceResources resources, T delegate, AspectDecorator aspectDecorator) { return doDecorate("foo", resources, delegate, aspectDecorator); } @Decorate(serviceInterface = Greeter.class, id = "bar") @GreenMarker @Order("after:Greeter") public static <T> T greeter2(ServiceResources resources, T delegate, AspectDecorator aspectDecorator) { return doDecorate("bar", resources, delegate, aspectDecorator); } @Decorate(serviceInterface = Greeter.class, id = "baz") @GreenMarker @Order({"after:Greeter", "before:bar"}) public static <T> T greeter3(ServiceResources resources, T delegate, AspectDecorator aspectDecorator) { return doDecorate("baz", resources, delegate, aspectDecorator); } @Decorate(serviceInterface = Greeter.class, id = "barney") @Local public static <T> T localAdvise(ServiceResources resources, T delegate, AspectDecorator aspectDecorator) { return doDecorate("barney", resources, delegate, aspectDecorator); } private static <T> T doDecorate(final String decoratorId, ServiceResources resources, T delegate, AspectDecorator aspectDecorator) { Class<T> serviceInterface = resources.getServiceInterface(); AspectInterceptorBuilder<T> builder = aspectDecorator.createBuilder(serviceInterface, delegate, String.format( "<Interceptor for %s(%s)>", resources.getServiceId(), serviceInterface.getName())); builder.adviseAllMethods(new MethodAdvice() { public void advise(MethodInvocation invocation) { invocation.proceed(); Object result = invocation.getReturnValue(); invocation.setReturnValue(String.format("Decorated by %s[%s]", decoratorId, result)); } }); return builder.build(); } @Marker(RedMarker.class) public Greeter buildRedGreeter() { return new Greeter() { public String getGreeting() { return "Red"; } }; } }
apache-2.0
BDizzle/lambda-refarch-webapp
lambda-functions/src/main/java/blog/configuration/ApplicationConfiguration.java
3639
package blog.configuration; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAttribute; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTable; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @DynamoDBTable(tableName = "aws-serverless-config") public class ApplicationConfiguration { private String environment; private String userDynamoDBTableName; private String postDynamoDBTableName; private String latestPostDynamoDBTableName; private String commentDynamoDBTableName; private String forumDynamoDBTableName; private String cognitoIdentityPoolId; private String cognitoDeveloperId; private String encryptionKeyId; @DynamoDBHashKey(attributeName = "environment") public String getEnvironment() { return environment; } public void setEnvironment(String environment) { this.environment = environment; } @DynamoDBAttribute(attributeName = "user_ddb_table_name") public String getUserDynamoDBTableName() { return userDynamoDBTableName; } public void setUserDynamoDBTableName(String userDynamoDBTableName) { this.userDynamoDBTableName = userDynamoDBTableName; } @DynamoDBAttribute(attributeName = "post_ddb_table_name") public String getPostDynamoDBTableName() { return postDynamoDBTableName; } public void setPostDynamoDBTableName(String postDynamoDBTableName) { this.postDynamoDBTableName = postDynamoDBTableName; } @DynamoDBAttribute(attributeName = "latest_post_ddb_table_name") public String getLatestPostDynamoDBTableName() { return latestPostDynamoDBTableName; } public void setLatestPostDynamoDBTableName(String latestPostDynamoDBTableName) { this.latestPostDynamoDBTableName = latestPostDynamoDBTableName; } @DynamoDBAttribute(attributeName = "comment_ddb_table_name") public String getCommentDynamoDBTableName() { return commentDynamoDBTableName; } public void setCommentDynamoDBTableName(String commentDynamoDBTableName) { this.commentDynamoDBTableName = commentDynamoDBTableName; } @DynamoDBAttribute(attributeName = "cognito_identity_pool_id") public String getCognitoIdentityPoolId() { return cognitoIdentityPoolId; } public void setCognitoIdentityPoolId(String cognitoIdentityPoolId) { this.cognitoIdentityPoolId = cognitoIdentityPoolId; } @DynamoDBAttribute(attributeName = "cognito_developer_id") public String getCognitoDeveloperId() { return cognitoDeveloperId; } public void setCognitoDeveloperId(String cognitoDeveloperId) { this.cognitoDeveloperId = cognitoDeveloperId; } @DynamoDBAttribute(attributeName = "encryption_key_id") public String getEncryptionKeyId() { return encryptionKeyId; } public void setEncryptionKeyId(String encryptionKeyId) { this.encryptionKeyId = encryptionKeyId; } @DynamoDBAttribute(attributeName = "forum_ddb_table_name") public String getForumDynamoDBTableName() { return forumDynamoDBTableName; } public void setForumDynamoDBTableName(String forumDynamoDBTableName) { this.forumDynamoDBTableName = forumDynamoDBTableName; } @Override public String toString() { ObjectMapper mapper = new ObjectMapper(); try { return mapper.writeValueAsString(this); } catch (JsonProcessingException e) { return "error"; } } }
apache-2.0
abono/Kaya-CMS
KayaCMS-Core/src/main/java/com/aranya/kayacms/beans/webpagetemplate/WebPageTemplate.java
1413
package com.aranya.kayacms.beans.webpagetemplate; import com.aranya.kayacms.beans.website.WebSiteId; import com.aranya.kayacms.properties.DayAndTime; import java.io.Serializable; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; @Getter @AllArgsConstructor @ToString @EqualsAndHashCode @Builder public class WebPageTemplate implements Serializable { private static final long serialVersionUID = -3793488307058119687L; private WebPageTemplateId webPageTemplateId; private String name; private String content; private String nameEdits; private String contentEdits; private DayAndTime createDate; private DayAndTime modifyDate; private DayAndTime publishDate; private WebSiteId webSiteId; public static WebPageTemplate.WebPageTemplateBuilder builderClone( WebPageTemplate webPageTemplate) { return builder() .webPageTemplateId(webPageTemplate.getWebPageTemplateId()) .name(webPageTemplate.getName()) .content(webPageTemplate.getContent()) .nameEdits(webPageTemplate.nameEdits) .contentEdits(webPageTemplate.getContentEdits()) .createDate(webPageTemplate.getCreateDate()) .modifyDate(webPageTemplate.getModifyDate()) .publishDate(webPageTemplate.getPublishDate()) .webSiteId(webPageTemplate.getWebSiteId()); } }
apache-2.0
wuhighway/DailyStudy
app/src/main/java/com/highway/study/takephoto/ItemCallback.java
169
package com.highway.study.takephoto; /** * Created by JH * on 2017/1/19. */ public interface ItemCallback { void clickCallback (); void deleteCallback(); }
apache-2.0